commit
a2bc02b4c5
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -2973,8 +2973,8 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hwcodec"
|
name = "hwcodec"
|
||||||
version = "0.1.1"
|
version = "0.1.3"
|
||||||
source = "git+https://github.com/21pages/hwcodec?branch=stable#82cdc15457e42feaf14e1b38622506b2d54baf76"
|
source = "git+https://github.com/21pages/hwcodec?branch=stable#83300549075158e5a3fa6c59ea527af3330e48ff"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bindgen 0.59.2",
|
"bindgen 0.59.2",
|
||||||
"cc",
|
"cc",
|
||||||
|
@ -1248,25 +1248,41 @@ customImageQualityDialog(SessionID sessionId, String id, FFI ffi) async {
|
|||||||
double fpsInitValue = 30;
|
double fpsInitValue = 30;
|
||||||
bool qualitySet = false;
|
bool qualitySet = false;
|
||||||
bool fpsSet = false;
|
bool fpsSet = false;
|
||||||
|
|
||||||
|
bool? direct;
|
||||||
|
try {
|
||||||
|
direct =
|
||||||
|
ConnectionTypeState.find(id).direct.value == ConnectionType.strDirect;
|
||||||
|
} catch (_) {}
|
||||||
|
bool hideFps = (await bind.mainIsUsingPublicServer() && direct != true) ||
|
||||||
|
versionCmp(ffi.ffiModel.pi.version, '1.2.0') < 0;
|
||||||
|
bool hideMoreQuality =
|
||||||
|
(await bind.mainIsUsingPublicServer() && direct != true) ||
|
||||||
|
versionCmp(ffi.ffiModel.pi.version, '1.2.2') < 0;
|
||||||
|
|
||||||
setCustomValues({double? quality, double? fps}) async {
|
setCustomValues({double? quality, double? fps}) async {
|
||||||
if (quality != null) {
|
if (quality != null) {
|
||||||
qualitySet = true;
|
qualitySet = true;
|
||||||
await bind.sessionSetCustomImageQuality(
|
await bind.sessionSetCustomImageQuality(
|
||||||
sessionId: sessionId, value: quality.toInt());
|
sessionId: sessionId, value: quality.toInt());
|
||||||
|
print("quality:$quality");
|
||||||
}
|
}
|
||||||
if (fps != null) {
|
if (fps != null) {
|
||||||
fpsSet = true;
|
fpsSet = true;
|
||||||
await bind.sessionSetCustomFps(sessionId: sessionId, fps: fps.toInt());
|
await bind.sessionSetCustomFps(sessionId: sessionId, fps: fps.toInt());
|
||||||
|
print("fps:$fps");
|
||||||
}
|
}
|
||||||
if (!qualitySet) {
|
if (!qualitySet) {
|
||||||
qualitySet = true;
|
qualitySet = true;
|
||||||
await bind.sessionSetCustomImageQuality(
|
await bind.sessionSetCustomImageQuality(
|
||||||
sessionId: sessionId, value: qualityInitValue.toInt());
|
sessionId: sessionId, value: qualityInitValue.toInt());
|
||||||
|
print("qualityInitValue:$qualityInitValue");
|
||||||
}
|
}
|
||||||
if (!fpsSet) {
|
if (!hideFps && !fpsSet) {
|
||||||
fpsSet = true;
|
fpsSet = true;
|
||||||
await bind.sessionSetCustomFps(
|
await bind.sessionSetCustomFps(
|
||||||
sessionId: sessionId, fps: fpsInitValue.toInt());
|
sessionId: sessionId, fps: fpsInitValue.toInt());
|
||||||
|
print("fpsInitValue:$fpsInitValue");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1279,7 +1295,9 @@ customImageQualityDialog(SessionID sessionId, String id, FFI ffi) async {
|
|||||||
final quality = await bind.sessionGetCustomImageQuality(sessionId: sessionId);
|
final quality = await bind.sessionGetCustomImageQuality(sessionId: sessionId);
|
||||||
qualityInitValue =
|
qualityInitValue =
|
||||||
quality != null && quality.isNotEmpty ? quality[0].toDouble() : 50.0;
|
quality != null && quality.isNotEmpty ? quality[0].toDouble() : 50.0;
|
||||||
if (qualityInitValue < 10 || qualityInitValue > 2000) {
|
if ((hideMoreQuality && qualityInitValue > 100) ||
|
||||||
|
qualityInitValue < 10 ||
|
||||||
|
qualityInitValue > 2000) {
|
||||||
qualityInitValue = 50;
|
qualityInitValue = 50;
|
||||||
}
|
}
|
||||||
// fps
|
// fps
|
||||||
@ -1289,20 +1307,14 @@ customImageQualityDialog(SessionID sessionId, String id, FFI ffi) async {
|
|||||||
if (fpsInitValue < 5 || fpsInitValue > 120) {
|
if (fpsInitValue < 5 || fpsInitValue > 120) {
|
||||||
fpsInitValue = 30;
|
fpsInitValue = 30;
|
||||||
}
|
}
|
||||||
bool? direct;
|
|
||||||
try {
|
|
||||||
direct =
|
|
||||||
ConnectionTypeState.find(id).direct.value == ConnectionType.strDirect;
|
|
||||||
} catch (_) {}
|
|
||||||
bool notShowFps = (await bind.mainIsUsingPublicServer() && direct != true) ||
|
|
||||||
versionCmp(ffi.ffiModel.pi.version, '1.2.0') < 0;
|
|
||||||
|
|
||||||
final content = customImageQualityWidget(
|
final content = customImageQualityWidget(
|
||||||
initQuality: qualityInitValue,
|
initQuality: qualityInitValue,
|
||||||
initFps: fpsInitValue,
|
initFps: fpsInitValue,
|
||||||
setQuality: (v) => setCustomValues(quality: v),
|
setQuality: (v) => setCustomValues(quality: v),
|
||||||
setFps: (v) => setCustomValues(fps: v),
|
setFps: (v) => setCustomValues(fps: v),
|
||||||
showFps: !notShowFps);
|
showFps: !hideFps,
|
||||||
|
showMoreQuality: !hideMoreQuality);
|
||||||
msgBoxCommon(ffi.dialogManager, 'Custom Image Quality', content, [btnClose]);
|
msgBoxCommon(ffi.dialogManager, 'Custom Image Quality', content, [btnClose]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,7 +10,11 @@ customImageQualityWidget(
|
|||||||
required double initFps,
|
required double initFps,
|
||||||
required Function(double) setQuality,
|
required Function(double) setQuality,
|
||||||
required Function(double) setFps,
|
required Function(double) setFps,
|
||||||
required bool showFps}) {
|
required bool showFps,
|
||||||
|
required bool showMoreQuality}) {
|
||||||
|
if (!showMoreQuality && initQuality > 100) {
|
||||||
|
initQuality = 50;
|
||||||
|
}
|
||||||
final qualityValue = initQuality.obs;
|
final qualityValue = initQuality.obs;
|
||||||
final fpsValue = initFps.obs;
|
final fpsValue = initFps.obs;
|
||||||
|
|
||||||
@ -69,7 +73,7 @@ customImageQualityWidget(
|
|||||||
style: const TextStyle(fontSize: 15),
|
style: const TextStyle(fontSize: 15),
|
||||||
)),
|
)),
|
||||||
// mobile doesn't have enough space
|
// mobile doesn't have enough space
|
||||||
if (!isMobile)
|
if (showMoreQuality && !isMobile)
|
||||||
Expanded(
|
Expanded(
|
||||||
flex: 1,
|
flex: 1,
|
||||||
child: Row(
|
child: Row(
|
||||||
@ -85,7 +89,7 @@ customImageQualityWidget(
|
|||||||
))
|
))
|
||||||
],
|
],
|
||||||
)),
|
)),
|
||||||
if (isMobile)
|
if (showMoreQuality && isMobile)
|
||||||
Obx(() => Row(
|
Obx(() => Row(
|
||||||
children: [
|
children: [
|
||||||
Expanded(
|
Expanded(
|
||||||
@ -160,7 +164,8 @@ customImageQualitySetting() {
|
|||||||
setFps: (v) {
|
setFps: (v) {
|
||||||
bind.mainSetUserDefaultOption(key: fpsKey, value: v.toString());
|
bind.mainSetUserDefaultOption(key: fpsKey, value: v.toString());
|
||||||
},
|
},
|
||||||
showFps: true);
|
showFps: true,
|
||||||
|
showMoreQuality: true);
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<bool> setServerConfig(
|
Future<bool> setServerConfig(
|
||||||
|
@ -583,32 +583,19 @@ class WindowActionPanelState extends State<WindowActionPanel>
|
|||||||
void onWindowClose() async {
|
void onWindowClose() async {
|
||||||
mainWindowClose() async => await windowManager.hide();
|
mainWindowClose() async => await windowManager.hide();
|
||||||
notMainWindowClose(WindowController controller) async {
|
notMainWindowClose(WindowController controller) async {
|
||||||
if (widget.tabController.length == 0) {
|
if (widget.tabController.length != 0) {
|
||||||
debugPrint("close emtpy multiwindow, hide");
|
|
||||||
await controller.hide();
|
|
||||||
await rustDeskWinManager
|
|
||||||
.call(WindowType.Main, kWindowEventHide, {"id": kWindowId!});
|
|
||||||
} else {
|
|
||||||
debugPrint("close not emtpy multiwindow from taskbar");
|
debugPrint("close not emtpy multiwindow from taskbar");
|
||||||
if (Platform.isWindows) {
|
if (Platform.isWindows) {
|
||||||
await controller.show();
|
await controller.show();
|
||||||
await controller.focus();
|
await controller.focus();
|
||||||
final res = await widget.onClose?.call() ?? true;
|
final res = await widget.onClose?.call() ?? true;
|
||||||
if (res) {
|
if (!res) return;
|
||||||
Future.delayed(Duration.zero, () async {
|
|
||||||
// onWindowClose will be called again to hide
|
|
||||||
await WindowController.fromWindowId(kWindowId!).close();
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// ubuntu22.04 windowOnTop not work from taskbar
|
|
||||||
widget.tabController.clear();
|
widget.tabController.clear();
|
||||||
Future.delayed(Duration.zero, () async {
|
|
||||||
// onWindowClose will be called again to hide
|
|
||||||
await WindowController.fromWindowId(kWindowId!).close();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
await controller.hide();
|
||||||
|
await rustDeskWinManager
|
||||||
|
.call(WindowType.Main, kWindowEventHide, {"id": kWindowId!});
|
||||||
}
|
}
|
||||||
|
|
||||||
macOSWindowClose(
|
macOSWindowClose(
|
||||||
|
@ -43,7 +43,7 @@ impl crate::TraitCapturer for Capturer {
|
|||||||
unsafe {
|
unsafe {
|
||||||
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
|
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
|
||||||
};
|
};
|
||||||
Ok(Frame::new(&self.rgba, self.height()))
|
Ok(Frame::new(&self.rgba, self.width(), self.height()))
|
||||||
} else {
|
} else {
|
||||||
return Err(io::ErrorKind::WouldBlock.into());
|
return Err(io::ErrorKind::WouldBlock.into());
|
||||||
}
|
}
|
||||||
@ -51,16 +51,23 @@ impl crate::TraitCapturer for Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct Frame<'a> {
|
pub struct Frame<'a> {
|
||||||
pub data: &'a [u8],
|
data: &'a [u8],
|
||||||
pub stride: Vec<usize>,
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
stride: Vec<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Frame<'a> {
|
impl<'a> Frame<'a> {
|
||||||
pub fn new(data: &'a [u8], h: usize) -> Self {
|
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
|
||||||
let stride = data.len() / h;
|
let stride0 = data.len() / height;
|
||||||
let mut v = Vec::new();
|
let mut stride = Vec::new();
|
||||||
v.push(stride);
|
stride.push(stride0);
|
||||||
Frame { data, stride: v }
|
Frame {
|
||||||
|
data,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
stride,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,6 +76,14 @@ impl<'a> crate::TraitFrame for Frame<'a> {
|
|||||||
self.data
|
self.data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn width(&self) -> usize {
|
||||||
|
self.width
|
||||||
|
}
|
||||||
|
|
||||||
|
fn height(&self) -> usize {
|
||||||
|
self.height
|
||||||
|
}
|
||||||
|
|
||||||
fn stride(&self) -> Vec<usize> {
|
fn stride(&self) -> Vec<usize> {
|
||||||
self.stride.clone()
|
self.stride.clone()
|
||||||
}
|
}
|
||||||
|
@ -7,9 +7,9 @@
|
|||||||
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
|
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
|
||||||
|
|
||||||
use crate::codec::{base_bitrate, codec_thread_num, Quality};
|
use crate::codec::{base_bitrate, codec_thread_num, Quality};
|
||||||
use crate::Pixfmt;
|
|
||||||
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
|
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
|
||||||
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
|
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
|
||||||
|
use crate::{EncodeYuvFormat, Pixfmt};
|
||||||
use hbb_common::{
|
use hbb_common::{
|
||||||
anyhow::{anyhow, Context},
|
anyhow::{anyhow, Context},
|
||||||
bytes::Bytes,
|
bytes::Bytes,
|
||||||
@ -54,6 +54,7 @@ pub struct AomEncoder {
|
|||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
i444: bool,
|
i444: bool,
|
||||||
|
yuvfmt: EncodeYuvFormat,
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
|
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
|
||||||
@ -241,6 +242,7 @@ impl EncoderApi for AomEncoder {
|
|||||||
width: config.width as _,
|
width: config.width as _,
|
||||||
height: config.height as _,
|
height: config.height as _,
|
||||||
i444,
|
i444,
|
||||||
|
yuvfmt: Self::get_yuvfmt(config.width, config.height, i444),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(anyhow!("encoder type mismatch")),
|
_ => Err(anyhow!("encoder type mismatch")),
|
||||||
@ -263,35 +265,7 @@ impl EncoderApi for AomEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||||
let mut img = Default::default();
|
self.yuvfmt.clone()
|
||||||
let fmt = if self.i444 {
|
|
||||||
aom_img_fmt::AOM_IMG_FMT_I444
|
|
||||||
} else {
|
|
||||||
aom_img_fmt::AOM_IMG_FMT_I420
|
|
||||||
};
|
|
||||||
unsafe {
|
|
||||||
aom_img_wrap(
|
|
||||||
&mut img,
|
|
||||||
fmt,
|
|
||||||
self.width as _,
|
|
||||||
self.height as _,
|
|
||||||
crate::STRIDE_ALIGN as _,
|
|
||||||
0x1 as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let pixfmt = if self.i444 {
|
|
||||||
Pixfmt::I444
|
|
||||||
} else {
|
|
||||||
Pixfmt::I420
|
|
||||||
};
|
|
||||||
crate::EncodeYuvFormat {
|
|
||||||
pixfmt,
|
|
||||||
w: img.w as _,
|
|
||||||
h: img.h as _,
|
|
||||||
stride: img.stride.map(|s| s as usize).to_vec(),
|
|
||||||
u: img.planes[1] as usize - img.planes[0] as usize,
|
|
||||||
v: img.planes[2] as usize - img.planes[0] as usize,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
||||||
@ -400,6 +374,34 @@ impl AomEncoder {
|
|||||||
|
|
||||||
(q_min, q_max)
|
(q_min, q_max)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_yuvfmt(width: u32, height: u32, i444: bool) -> EncodeYuvFormat {
|
||||||
|
let mut img = Default::default();
|
||||||
|
let fmt = if i444 {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I444
|
||||||
|
} else {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I420
|
||||||
|
};
|
||||||
|
unsafe {
|
||||||
|
aom_img_wrap(
|
||||||
|
&mut img,
|
||||||
|
fmt,
|
||||||
|
width as _,
|
||||||
|
height as _,
|
||||||
|
crate::STRIDE_ALIGN as _,
|
||||||
|
0x1 as _,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let pixfmt = if i444 { Pixfmt::I444 } else { Pixfmt::I420 };
|
||||||
|
EncodeYuvFormat {
|
||||||
|
pixfmt,
|
||||||
|
w: img.w as _,
|
||||||
|
h: img.h as _,
|
||||||
|
stride: img.stride.map(|s| s as usize).to_vec(),
|
||||||
|
u: img.planes[1] as usize - img.planes[0] as usize,
|
||||||
|
v: img.planes[2] as usize - img.planes[0] as usize,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for AomEncoder {
|
impl Drop for AomEncoder {
|
||||||
|
@ -202,17 +202,31 @@ pub fn convert_to_yuv(
|
|||||||
) -> ResultType<()> {
|
) -> ResultType<()> {
|
||||||
let src = captured.data();
|
let src = captured.data();
|
||||||
let src_stride = captured.stride();
|
let src_stride = captured.stride();
|
||||||
let captured_pixfmt = captured.pixfmt();
|
let src_pixfmt = captured.pixfmt();
|
||||||
if captured_pixfmt == crate::Pixfmt::BGRA || captured_pixfmt == crate::Pixfmt::RGBA {
|
let src_width = captured.width();
|
||||||
if src.len() < src_stride[0] * dst_fmt.h {
|
let src_height = captured.height();
|
||||||
|
if src_width > dst_fmt.w || src_height > dst_fmt.h {
|
||||||
bail!(
|
bail!(
|
||||||
"length not enough: {} < {}",
|
"src rect > dst rect: ({src_width}, {src_height}) > ({},{})",
|
||||||
|
dst_fmt.w,
|
||||||
|
dst_fmt.h
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if src_pixfmt == crate::Pixfmt::BGRA || src_pixfmt == crate::Pixfmt::RGBA {
|
||||||
|
if src.len() < src_stride[0] * src_height {
|
||||||
|
bail!(
|
||||||
|
"wrong src len, {} < {} * {}",
|
||||||
src.len(),
|
src.len(),
|
||||||
src_stride[0] * dst_fmt.h
|
src_stride[0],
|
||||||
|
src_height
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match (captured_pixfmt, dst_fmt.pixfmt) {
|
let align = |x:usize| {
|
||||||
|
(x + 63) / 64 * 64
|
||||||
|
};
|
||||||
|
|
||||||
|
match (src_pixfmt, dst_fmt.pixfmt) {
|
||||||
(crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
|
(crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
|
||||||
let dst_stride_y = dst_fmt.stride[0];
|
let dst_stride_y = dst_fmt.stride[0];
|
||||||
let dst_stride_uv = dst_fmt.stride[1];
|
let dst_stride_uv = dst_fmt.stride[1];
|
||||||
@ -220,7 +234,7 @@ pub fn convert_to_yuv(
|
|||||||
let dst_y = dst.as_mut_ptr();
|
let dst_y = dst.as_mut_ptr();
|
||||||
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
||||||
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
||||||
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
|
let f = if src_pixfmt == crate::Pixfmt::BGRA {
|
||||||
ARGBToI420
|
ARGBToI420
|
||||||
} else {
|
} else {
|
||||||
ABGRToI420
|
ABGRToI420
|
||||||
@ -234,17 +248,20 @@ pub fn convert_to_yuv(
|
|||||||
dst_stride_uv as _,
|
dst_stride_uv as _,
|
||||||
dst_v,
|
dst_v,
|
||||||
dst_stride_uv as _,
|
dst_stride_uv as _,
|
||||||
dst_fmt.w as _,
|
src_width as _,
|
||||||
dst_fmt.h as _,
|
src_height as _,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
(crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
|
(crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
|
||||||
let dst_stride_y = dst_fmt.stride[0];
|
let dst_stride_y = dst_fmt.stride[0];
|
||||||
let dst_stride_uv = dst_fmt.stride[1];
|
let dst_stride_uv = dst_fmt.stride[1];
|
||||||
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_uv / 2), 0);
|
dst.resize(
|
||||||
|
align(dst_fmt.h) * (align(dst_stride_y) + align(dst_stride_uv / 2)),
|
||||||
|
0,
|
||||||
|
);
|
||||||
let dst_y = dst.as_mut_ptr();
|
let dst_y = dst.as_mut_ptr();
|
||||||
let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
|
let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
|
||||||
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
|
let f = if src_pixfmt == crate::Pixfmt::BGRA {
|
||||||
ARGBToNV12
|
ARGBToNV12
|
||||||
} else {
|
} else {
|
||||||
ABGRToNV12
|
ABGRToNV12
|
||||||
@ -256,19 +273,22 @@ pub fn convert_to_yuv(
|
|||||||
dst_stride_y as _,
|
dst_stride_y as _,
|
||||||
dst_uv,
|
dst_uv,
|
||||||
dst_stride_uv as _,
|
dst_stride_uv as _,
|
||||||
dst_fmt.w as _,
|
src_width as _,
|
||||||
dst_fmt.h as _,
|
src_height as _,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
(crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
|
(crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
|
||||||
let dst_stride_y = dst_fmt.stride[0];
|
let dst_stride_y = dst_fmt.stride[0];
|
||||||
let dst_stride_u = dst_fmt.stride[1];
|
let dst_stride_u = dst_fmt.stride[1];
|
||||||
let dst_stride_v = dst_fmt.stride[2];
|
let dst_stride_v = dst_fmt.stride[2];
|
||||||
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_u + dst_stride_v), 0);
|
dst.resize(
|
||||||
|
align(dst_fmt.h) * (align(dst_stride_y) + align(dst_stride_u) + align(dst_stride_v)),
|
||||||
|
0,
|
||||||
|
);
|
||||||
let dst_y = dst.as_mut_ptr();
|
let dst_y = dst.as_mut_ptr();
|
||||||
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
||||||
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
||||||
let src = if captured_pixfmt == crate::Pixfmt::BGRA {
|
let src = if src_pixfmt == crate::Pixfmt::BGRA {
|
||||||
src
|
src
|
||||||
} else {
|
} else {
|
||||||
mid_data.resize(src.len(), 0);
|
mid_data.resize(src.len(), 0);
|
||||||
@ -277,8 +297,8 @@ pub fn convert_to_yuv(
|
|||||||
src_stride[0] as _,
|
src_stride[0] as _,
|
||||||
mid_data.as_mut_ptr(),
|
mid_data.as_mut_ptr(),
|
||||||
src_stride[0] as _,
|
src_stride[0] as _,
|
||||||
dst_fmt.w as _,
|
src_width as _,
|
||||||
dst_fmt.h as _,
|
src_height as _,
|
||||||
));
|
));
|
||||||
mid_data
|
mid_data
|
||||||
};
|
};
|
||||||
@ -291,13 +311,13 @@ pub fn convert_to_yuv(
|
|||||||
dst_stride_u as _,
|
dst_stride_u as _,
|
||||||
dst_v,
|
dst_v,
|
||||||
dst_stride_v as _,
|
dst_stride_v as _,
|
||||||
dst_fmt.w as _,
|
src_width as _,
|
||||||
dst_fmt.h as _,
|
src_height as _,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
bail!(
|
bail!(
|
||||||
"convert not support, {captured_pixfmt:?} -> {:?}",
|
"convert not support, {src_pixfmt:?} -> {:?}",
|
||||||
dst_fmt.pixfmt
|
dst_fmt.pixfmt
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ impl Capturer {
|
|||||||
impl TraitCapturer for Capturer {
|
impl TraitCapturer for Capturer {
|
||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
match self.inner.frame(timeout.as_millis() as _) {
|
match self.inner.frame(timeout.as_millis() as _) {
|
||||||
Ok(frame) => Ok(Frame::new(frame, self.height)),
|
Ok(frame) => Ok(Frame::new(frame, self.width, self.height)),
|
||||||
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
|
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
|
||||||
Err(error) => Err(error),
|
Err(error) => Err(error),
|
||||||
}
|
}
|
||||||
@ -58,15 +58,22 @@ impl TraitCapturer for Capturer {
|
|||||||
|
|
||||||
pub struct Frame<'a> {
|
pub struct Frame<'a> {
|
||||||
data: &'a [u8],
|
data: &'a [u8],
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
stride: Vec<usize>,
|
stride: Vec<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Frame<'a> {
|
impl<'a> Frame<'a> {
|
||||||
pub fn new(data: &'a [u8], h: usize) -> Self {
|
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
|
||||||
let stride = data.len() / h;
|
let stride0 = data.len() / height;
|
||||||
let mut v = Vec::new();
|
let mut stride = Vec::new();
|
||||||
v.push(stride);
|
stride.push(stride0);
|
||||||
Frame { data, stride: v }
|
Frame {
|
||||||
|
data,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
stride,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -75,6 +82,14 @@ impl<'a> crate::TraitFrame for Frame<'a> {
|
|||||||
self.data
|
self.data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn width(&self) -> usize {
|
||||||
|
self.width
|
||||||
|
}
|
||||||
|
|
||||||
|
fn height(&self) -> usize {
|
||||||
|
self.height
|
||||||
|
}
|
||||||
|
|
||||||
fn stride(&self) -> Vec<usize> {
|
fn stride(&self) -> Vec<usize> {
|
||||||
self.stride.clone()
|
self.stride.clone()
|
||||||
}
|
}
|
||||||
@ -167,7 +182,11 @@ impl CapturerMag {
|
|||||||
impl TraitCapturer for CapturerMag {
|
impl TraitCapturer for CapturerMag {
|
||||||
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
|
||||||
self.inner.frame(&mut self.data)?;
|
self.inner.frame(&mut self.data)?;
|
||||||
Ok(Frame::new(&self.data, self.inner.get_rect().2))
|
Ok(Frame::new(
|
||||||
|
&self.data,
|
||||||
|
self.inner.get_rect().1,
|
||||||
|
self.inner.get_rect().2,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_gdi(&self) -> bool {
|
fn is_gdi(&self) -> bool {
|
||||||
|
@ -112,6 +112,10 @@ pub trait TraitCapturer {
|
|||||||
pub trait TraitFrame {
|
pub trait TraitFrame {
|
||||||
fn data(&self) -> &[u8];
|
fn data(&self) -> &[u8];
|
||||||
|
|
||||||
|
fn width(&self) -> usize;
|
||||||
|
|
||||||
|
fn height(&self) -> usize;
|
||||||
|
|
||||||
fn stride(&self) -> Vec<usize>;
|
fn stride(&self) -> Vec<usize>;
|
||||||
|
|
||||||
fn pixfmt(&self) -> Pixfmt;
|
fn pixfmt(&self) -> Pixfmt;
|
||||||
@ -125,6 +129,7 @@ pub enum Pixfmt {
|
|||||||
I444,
|
I444,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct EncodeYuvFormat {
|
pub struct EncodeYuvFormat {
|
||||||
pub pixfmt: Pixfmt,
|
pub pixfmt: Pixfmt,
|
||||||
pub w: usize,
|
pub w: usize,
|
||||||
|
@ -55,7 +55,12 @@ impl crate::TraitCapturer for Capturer {
|
|||||||
Some(mut frame) => {
|
Some(mut frame) => {
|
||||||
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
|
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
|
||||||
frame.surface_to_bgra(self.height());
|
frame.surface_to_bgra(self.height());
|
||||||
Ok(Frame(frame, PhantomData))
|
Ok(Frame {
|
||||||
|
frame,
|
||||||
|
data: PhantomData,
|
||||||
|
width: self.width(),
|
||||||
|
height: self.height(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
None => Err(io::ErrorKind::WouldBlock.into()),
|
None => Err(io::ErrorKind::WouldBlock.into()),
|
||||||
@ -69,16 +74,29 @@ impl crate::TraitCapturer for Capturer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Frame<'a>(pub quartz::Frame, PhantomData<&'a [u8]>);
|
pub struct Frame<'a> {
|
||||||
|
frame: quartz::Frame,
|
||||||
|
data: PhantomData<&'a [u8]>,
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> crate::TraitFrame for Frame<'a> {
|
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||||
fn data(&self) -> &[u8] {
|
fn data(&self) -> &[u8] {
|
||||||
&*self.0
|
&*self.frame
|
||||||
|
}
|
||||||
|
|
||||||
|
fn width(&self) -> usize {
|
||||||
|
self.width
|
||||||
|
}
|
||||||
|
|
||||||
|
fn height(&self) -> usize {
|
||||||
|
self.height
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stride(&self) -> Vec<usize> {
|
fn stride(&self) -> Vec<usize> {
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
v.push(self.0.stride());
|
v.push(self.frame.stride());
|
||||||
v
|
v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, V
|
|||||||
use hbb_common::ResultType;
|
use hbb_common::ResultType;
|
||||||
|
|
||||||
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
|
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
|
||||||
use crate::{GoogleImage, Pixfmt, STRIDE_ALIGN};
|
use crate::{EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
|
||||||
|
|
||||||
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
|
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
|
||||||
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
|
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
|
||||||
@ -40,6 +40,7 @@ pub struct VpxEncoder {
|
|||||||
height: usize,
|
height: usize,
|
||||||
id: VpxVideoCodecId,
|
id: VpxVideoCodecId,
|
||||||
i444: bool,
|
i444: bool,
|
||||||
|
yuvfmt: EncodeYuvFormat,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct VpxDecoder {
|
pub struct VpxDecoder {
|
||||||
@ -175,6 +176,7 @@ impl EncoderApi for VpxEncoder {
|
|||||||
height: config.height as _,
|
height: config.height as _,
|
||||||
id: config.codec,
|
id: config.codec,
|
||||||
i444,
|
i444,
|
||||||
|
yuvfmt: Self::get_yuvfmt(config.width, config.height, i444),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(anyhow!("encoder type mismatch")),
|
_ => Err(anyhow!("encoder type mismatch")),
|
||||||
@ -202,35 +204,7 @@ impl EncoderApi for VpxEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||||
let mut img = Default::default();
|
self.yuvfmt.clone()
|
||||||
let fmt = if self.i444 {
|
|
||||||
vpx_img_fmt::VPX_IMG_FMT_I444
|
|
||||||
} else {
|
|
||||||
vpx_img_fmt::VPX_IMG_FMT_I420
|
|
||||||
};
|
|
||||||
unsafe {
|
|
||||||
vpx_img_wrap(
|
|
||||||
&mut img,
|
|
||||||
fmt,
|
|
||||||
self.width as _,
|
|
||||||
self.height as _,
|
|
||||||
crate::STRIDE_ALIGN as _,
|
|
||||||
0x1 as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let pixfmt = if self.i444 {
|
|
||||||
Pixfmt::I444
|
|
||||||
} else {
|
|
||||||
Pixfmt::I420
|
|
||||||
};
|
|
||||||
crate::EncodeYuvFormat {
|
|
||||||
pixfmt,
|
|
||||||
w: img.w as _,
|
|
||||||
h: img.h as _,
|
|
||||||
stride: img.stride.map(|s| s as usize).to_vec(),
|
|
||||||
u: img.planes[1] as usize - img.planes[0] as usize,
|
|
||||||
v: img.planes[2] as usize - img.planes[0] as usize,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
||||||
@ -362,6 +336,34 @@ impl VpxEncoder {
|
|||||||
|
|
||||||
(q_min, q_max)
|
(q_min, q_max)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_yuvfmt(width: u32, height: u32, i444: bool) -> EncodeYuvFormat {
|
||||||
|
let mut img = Default::default();
|
||||||
|
let fmt = if i444 {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I444
|
||||||
|
} else {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I420
|
||||||
|
};
|
||||||
|
unsafe {
|
||||||
|
vpx_img_wrap(
|
||||||
|
&mut img,
|
||||||
|
fmt,
|
||||||
|
width as _,
|
||||||
|
height as _,
|
||||||
|
crate::STRIDE_ALIGN as _,
|
||||||
|
0x1 as _,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let pixfmt = if i444 { Pixfmt::I444 } else { Pixfmt::I420 };
|
||||||
|
EncodeYuvFormat {
|
||||||
|
pixfmt,
|
||||||
|
w: img.w as _,
|
||||||
|
h: img.h as _,
|
||||||
|
stride: img.stride.map(|s| s as usize).to_vec(),
|
||||||
|
u: img.planes[1] as usize - img.planes[0] as usize,
|
||||||
|
v: img.planes[2] as usize - img.planes[0] as usize,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for VpxEncoder {
|
impl Drop for VpxEncoder {
|
||||||
|
@ -62,8 +62,8 @@ impl Capturer {
|
|||||||
impl TraitCapturer for Capturer {
|
impl TraitCapturer for Capturer {
|
||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
|
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
|
||||||
PixelProvider::BGR0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, h)),
|
PixelProvider::BGR0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, w, h)),
|
||||||
PixelProvider::RGB0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, h)),
|
PixelProvider::RGB0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, w,h)),
|
||||||
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
|
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
|
||||||
_ => Err(map_err("Invalid data")),
|
_ => Err(map_err("Invalid data")),
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{common::TraitCapturer, x11, TraitFrame, Pixfmt};
|
use crate::{common::TraitCapturer, x11, Pixfmt, TraitFrame};
|
||||||
use std::{io, time::Duration};
|
use std::{io, time::Duration};
|
||||||
|
|
||||||
pub struct Capturer(x11::Capturer);
|
pub struct Capturer(x11::Capturer);
|
||||||
@ -25,18 +25,26 @@ impl TraitCapturer for Capturer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Frame<'a>{
|
pub struct Frame<'a> {
|
||||||
pub data: &'a [u8],
|
pub data: &'a [u8],
|
||||||
pub pixfmt:Pixfmt,
|
pub pixfmt: Pixfmt,
|
||||||
pub stride:Vec<usize>,
|
pub width: usize,
|
||||||
|
pub height: usize,
|
||||||
|
pub stride: Vec<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Frame<'a> {
|
impl<'a> Frame<'a> {
|
||||||
pub fn new(data:&'a [u8], pixfmt:Pixfmt, h:usize) -> Self {
|
pub fn new(data: &'a [u8], pixfmt: Pixfmt, width: usize, height: usize) -> Self {
|
||||||
let stride = data.len() / h;
|
let stride0 = data.len() / height;
|
||||||
let mut v = Vec::new();
|
let mut stride = Vec::new();
|
||||||
v.push(stride);
|
stride.push(stride0);
|
||||||
Self { data, pixfmt, stride: v }
|
Self {
|
||||||
|
data,
|
||||||
|
pixfmt,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
stride,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,6 +53,14 @@ impl<'a> TraitFrame for Frame<'a> {
|
|||||||
self.data
|
self.data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn width(&self) -> usize {
|
||||||
|
self.width
|
||||||
|
}
|
||||||
|
|
||||||
|
fn height(&self) -> usize {
|
||||||
|
self.height
|
||||||
|
}
|
||||||
|
|
||||||
fn stride(&self) -> Vec<usize> {
|
fn stride(&self) -> Vec<usize> {
|
||||||
self.stride.clone()
|
self.stride.clone()
|
||||||
}
|
}
|
||||||
|
@ -102,7 +102,7 @@ impl Capturer {
|
|||||||
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
|
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
|
||||||
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
|
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
|
||||||
Ok(
|
Ok(
|
||||||
Frame::new(result, crate::Pixfmt::BGRA, self.display.h())
|
Frame::new(result, crate::Pixfmt::BGRA, self.display.w(), self.display.h())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -670,7 +670,6 @@ impl Connection {
|
|||||||
conn.lr.my_id.clone(),
|
conn.lr.my_id.clone(),
|
||||||
);
|
);
|
||||||
video_service::notify_video_frame_fetched(id, None);
|
video_service::notify_video_frame_fetched(id, None);
|
||||||
scrap::codec::Encoder::update(id, scrap::codec::EncodingUpdate::Remove);
|
|
||||||
if conn.authorized {
|
if conn.authorized {
|
||||||
password::update_temporary_password();
|
password::update_temporary_password();
|
||||||
}
|
}
|
||||||
@ -1173,7 +1172,7 @@ impl Connection {
|
|||||||
sub_service = true;
|
sub_service = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::on_remote_authorized();
|
self.on_remote_authorized();
|
||||||
}
|
}
|
||||||
let mut msg_out = Message::new();
|
let mut msg_out = Message::new();
|
||||||
msg_out.set_login_response(res);
|
msg_out.set_login_response(res);
|
||||||
@ -1212,9 +1211,10 @@ impl Connection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_remote_authorized() {
|
fn on_remote_authorized(&self) {
|
||||||
use std::sync::Once;
|
use std::sync::Once;
|
||||||
static _ONCE: Once = Once::new();
|
static _ONCE: Once = Once::new();
|
||||||
|
self.update_codec_on_login();
|
||||||
#[cfg(any(target_os = "windows", target_os = "linux"))]
|
#[cfg(any(target_os = "windows", target_os = "linux"))]
|
||||||
if !Config::get_option("allow-remove-wallpaper").is_empty() {
|
if !Config::get_option("allow-remove-wallpaper").is_empty() {
|
||||||
// multi connections set once
|
// multi connections set once
|
||||||
@ -1412,8 +1412,8 @@ impl Connection {
|
|||||||
return Config::get_option(enable_prefix_option).is_empty();
|
return Config::get_option(enable_prefix_option).is_empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_codec_on_login(&self, lr: &LoginRequest) {
|
fn update_codec_on_login(&self) {
|
||||||
if let Some(o) = lr.option.as_ref() {
|
if let Some(o) = self.lr.clone().option.as_ref() {
|
||||||
if let Some(q) = o.supported_decoding.clone().take() {
|
if let Some(q) = o.supported_decoding.clone().take() {
|
||||||
scrap::codec::Encoder::update(
|
scrap::codec::Encoder::update(
|
||||||
self.inner.id(),
|
self.inner.id(),
|
||||||
@ -1438,9 +1438,6 @@ impl Connection {
|
|||||||
if let Some(o) = lr.option.as_ref() {
|
if let Some(o) = lr.option.as_ref() {
|
||||||
self.options_in_login = Some(o.clone());
|
self.options_in_login = Some(o.clone());
|
||||||
}
|
}
|
||||||
if lr.union.is_none() {
|
|
||||||
self.update_codec_on_login(&lr);
|
|
||||||
}
|
|
||||||
self.video_ack_required = lr.video_ack_required;
|
self.video_ack_required = lr.video_ack_required;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2969,18 +2966,6 @@ mod raii {
|
|||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let mut active_conns_lock = ALIVE_CONNS.lock().unwrap();
|
let mut active_conns_lock = ALIVE_CONNS.lock().unwrap();
|
||||||
active_conns_lock.retain(|&c| c != self.0);
|
active_conns_lock.retain(|&c| c != self.0);
|
||||||
#[cfg(not(any(target_os = "android", target_os = "ios")))]
|
|
||||||
if active_conns_lock.is_empty() {
|
|
||||||
display_service::reset_resolutions();
|
|
||||||
}
|
|
||||||
#[cfg(all(windows, feature = "virtual_display_driver"))]
|
|
||||||
if active_conns_lock.is_empty() {
|
|
||||||
let _ = virtual_display_manager::reset_all();
|
|
||||||
}
|
|
||||||
#[cfg(all(windows))]
|
|
||||||
if active_conns_lock.is_empty() {
|
|
||||||
crate::privacy_win_mag::stop();
|
|
||||||
}
|
|
||||||
video_service::VIDEO_QOS
|
video_service::VIDEO_QOS
|
||||||
.lock()
|
.lock()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@ -2988,17 +2973,20 @@ mod raii {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AuthedConnID(i32);
|
pub struct AuthedConnID(i32, AuthConnType);
|
||||||
|
|
||||||
impl AuthedConnID {
|
impl AuthedConnID {
|
||||||
pub fn new(id: i32, conn_type: AuthConnType) -> Self {
|
pub fn new(id: i32, conn_type: AuthConnType) -> Self {
|
||||||
AUTHED_CONNS.lock().unwrap().push((id, conn_type));
|
AUTHED_CONNS.lock().unwrap().push((id, conn_type));
|
||||||
Self(id)
|
Self(id, conn_type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for AuthedConnID {
|
impl Drop for AuthedConnID {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
|
if self.1 == AuthConnType::Remote {
|
||||||
|
scrap::codec::Encoder::update(self.0, scrap::codec::EncodingUpdate::Remove);
|
||||||
|
}
|
||||||
let mut lock = AUTHED_CONNS.lock().unwrap();
|
let mut lock = AUTHED_CONNS.lock().unwrap();
|
||||||
lock.retain(|&c| c.0 != self.0);
|
lock.retain(|&c| c.0 != self.0);
|
||||||
if lock.iter().filter(|c| c.1 == AuthConnType::Remote).count() == 0 {
|
if lock.iter().filter(|c| c.1 == AuthConnType::Remote).count() == 0 {
|
||||||
@ -3006,6 +2994,12 @@ mod raii {
|
|||||||
{
|
{
|
||||||
*WALLPAPER_REMOVER.lock().unwrap() = None;
|
*WALLPAPER_REMOVER.lock().unwrap() = None;
|
||||||
}
|
}
|
||||||
|
#[cfg(not(any(target_os = "android", target_os = "ios")))]
|
||||||
|
display_service::reset_resolutions();
|
||||||
|
#[cfg(all(windows, feature = "virtual_display_driver"))]
|
||||||
|
let _ = virtual_display_manager::reset_all();
|
||||||
|
#[cfg(all(windows))]
|
||||||
|
crate::privacy_win_mag::stop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -704,7 +704,7 @@ pub mod client {
|
|||||||
}
|
}
|
||||||
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
|
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
|
||||||
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
|
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
|
||||||
Ok(Frame::new(data, self.height))
|
Ok(Frame::new(data, self.width, self.height))
|
||||||
} else {
|
} else {
|
||||||
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
|
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
|
||||||
let wouldblock = utils::ptr_to_i32(ptr);
|
let wouldblock = utils::ptr_to_i32(ptr);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user