This commit is contained in:
rustdesk 2023-03-23 14:31:50 +08:00
parent 0ea7019a16
commit b763ec3080
8 changed files with 20 additions and 147 deletions

View File

@ -25,7 +25,6 @@ message VideoFrame {
EncodedVideoFrames h264s = 10;
EncodedVideoFrames h265s = 11;
}
int64 timestamp = 9;
}
message IdPk {
@ -513,7 +512,6 @@ message AudioFormat {
message AudioFrame {
bytes data = 1;
int64 timestamp = 2;
}
// Notify peer to show message box.

View File

@ -4,7 +4,7 @@
use hbb_common::anyhow::{anyhow, Context};
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message, VideoFrame};
use hbb_common::{get_time, ResultType};
use hbb_common::ResultType;
use crate::STRIDE_ALIGN;
use crate::{codec::EncoderApi, ImageFormat};
@ -287,7 +287,6 @@ impl VpxEncoder {
frames: vp9s.into(),
..Default::default()
});
vf.timestamp = get_time();
msg_out.set_video_frame(vf);
msg_out
}

View File

@ -1,7 +1,7 @@
use std::{
collections::HashMap,
net::SocketAddr,
ops::{Deref, Not},
ops::Deref,
str::FromStr,
sync::{mpsc, Arc, Mutex, RwLock},
};
@ -39,7 +39,6 @@ use hbb_common::{
tokio::time::Duration,
AddrMangle, ResultType, Stream,
};
pub use helper::LatencyController;
pub use helper::*;
use scrap::{
codec::{Decoder, DecoderCfg},
@ -707,19 +706,9 @@ pub struct AudioHandler {
#[cfg(not(any(target_os = "android", target_os = "linux")))]
audio_stream: Option<Box<dyn StreamTrait>>,
channels: u16,
latency_controller: Arc<Mutex<LatencyController>>,
ignore_count: i32,
}
impl AudioHandler {
/// Create a new audio handler.
pub fn new(latency_controller: Arc<Mutex<LatencyController>>) -> Self {
AudioHandler {
latency_controller,
..Default::default()
}
}
/// Start the audio playback.
#[cfg(target_os = "linux")]
fn start_audio(&mut self, format0: AudioFormat) -> ResultType<()> {
@ -802,24 +791,8 @@ impl AudioHandler {
}
/// Handle audio frame and play it.
#[inline]
pub fn handle_frame(&mut self, frame: AudioFrame) {
if frame.timestamp != 0 {
if self
.latency_controller
.lock()
.unwrap()
.check_audio(frame.timestamp)
.not()
{
self.ignore_count += 1;
if self.ignore_count == 100 {
self.ignore_count = 0;
log::debug!("100 audio frames are ignored");
}
return;
}
}
#[cfg(not(any(target_os = "android", target_os = "linux")))]
if self.audio_stream.is_none() {
return;
@ -914,7 +887,6 @@ impl AudioHandler {
/// Video handler for the [`Client`].
pub struct VideoHandler {
decoder: Decoder,
latency_controller: Arc<Mutex<LatencyController>>,
pub rgb: Vec<u8>,
recorder: Arc<Mutex<Option<Recorder>>>,
record: bool,
@ -922,7 +894,7 @@ pub struct VideoHandler {
impl VideoHandler {
/// Create a new video handler.
pub fn new(latency_controller: Arc<Mutex<LatencyController>>) -> Self {
pub fn new() -> Self {
VideoHandler {
decoder: Decoder::new(DecoderCfg {
vpx: VpxDecoderConfig {
@ -930,7 +902,6 @@ impl VideoHandler {
num_threads: (num_cpus::get() / 2) as _,
},
}),
latency_controller,
rgb: Default::default(),
recorder: Default::default(),
record: false,
@ -938,14 +909,8 @@ impl VideoHandler {
}
/// Handle a new video frame.
#[inline]
pub fn handle_frame(&mut self, vf: VideoFrame) -> ResultType<bool> {
if vf.timestamp != 0 {
// Update the latency controller with the latest timestamp.
self.latency_controller
.lock()
.unwrap()
.update_video(vf.timestamp);
}
match &vf.union {
Some(frame) => {
let res = self.decoder.handle_video_frame(
@ -994,6 +959,7 @@ impl VideoHandler {
} else {
self.recorder = Default::default();
}
self.record = start;
}
}
@ -1696,11 +1662,8 @@ where
let (video_sender, video_receiver) = mpsc::channel::<MediaData>();
let mut video_callback = video_callback;
let latency_controller = LatencyController::new();
let latency_controller_cl = latency_controller.clone();
std::thread::spawn(move || {
let mut video_handler = VideoHandler::new(latency_controller);
let mut video_handler = VideoHandler::new();
loop {
if let Ok(data) = video_receiver.recv() {
match data {
@ -1723,19 +1686,16 @@ where
}
log::info!("Video decoder loop exits");
});
let audio_sender = start_audio_thread(Some(latency_controller_cl));
let audio_sender = start_audio_thread();
return (video_sender, audio_sender);
}
/// Start an audio thread
/// Return a audio [`MediaSender`]
pub fn start_audio_thread(
latency_controller: Option<Arc<Mutex<LatencyController>>>,
) -> MediaSender {
let latency_controller = latency_controller.unwrap_or(LatencyController::new());
pub fn start_audio_thread() -> MediaSender {
let (audio_sender, audio_receiver) = mpsc::channel::<MediaData>();
std::thread::spawn(move || {
let mut audio_handler = AudioHandler::new(latency_controller);
let mut audio_handler = AudioHandler::default();
loop {
if let Ok(data) = audio_receiver.recv() {
match data {

View File

@ -1,82 +1,8 @@
use std::{
sync::{Arc, Mutex},
time::Instant,
};
use hbb_common::{
log,
message_proto::{video_frame, VideoFrame, Message, VoiceCallRequest, VoiceCallResponse}, get_time,
get_time,
message_proto::{video_frame, Message, VideoFrame, VoiceCallRequest, VoiceCallResponse},
};
const MAX_LATENCY: i64 = 500;
const MIN_LATENCY: i64 = 100;
/// Latency controller for syncing audio with the video stream.
/// Only sync the audio to video, not the other way around.
#[derive(Debug)]
pub struct LatencyController {
last_video_remote_ts: i64, // generated on remote device
update_time: Instant,
allow_audio: bool,
audio_only: bool
}
impl Default for LatencyController {
fn default() -> Self {
Self {
last_video_remote_ts: Default::default(),
update_time: Instant::now(),
allow_audio: Default::default(),
audio_only: false
}
}
}
impl LatencyController {
/// Create a new latency controller.
pub fn new() -> Arc<Mutex<LatencyController>> {
Arc::new(Mutex::new(LatencyController::default()))
}
/// Set whether this [LatencyController] should be working in audio only mode.
pub fn set_audio_only(&mut self, only: bool) {
self.audio_only = only;
}
/// Update the latency controller with the latest video timestamp.
pub fn update_video(&mut self, timestamp: i64) {
self.last_video_remote_ts = timestamp;
self.update_time = Instant::now();
}
/// Check if the audio should be played based on the current latency.
pub fn check_audio(&mut self, timestamp: i64) -> bool {
// Compute audio latency.
let expected = self.update_time.elapsed().as_millis() as i64 + self.last_video_remote_ts;
let latency = if self.audio_only {
expected
} else {
expected - timestamp
};
// Set MAX and MIN, avoid fixing too frequently.
if self.allow_audio {
if latency.abs() > MAX_LATENCY {
log::debug!("LATENCY > {}ms cut off, latency:{}", MAX_LATENCY, latency);
self.allow_audio = false;
}
} else {
if latency.abs() < MIN_LATENCY {
log::debug!("LATENCY < {}ms resume, latency:{}", MIN_LATENCY, latency);
self.allow_audio = true;
}
}
// No video frame here, which means the update time is not up to date.
// We manually update the time here.
self.update_time = Instant::now();
self.allow_audio
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum CodecFormat {
VP9,
@ -135,4 +61,4 @@ pub fn new_voice_call_response(request_timestamp: i64, accepted: bool) -> Messag
let mut msg = Message::new();
msg.set_voice_call_response(resp);
msg
}
}

View File

@ -318,13 +318,11 @@ impl<T: InvokeUiSession> Remote<T> {
let mut msg = Message::new();
msg.set_audio_frame(frame.clone());
tx_audio.send(Data::Message(msg)).ok();
log::debug!("send audio frame {}", frame.timestamp);
}
Some(message::Union::Misc(misc)) => {
let mut msg = Message::new();
msg.set_misc(misc.clone());
tx_audio.send(Data::Message(msg)).ok();
log::debug!("send audio misc {:?}", misc.audio_format());
}
_ => {}
},

View File

@ -13,7 +13,6 @@
// https://github.com/krruzic/pulsectl
use super::*;
use hbb_common::get_time;
use magnum_opus::{Application::*, Channels::*, Encoder};
use std::sync::atomic::{AtomicBool, Ordering};
@ -349,7 +348,6 @@ fn send_f32(data: &[f32], encoder: &mut Encoder, sp: &GenericService) {
let mut msg_out = Message::new();
msg_out.set_audio_frame(AudioFrame {
data: data.into(),
timestamp: get_time(),
..Default::default()
});
sp.send(msg_out);
@ -369,7 +367,6 @@ fn send_f32(data: &[f32], encoder: &mut Encoder, sp: &GenericService) {
let mut msg_out = Message::new();
msg_out.set_audio_frame(AudioFrame {
data: data.into(),
timestamp: get_time(),
..Default::default()
});
sp.send(msg_out);

View File

@ -7,8 +7,7 @@ use crate::common::update_clipboard;
use crate::portable_service::client as portable_client;
use crate::{
client::{
new_voice_call_request, new_voice_call_response, start_audio_thread, LatencyController,
MediaData, MediaSender,
new_voice_call_request, new_voice_call_response, start_audio_thread, MediaData, MediaSender,
},
common::{get_default_sound_input, set_sound_input},
video_service,
@ -843,15 +842,16 @@ impl Connection {
pi.hostname = DEVICE_NAME.lock().unwrap().clone();
pi.platform = "Android".into();
}
let mut platform_additions = serde_json::Map::new();
#[cfg(target_os = "linux")]
{
let mut platform_additions = serde_json::Map::new();
if crate::platform::current_is_wayland() {
platform_additions.insert("is_wayland".into(), json!(true));
}
}
if !platform_additions.is_empty() {
pi.platform_additions = serde_json::to_string(&platform_additions).unwrap_or("".into());
if !platform_additions.is_empty() {
pi.platform_additions =
serde_json::to_string(&platform_additions).unwrap_or("".into());
}
}
#[cfg(feature = "hwcodec")]
@ -1612,11 +1612,7 @@ impl Connection {
if !self.disable_audio {
// Drop the audio sender previously.
drop(std::mem::replace(&mut self.audio_sender, None));
// Start a audio thread to play the audio sent by peer.
let latency_controller = LatencyController::new();
// No video frame will be sent here, so we need to disable latency controller, or audio check may fail.
latency_controller.lock().unwrap().set_audio_only(true);
self.audio_sender = Some(start_audio_thread(Some(latency_controller)));
self.audio_sender = Some(start_audio_thread());
allow_err!(self
.audio_sender
.as_ref()

View File

@ -787,7 +787,6 @@ fn create_msg(vp9s: Vec<EncodedVideoFrame>) -> Message {
frames: vp9s.into(),
..Default::default()
});
vf.timestamp = hbb_common::get_time();
msg_out.set_video_frame(vf);
msg_out
}