Revert "use fullrange by default for yuv420p if supported ()" ()

This reverts commit 80afa98d667db773d9451aa1ecc9c1d53d2a5828.
This commit is contained in:
RustDesk 2023-12-11 23:46:32 +09:00 committed by GitHub
parent 80afa98d66
commit ebb14af488
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 141 additions and 388 deletions

2
Cargo.lock generated

@ -3013,7 +3013,7 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]] [[package]]
name = "hwcodec" name = "hwcodec"
version = "0.1.3" version = "0.1.3"
source = "git+https://github.com/21pages/hwcodec?branch=stable#12fcf7208a2972050e2343a90eaea6e97d21beb0" source = "git+https://github.com/21pages/hwcodec?branch=stable#83300549075158e5a3fa6c59ea527af3330e48ff"
dependencies = [ dependencies = [
"bindgen 0.59.2", "bindgen 0.59.2",
"cc", "cc",

@ -522,9 +522,8 @@ Future<List<TToggleMenu>> toolbarDisplayToggle(
// 444 // 444
final codec_format = ffi.qualityMonitorModel.data.codecFormat; final codec_format = ffi.qualityMonitorModel.data.codecFormat;
if (codec_format != null && if (versionCmp(pi.version, "1.2.4") >= 0 &&
bind.sessionIsCodecSupport444( (codec_format == "AV1" || codec_format == "VP9")) {
sessionId: sessionId, codec: codec_format)) {
final option = 'i444'; final option = 'i444';
final value = final value =
bind.sessionGetToggleOptionSync(sessionId: sessionId, arg: option); bind.sessionGetToggleOptionSync(sessionId: sessionId, arg: option);

@ -18,8 +18,8 @@ message YUV {
} }
enum Chroma { enum Chroma {
C420 = 0; I420 = 0;
C444 = 1; I444 = 1;
} }
message VideoFrame { message VideoFrame {
@ -96,28 +96,12 @@ message CodecAbility {
bool h265 = 5; bool h265 = 5;
} }
// vpx, aom have yuv420p_bt601_studio by default
// h26x have nv12_bt601_studio by default
message ColorAbility {
bool yuv420p_bt601_full = 1;
bool yuv444p_bt601_studio = 2;
}
message ColorAbilities {
ColorAbility vp8 = 1;
ColorAbility vp9 = 2;
ColorAbility av1 = 3;
ColorAbility h264 = 4;
ColorAbility h265 = 5;
}
message SupportedEncoding { message SupportedEncoding {
bool h264 = 1; bool h264 = 1;
bool h265 = 2; bool h265 = 2;
bool vp8 = 3; bool vp8 = 3;
bool av1 = 4; bool av1 = 4;
CodecAbility i444 = 5; // deprecated, use color_abilities instead CodecAbility i444 = 5;
ColorAbilities color_abilities = 6;
} }
message PeerInfo { message PeerInfo {
@ -579,7 +563,6 @@ message SupportedDecoding {
int32 ability_av1 = 6; int32 ability_av1 = 6;
CodecAbility i444 = 7; CodecAbility i444 = 7;
Chroma prefer_chroma = 8; Chroma prefer_chroma = 8;
ColorAbilities color_abilities = 9;
} }
message OptionMessage { message OptionMessage {

@ -5,9 +5,9 @@ use hbb_common::{
}; };
use scrap::{ use scrap::{
aom::{AomDecoder, AomEncoder, AomEncoderConfig}, aom::{AomDecoder, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg, ExtraEncoderCfg, Quality as Q}, codec::{EncoderApi, EncoderCfg, Quality as Q},
convert_to_yuv, Capturer, ColorRange, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, convert_to_yuv, Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder,
VpxEncoder, VpxEncoderConfig, VpxEncoderConfig,
VpxVideoCodecId::{self, *}, VpxVideoCodecId::{self, *},
STRIDE_ALIGN, STRIDE_ALIGN,
}; };
@ -110,15 +110,7 @@ fn test_vpx(
codec: codec_id, codec: codec_id,
keyframe_interval: None, keyframe_interval: None,
}); });
let extra = ExtraEncoderCfg { let mut encoder = VpxEncoder::new(config, i444).unwrap();
pixfmt: if i444 {
scrap::Pixfmt::YUV444P
} else {
scrap::Pixfmt::YUV420P
},
range: ColorRange::Studio,
};
let mut encoder = VpxEncoder::new(config, extra).unwrap();
let mut vpxs = vec![]; let mut vpxs = vec![];
let start = Instant::now(); let start = Instant::now();
let mut size = 0; let mut size = 0;
@ -130,7 +122,7 @@ fn test_vpx(
match c.frame(std::time::Duration::from_millis(30)) { match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => { Ok(frame) => {
let tmp_timer = Instant::now(); let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap(); convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
for ref frame in encoder for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN) .encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap() .unwrap()
@ -195,15 +187,7 @@ fn test_av1(
quality, quality,
keyframe_interval: None, keyframe_interval: None,
}); });
let extra = ExtraEncoderCfg { let mut encoder = AomEncoder::new(config, i444).unwrap();
pixfmt: if i444 {
scrap::Pixfmt::YUV444P
} else {
scrap::Pixfmt::YUV420P
},
range: ColorRange::Studio,
};
let mut encoder = AomEncoder::new(config, extra).unwrap();
let start = Instant::now(); let start = Instant::now();
let mut size = 0; let mut size = 0;
let mut av1s: Vec<Vec<u8>> = vec![]; let mut av1s: Vec<Vec<u8>> = vec![];
@ -215,7 +199,7 @@ fn test_av1(
match c.frame(std::time::Duration::from_millis(30)) { match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => { Ok(frame) => {
let tmp_timer = Instant::now(); let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap(); convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
for ref frame in encoder for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN) .encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap() .unwrap()
@ -290,10 +274,6 @@ mod hw {
yuv_count: usize, yuv_count: usize,
h26xs: &mut Vec<Vec<u8>>, h26xs: &mut Vec<Vec<u8>>,
) { ) {
let extra = ExtraEncoderCfg {
pixfmt: scrap::Pixfmt::NV12,
range: ColorRange::Studio,
};
let mut encoder = HwEncoder::new( let mut encoder = HwEncoder::new(
EncoderCfg::HW(HwEncoderConfig { EncoderCfg::HW(HwEncoderConfig {
name: info.name.clone(), name: info.name.clone(),
@ -302,7 +282,7 @@ mod hw {
quality, quality,
keyframe_interval: None, keyframe_interval: None,
}), }),
extra, false,
) )
.unwrap(); .unwrap();
let mut size = 0; let mut size = 0;
@ -315,7 +295,7 @@ mod hw {
match c.frame(std::time::Duration::from_millis(30)) { match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => { Ok(frame) => {
let tmp_timer = Instant::now(); let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap(); convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
for ref frame in encoder.encode(&yuv).unwrap() { for ref frame in encoder.encode(&yuv).unwrap() {
size += frame.data.len(); size += frame.data.len();

@ -13,12 +13,12 @@ use std::time::{Duration, Instant};
use std::{io, thread}; use std::{io, thread};
use docopt::Docopt; use docopt::Docopt;
use scrap::codec::{EncoderApi, EncoderCfg, ExtraEncoderCfg, Quality as Q}; use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
use webm::mux; use webm::mux;
use webm::mux::Track; use webm::mux::Track;
use scrap::{convert_to_yuv, vpxcodec as vpx_encode}; use scrap::{convert_to_yuv, vpxcodec as vpx_encode};
use scrap::{Capturer, ColorRange, Display, TraitCapturer, STRIDE_ALIGN}; use scrap::{Capturer, Display, TraitCapturer, STRIDE_ALIGN};
const USAGE: &'static str = " const USAGE: &'static str = "
Simple WebM screen capture. Simple WebM screen capture.
@ -110,10 +110,6 @@ fn main() -> io::Result<()> {
Quality::Balanced => Q::Balanced, Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low, Quality::Low => Q::Low,
}; };
let extra = ExtraEncoderCfg {
pixfmt: scrap::Pixfmt::YUV420P,
range: ColorRange::Studio,
};
let mut vpx = vpx_encode::VpxEncoder::new( let mut vpx = vpx_encode::VpxEncoder::new(
EncoderCfg::VPX(vpx_encode::VpxEncoderConfig { EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width, width,
@ -122,7 +118,7 @@ fn main() -> io::Result<()> {
codec: vpx_codec, codec: vpx_codec,
keyframe_interval: None, keyframe_interval: None,
}), }),
extra, false,
) )
.unwrap(); .unwrap();
@ -156,7 +152,7 @@ fn main() -> io::Result<()> {
if let Ok(frame) = c.frame(Duration::from_millis(0)) { if let Ok(frame) = c.frame(Duration::from_millis(0)) {
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64; let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
convert_to_yuv(&frame, vpx.yuvfmt(), &mut yuv, &mut mid_data).unwrap(); convert_to_yuv(&frame, vpx.yuvfmt(), &mut yuv, &mut mid_data);
for frame in vpx.encode(ms as i64, &yuv, STRIDE_ALIGN).unwrap() { for frame in vpx.encode(ms as i64, &yuv, STRIDE_ALIGN).unwrap() {
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key); vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
} }

@ -6,10 +6,10 @@
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs")); include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, ExtraEncoderCfg, Quality}; use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN}; use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result}; use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use crate::{ColorRange, EncodeYuvFormat, Pixfmt}; use crate::{EncodeYuvFormat, Pixfmt};
use hbb_common::{ use hbb_common::{
anyhow::{anyhow, Context}, anyhow::{anyhow, Context},
bytes::Bytes, bytes::Bytes,
@ -53,7 +53,7 @@ pub struct AomEncoder {
ctx: aom_codec_ctx_t, ctx: aom_codec_ctx_t,
width: usize, width: usize,
height: usize, height: usize,
extra: ExtraEncoderCfg, i444: bool,
yuvfmt: EncodeYuvFormat, yuvfmt: EncodeYuvFormat,
} }
@ -98,7 +98,7 @@ mod webrtc {
pub fn enc_cfg( pub fn enc_cfg(
i: *const aom_codec_iface, i: *const aom_codec_iface,
cfg: AomEncoderConfig, cfg: AomEncoderConfig,
extra: &ExtraEncoderCfg, i444: bool,
) -> ResultType<aom_codec_enc_cfg> { ) -> ResultType<aom_codec_enc_cfg> {
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() }; let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile)); call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile));
@ -144,20 +144,12 @@ mod webrtc {
c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0. c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0.
// https://aomedia.googlesource.com/aom/+/refs/tags/v3.6.0/av1/common/enums.h#82 // https://aomedia.googlesource.com/aom/+/refs/tags/v3.6.0/av1/common/enums.h#82
c.g_profile = if extra.pixfmt == Pixfmt::YUV444P { c.g_profile = if i444 { 1 } else { 0 };
1
} else {
0
};
Ok(c) Ok(c)
} }
pub fn set_controls( pub fn set_controls(ctx: *mut aom_codec_ctx_t, cfg: &aom_codec_enc_cfg) -> ResultType<()> {
ctx: *mut aom_codec_ctx_t,
cfg: &aom_codec_enc_cfg,
extra: ExtraEncoderCfg,
) -> ResultType<()> {
use aom_tune_content::*; use aom_tune_content::*;
use aome_enc_control_id::*; use aome_enc_control_id::*;
macro_rules! call_ctl { macro_rules! call_ctl {
@ -219,45 +211,20 @@ mod webrtc {
call_ctl!(ctx, AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0); call_ctl!(ctx, AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_TX64, 0); call_ctl!(ctx, AV1E_SET_ENABLE_TX64, 0);
call_ctl!(ctx, AV1E_SET_MAX_REFERENCE_FRAMES, 3); call_ctl!(ctx, AV1E_SET_MAX_REFERENCE_FRAMES, 3);
// https://github.com/chromium/chromium/blob/327564a4861822c816d35395dfb54d7e5039e6ea/media/video/av1_video_encoder.cc#L662
call_ctl!(
ctx,
AV1E_SET_COLOR_RANGE,
if extra.range == ColorRange::Full {
aom_color_range::AOM_CR_FULL_RANGE
} else {
aom_color_range::AOM_CR_STUDIO_RANGE
}
);
call_ctl!(
ctx,
AV1E_SET_COLOR_PRIMARIES,
aom_color_primaries::AOM_CICP_CP_BT_601
);
call_ctl!(
ctx,
AV1E_SET_TRANSFER_CHARACTERISTICS,
aom_transfer_characteristics::AOM_CICP_TC_BT_601
);
call_ctl!(
ctx,
AV1E_SET_MATRIX_COEFFICIENTS,
aom_matrix_coefficients::AOM_CICP_MC_BT_601
);
Ok(()) Ok(())
} }
} }
impl EncoderApi for AomEncoder { impl EncoderApi for AomEncoder {
fn new(cfg: crate::codec::EncoderCfg, extra: ExtraEncoderCfg) -> ResultType<Self> fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
where where
Self: Sized, Self: Sized,
{ {
match cfg { match cfg {
crate::codec::EncoderCfg::AOM(config) => { crate::codec::EncoderCfg::AOM(config) => {
let i = call_aom_ptr!(aom_codec_av1_cx()); let i = call_aom_ptr!(aom_codec_av1_cx());
let c = webrtc::enc_cfg(i, config, &extra)?; let c = webrtc::enc_cfg(i, config, i444)?;
let mut ctx = Default::default(); let mut ctx = Default::default();
// Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH // Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH
@ -269,13 +236,13 @@ impl EncoderApi for AomEncoder {
flags, flags,
AOM_ENCODER_ABI_VERSION as _ AOM_ENCODER_ABI_VERSION as _
)); ));
webrtc::set_controls(&mut ctx, &c, extra)?; webrtc::set_controls(&mut ctx, &c)?;
Ok(Self { Ok(Self {
ctx, ctx,
width: config.width as _, width: config.width as _,
height: config.height as _, height: config.height as _,
extra, i444,
yuvfmt: Self::get_yuvfmt(config.width, config.height, &extra), yuvfmt: Self::get_yuvfmt(config.width, config.height, i444),
}) })
} }
_ => Err(anyhow!("encoder type mismatch")), _ => Err(anyhow!("encoder type mismatch")),
@ -324,13 +291,16 @@ impl EncoderApi for AomEncoder {
impl AomEncoder { impl AomEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> { pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
let (fmt, bpp) = match self.extra.pixfmt { let bpp = if self.i444 { 24 } else { 12 };
Pixfmt::YUV444P => (aom_img_fmt::AOM_IMG_FMT_I444, 24),
_ => (aom_img_fmt::AOM_IMG_FMT_I420, 12),
};
if data.len() < self.width * self.height * bpp / 8 { if data.len() < self.width * self.height * bpp / 8 {
return Err(Error::FailedCall("len not enough".to_string())); return Err(Error::FailedCall("len not enough".to_string()));
} }
let fmt = if self.i444 {
aom_img_fmt::AOM_IMG_FMT_I444
} else {
aom_img_fmt::AOM_IMG_FMT_I420
};
let mut image = Default::default(); let mut image = Default::default();
call_aom_ptr!(aom_img_wrap( call_aom_ptr!(aom_img_wrap(
&mut image, &mut image,
@ -405,11 +375,12 @@ impl AomEncoder {
(q_min, q_max) (q_min, q_max)
} }
fn get_yuvfmt(width: u32, height: u32, extra: &ExtraEncoderCfg) -> EncodeYuvFormat { fn get_yuvfmt(width: u32, height: u32, i444: bool) -> EncodeYuvFormat {
let mut img = Default::default(); let mut img = Default::default();
let fmt = match extra.pixfmt { let fmt = if i444 {
Pixfmt::YUV444P => aom_img_fmt::AOM_IMG_FMT_I444, aom_img_fmt::AOM_IMG_FMT_I444
_ => aom_img_fmt::AOM_IMG_FMT_I420, } else {
aom_img_fmt::AOM_IMG_FMT_I420
}; };
unsafe { unsafe {
aom_img_wrap( aom_img_wrap(
@ -421,9 +392,9 @@ impl AomEncoder {
0x1 as _, 0x1 as _,
); );
} }
let pixfmt = if i444 { Pixfmt::I444 } else { Pixfmt::I420 };
EncodeYuvFormat { EncodeYuvFormat {
pixfmt: extra.pixfmt, pixfmt,
range: extra.range,
w: img.w as _, w: img.w as _,
h: img.h as _, h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(), stride: img.stride.map(|s| s as usize).to_vec(),
@ -599,22 +570,8 @@ impl GoogleImage for Image {
fn chroma(&self) -> Chroma { fn chroma(&self) -> Chroma {
match self.inner().fmt { match self.inner().fmt {
aom_img_fmt::AOM_IMG_FMT_I444 => Chroma::C444, aom_img_fmt::AOM_IMG_FMT_I444 => Chroma::I444,
_ => Chroma::C420, _ => Chroma::I420,
}
}
fn pixfmt(&self) -> Pixfmt {
match self.inner().fmt {
aom_img_fmt::AOM_IMG_FMT_I444 => Pixfmt::YUV444P,
_ => Pixfmt::YUV420P,
}
}
fn range(&self) -> ColorRange {
match self.inner().range {
aom_color_range::AOM_CR_STUDIO_RANGE => ColorRange::Studio,
aom_color_range::AOM_CR_FULL_RANGE => ColorRange::Full,
} }
} }
} }

@ -14,7 +14,7 @@ use crate::{
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig}, aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
common::GoogleImage, common::GoogleImage,
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId}, vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
CodecName, ColorRange, EncodeYuvFormat, ImageRgb, Pixfmt, CodecName, EncodeYuvFormat, ImageRgb,
}; };
use hbb_common::{ use hbb_common::{
@ -23,8 +23,8 @@ use hbb_common::{
config::PeerConfig, config::PeerConfig,
log, log,
message_proto::{ message_proto::{
supported_decoding::PreferCodec, video_frame, Chroma, CodecAbility, ColorAbilities, supported_decoding::PreferCodec, video_frame, Chroma, CodecAbility, EncodedVideoFrames,
ColorAbility, EncodedVideoFrames, SupportedDecoding, SupportedEncoding, VideoFrame, SupportedDecoding, SupportedEncoding, VideoFrame,
}, },
sysinfo::System, sysinfo::System,
tokio::time::Instant, tokio::time::Instant,
@ -55,14 +55,8 @@ pub enum EncoderCfg {
HW(HwEncoderConfig), HW(HwEncoderConfig),
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ExtraEncoderCfg {
pub pixfmt: Pixfmt,
pub range: ColorRange,
}
pub trait EncoderApi { pub trait EncoderApi {
fn new(cfg: EncoderCfg, extra: ExtraEncoderCfg) -> ResultType<Self> fn new(cfg: EncoderCfg, i444: bool) -> ResultType<Self>
where where
Self: Sized; Self: Sized;
@ -113,18 +107,18 @@ pub enum EncodingUpdate {
} }
impl Encoder { impl Encoder {
pub fn new(config: EncoderCfg, extra: ExtraEncoderCfg) -> ResultType<Encoder> { pub fn new(config: EncoderCfg, i444: bool) -> ResultType<Encoder> {
log::info!("new encoder: {config:?}, extra: {extra:?}"); log::info!("new encoder: {config:?}, i444: {i444}");
match config { match config {
EncoderCfg::VPX(_) => Ok(Encoder { EncoderCfg::VPX(_) => Ok(Encoder {
codec: Box::new(VpxEncoder::new(config, extra)?), codec: Box::new(VpxEncoder::new(config, i444)?),
}), }),
EncoderCfg::AOM(_) => Ok(Encoder { EncoderCfg::AOM(_) => Ok(Encoder {
codec: Box::new(AomEncoder::new(config, extra)?), codec: Box::new(AomEncoder::new(config, i444)?),
}), }),
#[cfg(feature = "hwcodec")] #[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => match HwEncoder::new(config, extra) { EncoderCfg::HW(_) => match HwEncoder::new(config, i444) {
Ok(hw) => Ok(Encoder { Ok(hw) => Ok(Encoder {
codec: Box::new(hw), codec: Box::new(hw),
}), }),
@ -244,22 +238,6 @@ impl Encoder {
..Default::default() ..Default::default()
}) })
.into(), .into(),
color_abilities: Some(ColorAbilities {
vp9: Some(ColorAbility {
yuv420p_bt601_full: true,
yuv444p_bt601_studio: true,
..Default::default()
})
.into(),
av1: Some(ColorAbility {
yuv420p_bt601_full: true,
yuv444p_bt601_studio: true,
..Default::default()
})
.into(),
..Default::default()
})
.into(),
..Default::default() ..Default::default()
}; };
#[cfg(feature = "hwcodec")] #[cfg(feature = "hwcodec")]
@ -271,54 +249,20 @@ impl Encoder {
encoding encoding
} }
pub fn extra(config: &EncoderCfg) -> ExtraEncoderCfg { pub fn use_i444(config: &EncoderCfg) -> bool {
let decodings = PEER_DECODINGS.lock().unwrap().clone(); let decodings = PEER_DECODINGS.lock().unwrap().clone();
let prefer_i444 = decodings let prefer_i444 = decodings
.iter() .iter()
.any(|d| d.1.prefer_chroma == Chroma::C444.into()); .all(|d| d.1.prefer_chroma == Chroma::I444.into());
let (pixfmt, range) = match config { let i444_useable = match config {
EncoderCfg::VPX(vpx) => match vpx.codec { EncoderCfg::VPX(vpx) => match vpx.codec {
VpxVideoCodecId::VP8 => (Pixfmt::YUV420P, ColorRange::Studio), VpxVideoCodecId::VP8 => false,
VpxVideoCodecId::VP9 => { VpxVideoCodecId::VP9 => decodings.iter().all(|d| d.1.i444.vp9),
if prefer_i444
&& decodings.len() > 0
&& decodings
.iter()
.all(|d| d.1.color_abilities.vp9.yuv444p_bt601_studio)
{
(Pixfmt::YUV444P, ColorRange::Studio)
} else if decodings.len() > 0
&& decodings
.iter()
.all(|d| d.1.color_abilities.vp9.yuv420p_bt601_full)
{
(Pixfmt::YUV420P, ColorRange::Full)
} else {
(Pixfmt::YUV420P, ColorRange::Studio)
}
}
}, },
EncoderCfg::AOM(_) => { EncoderCfg::AOM(_) => decodings.iter().all(|d| d.1.i444.av1),
if prefer_i444 EncoderCfg::HW(_) => false,
&& decodings.len() > 0
&& decodings
.iter()
.all(|d| d.1.color_abilities.av1.yuv444p_bt601_studio)
{
(Pixfmt::YUV444P, ColorRange::Studio)
} else if decodings.len() > 0
&& decodings
.iter()
.all(|d| d.1.color_abilities.av1.yuv420p_bt601_full)
{
(Pixfmt::YUV420P, ColorRange::Full)
} else {
(Pixfmt::YUV420P, ColorRange::Studio)
}
}
EncoderCfg::HW(_) => (Pixfmt::NV12, ColorRange::Studio),
}; };
ExtraEncoderCfg { pixfmt, range } prefer_i444 && i444_useable && !decodings.is_empty()
} }
} }
@ -331,7 +275,12 @@ impl Decoder {
ability_vp8: 1, ability_vp8: 1,
ability_vp9: 1, ability_vp9: 1,
ability_av1: 1, ability_av1: 1,
color_abilities: Some(Self::color_abilities()).into(), i444: Some(CodecAbility {
vp9: true,
av1: true,
..Default::default()
})
.into(),
prefer: prefer.into(), prefer: prefer.into(),
prefer_chroma: prefer_chroma.into(), prefer_chroma: prefer_chroma.into(),
..Default::default() ..Default::default()
@ -422,7 +371,7 @@ impl Decoder {
} }
#[cfg(feature = "hwcodec")] #[cfg(feature = "hwcodec")]
video_frame::Union::H264s(h264s) => { video_frame::Union::H264s(h264s) => {
*chroma = Some(Chroma::C420); *chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h264 { if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420) Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
} else { } else {
@ -431,7 +380,7 @@ impl Decoder {
} }
#[cfg(feature = "hwcodec")] #[cfg(feature = "hwcodec")]
video_frame::Union::H265s(h265s) => { video_frame::Union::H265s(h265s) => {
*chroma = Some(Chroma::C420); *chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h265 { if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420) Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
} else { } else {
@ -440,7 +389,7 @@ impl Decoder {
} }
#[cfg(feature = "mediacodec")] #[cfg(feature = "mediacodec")]
video_frame::Union::H264s(h264s) => { video_frame::Union::H264s(h264s) => {
*chroma = Some(Chroma::C420); *chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.media_codec.h264 { if let Some(decoder) = &mut self.media_codec.h264 {
Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb) Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb)
} else { } else {
@ -449,7 +398,7 @@ impl Decoder {
} }
#[cfg(feature = "mediacodec")] #[cfg(feature = "mediacodec")]
video_frame::Union::H265s(h265s) => { video_frame::Union::H265s(h265s) => {
*chroma = Some(Chroma::C420); *chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.media_codec.h265 { if let Some(decoder) = &mut self.media_codec.h265 {
Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb) Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb)
} else { } else {
@ -551,7 +500,7 @@ impl Decoder {
fn preference(id: Option<&str>) -> (PreferCodec, Chroma) { fn preference(id: Option<&str>) -> (PreferCodec, Chroma) {
let id = id.unwrap_or_default(); let id = id.unwrap_or_default();
if id.is_empty() { if id.is_empty() {
return (PreferCodec::Auto, Chroma::C420); return (PreferCodec::Auto, Chroma::I420);
} }
let options = PeerConfig::load(id).options; let options = PeerConfig::load(id).options;
let codec = options let codec = options
@ -571,30 +520,12 @@ impl Decoder {
PreferCodec::Auto PreferCodec::Auto
}; };
let chroma = if options.get("i444") == Some(&"Y".to_string()) { let chroma = if options.get("i444") == Some(&"Y".to_string()) {
Chroma::C444 Chroma::I444
} else { } else {
Chroma::C420 Chroma::I420
}; };
(codec, chroma) (codec, chroma)
} }
fn color_abilities() -> ColorAbilities {
ColorAbilities {
vp9: Some(ColorAbility {
yuv420p_bt601_full: true,
yuv444p_bt601_studio: true,
..Default::default()
})
.into(),
av1: Some(ColorAbility {
yuv420p_bt601_full: true,
yuv444p_bt601_studio: true,
..Default::default()
})
.into(),
..Default::default()
}
}
} }
#[cfg(any(feature = "hwcodec", feature = "mediacodec"))] #[cfg(any(feature = "hwcodec", feature = "mediacodec"))]

@ -8,7 +8,7 @@ include!(concat!(env!("OUT_DIR"), "/yuv_ffi.rs"));
#[cfg(not(target_os = "ios"))] #[cfg(not(target_os = "ios"))]
use crate::Frame; use crate::Frame;
use crate::{generate_call_macro, ColorRange, EncodeYuvFormat, Pixfmt, TraitFrame}; use crate::{generate_call_macro, EncodeYuvFormat, TraitFrame};
use hbb_common::{bail, log, ResultType}; use hbb_common::{bail, log, ResultType};
generate_call_macro!(call_yuv, false); generate_call_macro!(call_yuv, false);
@ -212,7 +212,7 @@ pub fn convert_to_yuv(
dst_fmt.h dst_fmt.h
); );
} }
if src_pixfmt == Pixfmt::BGRA || src_pixfmt == Pixfmt::RGBA { if src_pixfmt == crate::Pixfmt::BGRA || src_pixfmt == crate::Pixfmt::RGBA {
if src.len() < src_stride[0] * src_height { if src.len() < src_stride[0] * src_height {
bail!( bail!(
"wrong src len, {} < {} * {}", "wrong src len, {} < {} * {}",
@ -222,35 +222,22 @@ pub fn convert_to_yuv(
); );
} }
} }
let align = |x: usize| (x + 63) / 64 * 64; let align = |x:usize| {
(x + 63) / 64 * 64
};
match (src_pixfmt, dst_fmt.pixfmt) { match (src_pixfmt, dst_fmt.pixfmt) {
(Pixfmt::BGRA, Pixfmt::YUV420P) | (Pixfmt::RGBA, Pixfmt::YUV420P) => { (crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
let dst_stride_y = dst_fmt.stride[0]; let dst_stride_y = dst_fmt.stride[0];
let dst_stride_uv = dst_fmt.stride[1]; let dst_stride_uv = dst_fmt.stride[1];
dst.resize(dst_fmt.h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety dst.resize(dst_fmt.h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr(); let dst_y = dst.as_mut_ptr();
let dst_u = dst[dst_fmt.u..].as_mut_ptr(); let dst_u = dst[dst_fmt.u..].as_mut_ptr();
let dst_v = dst[dst_fmt.v..].as_mut_ptr(); let dst_v = dst[dst_fmt.v..].as_mut_ptr();
let mut src = src; let f = if src_pixfmt == crate::Pixfmt::BGRA {
let f = match (src_pixfmt, dst_fmt.pixfmt, dst_fmt.range) { ARGBToI420
(Pixfmt::BGRA, Pixfmt::YUV420P, ColorRange::Studio) => ARGBToI420, } else {
(Pixfmt::RGBA, Pixfmt::YUV420P, ColorRange::Studio) => ABGRToI420, ABGRToI420
(Pixfmt::BGRA, Pixfmt::YUV420P, ColorRange::Full) => ARGBToJ420,
(Pixfmt::RGBA, Pixfmt::YUV420P, ColorRange::Full) => {
mid_data.resize(src.len(), 0);
call_yuv!(ABGRToARGB(
src.as_ptr(),
src_stride[0] as _,
mid_data.as_mut_ptr(),
src_stride[0] as _,
src_width as _,
src_height as _,
));
src = mid_data;
ARGBToJ420
}
_ => bail!("unreachable"),
}; };
call_yuv!(f( call_yuv!(f(
src.as_ptr(), src.as_ptr(),
@ -265,7 +252,7 @@ pub fn convert_to_yuv(
src_height as _, src_height as _,
)); ));
} }
(Pixfmt::BGRA, Pixfmt::NV12) | (Pixfmt::RGBA, Pixfmt::NV12) => { (crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
let dst_stride_y = dst_fmt.stride[0]; let dst_stride_y = dst_fmt.stride[0];
let dst_stride_uv = dst_fmt.stride[1]; let dst_stride_uv = dst_fmt.stride[1];
dst.resize( dst.resize(
@ -274,7 +261,7 @@ pub fn convert_to_yuv(
); );
let dst_y = dst.as_mut_ptr(); let dst_y = dst.as_mut_ptr();
let dst_uv = dst[dst_fmt.u..].as_mut_ptr(); let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
let f = if src_pixfmt == Pixfmt::BGRA { let f = if src_pixfmt == crate::Pixfmt::BGRA {
ARGBToNV12 ARGBToNV12
} else { } else {
ABGRToNV12 ABGRToNV12
@ -290,19 +277,18 @@ pub fn convert_to_yuv(
src_height as _, src_height as _,
)); ));
} }
(Pixfmt::BGRA, Pixfmt::YUV444P) | (Pixfmt::RGBA, Pixfmt::YUV444P) => { (crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
let dst_stride_y = dst_fmt.stride[0]; let dst_stride_y = dst_fmt.stride[0];
let dst_stride_u = dst_fmt.stride[1]; let dst_stride_u = dst_fmt.stride[1];
let dst_stride_v = dst_fmt.stride[2]; let dst_stride_v = dst_fmt.stride[2];
dst.resize( dst.resize(
align(dst_fmt.h) align(dst_fmt.h) * (align(dst_stride_y) + align(dst_stride_u) + align(dst_stride_v)),
* (align(dst_stride_y) + align(dst_stride_u) + align(dst_stride_v)),
0, 0,
); );
let dst_y = dst.as_mut_ptr(); let dst_y = dst.as_mut_ptr();
let dst_u = dst[dst_fmt.u..].as_mut_ptr(); let dst_u = dst[dst_fmt.u..].as_mut_ptr();
let dst_v = dst[dst_fmt.v..].as_mut_ptr(); let dst_v = dst[dst_fmt.v..].as_mut_ptr();
let src = if src_pixfmt == Pixfmt::BGRA { let src = if src_pixfmt == crate::Pixfmt::BGRA {
src src
} else { } else {
mid_data.resize(src.len(), 0); mid_data.resize(src.len(), 0);

@ -1,5 +1,5 @@
use crate::{ use crate::{
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg, ExtraEncoderCfg}, codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN, hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
}; };
use hbb_common::{ use hbb_common::{
@ -36,11 +36,10 @@ pub struct HwEncoder {
width: u32, width: u32,
height: u32, height: u32,
bitrate: u32, //kbs bitrate: u32, //kbs
extra: ExtraEncoderCfg,
} }
impl EncoderApi for HwEncoder { impl EncoderApi for HwEncoder {
fn new(cfg: EncoderCfg, extra: ExtraEncoderCfg) -> ResultType<Self> fn new(cfg: EncoderCfg, _i444: bool) -> ResultType<Self>
where where
Self: Sized, Self: Sized,
{ {
@ -83,7 +82,6 @@ impl EncoderApi for HwEncoder {
width: ctx.width as _, width: ctx.width as _,
height: ctx.height as _, height: ctx.height as _,
bitrate, bitrate,
extra,
}), }),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))), Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
} }
@ -122,7 +120,7 @@ impl EncoderApi for HwEncoder {
let pixfmt = if self.pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 { let pixfmt = if self.pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 {
Pixfmt::NV12 Pixfmt::NV12
} else { } else {
Pixfmt::YUV420P Pixfmt::I420
}; };
let stride = self let stride = self
.encoder .encoder
@ -133,7 +131,6 @@ impl EncoderApi for HwEncoder {
.collect(); .collect();
crate::EncodeYuvFormat { crate::EncodeYuvFormat {
pixfmt, pixfmt,
range: self.extra.range,
w: self.encoder.ctx.width as _, w: self.encoder.ctx.width as _,
h: self.encoder.ctx.height as _, h: self.encoder.ctx.height as _,
stride, stride,

@ -50,7 +50,7 @@ pub mod record;
mod vpx; mod vpx;
#[repr(usize)] #[repr(usize)]
#[derive(Debug, Copy, Clone)] #[derive(Copy, Clone)]
pub enum ImageFormat { pub enum ImageFormat {
Raw, Raw,
ABGR, ABGR,
@ -124,21 +124,14 @@ pub trait TraitFrame {
pub enum Pixfmt { pub enum Pixfmt {
BGRA, BGRA,
RGBA, RGBA,
YUV420P, I420,
NV12, NV12,
YUV444P, I444,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum ColorRange {
Studio,
Full,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct EncodeYuvFormat { pub struct EncodeYuvFormat {
pub pixfmt: Pixfmt, pub pixfmt: Pixfmt,
pub range: ColorRange,
pub w: usize, pub w: usize,
pub h: usize, pub h: usize,
pub stride: Vec<usize>, pub stride: Vec<usize>,
@ -299,8 +292,6 @@ pub trait GoogleImage {
fn stride(&self) -> Vec<i32>; fn stride(&self) -> Vec<i32>;
fn planes(&self) -> Vec<*mut u8>; fn planes(&self) -> Vec<*mut u8>;
fn chroma(&self) -> Chroma; fn chroma(&self) -> Chroma;
fn pixfmt(&self) -> Pixfmt;
fn range(&self) -> ColorRange;
fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize { fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
let bytes_per_pixel = match fmt { let bytes_per_pixel = match fmt {
ImageFormat::Raw => 3, ImageFormat::Raw => 3,
@ -319,13 +310,9 @@ pub trait GoogleImage {
let stride = self.stride(); let stride = self.stride();
let planes = self.planes(); let planes = self.planes();
unsafe { unsafe {
match (self.pixfmt(), rgb.fmt()) { match (self.chroma(), rgb.fmt()) {
(Pixfmt::YUV420P, ImageFormat::Raw) => { (Chroma::I420, ImageFormat::Raw) => {
let f = match self.range() { super::I420ToRAW(
ColorRange::Studio => super::I420ToRAW,
ColorRange::Full => super::J420ToRAW,
};
f(
planes[0], planes[0],
stride[0], stride[0],
planes[1], planes[1],
@ -338,12 +325,8 @@ pub trait GoogleImage {
self.height() as _, self.height() as _,
); );
} }
(Pixfmt::YUV420P, ImageFormat::ARGB) => { (Chroma::I420, ImageFormat::ARGB) => {
let f = match self.range() { super::I420ToARGB(
ColorRange::Studio => super::I420ToARGB,
ColorRange::Full => super::J420ToARGB,
};
f(
planes[0], planes[0],
stride[0], stride[0],
planes[1], planes[1],
@ -356,12 +339,8 @@ pub trait GoogleImage {
self.height() as _, self.height() as _,
); );
} }
(Pixfmt::YUV420P, ImageFormat::ABGR) => { (Chroma::I420, ImageFormat::ABGR) => {
let f = match self.range() { super::I420ToABGR(
ColorRange::Studio => super::I420ToABGR,
ColorRange::Full => super::J420ToABGR,
};
f(
planes[0], planes[0],
stride[0], stride[0],
planes[1], planes[1],
@ -374,12 +353,8 @@ pub trait GoogleImage {
self.height() as _, self.height() as _,
); );
} }
(Pixfmt::YUV444P, ImageFormat::ARGB) => { (Chroma::I444, ImageFormat::ARGB) => {
let f = match self.range() { super::I444ToARGB(
ColorRange::Studio => super::I444ToARGB,
ColorRange::Full => super::J444ToARGB,
};
f(
planes[0], planes[0],
stride[0], stride[0],
planes[1], planes[1],
@ -392,12 +367,8 @@ pub trait GoogleImage {
self.height() as _, self.height() as _,
); );
} }
(Pixfmt::YUV444P, ImageFormat::ABGR) => { (Chroma::I444, ImageFormat::ABGR) => {
let f = match self.range() { super::I444ToABGR(
ColorRange::Studio => super::I444ToABGR,
ColorRange::Full => super::J444ToABGR,
};
f(
planes[0], planes[0],
stride[0], stride[0],
planes[1], planes[1],
@ -411,7 +382,7 @@ pub trait GoogleImage {
); );
} }
// (Chroma::I444, ImageFormat::Raw), new version libyuv have I444ToRAW // (Chroma::I444, ImageFormat::Raw), new version libyuv have I444ToRAW
_ => log::error!("{:?} -> {:?} is not supported", self.pixfmt(), rgb.fmt()), _ => log::error!("unsupported pixfmt: {:?}", self.chroma()),
} }
} }
} }

@ -7,8 +7,8 @@ use hbb_common::log;
use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame}; use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
use hbb_common::ResultType; use hbb_common::ResultType;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, ExtraEncoderCfg, Quality}; use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::{ColorRange, EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN}; use crate::{EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *}; use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result}; use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
@ -39,7 +39,7 @@ pub struct VpxEncoder {
width: usize, width: usize,
height: usize, height: usize,
id: VpxVideoCodecId, id: VpxVideoCodecId,
extra: ExtraEncoderCfg, i444: bool,
yuvfmt: EncodeYuvFormat, yuvfmt: EncodeYuvFormat,
} }
@ -48,7 +48,7 @@ pub struct VpxDecoder {
} }
impl EncoderApi for VpxEncoder { impl EncoderApi for VpxEncoder {
fn new(cfg: crate::codec::EncoderCfg, extra: ExtraEncoderCfg) -> ResultType<Self> fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
where where
Self: Sized, Self: Sized,
{ {
@ -102,8 +102,7 @@ impl EncoderApi for VpxEncoder {
} }
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp9/common/vp9_enums.h#29 // https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp9/common/vp9_enums.h#29
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp8/vp8_cx_iface.c#282 // https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp8/vp8_cx_iface.c#282
c.g_profile = c.g_profile = if i444 && config.codec == VpxVideoCodecId::VP9 {
if extra.pixfmt == Pixfmt::YUV444P && config.codec == VpxVideoCodecId::VP9 {
1 1
} else { } else {
0 0
@ -165,22 +164,6 @@ impl EncoderApi for VpxEncoder {
VP9E_SET_TILE_COLUMNS as _, VP9E_SET_TILE_COLUMNS as _,
4 as c_int 4 as c_int
)); ));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_COLOR_RANGE as _,
if extra.range == ColorRange::Full {
vpx_color_range::VPX_CR_FULL_RANGE
} else {
vpx_color_range::VPX_CR_STUDIO_RANGE
}
));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_COLOR_SPACE as _,
vpx_color_space::VPX_CS_BT_601
));
} else if config.codec == VpxVideoCodecId::VP8 { } else if config.codec == VpxVideoCodecId::VP8 {
// https://github.com/webmproject/libvpx/blob/972149cafeb71d6f08df89e91a0130d6a38c4b15/vpx/vp8cx.h#L172 // https://github.com/webmproject/libvpx/blob/972149cafeb71d6f08df89e91a0130d6a38c4b15/vpx/vp8cx.h#L172
// https://groups.google.com/a/webmproject.org/g/webm-discuss/c/DJhSrmfQ61M // https://groups.google.com/a/webmproject.org/g/webm-discuss/c/DJhSrmfQ61M
@ -192,8 +175,8 @@ impl EncoderApi for VpxEncoder {
width: config.width as _, width: config.width as _,
height: config.height as _, height: config.height as _,
id: config.codec, id: config.codec,
extra, i444,
yuvfmt: Self::get_yuvfmt(config.width, config.height, &extra), yuvfmt: Self::get_yuvfmt(config.width, config.height, i444),
}) })
} }
_ => Err(anyhow!("encoder type mismatch")), _ => Err(anyhow!("encoder type mismatch")),
@ -247,13 +230,16 @@ impl EncoderApi for VpxEncoder {
impl VpxEncoder { impl VpxEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> { pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
let (fmt, bpp) = match self.extra.pixfmt { let bpp = if self.i444 { 24 } else { 12 };
Pixfmt::YUV444P => (vpx_img_fmt::VPX_IMG_FMT_I444, 24),
_ => (vpx_img_fmt::VPX_IMG_FMT_I420, 12),
};
if data.len() < self.width * self.height * bpp / 8 { if data.len() < self.width * self.height * bpp / 8 {
return Err(Error::FailedCall("len not enough".to_string())); return Err(Error::FailedCall("len not enough".to_string()));
} }
let fmt = if self.i444 {
vpx_img_fmt::VPX_IMG_FMT_I444
} else {
vpx_img_fmt::VPX_IMG_FMT_I420
};
let mut image = Default::default(); let mut image = Default::default();
call_vpx_ptr!(vpx_img_wrap( call_vpx_ptr!(vpx_img_wrap(
&mut image, &mut image,
@ -351,11 +337,12 @@ impl VpxEncoder {
(q_min, q_max) (q_min, q_max)
} }
fn get_yuvfmt(width: u32, height: u32, extra: &ExtraEncoderCfg) -> EncodeYuvFormat { fn get_yuvfmt(width: u32, height: u32, i444: bool) -> EncodeYuvFormat {
let mut img = Default::default(); let mut img = Default::default();
let fmt = match extra.pixfmt { let fmt = if i444 {
Pixfmt::YUV444P => vpx_img_fmt::VPX_IMG_FMT_I444, vpx_img_fmt::VPX_IMG_FMT_I444
_ => vpx_img_fmt::VPX_IMG_FMT_I420, } else {
vpx_img_fmt::VPX_IMG_FMT_I420
}; };
unsafe { unsafe {
vpx_img_wrap( vpx_img_wrap(
@ -367,9 +354,9 @@ impl VpxEncoder {
0x1 as _, 0x1 as _,
); );
} }
let pixfmt = if i444 { Pixfmt::I444 } else { Pixfmt::I420 };
EncodeYuvFormat { EncodeYuvFormat {
pixfmt: extra.pixfmt, pixfmt,
range: extra.range,
w: img.w as _, w: img.w as _,
h: img.h as _, h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(), stride: img.stride.map(|s| s as usize).to_vec(),
@ -594,22 +581,8 @@ impl GoogleImage for Image {
fn chroma(&self) -> Chroma { fn chroma(&self) -> Chroma {
match self.inner().fmt { match self.inner().fmt {
vpx_img_fmt::VPX_IMG_FMT_I444 => Chroma::C444, vpx_img_fmt::VPX_IMG_FMT_I444 => Chroma::I444,
_ => Chroma::C420, _ => Chroma::I420,
}
}
fn pixfmt(&self) -> Pixfmt {
match self.inner().fmt {
vpx_img_fmt::VPX_IMG_FMT_I444 => Pixfmt::YUV444P,
_ => Pixfmt::YUV420P,
}
}
fn range(&self) -> ColorRange {
match self.inner().range {
vpx_color_range::VPX_CR_STUDIO_RANGE => ColorRange::Studio,
vpx_color_range::VPX_CR_FULL_RANGE => ColorRange::Full,
} }
} }
} }

@ -252,8 +252,8 @@ impl<T: InvokeUiSession> Remote<T> {
}).collect::<HashMap<usize, i32>>(); }).collect::<HashMap<usize, i32>>();
let chroma = self.chroma.read().unwrap().clone(); let chroma = self.chroma.read().unwrap().clone();
let chroma = match chroma { let chroma = match chroma {
Some(Chroma::C444) => "4:4:4", Some(Chroma::I444) => "4:4:4",
Some(Chroma::C420) => "4:2:0", Some(Chroma::I420) => "4:2:0",
None => "-", None => "-",
}; };
let chroma = Some(chroma.to_string()); let chroma = Some(chroma.to_string());

@ -1416,15 +1416,6 @@ pub fn session_change_prefer_codec(session_id: SessionID) {
} }
} }
pub fn session_is_codec_support_444(session_id: SessionID, codec: String) -> SyncReturn<bool> {
let res = if let Some(session) = sessions::get_session_by_session_id(&session_id) {
session.is_codec_support_444(codec)
} else {
false
};
SyncReturn(res)
}
pub fn session_on_waiting_for_image_dialog_show(session_id: SessionID) { pub fn session_on_waiting_for_image_dialog_show(session_id: SessionID) {
super::flutter::session_on_waiting_for_image_dialog_show(session_id); super::flutter::session_on_waiting_for_image_dialog_show(session_id);
} }

@ -400,8 +400,8 @@ fn run(vs: VideoService) -> ResultType<()> {
let encoder_cfg = get_encoder_config(&c, quality, last_recording); let encoder_cfg = get_encoder_config(&c, quality, last_recording);
let mut encoder; let mut encoder;
let extra = Encoder::extra(&encoder_cfg); let use_i444 = Encoder::use_i444(&encoder_cfg);
match Encoder::new(encoder_cfg.clone(), extra) { match Encoder::new(encoder_cfg.clone(), use_i444) {
Ok(x) => encoder = x, Ok(x) => encoder = x,
Err(err) => bail!("Failed to create encoder: {}", err), Err(err) => bail!("Failed to create encoder: {}", err),
} }
@ -456,7 +456,7 @@ fn run(vs: VideoService) -> ResultType<()> {
if last_portable_service_running != crate::portable_service::client::running() { if last_portable_service_running != crate::portable_service::client::running() {
bail!("SWITCH"); bail!("SWITCH");
} }
if Encoder::extra(&encoder_cfg) != extra { if Encoder::use_i444(&encoder_cfg) != use_i444 {
bail!("SWITCH"); bail!("SWITCH");
} }
check_privacy_mode_changed(&sp, c.privacy_mode_id)?; check_privacy_mode_changed(&sp, c.privacy_mode_id)?;

@ -1230,17 +1230,6 @@ impl<T: InvokeUiSession> Session<T> {
pub fn close_voice_call(&self) { pub fn close_voice_call(&self) {
self.send(Data::CloseVoiceCall); self.send(Data::CloseVoiceCall);
} }
pub fn is_codec_support_444(&self, codec: String) -> bool {
let codec = codec.to_uppercase();
let encoding = self.lc.read().unwrap().supported_encoding.clone();
// decoding support following
match codec.as_str() {
"VP9" => encoding.color_abilities.vp9.yuv444p_bt601_studio,
"AV1" => encoding.color_abilities.av1.yuv444p_bt601_studio,
_ => false,
}
}
} }
pub trait InvokeUiSession: Send + Sync + Clone + 'static + Sized + Default { pub trait InvokeUiSession: Send + Sync + Clone + 'static + Sized + Default {