aom encode/decode

Signed-off-by: 21pages <pages21@163.com>
This commit is contained in:
21pages 2023-05-08 20:35:24 +08:00
parent a3f3bb4751
commit e482dc3e2b
15 changed files with 780 additions and 34 deletions

View File

@ -280,15 +280,18 @@ Future<List<TRadioMenu<String>>> toolbarCodec(
try {
final Map codecsJson = jsonDecode(alternativeCodecs);
final vp8 = codecsJson['vp8'] ?? false;
final av1 = codecsJson['av1'] ?? false;
final h264 = codecsJson['h264'] ?? false;
final h265 = codecsJson['h265'] ?? false;
codecs.add(vp8);
codecs.add(av1);
codecs.add(h264);
codecs.add(h265);
} catch (e) {
debugPrint("Show Codec Preference err=$e");
}
final visible = codecs.length == 3 && (codecs[0] || codecs[1] || codecs[2]);
final visible =
codecs.length == 4 && (codecs[0] || codecs[1] || codecs[2] || codecs[3]);
if (!visible) return [];
onChanged(String? value) async {
if (value == null) return;
@ -307,10 +310,11 @@ Future<List<TRadioMenu<String>>> toolbarCodec(
return [
radio('Auto', 'auto', true),
if (isDesktop || codecs[0]) radio('VP8', 'vp8', codecs[0]),
if (codecs[0]) radio('VP8', 'vp8', codecs[0]),
radio('VP9', 'vp9', true),
if (isDesktop || codecs[1]) radio('H264', 'h264', codecs[1]),
if (isDesktop || codecs[2]) radio('H265', 'h265', codecs[2]),
if (codecs[1]) radio('AV1', 'av1', codecs[1]),
if (codecs[2]) radio('H264', 'h264', codecs[2]),
if (codecs[3]) radio('H265', 'h265', codecs[3]),
];
}

View File

@ -1322,6 +1322,11 @@ class _DisplayState extends State<_Display> {
groupValue: groupValue,
label: 'VP9',
onChanged: onChanged),
_Radio(context,
value: 'av1',
groupValue: groupValue,
label: 'AV1',
onChanged: onChanged),
...hwRadios,
]);
}

View File

@ -25,6 +25,7 @@ message VideoFrame {
EncodedVideoFrames h264s = 10;
EncodedVideoFrames h265s = 11;
EncodedVideoFrames vp8s = 12;
EncodedVideoFrames av1s = 13;
}
}
@ -85,6 +86,7 @@ message SupportedEncoding {
bool h264 = 1;
bool h265 = 2;
bool vp8 = 3;
bool av1 = 4;
}
message PeerInfo {
@ -477,6 +479,7 @@ message SupportedDecoding {
H264 = 2;
H265 = 3;
VP8 = 4;
AV1 = 5;
}
int32 ability_vp9 = 1;
@ -484,6 +487,7 @@ message SupportedDecoding {
int32 ability_h265 = 3;
PreferCodec prefer = 4;
int32 ability_vp8 = 5;
int32 ability_av1 = 6;
}
message OptionMessage {

View File

@ -1399,7 +1399,9 @@ impl UserDefaultConfig {
"view_style" => self.get_string(key, "original", vec!["adaptive"]),
"scroll_style" => self.get_string(key, "scrollauto", vec!["scrollbar"]),
"image_quality" => self.get_string(key, "balanced", vec!["best", "low", "custom"]),
"codec-preference" => self.get_string(key, "auto", vec!["vp8", "vp9", "h264", "h265"]),
"codec-preference" => {
self.get_string(key, "auto", vec!["vp8", "vp9", "av1", "h264", "h265"])
}
"custom_image_quality" => self.get_double_string(key, 50.0, 10.0, 100.0),
"custom-fps" => self.get_double_string(key, 30.0, 5.0, 120.0),
_ => self

View File

@ -1,6 +1,7 @@
use docopt::Docopt;
use hbb_common::env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use scrap::{
aom::{AomDecoder, AomDecoderConfig, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg},
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
VpxVideoCodecId::{self, *},
@ -51,6 +52,7 @@ fn main() {
width, height, bitrate_k, args.flag_hw_pixfmt
);
[VP8, VP9].map(|c| test_vpx(c, &yuvs, width, height, bitrate_k, yuv_count));
test_av1(&yuvs, width, height, bitrate_k, yuv_count);
#[cfg(feature = "hwcodec")]
{
use hwcodec::AVPixelFormat;
@ -105,34 +107,29 @@ fn test_vpx(
num_threads: (num_cpus::get() / 2) as _,
});
let mut encoder = VpxEncoder::new(config).unwrap();
let start = Instant::now();
for yuv in yuvs {
let _ = encoder
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
.unwrap();
let _ = encoder.flush().unwrap();
}
println!(
"{:?} encode: {:?}",
codec_id,
start.elapsed() / yuv_count as _
);
// prepare data separately
let mut vpxs = vec![];
let start = Instant::now();
let mut size = 0;
for yuv in yuvs {
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
.unwrap()
{
size += frame.data.len();
vpxs.push(frame.data.to_vec());
}
for ref frame in encoder.flush().unwrap() {
size += frame.data.len();
vpxs.push(frame.data.to_vec());
}
}
assert_eq!(vpxs.len(), yuv_count);
println!(
"{:?} encode: {:?}, {} byte",
codec_id,
start.elapsed() / yuv_count as _,
size / yuv_count
);
let mut decoder = VpxDecoder::new(VpxDecoderConfig {
codec: codec_id,
@ -151,6 +148,43 @@ fn test_vpx(
);
}
fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, bitrate_k: usize, yuv_count: usize) {
let config = EncoderCfg::AOM(AomEncoderConfig {
width: width as _,
height: height as _,
bitrate: bitrate_k as _,
});
let mut encoder = AomEncoder::new(config).unwrap();
let start = Instant::now();
let mut size = 0;
let mut av1s = vec![];
for yuv in yuvs {
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
.unwrap()
{
size += frame.data.len();
av1s.push(frame.data.to_vec());
}
}
assert_eq!(av1s.len(), yuv_count);
println!(
"AV1 encode: {:?}, {} byte",
start.elapsed() / yuv_count as _,
size / yuv_count
);
let mut decoder = AomDecoder::new(AomDecoderConfig {
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
let start = Instant::now();
for av1 in av1s {
let _ = decoder.decode(&av1);
let _ = decoder.flush();
}
println!("AV1 decode: {:?}", start.elapsed() / yuv_count as _);
}
#[cfg(feature = "hwcodec")]
mod hw {
use super::*;
@ -221,14 +255,19 @@ mod hw {
ctx.name = info.name;
let mut encoder = Encoder::new(ctx.clone()).unwrap();
let start = Instant::now();
let mut size = 0;
for yuv in yuvs {
let _ = encoder.encode(yuv).unwrap();
let frames = encoder.encode(yuv).unwrap();
for frame in frames {
size += frame.data.len();
}
}
println!(
"{}{}: {:?}",
"{}{}: {:?}, {} byte",
if best { "*" } else { "" },
ctx.name,
start.elapsed() / yuvs.len() as _
start.elapsed() / yuvs.len() as _,
size / yuvs.len(),
);
}

View File

@ -5,4 +5,6 @@
#include <aom/aom_external_partition.h>
#include <aom/aom_frame_buffer.h>
#include <aom/aom_encoder.h>
#include <aom/aom_decoder.h>
#include <aom/aomcx.h>
#include <aom/aomdx.h>

View File

@ -0,0 +1,625 @@
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(improper_ctypes)]
#![allow(dead_code)]
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::{codec::EncoderApi, EncodeFrame, ImageFormat, ImageRgb, STRIDE_ALIGN};
use hbb_common::{
anyhow::{anyhow, Context},
bytes::Bytes,
message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message, VideoFrame},
ResultType,
};
use std::{ptr, slice};
impl Default for aom_codec_enc_cfg_t {
fn default() -> Self {
unsafe { std::mem::zeroed() }
}
}
impl Default for aom_codec_ctx_t {
fn default() -> Self {
unsafe { std::mem::zeroed() }
}
}
impl Default for aom_image_t {
fn default() -> Self {
unsafe { std::mem::zeroed() }
}
}
#[derive(Debug)]
pub enum Error {
FailedCall(String),
BadPtr(String),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
macro_rules! call_aom {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, i32>(result) };
if result_int != 0 {
return Err(Error::FailedCall(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
macro_rules! call_aom_ptr {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, isize>(result) };
if result_int == 0 {
return Err(Error::BadPtr(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
#[derive(Clone, Copy, Debug)]
pub struct AomEncoderConfig {
pub width: u32,
pub height: u32,
pub bitrate: u32,
}
pub struct AomEncoder {
ctx: aom_codec_ctx_t,
width: usize,
height: usize,
}
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
mod webrtc {
use super::*;
const kQpMin: u32 = 10;
const kUsageProfile: u32 = AOM_USAGE_REALTIME;
const kMinQindex: u32 = 145; // Min qindex threshold for QP scaling.
const kMaxQindex: u32 = 205; // Max qindex threshold for QP scaling.
const kBitDepth: u32 = 8;
const kLagInFrames: u32 = 0; // No look ahead.
const kRtpTicksPerSecond: i32 = 90000;
const kMinimumFrameRate: f64 = 1.0;
const kQpMax: u32 = 25; // to-do: webrtc use dynamic value, no more than 63
fn number_of_threads(width: u32, height: u32, number_of_cores: usize) -> u32 {
// Keep the number of encoder threads equal to the possible number of
// column/row tiles, which is (1, 2, 4, 8). See comments below for
// AV1E_SET_TILE_COLUMNS/ROWS.
if width * height >= 640 * 360 && number_of_cores > 4 {
return 4;
} else if width * height >= 320 * 180 && number_of_cores > 2 {
return 2;
} else {
// Use 2 threads for low res on ARM.
#[cfg(any(target_arch = "arm", target_arch = "aarch64", target_os = "android"))]
if (width * height >= 320 * 180 && number_of_cores > 2) {
return 2;
}
// 1 thread less than VGA.
return 1;
}
}
// Only positive speeds, range for real-time coding currently is: 6 - 8.
// Lower means slower/better quality, higher means fastest/lower quality.
fn get_cpu_speed(width: u32, height: u32) -> u32 {
// aux_config_ = nullptr, kComplexityHigh
if width * height <= 320 * 180 {
8
} else if width * height <= 640 * 360 {
9
} else {
10
}
}
fn get_super_block_size(width: u32, height: u32, threads: u32) -> aom_superblock_size_t {
use aom_superblock_size::*;
let resolution = width * height;
if threads >= 4 && resolution >= 960 * 540 && resolution < 1920 * 1080 {
AOM_SUPERBLOCK_SIZE_64X64
} else {
AOM_SUPERBLOCK_SIZE_DYNAMIC
}
}
pub fn enc_cfg(
i: *const aom_codec_iface,
cfg: AomEncoderConfig,
) -> ResultType<aom_codec_enc_cfg> {
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile));
// Overwrite default config with input encoder settings & RTC-relevant values.
c.g_w = cfg.width;
c.g_h = cfg.height;
c.g_threads = number_of_threads(cfg.width, cfg.height, num_cpus::get());
c.g_timebase.num = 1;
c.g_timebase.den = kRtpTicksPerSecond;
c.rc_target_bitrate = cfg.bitrate; // kilobits/sec.
c.g_input_bit_depth = kBitDepth;
c.kf_mode = aom_kf_mode::AOM_KF_DISABLED;
c.rc_min_quantizer = kQpMin;
c.rc_max_quantizer = kQpMax;
c.rc_undershoot_pct = 50;
c.rc_overshoot_pct = 50;
c.rc_buf_initial_sz = 600;
c.rc_buf_optimal_sz = 600;
c.rc_buf_sz = 1000;
c.g_usage = kUsageProfile;
c.g_error_resilient = 0;
// Low-latency settings.
c.rc_end_usage = aom_rc_mode::AOM_CBR; // Constant Bit Rate (CBR) mode
c.g_pass = aom_enc_pass::AOM_RC_ONE_PASS; // One-pass rate control
c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0.
Ok(c)
}
pub fn set_controls(ctx: *mut aom_codec_ctx_t, cfg: &aom_codec_enc_cfg) -> ResultType<()> {
use aom_tune_content::*;
use aome_enc_control_id::*;
macro_rules! call_ctl {
($ctx:expr, $av1e:expr, $arg:expr) => {{
call_aom!(aom_codec_control($ctx, $av1e as i32, $arg));
}};
}
call_ctl!(ctx, AOME_SET_CPUUSED, get_cpu_speed(cfg.g_w, cfg.g_h));
call_ctl!(ctx, AV1E_SET_ENABLE_CDEF, 1);
call_ctl!(ctx, AV1E_SET_ENABLE_TPL_MODEL, 0);
call_ctl!(ctx, AV1E_SET_DELTAQ_MODE, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_ORDER_HINT, 0);
call_ctl!(ctx, AV1E_SET_AQ_MODE, 3);
call_ctl!(ctx, AOME_SET_MAX_INTRA_BITRATE_PCT, 300);
call_ctl!(ctx, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
call_ctl!(ctx, AV1E_SET_MODE_COST_UPD_FREQ, 3);
call_ctl!(ctx, AV1E_SET_MV_COST_UPD_FREQ, 3);
// kScreensharing
call_ctl!(ctx, AV1E_SET_TUNE_CONTENT, AOM_CONTENT_SCREEN);
call_ctl!(ctx, AV1E_SET_ENABLE_PALETTE, 1);
let tile_set = if cfg.g_threads == 4 && cfg.g_w == 640 && (cfg.g_h == 360 || cfg.g_h == 480)
{
AV1E_SET_TILE_ROWS
} else {
AV1E_SET_TILE_COLUMNS
};
call_ctl!(ctx, tile_set, (cfg.g_threads as f64 * 1.0f64).log2().ceil());
call_ctl!(ctx, AV1E_SET_ROW_MT, 1);
call_ctl!(ctx, AV1E_SET_ENABLE_OBMC, 0);
call_ctl!(ctx, AV1E_SET_NOISE_SENSITIVITY, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_WARPED_MOTION, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_REF_FRAME_MVS, 0);
call_ctl!(
ctx,
AV1E_SET_SUPERBLOCK_SIZE,
get_super_block_size(cfg.g_w, cfg.g_h, cfg.g_threads)
);
call_ctl!(ctx, AV1E_SET_ENABLE_CFL_INTRA, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_SMOOTH_INTRA, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_ANGLE_DELTA, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_FILTER_INTRA, 0);
call_ctl!(ctx, AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1);
call_ctl!(ctx, AV1E_SET_DISABLE_TRELLIS_QUANT, 1);
call_ctl!(ctx, AV1E_SET_ENABLE_DIST_WTD_COMP, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_DIFF_WTD_COMP, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_DUAL_FILTER, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_INTERINTRA_COMP, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_INTERINTRA_WEDGE, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_INTRA_EDGE_FILTER, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_INTRABC, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_MASKED_COMP, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_PAETH_INTRA, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_QM, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_RECT_PARTITIONS, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_RESTORATION, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0);
call_ctl!(ctx, AV1E_SET_ENABLE_TX64, 0);
call_ctl!(ctx, AV1E_SET_MAX_REFERENCE_FRAMES, 3);
Ok(())
}
}
impl EncoderApi for AomEncoder {
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
crate::codec::EncoderCfg::AOM(config) => {
let i = call_aom_ptr!(aom_codec_av1_cx());
let c = webrtc::enc_cfg(i, config)?;
let mut ctx = Default::default();
// Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH
let flags: aom_codec_flags_t = 0;
call_aom!(aom_codec_enc_init_ver(
&mut ctx,
i,
&c,
flags,
AOM_ENCODER_ABI_VERSION as _
));
webrtc::set_controls(&mut ctx, &c)?;
Ok(Self {
ctx,
width: config.width as _,
height: config.height as _,
})
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<Message> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(Self::create_frame(frame));
}
if frames.len() > 0 {
Ok(Self::create_msg(frames))
} else {
Err(anyhow!("no valid frame"))
}
}
fn use_yuv(&self) -> bool {
true
}
fn set_bitrate(&mut self, bitrate: u32) -> ResultType<()> {
let mut new_enc_cfg = unsafe { *self.ctx.config.enc.to_owned() };
new_enc_cfg.rc_target_bitrate = bitrate;
call_aom!(aom_codec_enc_config_set(&mut self.ctx, &new_enc_cfg));
return Ok(());
}
}
impl AomEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
if 2 * data.len() < 3 * self.width * self.height {
return Err(Error::FailedCall("len not enough".to_string()));
}
let mut image = Default::default();
call_aom_ptr!(aom_img_wrap(
&mut image,
aom_img_fmt::AOM_IMG_FMT_I420,
self.width as _,
self.height as _,
stride_align as _,
data.as_ptr() as _,
));
call_aom!(aom_codec_encode(
&mut self.ctx,
&image,
pts as _,
1, // Duration
0, // Flags
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
#[inline]
pub fn create_msg(frames: Vec<EncodedVideoFrame>) -> Message {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
let av1s = EncodedVideoFrames {
frames: frames.into(),
..Default::default()
};
vf.set_av1s(av1s);
msg_out.set_video_frame(vf);
msg_out
}
#[inline]
fn create_frame(frame: &EncodeFrame) -> EncodedVideoFrame {
EncodedVideoFrame {
data: Bytes::from(frame.data.to_vec()),
key: frame.key,
pts: frame.pts,
..Default::default()
}
}
}
impl Drop for AomEncoder {
fn drop(&mut self) {
unsafe {
let result = aom_codec_destroy(&mut self.ctx);
if result != aom_codec_err_t::AOM_CODEC_OK {
panic!("failed to destroy aom codec");
}
}
}
}
pub struct EncodeFrames<'a> {
ctx: &'a mut aom_codec_ctx_t,
iter: aom_codec_iter_t,
}
impl<'a> Iterator for EncodeFrames<'a> {
type Item = EncodeFrame<'a>;
fn next(&mut self) -> Option<Self::Item> {
loop {
unsafe {
let pkt = aom_codec_get_cx_data(self.ctx, &mut self.iter);
if pkt.is_null() {
return None;
} else if (*pkt).kind == aom_codec_cx_pkt_kind::AOM_CODEC_CX_FRAME_PKT {
let f = &(*pkt).data.frame;
return Some(Self::Item {
data: slice::from_raw_parts(f.buf as _, f.sz as _),
key: (f.flags & AOM_FRAME_IS_KEY) != 0,
pts: f.pts,
});
} else {
// Ignore the packet.
}
}
}
}
}
pub struct AomDecoderConfig {
pub num_threads: u32,
}
pub struct AomDecoder {
ctx: aom_codec_ctx_t,
}
impl AomDecoder {
pub fn new(cfg: AomDecoderConfig) -> Result<Self> {
let i = call_aom_ptr!(aom_codec_av1_dx());
let mut ctx = Default::default();
let cfg = aom_codec_dec_cfg_t {
threads: if cfg.num_threads == 0 {
num_cpus::get() as _
} else {
cfg.num_threads
},
w: 0,
h: 0,
allow_lowbitdepth: 1,
};
call_aom!(aom_codec_dec_init_ver(
&mut ctx,
i,
&cfg,
0,
AOM_DECODER_ABI_VERSION as _,
));
Ok(Self { ctx })
}
pub fn decode(&mut self, data: &[u8]) -> Result<DecodeFrames> {
call_aom!(aom_codec_decode(
&mut self.ctx,
data.as_ptr(),
data.len() as _,
ptr::null_mut(),
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the decoder to return any pending frame
pub fn flush(&mut self) -> Result<DecodeFrames> {
call_aom!(aom_codec_decode(
&mut self.ctx,
ptr::null(),
0,
ptr::null_mut(),
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for AomDecoder {
fn drop(&mut self) {
unsafe {
let result = aom_codec_destroy(&mut self.ctx);
if result != aom_codec_err_t::AOM_CODEC_OK {
panic!("failed to destroy aom codec");
}
}
}
}
pub struct DecodeFrames<'a> {
ctx: &'a mut aom_codec_ctx_t,
iter: aom_codec_iter_t,
}
impl<'a> Iterator for DecodeFrames<'a> {
type Item = Image;
fn next(&mut self) -> Option<Self::Item> {
let img = unsafe { aom_codec_get_frame(self.ctx, &mut self.iter) };
if img.is_null() {
return None;
} else {
return Some(Image(img));
}
}
}
pub struct Image(*mut aom_image_t);
impl Image {
#[inline]
pub fn new() -> Self {
Self(std::ptr::null_mut())
}
#[inline]
pub fn is_null(&self) -> bool {
self.0.is_null()
}
#[inline]
pub fn width(&self) -> usize {
self.inner().d_w as _
}
#[inline]
pub fn height(&self) -> usize {
self.inner().d_h as _
}
#[inline]
pub fn format(&self) -> aom_img_fmt_t {
self.inner().fmt
}
#[inline]
pub fn inner(&self) -> &aom_image_t {
unsafe { &*self.0 }
}
#[inline]
pub fn stride(&self, iplane: usize) -> i32 {
self.inner().stride[iplane]
}
#[inline]
pub fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
let bytes_per_pixel = match fmt {
ImageFormat::Raw => 3,
ImageFormat::ARGB | ImageFormat::ABGR => 4,
};
// https://github.com/lemenkov/libyuv/blob/6900494d90ae095d44405cd4cc3f346971fa69c9/source/convert_argb.cc#L128
// https://github.com/lemenkov/libyuv/blob/6900494d90ae095d44405cd4cc3f346971fa69c9/source/convert_argb.cc#L129
(w * bytes_per_pixel + stride - 1) & !(stride - 1)
}
// rgb [in/out] fmt and stride must be set in ImageRgb
pub fn to(&self, rgb: &mut ImageRgb) {
rgb.w = self.width();
rgb.h = self.height();
let bytes_per_row = Self::get_bytes_per_row(rgb.w, rgb.fmt, rgb.stride());
rgb.raw.resize(rgb.h * bytes_per_row, 0);
let img = self.inner();
unsafe {
match rgb.fmt() {
ImageFormat::Raw => {
super::I420ToRAW(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
);
}
ImageFormat::ARGB => {
super::I420ToARGB(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
);
}
ImageFormat::ABGR => {
super::I420ToABGR(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
);
}
}
}
}
#[inline]
pub fn data(&self) -> (&[u8], &[u8], &[u8]) {
unsafe {
let img = self.inner();
let h = (img.d_h as usize + 1) & !1;
let n = img.stride[0] as usize * h;
let y = slice::from_raw_parts(img.planes[0], n);
let n = img.stride[1] as usize * (h >> 1);
let u = slice::from_raw_parts(img.planes[1], n);
let v = slice::from_raw_parts(img.planes[2], n);
(y, u, v)
}
}
}
impl Drop for Image {
fn drop(&mut self) {
if !self.0.is_null() {
unsafe { aom_img_free(self.0) };
}
}
}
unsafe impl Send for aom_codec_ctx_t {}

View File

@ -10,7 +10,11 @@ use crate::hwcodec::*;
use crate::mediacodec::{
MediaCodecDecoder, MediaCodecDecoders, H264_DECODER_SUPPORT, H265_DECODER_SUPPORT,
};
use crate::{vpxcodec::*, CodecName, ImageRgb};
use crate::{
aom::{self, AomDecoder, AomDecoderConfig, AomEncoder, AomEncoderConfig},
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
CodecName, ImageRgb,
};
#[cfg(not(any(target_os = "android", target_os = "ios")))]
use hbb_common::sysinfo::{System, SystemExt};
@ -43,6 +47,7 @@ pub struct HwEncoderConfig {
#[derive(Debug, Clone)]
pub enum EncoderCfg {
VPX(VpxEncoderConfig),
AOM(AomEncoderConfig),
HW(HwEncoderConfig),
}
@ -79,6 +84,7 @@ impl DerefMut for Encoder {
pub struct Decoder {
vp8: VpxDecoder,
vp9: VpxDecoder,
av1: AomDecoder,
#[cfg(feature = "hwcodec")]
hw: HwDecoders,
#[cfg(feature = "hwcodec")]
@ -101,6 +107,9 @@ impl Encoder {
EncoderCfg::VPX(_) => Ok(Encoder {
codec: Box::new(VpxEncoder::new(config)?),
}),
EncoderCfg::AOM(_) => Ok(Encoder {
codec: Box::new(AomEncoder::new(config)?),
}),
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => match HwEncoder::new(config) {
@ -139,6 +148,7 @@ impl Encoder {
}
let vp8_useable = decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_vp8 > 0);
let av1_useable = decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_av1 > 0);
#[allow(unused_mut)]
let mut h264_name = None;
#[allow(unused_mut)]
@ -167,6 +177,7 @@ impl Encoder {
.filter(|(_, s)| {
s.prefer == PreferCodec::VP9.into()
|| s.prefer == PreferCodec::VP8.into() && vp8_useable
|| s.prefer == PreferCodec::AV1.into() && av1_useable
|| s.prefer == PreferCodec::H264.into() && h264_name.is_some()
|| s.prefer == PreferCodec::H265.into() && h265_name.is_some()
})
@ -187,6 +198,7 @@ impl Encoder {
match preference {
PreferCodec::VP8 => *name = CodecName::VP8,
PreferCodec::VP9 => *name = CodecName::VP9,
PreferCodec::AV1 => *name = CodecName::AV1,
PreferCodec::H264 => *name = h264_name.map_or(auto_codec, |c| CodecName::H264(c)),
PreferCodec::H265 => *name = h265_name.map_or(auto_codec, |c| CodecName::H265(c)),
PreferCodec::Auto => *name = auto_codec,
@ -209,6 +221,7 @@ impl Encoder {
#[allow(unused_mut)]
let mut encoding = SupportedEncoding {
vp8: true,
av1: true,
..Default::default()
};
#[cfg(feature = "hwcodec")]
@ -227,6 +240,7 @@ impl Decoder {
let mut decoding = SupportedDecoding {
ability_vp8: 1,
ability_vp9: 1,
ability_av1: 1,
prefer: id_for_perfer
.map_or(PreferCodec::Auto, |id| Self::codec_preference(id))
.into(),
@ -267,9 +281,14 @@ impl Decoder {
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
let av1 = AomDecoder::new(AomDecoderConfig {
num_threads: (num_cpus::get() / 2) as _,
})
.unwrap();
Decoder {
vp8,
vp9,
av1,
#[cfg(feature = "hwcodec")]
hw: if enable_hwcodec_option() {
HwDecoder::new_decoders()
@ -300,6 +319,9 @@ impl Decoder {
video_frame::Union::Vp9s(vp9s) => {
Decoder::handle_vpxs_video_frame(&mut self.vp9, vp9s, rgb)
}
video_frame::Union::Av1s(av1s) => {
Decoder::handle_av1s_video_frame(&mut self.av1, av1s, rgb)
}
#[cfg(feature = "hwcodec")]
video_frame::Union::H264s(h264s) => {
if let Some(decoder) = &mut self.hw.h264 {
@ -342,7 +364,7 @@ impl Decoder {
vpxs: &EncodedVideoFrames,
rgb: &mut ImageRgb,
) -> ResultType<bool> {
let mut last_frame = Image::new();
let mut last_frame = vpxcodec::Image::new();
for vpx in vpxs.frames.iter() {
for frame in decoder.decode(&vpx.data)? {
drop(last_frame);
@ -361,6 +383,31 @@ impl Decoder {
}
}
// rgb [in/out] fmt and stride must be set in ImageRgb
fn handle_av1s_video_frame(
decoder: &mut AomDecoder,
av1s: &EncodedVideoFrames,
rgb: &mut ImageRgb,
) -> ResultType<bool> {
let mut last_frame = aom::Image::new();
for av1 in av1s.frames.iter() {
for frame in decoder.decode(&av1.data)? {
drop(last_frame);
last_frame = frame;
}
}
for frame in decoder.flush()? {
drop(last_frame);
last_frame = frame;
}
if last_frame.is_null() {
Ok(false)
} else {
last_frame.to(rgb);
Ok(true)
}
}
// rgb [in/out] fmt and stride must be set in ImageRgb
#[cfg(feature = "hwcodec")]
fn handle_hw_video_frame(
@ -404,6 +451,8 @@ impl Decoder {
PreferCodec::VP8
} else if codec == "vp9" {
PreferCodec::VP9
} else if codec == "av1" {
PreferCodec::AV1
} else if codec == "h264" {
PreferCodec::H264
} else if codec == "h265" {

View File

@ -41,6 +41,7 @@ pub use self::convert::*;
pub const STRIDE_ALIGN: usize = 64; // commonly used in libvpx vpx_img_alloc caller
pub const HW_STRIDE_ALIGN: usize = 0; // recommended by av_frame_get_buffer
pub mod aom;
pub mod record;
mod vpx;
@ -132,6 +133,7 @@ pub fn is_cursor_embedded() -> bool {
pub enum CodecName {
VP8,
VP9,
AV1,
H264(String),
H265(String),
}
@ -140,6 +142,7 @@ pub enum CodecName {
pub enum CodecFormat {
VP8,
VP9,
AV1,
H264,
H265,
Unknown,
@ -150,6 +153,7 @@ impl From<&VideoFrame> for CodecFormat {
match it.union {
Some(video_frame::Union::Vp8s(_)) => CodecFormat::VP8,
Some(video_frame::Union::Vp9s(_)) => CodecFormat::VP9,
Some(video_frame::Union::Av1s(_)) => CodecFormat::AV1,
Some(video_frame::Union::H264s(_)) => CodecFormat::H264,
Some(video_frame::Union::H265s(_)) => CodecFormat::H265,
_ => CodecFormat::Unknown,
@ -162,6 +166,7 @@ impl From<&CodecName> for CodecFormat {
match value {
CodecName::VP8 => Self::VP8,
CodecName::VP9 => Self::VP9,
CodecName::AV1 => Self::AV1,
CodecName::H264(_) => Self::H264,
CodecName::H265(_) => Self::H265,
}
@ -173,6 +178,7 @@ impl ToString for CodecFormat {
match self {
CodecFormat::VP8 => "VP8".into(),
CodecFormat::VP9 => "VP9".into(),
CodecFormat::AV1 => "AV1".into(),
CodecFormat::H264 => "H264".into(),
CodecFormat::H265 => "H265".into(),
CodecFormat::Unknown => "Unknow".into(),

View File

@ -844,7 +844,7 @@ impl<T: InvokeUiSession> Remote<T> {
use video_frame::Union::*;
match &vf.union {
Some(vf) => match vf {
Vp8s(f) | Vp9s(f) | H264s(f) | H265s(f) => f.frames.iter().any(|e| e.key),
Vp8s(f) | Vp9s(f) | Av1s(f) | H264s(f) | H265s(f) => f.frames.iter().any(|e| e.key),
_ => false,
},
None => false,

View File

@ -1100,8 +1100,8 @@ pub fn session_send_note(id: String, note: String) {
pub fn session_alternative_codecs(id: String) -> String {
if let Some(session) = SESSIONS.read().unwrap().get(&id) {
let (vp8, h264, h265) = session.alternative_codecs();
let msg = HashMap::from([("vp8", vp8), ("h264", h264), ("h265", h265)]);
let (vp8, av1, h264, h265) = session.alternative_codecs();
let msg = HashMap::from([("vp8", vp8), ("av1", av1), ("h264", h264), ("h265", h265)]);
serde_json::ser::to_string(&msg).unwrap_or("".to_owned())
} else {
String::new()

View File

@ -35,6 +35,7 @@ use hbb_common::{
#[cfg(not(windows))]
use scrap::Capturer;
use scrap::{
aom::AomEncoderConfig,
codec::{Encoder, EncoderCfg, HwEncoderConfig},
record::{Recorder, RecorderContext},
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
@ -549,6 +550,11 @@ fn run(sp: GenericService) -> ResultType<()> {
num_threads: (num_cpus::get() / 2) as _,
})
}
scrap::CodecName::AV1 => EncoderCfg::AOM(AomEncoderConfig {
width: c.width as _,
height: c.height as _,
bitrate: bitrate as _,
}),
};
let mut encoder;

View File

@ -162,7 +162,7 @@ class Header: Reactor.Component {
function renderDisplayPop() {
var codecs = handler.alternative_codecs();
var show_codec = codecs[0] || codecs[1] || codecs[2];
var show_codec = codecs[0] || codecs[1] || codecs[2] || codecs[3];
var cursor_embedded = false;
if ((pi.displays || []).length > 0) {
@ -188,8 +188,9 @@ class Header: Reactor.Component {
<li #auto type="codec-preference"><span>{svg_checkmark}</span>Auto</li>
{codecs[0] ? <li #vp8 type="codec-preference"><span>{svg_checkmark}</span>VP8</li> : ""}
<li #vp9 type="codec-preference"><span>{svg_checkmark}</span>VP9</li>
{codecs[1] ? <li #h264 type="codec-preference"><span>{svg_checkmark}</span>H264</li> : ""}
{codecs[2] ? <li #h265 type="codec-preference"><span>{svg_checkmark}</span>H265</li> : ""}
{codecs[1] ? <li #av1 type="codec-preference"><span>{svg_checkmark}</span>AV1</li> : ""}
{codecs[2] ? <li #h264 type="codec-preference"><span>{svg_checkmark}</span>H264</li> : ""}
{codecs[3] ? <li #h265 type="codec-preference"><span>{svg_checkmark}</span>H265</li> : ""}
</div> : ""}
<div .separator />
{!cursor_embedded && <li #show-remote-cursor .toggle-option><span>{svg_checkmark}</span>{translate('Show remote cursor')}</li>}

View File

@ -520,9 +520,10 @@ impl SciterSession {
}
fn alternative_codecs(&self) -> Value {
let (vp8, h264, h265) = self.0.alternative_codecs();
let (vp8, av1, h264, h265) = self.0.alternative_codecs();
let mut v = Value::array(0);
v.push(vp8);
v.push(av1);
v.push(h264);
v.push(h265);
v

View File

@ -225,16 +225,18 @@ impl<T: InvokeUiSession> Session<T> {
true
}
pub fn alternative_codecs(&self) -> (bool, bool, bool) {
pub fn alternative_codecs(&self) -> (bool, bool, bool, bool) {
let decoder = scrap::codec::Decoder::supported_decodings(None);
let mut vp8 = decoder.ability_vp8 > 0;
let mut av1 = decoder.ability_av1 > 0;
let mut h264 = decoder.ability_h264 > 0;
let mut h265 = decoder.ability_h265 > 0;
let enc = &self.lc.read().unwrap().supported_encoding;
vp8 = vp8 && enc.vp8;
av1 = av1 && enc.av1;
h264 = h264 && enc.h264;
h265 = h265 && enc.h265;
(vp8, h264, h265)
(vp8, av1, h264, h265)
}
pub fn change_prefer_codec(&self) {