media: Implement Player for ohos backend (#43208)

Implement Player Trait in ohos backend using Harmony OS MediaKit's[
AVPlayer](https://developer.huawei.com/consumer/en/doc/harmonyos-references/capi-avplayer-h).

The modular design of `VideoSink`, `InnerPlayer`, and `MediaSource` is
taken from the GStreamer backend.
Only support HarmonyOS SDK API 21, because the
`OH_AVPlayer_SetDataSource` only started to be exposed on API 21.

Testing: N/A, as there are no platform specific task.
Fixes: N/A, now we can play video on HarmonyOS phone using `<video>`

---------

Signed-off-by: rayguo17 <rayguo17@gmail.com>
This commit is contained in:
TIN TUN AUNG
2026-04-08 16:48:10 +08:00
committed by GitHub
parent 1b336760ae
commit 280d984d3b
15 changed files with 1660 additions and 125 deletions

70
Cargo.lock generated
View File

@@ -3521,6 +3521,8 @@ version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash 0.2.0",
]
@@ -4874,6 +4876,15 @@ dependencies = [
"imgref",
]
[[package]]
name = "lru"
version = "0.16.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593"
dependencies = [
"hashbrown 0.16.0",
]
[[package]]
name = "mach2"
version = "0.6.0"
@@ -5869,6 +5880,15 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "060ca76c500c8ffde25a89724d3476d4faca3b55fa3fe02cd8a3607e95b0861d"
[[package]]
name = "ohos-media-sys"
version = "0.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6384d6e3befdaaec0206772e05a6b9222188e8ce023f6a164ca96e1091ef2e87"
dependencies = [
"ohos-sys-opaque-types",
]
[[package]]
name = "ohos-sys-opaque-types"
version = "0.1.9"
@@ -5899,6 +5919,15 @@ dependencies = [
"ohos-sys-opaque-types",
]
[[package]]
name = "ohos-window-sys"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f801f5de727bd01cfdcd6e9b0de9d6d3e674e0ac73ea7ee202c0bcd75fc1daf7"
dependencies = [
"ohos-sys-opaque-types",
]
[[package]]
name = "once_cell"
version = "1.21.4"
@@ -6712,6 +6741,12 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca45419789ae5a7899559e9512e58ca889e41f04f1f2445e9f4b290ceccd1d08"
[[package]]
name = "rangemap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "973443cf09a9c8656b574a866ab68dfa19f0867d0340648c7d2f6a71b8a8ea68"
[[package]]
name = "rav1e"
version = "0.7.1"
@@ -7450,6 +7485,7 @@ dependencies = [
"servo-media",
"servo-media-dummy",
"servo-media-gstreamer",
"servo-media-ohos",
"servo-media-thread",
"servo-net",
"servo-net-traits",
@@ -8204,6 +8240,31 @@ dependencies = [
"servo-media-player",
]
[[package]]
name = "servo-media-ohos"
version = "0.1.0"
dependencies = [
"crossbeam-channel",
"ipc-channel",
"libc",
"log",
"lru",
"mime",
"ohos-media-sys",
"ohos-sys-opaque-types",
"ohos-window-sys",
"once_cell",
"rangemap",
"serde_json",
"servo-media",
"servo-media-audio",
"servo-media-player",
"servo-media-streams",
"servo-media-traits",
"servo-media-webrtc",
"yuv",
]
[[package]]
name = "servo-media-player"
version = "0.1.0"
@@ -11932,6 +11993,15 @@ dependencies = [
"synstructure",
]
[[package]]
name = "yuv"
version = "0.8.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47d3a7e2cda3061858987ee2fb028f61695f5ee13f9490d75be6c3900df9a4ea"
dependencies = [
"num-traits",
]
[[package]]
name = "zbus"
version = "5.14.0"

View File

@@ -278,6 +278,7 @@ servo-media-gstreamer = { version = "0.1.0", path = "components/media/backends/g
servo-media-gstreamer-render = { version = "0.1.0", path = "components/media/backends/gstreamer/render" }
servo-media-gstreamer-render-android = { version = "0.1.0", path = "components/media/backends/gstreamer/render-android" }
servo-media-gstreamer-render-unix = { version = "0.1.0", path = "components/media/backends/gstreamer/render-unix" }
servo-media-ohos = { version = "0.1.0", path = "components/media/backends/ohos" }
servo-media-player = { version = "0.1.0", path = "components/media/player" }
servo-media-streams = { version = "0.1.0", path = "components/media/streams" }
servo-media-traits = { version = "0.1.0", path = "components/media/traits" }

View File

@@ -24,3 +24,14 @@ mime = "0.3.13"
once_cell = "1.18.0"
log = "0.4"
ohos-media-sys = { version = "0.0.5" ,features = ["api-21"] }
ohos-window-sys = { version = "0.1.3", features = ["api-13"] }
ohos-sys-opaque-types = { version = "0.1.7" }
ipc-channel = { workspace = true }
crossbeam-channel = { workspace = true }
lru = "0.16.3"
rangemap = "1.6.0"
libc = "0.2"
yuv = "0.8.11"
[build-dependencies]
serde_json.workspace = true

View File

@@ -0,0 +1,41 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::path;
// build.rs
fn main() {
println!("cargo:rustc-check-cfg=cfg(sdk_api_21)");
println!("cargo:rustc-check-cfg=cfg(sdk_api_22)");
println!("cargo:rustc-check-cfg=cfg(sdk_api_23)");
let target_env = std::env::var("CARGO_CFG_TARGET_ENV").unwrap();
if target_env != "ohos" {
return;
}
let sdk_path_name = std::env::var("OHOS_SDK_NATIVE").expect("OHOS_SDK_NATIVE must be set");
let sdk_path = path::PathBuf::from(sdk_path_name);
let meta_file_path = sdk_path.join("oh-uni-package.json");
let meta_info = serde_json::from_str::<serde_json::Value>(
&std::fs::read_to_string(&meta_file_path).expect("Failed to read oh-uni-package.json"),
)
.expect("Failed to parse oh-uni-package.json");
let api_version_str = meta_info
.get("apiVersion")
.expect("Unable to find apiVersion in oh-uni-package.json")
.as_str()
.expect("apiVersion should be a string");
let api_version = api_version_str
.parse::<u32>()
.expect("apiVersion should be a valid integer");
let low_api_version = 21;
if let 21.. = api_version {
for version in low_api_version..=api_version {
println!("cargo:rustc-cfg=sdk_api_{}", version);
}
}
println!("cargo:warning=Detected API version: {:?}", api_version);
println!("cargo:rerun-if-env-changed=OHOS_SDK_NATIVE");
}

View File

@@ -2,28 +2,30 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::{
collections::HashMap,
sync::{
atomic::AtomicUsize,
mpsc::{self, Sender},
Arc, Mutex, Weak,
},
thread,
};
use std::collections::HashMap;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::mpsc::{self, Sender};
use std::sync::{Arc, Mutex, Weak};
use std::thread;
use log::warn;
use log::{debug, warn};
use mime::Mime;
use servo_media::player::StreamType;
use servo_media::{
Backend, BackendInit, BackendMsg, ClientContextId, MediaInstance, SupportsMediaType,
Backend, BackendInit, BackendMsg, ClientContextId, MediaInstance, MediaInstanceError,
SupportsMediaType,
};
use crate::{player::OhosAVPlayer, registry_scanner::OHOS_REGISTRY_SCANNER};
use crate::player::OhosAvPlayer;
use crate::registry_scanner::OHOS_REGISTRY_SCANNER;
mod ohos_media;
mod player;
mod registry_scanner;
type MediaInstanceMap = HashMap<ClientContextId, Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>>;
pub struct OhosBackend {
instances: Arc<Mutex<HashMap<ClientContextId, Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>>>>,
instances: Arc<Mutex<MediaInstanceMap>>,
next_instance_id: AtomicUsize,
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
}
@@ -32,7 +34,7 @@ impl OhosBackend {
fn media_instance_action(
&self,
id: &ClientContextId,
cb: &dyn Fn(&dyn MediaInstance) -> Result<(), ()>,
cb: &dyn Fn(&dyn MediaInstance) -> Result<(), MediaInstanceError>,
) {
let mut instances = self.instances.lock().unwrap();
match instances.get_mut(id) {
@@ -48,16 +50,14 @@ impl OhosBackend {
}),
None => {
warn!("Trying to exec media action on an unknown client context");
}
},
}
}
}
impl BackendInit for OhosBackend {
fn init() -> Box<dyn Backend> {
let instances: Arc<
Mutex<HashMap<ClientContextId, Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>>>,
> = Arc::new(Mutex::new(HashMap::new()));
let instances: Arc<Mutex<MediaInstanceMap>> = Arc::new(Mutex::new(HashMap::new()));
let instances_ = instances.clone();
let (backend_chan, recvr) = mpsc::channel();
@@ -65,7 +65,11 @@ impl BackendInit for OhosBackend {
.name("OhosBackend ShutdownThread".to_owned())
.spawn(move || {
match recvr.recv().unwrap() {
BackendMsg::Shutdown { context, id, tx_ack } => {
BackendMsg::Shutdown {
context,
id,
tx_ack,
} => {
let mut map = instances_.lock().unwrap();
if let Some(vec) = map.get_mut(&context) {
vec.retain(|m| m.0 != id);
@@ -74,15 +78,15 @@ impl BackendInit for OhosBackend {
}
}
let _ = tx_ack.send(());
}
},
};
})
.unwrap();
return Box::new(OhosBackend {
Box::new(OhosBackend {
next_instance_id: AtomicUsize::new(0),
instances,
backend_chan: Arc::new(Mutex::new(backend_chan)),
});
})
}
}
@@ -92,7 +96,7 @@ impl BackendInit for OhosBackend {
impl Backend for OhosBackend {
fn create_player(
&self,
id: &servo_media::ClientContextId,
context_id: &servo_media::ClientContextId,
stream_type: servo_media_player::StreamType,
sender: servo_media_player::ipc_channel::ipc::IpcSender<servo_media_player::PlayerEvent>,
video_renderer: Option<
@@ -101,9 +105,33 @@ impl Backend for OhosBackend {
audio_renderer: Option<
std::sync::Arc<std::sync::Mutex<dyn servo_media_player::audio::AudioRenderer>>,
>,
gl_context: Box<dyn servo_media_player::context::PlayerGLContext>,
_gl_context: Box<dyn servo_media_player::context::PlayerGLContext>,
) -> std::sync::Arc<std::sync::Mutex<dyn servo_media_player::Player>> {
Arc::new(Mutex::new(OhosAVPlayer::new()))
// TODO: Choose different Player Impl depends on stream_type
match stream_type {
StreamType::Stream => {
todo!("Stream Type currently not supported!")
},
StreamType::Seekable => (),
}
if let Some(_audio_renderer) = audio_renderer {
warn!("Audio Rendering Currently Not Supported!");
}
let player_id = self.next_instance_id.fetch_add(1, Ordering::Relaxed);
debug!("Creating Player in OhosBackend");
let mut player = OhosAvPlayer::new(
player_id,
*context_id,
sender,
video_renderer,
self.backend_chan.clone(),
);
player.setup_info_event();
player.setup_data_source();
Arc::new(Mutex::new(player))
}
fn create_audiostream(&self) -> servo_media_streams::MediaStreamId {
@@ -120,7 +148,7 @@ impl Backend for OhosBackend {
fn create_stream_and_socket(
&self,
ty: servo_media_streams::MediaStreamType,
_ty: servo_media_streams::MediaStreamType,
) -> (
Box<dyn servo_media_streams::MediaSocket>,
servo_media_streams::MediaStreamId,
@@ -130,22 +158,22 @@ impl Backend for OhosBackend {
fn create_audioinput_stream(
&self,
set: servo_media_streams::capture::MediaTrackConstraintSet,
_set: servo_media_streams::capture::MediaTrackConstraintSet,
) -> Option<servo_media_streams::MediaStreamId> {
todo!()
}
fn create_videoinput_stream(
&self,
set: servo_media_streams::capture::MediaTrackConstraintSet,
_set: servo_media_streams::capture::MediaTrackConstraintSet,
) -> Option<servo_media_streams::MediaStreamId> {
todo!()
}
fn create_audio_context(
&self,
id: &servo_media::ClientContextId,
options: servo_media_audio::context::AudioContextOptions,
_id: &servo_media::ClientContextId,
_options: servo_media_audio::context::AudioContextOptions,
) -> Result<
std::sync::Arc<std::sync::Mutex<servo_media_audio::context::AudioContext>>,
servo_media_audio::sink::AudioSinkError,
@@ -155,7 +183,7 @@ impl Backend for OhosBackend {
fn create_webrtc(
&self,
signaller: Box<dyn servo_media_webrtc::WebRtcSignaller>,
_signaller: Box<dyn servo_media_webrtc::WebRtcSignaller>,
) -> servo_media_webrtc::WebRtcController {
todo!()
}

View File

@@ -0,0 +1,431 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::ffi::c_void;
use libc::pollfd;
use log::{debug, warn};
use ohos_media_sys::avformat::OH_AVFormat;
use ohos_media_sys::avplayer::{
OH_AVPlayer_Create, OH_AVPlayer_Pause, OH_AVPlayer_Play, OH_AVPlayer_Prepare,
OH_AVPlayer_Release, OH_AVPlayer_Seek, OH_AVPlayer_SetOnInfoCallback,
OH_AVPlayer_SetPlaybackSpeed, OH_AVPlayer_SetVideoSurface, OH_AVPlayer_SetVolume,
OH_AVPlayer_Stop,
};
use ohos_media_sys::avplayer_base::{
AVPlaybackSpeed, AVPlayerOnInfoType, AVPlayerSeekMode, AVPlayerState, OH_AVPlayer,
};
use ohos_sys_opaque_types::{OH_NativeImage, OHNativeWindow, OHNativeWindowBuffer};
use ohos_window_sys::native_buffer::native_buffer::OH_NativeBuffer_Usage;
use ohos_window_sys::native_image::{
OH_ConsumerSurface_Create, OH_ConsumerSurface_SetDefaultUsage,
OH_NativeImage_AcquireNativeWindow, OH_NativeImage_AcquireNativeWindowBuffer,
OH_NativeImage_Destroy, OH_NativeImage_ReleaseNativeWindowBuffer,
OH_NativeImage_SetOnFrameAvailableListener, OH_OnFrameAvailableListener,
};
use ohos_window_sys::native_window::{
OH_NativeWindow_DestroyNativeWindow, OH_NativeWindow_GetBufferHandleFromNative,
OH_NativeWindow_NativeObjectReference, OH_NativeWindow_NativeObjectUnreference,
};
#[cfg(not(sdk_api_21))]
use crate::ohos_media::dummy_source::MediaSourceWrapper;
#[cfg(sdk_api_21)]
use crate::ohos_media::source::MediaSourceWrapper;
#[repr(C)]
#[derive(Debug)]
pub struct FrameInfo {
pub fd: i32,
pub width: i32,
pub height: i32,
pub stride: i32,
pub size: i32,
pub format: i32,
pub vir_addr: *mut u8,
native_window_buffer: *mut OHNativeWindowBuffer,
fence_fd: i32,
}
pub struct OhosPlayer {
native_image: Option<*mut OH_NativeImage>,
ohos_av_player: *mut OH_AVPlayer,
media_data_source: Option<MediaSourceWrapper>,
event_info_callback_closure: Option<*mut Box<dyn Fn(AVPlayerOnInfoType, *mut OH_AVFormat)>>,
frame_available_callback_closure: Option<*mut Box<dyn Fn()>>,
native_window: Option<*mut OHNativeWindow>,
has_set_source_size: bool,
has_set_window: bool,
volume: f64,
playback_rate: f64,
state: AVPlayerState,
}
impl OhosPlayer {
pub fn new() -> Self {
debug!("Creating OHOS Player!");
OhosPlayer {
native_image: None,
ohos_av_player: unsafe { OH_AVPlayer_Create() },
media_data_source: None,
event_info_callback_closure: None,
frame_available_callback_closure: None,
native_window: None,
has_set_source_size: false,
has_set_window: false,
volume: 1.0,
playback_rate: 1.0,
state: AVPlayerState::AV_IDLE,
}
}
// initialize function
pub fn set_state(&mut self, state: AVPlayerState) {
self.state = state;
self.initialize_check_state_action();
}
/// This function would try to run some action during initialize phase
/// each action should only be run once.
/// Should try to check whether to run each action when
/// 1. state changed
/// 2. after running external initialize step.
/// e.g. setup_window_buffer_listener
pub fn initialize_check_state_action(&mut self) {
if self.state == AVPlayerState::AV_INITIALIZED &&
self.native_window.is_some() &&
!self.has_set_window
{
self.has_set_window = true;
self.set_window_to_player();
self.prepare(); // only prepare after setting window.
}
}
fn set_window_to_player(&mut self) {
let Some(native_window) = self.native_window else {
warn!("Setting window to player, but Native Window not initialized!");
return;
};
unsafe {
OH_AVPlayer_SetVideoSurface(self.ohos_av_player, native_window);
}
}
/// The first step of initialization process, after this avplayer will
/// become initialized. Kickstart the initialize process.
fn setup_data_source(&mut self) {
let Some(ref mut source) = self.media_data_source else {
warn!("Error Source not initialized!");
return;
};
debug!("Setting up data source");
source.set_data_src(self.ohos_av_player);
}
pub fn set_volume(&mut self, volume: f64) {
unsafe {
OH_AVPlayer_SetVolume(self.ohos_av_player, volume as f32, volume as f32);
}
self.volume = volume;
}
pub fn volume(&self) -> f64 {
self.volume
}
pub fn set_source(&mut self, source: MediaSourceWrapper) {
// Todo: Should think of better way to change the way the data is given.
self.media_data_source = Some(source);
}
pub fn end_of_stream(&self) {
if let Some(source) = &self.media_data_source {
source.end_of_stream();
}
}
pub fn push_data(&self, data: Vec<u8>) {
if let Some(inner_source) = &self.media_data_source {
inner_source.push_data(data);
}
}
pub fn play(&self) {
unsafe {
debug!("OH_AVPlayer_Play!");
OH_AVPlayer_Play(self.ohos_av_player);
}
}
pub fn set_mute(&mut self, mute: bool) {
debug!("OH_AVPlayer Set mute: {}", mute);
let volume = match mute {
true => 0.,
false => 1.,
};
unsafe {
OH_AVPlayer_SetVolume(self.ohos_av_player, volume, volume);
}
self.volume = volume as f64;
}
pub fn muted(&self) -> bool {
self.volume == 0.0
}
pub fn seek(&self, second: i32) {
unsafe {
log::info!("OH_AVPlayer_Seek! :{}", second);
OH_AVPlayer_Seek(
self.ohos_av_player,
second,
AVPlayerSeekMode::AV_SEEK_CLOSEST,
);
}
}
pub fn set_rate(&mut self, rate: f64) {
self.playback_rate = rate;
// Round toward 1x: for rates >= 1 round down, for rates < 1 round up.
let speed = if rate >= 1.0 {
match rate {
3.0.. => AVPlaybackSpeed::AV_SPEED_FORWARD_3_00_X,
2.0.. => AVPlaybackSpeed::AV_SPEED_FORWARD_2_00_X,
1.75.. => AVPlaybackSpeed::AV_SPEED_FORWARD_1_75_X,
1.5.. => AVPlaybackSpeed::AV_SPEED_FORWARD_1_50_X,
1.25.. => AVPlaybackSpeed::AV_SPEED_FORWARD_1_25_X,
_ => AVPlaybackSpeed::AV_SPEED_FORWARD_1_00_X,
}
} else {
match rate {
..=0.0 => AVPlaybackSpeed::AV_SPEED_FORWARD_1_00_X,
..=0.125 => AVPlaybackSpeed::AV_SPEED_FORWARD_0_125_X,
..=0.25 => AVPlaybackSpeed::AV_SPEED_FORWARD_0_25_X,
..=0.5 => AVPlaybackSpeed::AV_SPEED_FORWARD_0_50_X,
..=0.75 => AVPlaybackSpeed::AV_SPEED_FORWARD_0_75_X,
_ => AVPlaybackSpeed::AV_SPEED_FORWARD_1_00_X,
}
};
unsafe {
OH_AVPlayer_SetPlaybackSpeed(self.ohos_av_player, speed);
}
}
pub fn playback_rate(&self) -> f64 {
self.playback_rate
}
pub fn pause(&self) {
unsafe {
debug!("OH_AVPlayer_Pause!");
OH_AVPlayer_Pause(self.ohos_av_player);
}
}
pub fn stop(&self) {
unsafe {
debug!("OH_AVPlayer_Stop!");
OH_AVPlayer_Stop(self.ohos_av_player);
}
}
pub fn prepare(&mut self) {
unsafe {
debug!("OH_AVPlayer Prepare Called!");
OH_AVPlayer_Prepare(self.ohos_av_player);
}
}
// For AVPlayer only call SetSource after SetInputSize to avoid being recognized as live stream.
pub fn set_input_size(&mut self, size: u64) {
if let Some(inner_source) = &mut self.media_data_source {
debug!("Setting up data source size: {}", size);
inner_source.set_input_size(size as usize);
// Only set once when first time initialized
if !self.has_set_source_size {
debug!("Setup data Source");
self.setup_data_source();
self.has_set_source_size = true;
}
}
}
pub fn connect_info_event_callback<F>(&mut self, f: F)
where
F: Fn(AVPlayerOnInfoType, *mut OH_AVFormat) + Send + 'static,
{
debug!("Trying to connect info event callback");
extern "C" fn on_info_event(
_player: *mut OH_AVPlayer,
into_type: AVPlayerOnInfoType,
info_body: *mut OH_AVFormat,
user_data: *mut c_void,
) {
assert!(
!user_data.is_null(),
"on_info_event: user_data must not be null"
);
let f = unsafe {
&*(user_data as *const Box<dyn Fn(AVPlayerOnInfoType, *mut OH_AVFormat)>)
};
f(into_type, info_body);
}
let f: Box<dyn Fn(AVPlayerOnInfoType, *mut OH_AVFormat)> = Box::new(f);
let f: Box<Box<dyn Fn(AVPlayerOnInfoType, *mut OH_AVFormat)>> = Box::new(f);
let raw_ptr_f = unsafe {
let raw_ptr_f = Box::into_raw(f);
let ret = OH_AVPlayer_SetOnInfoCallback(
self.ohos_av_player,
Some(on_info_event),
raw_ptr_f as *mut c_void,
);
debug!("OH AVPlayer Set INFO Callback: {:?}", ret);
raw_ptr_f
};
self.event_info_callback_closure = Some(raw_ptr_f);
}
/// External Initialization step.
pub fn setup_window_buffer_listener<F: Fn() + Send + 'static>(&mut self, f: F) {
let f: Box<dyn Fn()> = Box::new(f);
let f: Box<Box<dyn Fn()>> = Box::new(f);
(
self.native_image,
self.frame_available_callback_closure,
self.native_window,
) = unsafe {
let native_image = OH_ConsumerSurface_Create();
debug!("Native image created :{:p}", native_image);
let ret = OH_ConsumerSurface_SetDefaultUsage(
native_image,
OH_NativeBuffer_Usage::NATIVEBUFFER_USAGE_CPU_READ.0 as u64,
);
debug!("Set consumer surface default usage: {}", ret);
extern "C" fn frame_available_cb(context: *mut c_void) {
assert!(
!context.is_null(),
"frame_available_cb: context must not be null"
);
let f = unsafe { &*(context as *mut Box<dyn Fn()>) };
f();
}
let raw_ptr_f = Box::into_raw(f);
let listener = OH_OnFrameAvailableListener {
context: raw_ptr_f as *mut c_void,
onFrameAvailable: Some(frame_available_cb),
};
let res = OH_NativeImage_SetOnFrameAvailableListener(native_image, listener);
debug!("Native Image Set On Frame Available Listener done: {}", res);
let native_window = OH_NativeImage_AcquireNativeWindow(native_image);
debug!(
"Native window acquired from native window {:p}",
native_window
);
(Some(native_image), Some(raw_ptr_f), Some(native_window))
};
self.initialize_check_state_action();
}
/// Should pair with release_buffer.
pub fn acquire_buffer(&self) -> Option<FrameInfo> {
let native_image = self.native_image?;
let mut native_window_buffer = std::ptr::null_mut();
let mut fence_fd = 0;
let ret = unsafe {
OH_NativeImage_AcquireNativeWindowBuffer(
native_image,
&mut native_window_buffer,
&mut fence_fd,
)
};
if ret != 0 || native_window_buffer.is_null() {
warn!("Failed to acquire native window buffer: ret={}", ret);
return None;
}
debug!("Fence fd: {}", fence_fd);
if fence_fd != 0 && fence_fd != -1 {
let mut pollfds = pollfd {
fd: fence_fd,
events: libc::POLLIN,
revents: 0,
};
let ret = unsafe { libc::poll(&mut pollfds, 1, 3000) };
if ret <= 0 {
warn!("Pulling timeout or failed");
return None;
}
}
debug!("Taking object refernce!");
let ret =
unsafe { OH_NativeWindow_NativeObjectReference(native_window_buffer as *mut c_void) };
if ret != 0 {
warn!("Native Window Buffer Reference Failed!");
}
let frame_info = unsafe {
let buffer_handle = OH_NativeWindow_GetBufferHandleFromNative(native_window_buffer);
FrameInfo {
fd: (*buffer_handle).fd,
width: (*buffer_handle).width,
height: (*buffer_handle).height,
stride: (*buffer_handle).stride,
size: (*buffer_handle).size,
format: (*buffer_handle).format,
vir_addr: (*buffer_handle).virAddr as *mut u8,
native_window_buffer,
fence_fd,
}
};
let ret =
unsafe { OH_NativeWindow_NativeObjectUnreference(native_window_buffer as *mut c_void) };
if ret != 0 {
warn!("Native Window Buffer Unreference failed!");
}
// FIXME(ray): Potential memory copying.
Some(frame_info)
}
/// Should pair with acquire_buffer.
pub fn release_buffer(&self, frame_info: FrameInfo) {
let native_image = self.native_image.expect("native image should not be empty");
unsafe {
let ret = OH_NativeImage_ReleaseNativeWindowBuffer(
native_image,
frame_info.native_window_buffer,
-1,
);
debug!("Release native window buffer ret: {}", ret);
}
}
}
impl Drop for OhosPlayer {
fn drop(&mut self) {
unsafe {
if let Some(closure) = self.frame_available_callback_closure {
let box_closure = Box::from_raw(closure);
drop(box_closure);
}
if let Some(closure) = self.event_info_callback_closure {
let box_closure = Box::from_raw(closure);
drop(box_closure);
}
debug!("Releasing AVPlayer because drop is called!");
OH_AVPlayer_Release(self.ohos_av_player);
if let Some(mut native_image) = self.native_image {
OH_NativeImage_Destroy(&mut native_image);
}
if let Some(native_window) = self.native_window {
OH_NativeWindow_DestroyNativeWindow(native_window);
}
}
}
}
unsafe impl Send for OhosPlayer {}
unsafe impl Sync for OhosPlayer {}

View File

@@ -0,0 +1,36 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::ohos_media::source_builder::MediaSourceBuilder;
pub struct MediaSourceWrapper {}
impl MediaSourceWrapper {
pub fn new() -> Self {
Self {}
}
}
impl MediaSourceWrapper {
pub fn builder() -> MediaSourceBuilder {
MediaSourceBuilder {
enough_data: None,
seek_data: None,
}
}
pub fn set_input_size(&self, _size: usize) {
// No-op for dummy source
}
pub fn end_of_stream(&self) {
// No-op for dummy source
}
pub fn push_data(&self, _data: Vec<u8>) {
// No-op for dummy source
}
pub fn set_data_src(&mut self, _av_player: *mut ohos_media_sys::avplayer_base::OH_AVPlayer) {
// No-op for dummy source.
}
}

View File

@@ -0,0 +1,9 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
pub mod avplayer;
#[cfg(not(sdk_api_21))]
pub mod dummy_source;
#[cfg(sdk_api_21)]
pub mod source;
pub mod source_builder;

View File

@@ -0,0 +1,250 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use core::slice;
use std::sync::{Arc, Mutex};
use log::debug;
use ohos_media_sys::avbuffer::OH_AVBuffer;
use ohos_media_sys::avcodec_base::OH_AVDataSourceExt;
use ohos_media_sys::avplayer::OH_AVPlayer_SetDataSource;
use ohos_media_sys::avplayer_base::OH_AVPlayer;
use crate::ohos_media::source_builder::MediaSourceBuilder;
const DEFAULT_CACHE_SIZE: usize = 8 * 1024 * 1024; // 8MB
pub struct MediaSourceWrapper {
pub(crate) data_src: ohos_media_sys::avcodec_base::OH_AVDataSourceExt,
total_media_source_size: usize,
playback_buffer: Arc<Mutex<PlaybackBuffer>>,
closure_handle: *mut Box<dyn Fn(*mut u8, u32, i64) -> i32>,
}
impl MediaSourceWrapper {
pub fn builder() -> MediaSourceBuilder {
MediaSourceBuilder {
enough_data: None,
seek_data: None,
}
}
}
// AVPlayer itself already have a short buffer internally,
// we can just schedule fetch if we does not have data for that specific location.
impl MediaSourceWrapper {
pub fn new(source_cb: MediaSourceBuilder) -> Self {
let playback_buffer = Arc::new(Mutex::new(PlaybackBuffer::new(source_cb.enough_data)));
let playback_buffer_clone = playback_buffer.clone();
let read_at_closure = move |buffer: *mut u8, length: u32, pos: i64| -> i32 {
log::debug!(
"Inside Read At Closure: {:p}, length: {}, pos: {}",
buffer,
length,
pos
);
let buffer = unsafe { slice::from_raw_parts_mut(buffer, length as usize) };
let (read_bytes, seek_pos) = {
let mut playback_buffer_lock = playback_buffer_clone.lock().unwrap();
playback_buffer_lock.read_data(buffer, pos)
};
// The playback_buffer lock must be released before calling the seek
// closure, which blocks on IPC with the script thread. Holding the
// lock here would deadlock if the script thread is simultaneously
// trying to push_data (which also acquires this lock).
if let Some(seek_pos) = seek_pos {
if let Some(seek_closure) = &source_cb.seek_data {
seek_closure(seek_pos);
}
}
read_bytes
};
let box_closure: Box<dyn Fn(*mut u8, u32, i64) -> i32> = Box::new(read_at_closure);
// Double boxing is needed because we need to convert the closure into a raw pointer to pass to C,
// but Rust does not allow us to directly convert a Box<dyn Fn> into a raw pointer, we need to first box it
// and then convert the box into a raw pointer.
let double_box_closure = Box::new(box_closure);
extern "C" fn oh_avdatasource_read_at_callback(
data: *mut OH_AVBuffer,
length: i32,
pos: i64,
user_data: *mut std::ffi::c_void,
) -> i32 {
assert!(
!user_data.is_null(),
"oh_avdatasource_read_at_callback: user_data must not be null"
);
let f = unsafe { &*(user_data as *mut Box<dyn Fn(*mut u8, u32, i64) -> i32>) };
let buffer_addr = unsafe { ohos_media_sys::avbuffer::OH_AVBuffer_GetAddr(data) };
f(buffer_addr, length as u32, pos)
}
let data_src = OH_AVDataSourceExt {
size: 0,
readAt: Some(oh_avdatasource_read_at_callback),
};
let raw_ptr_f = Box::into_raw(double_box_closure);
Self {
data_src,
total_media_source_size: 0,
playback_buffer,
closure_handle: raw_ptr_f,
}
}
pub fn set_input_size(&mut self, size: usize) {
log::debug!("Setting input size to {}", size);
if self.total_media_source_size == 0 {
self.total_media_source_size = size;
self.data_src.size = size as i64;
}
self.playback_buffer.lock().unwrap().notify_seek_done();
}
pub fn push_data(&self, data: Vec<u8>) -> bool {
let mut playback_buffer_lock = self.playback_buffer.lock().unwrap();
playback_buffer_lock.push_buffer(data)
}
pub fn end_of_stream(&self) {
self.playback_buffer.lock().unwrap().end_of_stream();
}
pub fn set_data_src(&mut self, av_player: *mut OH_AVPlayer) {
unsafe {
OH_AVPlayer_SetDataSource(
av_player,
&mut self.data_src as *mut OH_AVDataSourceExt,
self.closure_handle as *mut std::ffi::c_void,
);
}
}
}
impl Drop for MediaSourceWrapper {
fn drop(&mut self) {
unsafe {
let box_closure = Box::from_raw(self.closure_handle);
drop(box_closure);
}
}
}
/// There would be two thread interact with playbackbuffer,
/// 1. AVPlayer Client Thread, will call read.
/// 2. Script Thread, will try to push_buffer into buffer.
pub struct PlaybackBuffer {
enough_data_closure: Option<Box<dyn Fn() + Send + Sync>>,
buffer_data_head: i64,
has_active_request: bool,
last_read_end: i64,
is_seeking: bool,
buffer: Vec<u8>,
}
impl PlaybackBuffer {
pub fn new(enough_data_closure: Option<Box<dyn Fn() + Send + Sync>>) -> Self {
PlaybackBuffer {
enough_data_closure,
buffer_data_head: 0,
has_active_request: false,
is_seeking: false,
last_read_end: 0,
buffer: Vec::with_capacity(DEFAULT_CACHE_SIZE),
}
}
pub fn notify_seek_done(&mut self) {
self.is_seeking = false;
}
/// Return (Number of Bytes read, Some(Seek Position) if no data at that position)
pub fn read_data(&mut self, dest_slice: &mut [u8], pos: i64) -> (i32, Option<u64>) {
if self.is_seeking {
debug!(
"Currently seeking, cannot read data at position {}, buffer head is at {}, buffer len is {} has_active_request: {}",
pos,
self.buffer_data_head,
self.buffer.len(),
self.has_active_request
);
return (0, None);
}
// First check whether we have enough data at that position.
let pos_offset = pos - self.buffer_data_head;
let available_data = self.buffer.len() as i64 - pos_offset;
let need_seek = pos_offset < 0 ||
(available_data <= 0 &&
(!self.has_active_request ||
pos >= self.buffer_data_head + self.buffer.capacity() as i64));
if need_seek {
debug!(
"We don't have data at position {}, buffer head is at {}, buffer len is {} has_active_request: {}",
pos,
self.buffer_data_head,
self.buffer.len(),
self.has_active_request
);
self.buffer.clear();
self.buffer_data_head = pos;
self.has_active_request = true;
self.is_seeking = true;
return (0, Some(pos as u64));
}
let read_len = available_data.clamp(0, dest_slice.len() as i64) as usize;
if read_len == 0 {
debug!(
"No available data to read at position {}, buffer head is at {}, buffer len is {} has_active_request: {}",
pos,
self.buffer_data_head,
self.buffer.len(),
self.has_active_request
);
return (0, None);
}
dest_slice[..read_len]
.copy_from_slice(&self.buffer[(pos_offset) as usize..(pos_offset as usize + read_len)]);
self.last_read_end = pos + read_len as i64;
(read_len as i32, None)
}
/// Return False when we have enough data.
pub fn push_buffer(&mut self, data: Vec<u8>) -> bool {
// Reject data while a seek is in progress. Between the buffer being
// cleared/reset for a new seek position and the old fetch being
// cancelled, stale data from the previous fetch could arrive and
// corrupt the buffer (it would be appended as if it started at the
// new seek position). Silently discard it.
if self.is_seeking {
return true;
}
if self.buffer.len() + data.len() > self.buffer.capacity() {
debug!(
"Buffer is full, cannot push more data,current head: {}, current len: {}, incoming data len: {}, capacity: {}",
self.buffer_data_head,
self.buffer.len(),
data.len(),
self.buffer.capacity()
);
self.has_active_request = false;
if let Some(enough_data_closure) = &self.enough_data_closure {
enough_data_closure();
}
return false;
}
self.buffer.extend_from_slice(&data);
true
}
pub fn end_of_stream(&mut self) {
self.has_active_request = false;
}
}

View File

@@ -0,0 +1,40 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#[cfg(not(sdk_api_21))]
use crate::ohos_media::dummy_source::MediaSourceWrapper;
#[cfg(sdk_api_21)]
use crate::ohos_media::source::MediaSourceWrapper;
type SeekDataClosure = Box<dyn Fn(u64) -> bool + Send + Sync>;
pub struct MediaSourceBuilder {
pub enough_data: Option<Box<dyn Fn() + Send + Sync>>,
pub seek_data: Option<SeekDataClosure>,
}
impl MediaSourceBuilder {
pub fn set_enough_data<F: Fn() + Send + Sync + Clone + 'static>(mut self, callback: F) -> Self {
self.enough_data = Some(Box::new(callback));
self
}
pub fn set_seek_data<F: Fn(u64) -> bool + Send + Sync + Clone + 'static>(
mut self,
callback: F,
) -> Self {
self.seek_data = Some(Box::new(callback));
self
}
pub fn build(self) -> MediaSourceWrapper {
#[cfg(not(sdk_api_21))]
{
MediaSourceWrapper::new()
}
#[cfg(sdk_api_21)]
{
MediaSourceWrapper::new(self)
}
}
}

View File

@@ -2,129 +2,729 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use servo_media::MediaInstance;
use servo_media_player::Player;
use std::cell::Cell;
use std::ops::Range;
use std::sync::{Arc, Mutex, mpsc};
use std::time;
pub struct OhosAVPlayer {}
use crossbeam_channel::Sender;
use ipc_channel::ipc::{IpcReceiver, channel};
use log::{debug, error, warn};
use ohos_media_sys::avformat::{
OH_AVFormat, OH_AVFormat_GetFloatValue, OH_AVFormat_GetIntValue, OH_AVFormat_GetLongValue,
};
use ohos_media_sys::avplayer_base::{
AVPlayerOnInfoType, AVPlayerState, OH_PLAYER_BUFFERING_TYPE, OH_PLAYER_BUFFERING_VALUE,
OH_PLAYER_CURRENT_POSITION, OH_PLAYER_DURATION, OH_PLAYER_IS_LIVE_STREAM,
OH_PLAYER_SEEK_POSITION, OH_PLAYER_STATE, OH_PLAYER_STATE_CHANGE_REASON,
OH_PLAYER_VIDEO_HEIGHT, OH_PLAYER_VIDEO_WIDTH, OH_PLAYER_VOLUME,
};
use servo_media::{BackendMsg, ClientContextId, MediaInstance, MediaInstanceError};
use servo_media_player::metadata::Metadata;
use servo_media_player::video::{self, Buffer, VideoFrame, VideoFrameData};
use servo_media_player::{PlaybackState, Player, PlayerEvent, SeekLock, SeekLockMsg};
use yuv::yuv_nv12_to_bgra;
impl OhosAVPlayer {
pub fn new() -> OhosAVPlayer {
OhosAVPlayer {}
use crate::ohos_media::avplayer::OhosPlayer as OhosPlayerInner;
#[cfg(not(sdk_api_21))]
use crate::ohos_media::dummy_source::MediaSourceWrapper;
#[cfg(sdk_api_21)]
use crate::ohos_media::source::MediaSourceWrapper;
// Height of decoded video frame from AVPlayer is padded to multiples of this value by the codec.
// https://developer.huawei.com/consumer/cn/doc/harmonyos-guides/video-decoding
const FRAME_HEIGHT_MULTIPLE: i32 = 32;
/// This is used to fill the gap between internal AVPlayer state and Player State exposed to Media Element.
pub struct StateManager {
pub internal_state: InternalState,
pub player_state: PlayerState,
}
pub struct InternalState {
pub state: AVPlayerState,
}
pub struct PlayerState {
pub paused: bool,
}
impl StateManager {
pub fn new() -> Self {
StateManager {
internal_state: InternalState {
state: AVPlayerState::AV_IDLE,
},
player_state: PlayerState { paused: true },
}
}
}
impl MediaInstance for OhosAVPlayer {
pub struct OhosAvPlayer {
id: usize,
context_id: ClientContextId,
player_inner: Arc<Mutex<OhosPlayerInner>>,
event_sender: Arc<Mutex<ipc_channel::ipc::IpcSender<servo_media::player::PlayerEvent>>>,
video_sink: Option<Arc<Mutex<VideoSink>>>,
backend_chan: Arc<Mutex<mpsc::Sender<BackendMsg>>>,
last_metadata: Arc<Mutex<Cell<Metadata>>>,
state_manager: Arc<Mutex<StateManager>>,
}
// Procedure for setting up AVPlayer, state change condition:
// 1. Create AVPlayer
// 2. Setup AVPlayer InfoCallback (this should be the first step, so that we can listen to state change)
// 3. Setup AVPlayer Media source.
// 4. wait for AVplayer into Initialized State, setup VideoSurface.
// 5. AVPlayer Prepare()
// 6. wait for ready for prepare, in the meantime, avplayer will try to read data from media source.
// 7. player ready to play.
impl OhosAvPlayer {
pub fn new(
id: usize,
context_id: ClientContextId,
sender: ipc_channel::ipc::IpcSender<servo_media::player::PlayerEvent>,
video_renderer: Option<
std::sync::Arc<std::sync::Mutex<dyn servo_media::player::video::VideoFrameRenderer>>,
>,
backend_chan: Arc<Mutex<mpsc::Sender<BackendMsg>>>,
) -> OhosAvPlayer {
let player_inner = Arc::new(Mutex::new(OhosPlayerInner::new()));
let event_sender = Arc::new(Mutex::new(sender));
let video_sink = video_renderer.clone().map(|v| {
Arc::new(Mutex::new(VideoSink::new(
v,
player_inner.clone(),
event_sender.clone(),
)))
});
OhosAvPlayer {
id,
context_id,
player_inner,
event_sender,
video_sink,
backend_chan,
last_metadata: Arc::new(Mutex::new(Cell::new(Metadata {
duration: None,
width: 0,
height: 0,
format: String::new(),
is_seekable: false,
is_live: false,
video_tracks: vec![],
audio_tracks: vec![],
title: None,
}))),
state_manager: Arc::new(Mutex::new(StateManager::new())),
}
}
pub fn setup_info_event(&mut self) {
let sender_clone = self.event_sender.clone();
let player_inner_clone = self.player_inner.clone();
let state_manager_clone = self.state_manager.clone();
let video_sink_clone = self.video_sink.as_ref().map(|v| v.clone());
let metadata_clone = self.last_metadata.clone();
let event_info_closure =
move |info_type: AVPlayerOnInfoType, info_body: *mut OH_AVFormat| {
debug!(
"Info Type received!:{:?}, address: {:p}",
info_type, info_body
);
match info_type {
AVPlayerOnInfoType::AV_INFO_TYPE_STATE_CHANGE => {
let mut state_change_reason = -1;
let mut state = -1;
unsafe {
OH_AVFormat_GetIntValue(info_body, OH_PLAYER_STATE, &mut state);
OH_AVFormat_GetIntValue(
info_body,
OH_PLAYER_STATE_CHANGE_REASON,
&mut state_change_reason,
);
}
let av_player_state = AVPlayerState(state as u32);
debug!(
"AV Player State Change: {:?}, state change reason: {}",
av_player_state, state_change_reason
);
state_manager_clone.lock().unwrap().internal_state.state = av_player_state;
match av_player_state {
AVPlayerState::AV_INITIALIZED => {
debug!("Setup Video Sink");
if let Some(ref video_sink_clone) = video_sink_clone {
video_sink_clone.lock().unwrap().setup(); // TODO: Hide internal state machine
}
},
AVPlayerState::AV_PREPARED => {
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::StateChanged(PlaybackState::Paused));
},
AVPlayerState::AV_PLAYING => {
let sender_clone_guard = sender_clone.lock().unwrap();
let _ = sender_clone_guard
.send(PlayerEvent::StateChanged(PlaybackState::Playing));
},
AVPlayerState::AV_PAUSED => {
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::StateChanged(PlaybackState::Paused));
},
AVPlayerState::AV_STOPPED => {
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::StateChanged(PlaybackState::Stopped));
},
AVPlayerState::AV_COMPLETED => {
let _ = sender_clone.lock().unwrap().send(PlayerEvent::EndOfStream);
},
_ => {
warn!("Unhandled State: {:?}", av_player_state);
},
}
player_inner_clone
.lock()
.unwrap()
.set_state(av_player_state);
},
AVPlayerOnInfoType::AV_INFO_TYPE_RESOLUTION_CHANGE => {
let mut width = -1;
let mut height = -1;
unsafe {
OH_AVFormat_GetIntValue(info_body, OH_PLAYER_VIDEO_WIDTH, &mut width);
OH_AVFormat_GetIntValue(info_body, OH_PLAYER_VIDEO_HEIGHT, &mut height);
}
// Todo fix the metadata update logic, we should only report metadata once during intialization.
let mut last_metadata = metadata_clone.lock().unwrap();
last_metadata.get_mut().height = height as u32;
last_metadata.get_mut().width = width as u32;
let meta_data_clone_clone = last_metadata.get_mut().clone();
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::MetadataUpdated(meta_data_clone_clone));
debug!("Resolution get: width: {}, height: {}", width, height);
},
AVPlayerOnInfoType::AV_INFO_TYPE_IS_LIVE_STREAM => {
let mut value = -1;
unsafe {
OH_AVFormat_GetIntValue(
info_body,
OH_PLAYER_IS_LIVE_STREAM,
&mut value,
);
}
let mut last_metadata = metadata_clone.lock().unwrap();
let last_metadata_mut = last_metadata.get_mut();
(last_metadata_mut.is_live, last_metadata_mut.is_seekable) = match value {
1 => (true, false),
_ => (false, true),
};
debug!("AVPlayer is live stream: {}. which is not supported", value);
},
AVPlayerOnInfoType::AV_INFO_TYPE_DURATION_UPDATE => {
let mut duration: i64 = -1;
unsafe {
OH_AVFormat_GetLongValue(info_body, OH_PLAYER_DURATION, &mut duration);
}
let duration = time::Duration::from_millis(duration as u64);
metadata_clone.lock().unwrap().get_mut().duration = Some(duration);
let mut last_metadata = metadata_clone.lock().unwrap();
last_metadata.get_mut().duration = Some(duration);
let metadata_clone_clone = last_metadata.get_mut().clone();
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::MetadataUpdated(metadata_clone_clone));
debug!("DURATION UPDATE: {:?}", duration);
},
AVPlayerOnInfoType::AV_INFO_TYPE_BUFFERING_UPDATE => {
let mut buffer_type = -1;
let mut buffer_value = -1;
unsafe {
OH_AVFormat_GetIntValue(
info_body,
OH_PLAYER_BUFFERING_TYPE,
&mut buffer_type,
);
OH_AVFormat_GetIntValue(
info_body,
OH_PLAYER_BUFFERING_VALUE,
&mut buffer_value,
);
}
debug!("Buffering update: {}, value: {}", buffer_type, buffer_value);
},
AVPlayerOnInfoType::AV_INFO_TYPE_VOLUME_CHANGE => {
let mut volume = 0.0;
unsafe {
OH_AVFormat_GetFloatValue(info_body, OH_PLAYER_VOLUME, &mut volume);
}
debug!("Player Volume Change: {}", volume);
},
AVPlayerOnInfoType::AV_INFO_TYPE_POSITION_UPDATE => {
let mut position = -1;
unsafe {
OH_AVFormat_GetIntValue(
info_body,
OH_PLAYER_CURRENT_POSITION,
&mut position,
);
}
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::PositionChanged(position as f64 / 1000.0));
},
AVPlayerOnInfoType::AV_INFO_TYPE_SEEKDONE => {
let mut position = -1;
unsafe {
OH_AVFormat_GetIntValue(
info_body,
OH_PLAYER_SEEK_POSITION,
&mut position,
);
}
let _ = sender_clone
.lock()
.unwrap()
.send(PlayerEvent::SeekDone(position as f64 / 1000.0));
},
_ => {
warn!("Unhandled info type: {:?}", info_type);
},
}
};
self.player_inner
.lock()
.unwrap()
.connect_info_event_callback(event_info_closure);
}
pub fn setup_data_source(&mut self) {
let sender_clone = self.event_sender.clone();
let sender_clone_clone = self.event_sender.clone();
let seek_channel = Arc::new(Mutex::new(SeekChannel::new()));
let seekdata_send_closure = move |pos: u64| {
let _ = sender_clone.lock().unwrap().send(PlayerEvent::SeekData(
pos,
seek_channel.lock().unwrap().sender(),
));
let (ret, ack_channel) = seek_channel.lock().unwrap().wait();
let _ = ack_channel.send(());
debug!("Seek Initiated! :{}", pos);
let _ = sender_clone.lock().unwrap().send(PlayerEvent::NeedData);
ret
};
let source = MediaSourceWrapper::builder()
.set_enough_data(move || {
let _ = sender_clone_clone
.lock()
.unwrap()
.send(PlayerEvent::EnoughData);
})
.set_seek_data(seekdata_send_closure)
.build();
self.player_inner.lock().unwrap().set_source(source);
// To kickstart the first need data event.
let _ = self
.event_sender
.lock()
.unwrap()
.send(PlayerEvent::NeedData);
}
}
struct SeekChannel {
sender: SeekLock,
recv: IpcReceiver<SeekLockMsg>,
}
impl SeekChannel {
fn new() -> Self {
let (sender, recv) = channel::<SeekLockMsg>().expect("Couldn't create IPC channel");
Self {
sender: SeekLock {
lock_channel: sender,
},
recv,
}
}
fn sender(&self) -> SeekLock {
self.sender.clone()
}
fn wait(&self) -> SeekLockMsg {
self.recv.recv().unwrap()
}
}
impl Drop for OhosAvPlayer {
fn drop(&mut self) {
debug!("Ohos Dropping");
let (sender, _) = std::sync::mpsc::channel::<()>();
let _ = self
.backend_chan
.lock()
.unwrap()
.send(BackendMsg::Shutdown {
context: self.context_id,
id: self.id,
tx_ack: sender,
});
}
}
impl MediaInstance for OhosAvPlayer {
fn get_id(&self) -> usize {
todo!()
self.id
}
fn mute(&self, val: bool) -> Result<(), ()> {
todo!()
fn mute(&self, val: bool) -> Result<(), MediaInstanceError> {
self.set_mute(val).map_err(|_| MediaInstanceError)
}
fn suspend(&self) -> Result<(), ()> {
todo!()
fn suspend(&self) -> Result<(), MediaInstanceError> {
self.pause().map_err(|_| MediaInstanceError)
}
fn resume(&self) -> Result<(), ()> {
todo!()
fn resume(&self) -> Result<(), MediaInstanceError> {
self.play().map_err(|_| MediaInstanceError)
}
}
impl Player for OhosAVPlayer {
fn play(&self) -> Result<(), servo_media_player::PlayerError> {
todo!()
// TODO: Connect Error.
impl Player for OhosAvPlayer {
fn play(&self) -> Result<(), servo_media::player::PlayerError> {
debug!("Start playing ohos player");
self.state_manager.lock().unwrap().player_state.paused = false;
self.player_inner.lock().unwrap().play();
Ok(())
}
fn pause(&self) -> Result<(), servo_media_player::PlayerError> {
todo!()
fn pause(&self) -> Result<(), servo_media::player::PlayerError> {
self.state_manager.lock().unwrap().player_state.paused = true;
self.player_inner.lock().unwrap().pause();
Ok(())
}
fn paused(&self) -> bool {
todo!()
fn stop(&self) -> Result<(), servo_media::player::PlayerError> {
self.player_inner.lock().unwrap().stop();
Ok(())
}
fn seek(&self, time: f64) -> Result<(), servo_media::player::PlayerError> {
log::error!("Seeking to {} seconds", time);
self.player_inner
.lock()
.unwrap()
.seek((time * 1000.0) as i32);
let state_manger_lock = self.state_manager.lock().unwrap();
if !state_manger_lock.player_state.paused &&
state_manger_lock.internal_state.state == AVPlayerState::AV_COMPLETED
{
self.player_inner.lock().unwrap().play();
}
Ok(())
}
fn seekable(&self) -> Vec<std::ops::Range<f64>> {
if let Some(duration) = self.last_metadata.lock().unwrap().get_mut().duration {
return vec![Range {
start: 0.0,
end: duration.as_secs_f64(),
}];
}
self.buffered()
}
fn set_mute(&self, val: bool) -> Result<(), servo_media::player::PlayerError> {
self.player_inner.lock().unwrap().set_mute(val);
Ok(())
}
fn set_volume(&self, value: f64) -> Result<(), servo_media::player::PlayerError> {
self.player_inner.lock().unwrap().set_volume(value);
Ok(())
}
fn set_input_size(&self, size: u64) -> Result<(), servo_media::player::PlayerError> {
debug!("SetInputSize: {}", size);
self.player_inner.lock().unwrap().set_input_size(size);
Ok(())
}
fn set_playback_rate(
&self,
playback_rate: f64,
) -> Result<(), servo_media::player::PlayerError> {
self.player_inner.lock().unwrap().set_rate(playback_rate);
Ok(())
}
fn push_data(&self, data: Vec<u8>) -> Result<(), servo_media::player::PlayerError> {
self.player_inner.lock().unwrap().push_data(data);
Ok(())
}
fn end_of_stream(&self) -> Result<(), servo_media::player::PlayerError> {
debug!("Player: Current Request End of Stream reached!");
self.player_inner.lock().unwrap().end_of_stream();
Ok(())
}
fn buffered(&self) -> Vec<std::ops::Range<f64>> {
vec![]
}
fn set_stream(
&self,
_stream: &servo_media::streams::MediaStreamId,
_only_stream: bool,
) -> Result<(), servo_media::player::PlayerError> {
Ok(())
}
fn render_use_gl(&self) -> bool {
warn!("Render use gl not supported!");
false
}
fn set_audio_track(
&self,
_stream_index: i32,
_enabled: bool,
) -> Result<(), servo_media::player::PlayerError> {
Ok(())
}
fn set_video_track(
&self,
_stream_index: i32,
_enabled: bool,
) -> Result<(), servo_media::player::PlayerError> {
Ok(())
}
fn can_resume(&self) -> bool {
todo!()
}
fn stop(&self) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn seek(&self, time: f64) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn seekable(&self) -> Vec<std::ops::Range<f64>> {
todo!()
}
fn set_mute(&self, muted: bool) -> Result<(), servo_media_player::PlayerError> {
todo!()
fn paused(&self) -> bool {
self.state_manager.lock().unwrap().player_state.paused
}
fn muted(&self) -> bool {
todo!()
}
fn set_volume(&self, volume: f64) -> Result<(), servo_media_player::PlayerError> {
todo!()
self.player_inner.lock().unwrap().muted()
}
fn volume(&self) -> f64 {
todo!()
}
fn set_input_size(&self, size: u64) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn set_playback_rate(&self, playback_rate: f64) -> Result<(), servo_media_player::PlayerError> {
todo!()
self.player_inner.lock().unwrap().volume()
}
fn playback_rate(&self) -> f64 {
todo!()
}
fn push_data(&self, data: Vec<u8>) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn end_of_stream(&self) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn buffered(&self) -> Vec<std::ops::Range<f64>> {
todo!()
}
fn set_stream(
&self,
stream: &servo_media_streams::MediaStreamId,
only_stream: bool,
) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn render_use_gl(&self) -> bool {
todo!()
}
fn set_audio_track(
&self,
stream_index: i32,
enabled: bool,
) -> Result<(), servo_media_player::PlayerError> {
todo!()
}
fn set_video_track(
&self,
stream_index: i32,
enabled: bool,
) -> Result<(), servo_media_player::PlayerError> {
todo!()
self.player_inner.lock().unwrap().playback_rate()
}
}
/// Used when acquiring the decoded Video Frame,
/// and upload it to Media Frame Renderer in media element.
struct VideoSink {
video_render:
std::sync::Arc<std::sync::Mutex<dyn servo_media::player::video::VideoFrameRenderer>>,
player_inner: Arc<Mutex<OhosPlayerInner>>,
event_sender: Arc<Mutex<ipc_channel::ipc::IpcSender<servo_media::player::PlayerEvent>>>,
thread_send_chan: Cell<Option<Sender<RenderMsg>>>,
}
pub enum RenderMsg {
Terminate,
FrameAvailable,
}
impl VideoSink {
pub fn new(
video_render: std::sync::Arc<
std::sync::Mutex<dyn servo_media::player::video::VideoFrameRenderer>,
>,
player_inner: Arc<Mutex<OhosPlayerInner>>,
event_sender: Arc<Mutex<ipc_channel::ipc::IpcSender<servo_media::player::PlayerEvent>>>,
) -> Self {
VideoSink {
video_render,
player_inner,
event_sender,
thread_send_chan: Cell::new(None),
}
}
// For VideoSink, Need to think of better way to retrieve data.
pub fn setup(&self) {
let (sender, receiver) = crossbeam_channel::unbounded::<RenderMsg>();
let sender_clone = sender.clone();
self.thread_send_chan.set(Some(sender));
let event_sender_clone = self.event_sender.clone();
let player_inner_clone = self.player_inner.clone();
let renderer_clone = self.video_render.clone();
let frame_available_closure = move || {
let res = sender_clone.send(RenderMsg::FrameAvailable);
if res.is_err() {
debug!("Failed to send frame available: {:?}", res.err());
}
};
self.player_inner
.lock()
.unwrap()
.setup_window_buffer_listener(frame_available_closure);
std::thread::Builder::new()
.name("Media Worker Thread".to_owned())
.spawn(move || {
loop {
let Ok(msg) = receiver.recv() else {
debug!("error receiving message");
break;
};
match msg {
RenderMsg::Terminate => {
break;
},
RenderMsg::FrameAvailable => {
let frame_info = match player_inner_clone.lock().unwrap().acquire_buffer() {
Some(frame_info) => frame_info,
None => continue,
};
debug!(
"fd: {}, width: {}, height: {}, stride: {}, size: {}, format: {}, virt addr: {:p}",
frame_info.fd,
frame_info.width,
frame_info.height,
frame_info.stride,
frame_info.size,
frame_info.format,
frame_info.vir_addr
);
let coded_height = ((frame_info.height + FRAME_HEIGHT_MULTIPLE - 1) / FRAME_HEIGHT_MULTIPLE) * FRAME_HEIGHT_MULTIPLE;
let y_plane_size = (frame_info.stride * coded_height) as usize;
let uv_plane_size = (frame_info.stride * coded_height / 2) as usize;
let total_needed = y_plane_size + uv_plane_size;
if total_needed > frame_info.size as usize || frame_info.vir_addr.is_null() {
error!(
"Buffer too small or null: needed {} bytes (y={}, uv={}), have {} bytes, vir_addr null={}",
total_needed, y_plane_size, uv_plane_size, frame_info.size, frame_info.vir_addr.is_null()
);
player_inner_clone
.lock()
.unwrap()
.release_buffer(frame_info);
continue;
}
let bi_planar_image = yuv::YuvBiPlanarImage {
y_plane: unsafe {
std::slice::from_raw_parts(
frame_info.vir_addr as *const u8,
y_plane_size,
)
},
uv_plane: unsafe {
std::slice::from_raw_parts(
(frame_info.vir_addr as usize + y_plane_size) as *const u8,
uv_plane_size,
)
},
width: frame_info.width as u32,
height: frame_info.height as u32,
y_stride: frame_info.stride as u32,
uv_stride: frame_info.stride as u32,
};
let mut bgra = vec![0u8; (frame_info.width * frame_info.height * 4) as usize];
// Conversion from yuv to bgra8
let Ok(_) = yuv_nv12_to_bgra(
&bi_planar_image,
&mut bgra,
frame_info.width as u32 *4,
yuv::YuvRange::Full,
yuv::YuvStandardMatrix::Bt709,
yuv::YuvConversionMode::Balanced
)else{
error!("Failed to convert YUV to BGRA");
player_inner_clone
.lock()
.unwrap()
.release_buffer(frame_info);
continue;
};
let Some(frame) = VideoFrame::new(
frame_info.width,
frame_info.height,
Arc::new(OhosBuffer::new(bgra)),
) else {
error!("Failed to create VideoFrame");
player_inner_clone
.lock()
.unwrap()
.release_buffer(frame_info);
continue;
};
renderer_clone.lock().expect(
"Failed to acquire video renderer lock"
).render(frame);
player_inner_clone
.lock()
.unwrap()
.release_buffer(frame_info);
match event_sender_clone
.lock()
.unwrap()
.send(PlayerEvent::VideoFrameUpdated)
{
Ok(()) => {},
Err(e) => {
warn!("Send PlayerEvent::VideoFrameUpdated Error: {}", e);
},
};
},
}
}
})
.unwrap();
}
}
impl Drop for VideoSink {
fn drop(&mut self) {
if let Some(sender) = self.thread_send_chan.get_mut() {
let _ = sender.send(RenderMsg::Terminate);
}
}
}
struct OhosBuffer {
data: Vec<u8>,
}
impl OhosBuffer {
pub fn new(data: Vec<u8>) -> OhosBuffer {
OhosBuffer { data }
}
}
impl Buffer for OhosBuffer {
fn to_vec(&self) -> Option<video::VideoFrameData> {
Some(VideoFrameData::Raw(Arc::new(self.data.to_owned())))
}
}

View File

@@ -6,8 +6,7 @@ use std::collections::HashMap;
use once_cell::sync::Lazy;
pub static OHOS_REGISTRY_SCANNER: Lazy<OhosRegistryScanner> =
Lazy::new(|| OhosRegistryScanner::new());
pub static OHOS_REGISTRY_SCANNER: Lazy<OhosRegistryScanner> = Lazy::new(OhosRegistryScanner::new);
// Should be a combination of mime/codecs
// If the type we are matching only contain mime, then we only match the container.

View File

@@ -116,6 +116,11 @@ static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.cs
/// A JS file to control the media controls.
static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
/// The media engine may report a seek-done position that differs slightly from the
/// requested position (e.g. snapping to the nearest keyframe), so we use a threshold
/// instead of strict equality. (Unit is second)
const SEEK_POSITION_THRESHOLD: f64 = 0.5;
#[derive(MallocSizeOf, PartialEq)]
enum FrameStatus {
Locked,
@@ -2737,7 +2742,8 @@ impl HTMLMediaElement {
fn playback_seek_done(&self, position: f64) {
// If the seek was initiated by script or by the user agent itself continue with the
// following steps, otherwise abort.
if !self.seeking.get() || position != self.current_seek_position.get() {
let delta = (position - self.current_seek_position.get()).abs();
if !self.seeking.get() || delta > SEEK_POSITION_THRESHOLD {
return;
}

View File

@@ -159,6 +159,9 @@ gaol = "0.2.1"
[target.'cfg(target_os = "windows")'.dependencies]
webxr = { workspace = true, features = ["glwindow", "headless", "openxr-api"] }
[target.'cfg(target_env = "ohos")'.dependencies]
servo-media-ohos = { workspace = true }
[dev-dependencies]
accesskit_consumer = "0.35.0"
http = { workspace = true }

View File

@@ -121,7 +121,17 @@ mod media_platform {
}
}
#[cfg(not(feature = "media-gstreamer"))]
#[cfg(all(not(feature = "media-gstreamer"), target_env = "ohos"))]
mod media_platform {
use servo_media_ohos::OhosBackend;
use super::ServoMedia;
pub fn init() {
ServoMedia::init::<OhosBackend>();
}
}
#[cfg(all(not(feature = "media-gstreamer"), not(target_env = "ohos")))]
mod media_platform {
use super::ServoMedia;
pub fn init() {