mirror of
https://github.com/servo/servo
synced 2026-04-25 17:15:48 +02:00
deps: Merge the servo/media repository (#42369)
This change merges http://github.com/servo/media into this repository. It is only used by Servo and version upgrades are complicated by having two repositories. In addition, this avoids the need to refer to individual commit hashes in the Servo `Cargo.toml`. The hope is that merging these two repositories will lead to better code organization / simplification like we have seen with the WebXR support. Initiailly, the idea was that this media support could be shared with the wider Rust ecosystem, but I think that hasn't worked out as planned due to the fact that it is difficult to use the various media packaes outside of the Servo project and the fact that no one seems to be doing this. Some changes were made when importing the code: - The second commit in this PR addresses new clippy warnings from the imported code. - GStreamer Packages are no longer renamed in the media code, so that they are named the same as they are currently in Servo. - Some examples are not ported as they require being run interactively and depend on older version of important libraries like winit. Having these dependencies in the core part of Servo isn't very convenient. Removed examples: - `audio_decoder.rs`: This is meant to be run interactively with a file so isn't very useful for testing. - Depended on winit GUI (`player` subdirectory): - `media_element_source_node.rs` - `play_media_stream.rs` - `simple_player.rs` - `muted_player.rs` - `siple_webrtc.rs`: Depended on `webrtc` library: Testing: This is covered by existing tests. In addition, the job which runs the media examples is added to the unit test workflow. --------- Signed-off-by: Martin Robinson <mrobinson@igalia.com>
This commit is contained in:
3
.github/workflows/linux-wpt.yml
vendored
3
.github/workflows/linux-wpt.yml
vendored
@@ -101,6 +101,9 @@ jobs:
|
||||
sudo apt install -qy --no-install-recommends mesa-vulkan-drivers fonts-noto-cjk fonts-dejavu-extra
|
||||
# FIXME #35029
|
||||
sudo apt purge -y fonts-droid-fallback
|
||||
sudo apt install -y jackd2 libjack-jackd2-0
|
||||
- name: Start Dummy Audio Device
|
||||
run: jackd -d dummy &
|
||||
- name: Sync from upstream WPT
|
||||
if: ${{ inputs.wpt-sync-from-upstream }}
|
||||
run: |
|
||||
|
||||
66
.github/workflows/linux.yml
vendored
66
.github/workflows/linux.yml
vendored
@@ -191,7 +191,7 @@ jobs:
|
||||
flags: unittests,unittests-linux,unittests-linux-${{ inputs.profile }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Devtools tests
|
||||
if: ${{ false && inputs.unit-tests }} # FIXME #39273
|
||||
if: ${{ false && inputs.unit-tests }} # FIXME #39273
|
||||
run: ./mach test-devtools --profile ${{ inputs.profile }}
|
||||
- name: Archive build timing
|
||||
uses: actions/upload-artifact@v6
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
if: ${{ inputs.bencher && inputs.profile != 'debug' && github.event_name != 'workflow_dispatch' && github.event_name != 'merge_group' }}
|
||||
uses: ./.github/workflows/bencher.yml
|
||||
with:
|
||||
target: 'linux'
|
||||
target: "linux"
|
||||
profile: ${{ inputs.profile }}
|
||||
compressed-file-path: ${{ inputs.profile }}-binary-linux/servo-tech-demo.tar.gz
|
||||
binary-path: servo/servo
|
||||
@@ -368,3 +368,65 @@ jobs:
|
||||
files: codecov.json,support/crown/codecov.json
|
||||
fail_ci_if_error: true
|
||||
flags: unittests
|
||||
|
||||
media-examples:
|
||||
name: Media Examples
|
||||
if: ${{ inputs.unit-tests }}
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
if: github.event_name != 'pull_request_target'
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo bash -c 'apt-add-repository -y https://mirrors.kernel.org/ubuntu'
|
||||
sudo apt update
|
||||
sudo apt install -y \
|
||||
gstreamer1.0-libav \
|
||||
gstreamer1.0-plugins-bad \
|
||||
gstreamer1.0-plugins-base \
|
||||
gstreamer1.0-plugins-good \
|
||||
gstreamer1.0-plugins-ugly \
|
||||
gstreamer1.0-tools \
|
||||
libasound2-plugins \
|
||||
libfaad2 \
|
||||
libffi7 \
|
||||
libfftw3-single3 \
|
||||
libges-1.0-dev \
|
||||
libgstreamer-plugins-bad1.0-dev\
|
||||
libgstreamer-plugins-base1.0-dev \
|
||||
libgstreamer1.0-dev \
|
||||
libgstrtspserver-1.0-dev \
|
||||
libjack-jackd2-0 \
|
||||
libmms0 \
|
||||
libmpg123-0 \
|
||||
libopus0 \
|
||||
liborc-0.4-0 \
|
||||
liborc-0.4-dev \
|
||||
libpulsedsp \
|
||||
libsamplerate0 \
|
||||
libspeexdsp1 \
|
||||
libtdb1 \
|
||||
libtheora0 \
|
||||
libtwolame0 \
|
||||
libwayland-egl1-mesa \
|
||||
libwebrtc-audio-processing1 \
|
||||
pulseaudio \
|
||||
pulseaudio-utils \
|
||||
webp
|
||||
- name: Determine MSRV
|
||||
id: msrv
|
||||
uses: ./.github/actions/parse_msrv
|
||||
- name: Install MSRV
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ steps.msrv.outputs.rust_version }}
|
||||
- name: Virtual Audio Devices
|
||||
run: |
|
||||
jackd -d dummy &
|
||||
pulseaudio --start
|
||||
gst-inspect-1.0 | grep Total
|
||||
- name: Run Examples
|
||||
run: |
|
||||
ls components/media/examples/examples/*.rs | xargs -I{} basename {} .rs | grep -v params_connect | RUST_BACKTRACE=1 GST_DEBUG=3 xargs -I{} cargo run -p media-examples --example {}
|
||||
|
||||
108
Cargo.lock
generated
108
Cargo.lock
generated
@@ -3583,6 +3583,56 @@ dependencies = [
|
||||
"system-deps 7.0.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-gl-wayland"
|
||||
version = "0.24.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e7c882c6de202d985302f531ee7f907e1f1a89b11caa1ba8a153764dc6c650c"
|
||||
dependencies = [
|
||||
"glib",
|
||||
"gstreamer",
|
||||
"gstreamer-gl",
|
||||
"gstreamer-gl-wayland-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-gl-wayland-sys"
|
||||
version = "0.24.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be8dffafb60f97705cb2395139df2002fa2730fa75d4efa04ed1ece1a014d464"
|
||||
dependencies = [
|
||||
"glib-sys",
|
||||
"gstreamer-gl-sys",
|
||||
"libc",
|
||||
"system-deps 7.0.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-gl-x11"
|
||||
version = "0.24.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a5cbe1aaf44cf6af4beef95390f3be8fdf919059a26991c922dcb0200c21535"
|
||||
dependencies = [
|
||||
"glib",
|
||||
"gstreamer",
|
||||
"gstreamer-gl",
|
||||
"gstreamer-gl-x11-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-gl-x11-sys"
|
||||
version = "0.24.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be689b7d327607d11336964e4dc0f5da658b3a4d0373f0e267995edbe0e0454c"
|
||||
dependencies = [
|
||||
"glib-sys",
|
||||
"gstreamer-gl-sys",
|
||||
"libc",
|
||||
"system-deps 7.0.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-play"
|
||||
version = "0.24.4"
|
||||
@@ -5349,6 +5399,18 @@ dependencies = [
|
||||
"webrender_api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "media-examples"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"euclid",
|
||||
"rand 0.9.2",
|
||||
"serde",
|
||||
"servo-media",
|
||||
"servo-media-auto",
|
||||
"servo-media-dummy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.6"
|
||||
@@ -8238,8 +8300,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"servo-media-audio",
|
||||
@@ -8251,8 +8312,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-audio"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"byte-slice-cast",
|
||||
"euclid",
|
||||
@@ -8270,10 +8330,17 @@ dependencies = [
|
||||
"speexdsp-resampler",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-auto"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"servo-media-dummy",
|
||||
"servo-media-gstreamer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -8282,8 +8349,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-dummy"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"ipc-channel",
|
||||
"servo-media",
|
||||
@@ -8296,8 +8362,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-gstreamer"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"byte-slice-cast",
|
||||
"glib",
|
||||
@@ -8329,8 +8394,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-gstreamer-render"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"gstreamer",
|
||||
"gstreamer-video",
|
||||
@@ -8339,8 +8403,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-gstreamer-render-android"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"glib",
|
||||
"gstreamer",
|
||||
@@ -8353,13 +8416,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-gstreamer-render-unix"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"glib",
|
||||
"gstreamer",
|
||||
"gstreamer-gl",
|
||||
"gstreamer-gl-egl",
|
||||
"gstreamer-gl-wayland",
|
||||
"gstreamer-gl-x11",
|
||||
"gstreamer-video",
|
||||
"servo-media-gstreamer-render",
|
||||
"servo-media-player",
|
||||
@@ -8367,8 +8431,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-player"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"ipc-channel",
|
||||
"serde",
|
||||
@@ -8379,21 +8442,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-streams"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-traits"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "servo-media-webrtc"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/servo/media?rev=f384dbc4ff8b5c6f8db2c763306cbe2281d66391#f384dbc4ff8b5c6f8db2c763306cbe2281d66391"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"log",
|
||||
"servo-media-streams",
|
||||
|
||||
17
Cargo.toml
17
Cargo.toml
@@ -1,6 +1,7 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"components/media/examples",
|
||||
"components/xpath",
|
||||
"ports/servoshell",
|
||||
"tests/unit/*",
|
||||
@@ -74,13 +75,23 @@ flate2 = "1.1"
|
||||
fonts_traits = { path = "components/shared/fonts" }
|
||||
freetype-sys = "0.20"
|
||||
gleam = "0.15"
|
||||
glib = "0.21"
|
||||
glib-sys = "0.21"
|
||||
glow = "0.16.0"
|
||||
gstreamer = { version = "0.24", features = ["v1_18"] }
|
||||
gstreamer-app = "0.24"
|
||||
gstreamer-audio = "0.24"
|
||||
gstreamer-base = "0.24"
|
||||
gstreamer-gl = "0.24"
|
||||
gstreamer-gl-egl = "0.24"
|
||||
gstreamer-gl-sys = "0.24"
|
||||
gstreamer-gl-wayland = "0.24"
|
||||
gstreamer-gl-x11 = "0.24"
|
||||
gstreamer-play = "0.24"
|
||||
gstreamer-sdp = "0.24"
|
||||
gstreamer-sys = "0.24"
|
||||
gstreamer-video = "0.24"
|
||||
gstreamer-webrtc = { version = "0.24", features = ["v1_18"] }
|
||||
harfbuzz-sys = "0.6.1"
|
||||
headers = "0.4"
|
||||
hitrace = "0.1.6"
|
||||
@@ -162,9 +173,9 @@ serde = "1.0.228"
|
||||
serde_bytes = "0.11"
|
||||
serde_core = "1.0.226"
|
||||
serde_json = "1.0"
|
||||
servo-media = { git = "https://github.com/servo/media", rev = "f384dbc4ff8b5c6f8db2c763306cbe2281d66391" }
|
||||
servo-media-dummy = { git = "https://github.com/servo/media", rev = "f384dbc4ff8b5c6f8db2c763306cbe2281d66391" }
|
||||
servo-media-gstreamer = { git = "https://github.com/servo/media", rev = "f384dbc4ff8b5c6f8db2c763306cbe2281d66391" }
|
||||
servo-media = { path = "components/media/servo-media" }
|
||||
servo-media-dummy = { path = "components/media/backends/dummy" }
|
||||
servo-media-gstreamer = { path = "components/media/backends/gstreamer" }
|
||||
servo-tracing = { path = "components/servo_tracing" }
|
||||
servo_arc = { git = "https://github.com/servo/stylo", rev = "3b46f3ef27eb14ca276b4bf3ff041a6c43d93827" }
|
||||
sha1 = "0.10"
|
||||
|
||||
@@ -42,7 +42,7 @@ ipc-channel = { workspace = true }
|
||||
keyboard-types = { workspace = true }
|
||||
layout_api = { workspace = true }
|
||||
log = { workspace = true }
|
||||
media = { path = "../media" }
|
||||
media = { path = "../media/media-thread" }
|
||||
net = { path = "../net" }
|
||||
net_traits = { workspace = true }
|
||||
paint_api = { workspace = true }
|
||||
|
||||
35
components/media/audio/Cargo.toml
Normal file
35
components/media/audio/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "servo-media-audio"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_audio"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
euclid = "0.22"
|
||||
log = "0.4"
|
||||
serde_derive = "1.0.66"
|
||||
serde = "1.0.66"
|
||||
servo-media-derive = { path = "../servo-media-derive" }
|
||||
servo-media-player = { path = "../player" }
|
||||
servo-media-traits = { path = "../traits" }
|
||||
servo-media-streams = { path = "../streams" }
|
||||
smallvec = "1"
|
||||
speexdsp-resampler = "0.1.0"
|
||||
num-complex = "0.2.4"
|
||||
|
||||
[dependencies.petgraph]
|
||||
version = "0.4.12"
|
||||
features = ["stable_graph"]
|
||||
|
||||
[dependencies.byte-slice-cast]
|
||||
version = "1"
|
||||
|
||||
[dependencies.num-traits]
|
||||
version = "0.2"
|
||||
283
components/media/audio/analyser_node.rs
Normal file
283
components/media/audio/analyser_node.rs
Normal file
@@ -0,0 +1,283 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cmp;
|
||||
use std::f32::consts::PI;
|
||||
|
||||
use crate::block::{Block, Chunk, FRAMES_PER_BLOCK_USIZE};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo, ChannelInterpretation};
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct AnalyserNode {
|
||||
channel_info: ChannelInfo,
|
||||
callback: Box<dyn FnMut(Block) + Send>,
|
||||
}
|
||||
|
||||
impl AnalyserNode {
|
||||
pub fn new(callback: Box<dyn FnMut(Block) + Send>, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
callback,
|
||||
channel_info,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for AnalyserNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::AnalyserNode
|
||||
}
|
||||
|
||||
fn process(&mut self, inputs: Chunk, _: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
let mut push = inputs.blocks[0].clone();
|
||||
push.mix(1, ChannelInterpretation::Speakers);
|
||||
|
||||
(self.callback)(push);
|
||||
|
||||
// analyser node doesn't modify the inputs
|
||||
inputs
|
||||
}
|
||||
}
|
||||
|
||||
/// From <https://webaudio.github.io/web-audio-api/#dom-analysernode-fftsize>
|
||||
pub const MAX_FFT_SIZE: usize = 32768;
|
||||
pub const MAX_BLOCK_COUNT: usize = MAX_FFT_SIZE / FRAMES_PER_BLOCK_USIZE;
|
||||
|
||||
/// The actual analysis is done on the DOM side. We provide
|
||||
/// the actual base functionality in this struct, so the DOM
|
||||
/// just has to do basic shimming
|
||||
pub struct AnalysisEngine {
|
||||
/// The number of past sample-frames to consider in the FFT
|
||||
fft_size: usize,
|
||||
smoothing_constant: f64,
|
||||
min_decibels: f64,
|
||||
max_decibels: f64,
|
||||
/// This is a ring buffer containing the last MAX_FFT_SIZE
|
||||
/// sample-frames
|
||||
data: Box<[f32; MAX_FFT_SIZE]>,
|
||||
/// The index of the current block
|
||||
current_block: usize,
|
||||
/// Have we computed the FFT already?
|
||||
fft_computed: bool,
|
||||
/// Cached blackman window data
|
||||
blackman_windows: Vec<f32>,
|
||||
/// The smoothed FFT data (in frequency domain)
|
||||
smoothed_fft_data: Vec<f32>,
|
||||
/// The computed FFT data, in decibels
|
||||
computed_fft_data: Vec<f32>,
|
||||
/// The windowed time domain data
|
||||
/// Used during FFT computation
|
||||
windowed: Vec<f32>,
|
||||
}
|
||||
|
||||
impl AnalysisEngine {
|
||||
pub fn new(
|
||||
fft_size: usize,
|
||||
smoothing_constant: f64,
|
||||
min_decibels: f64,
|
||||
max_decibels: f64,
|
||||
) -> Self {
|
||||
debug_assert!((32..=32768).contains(&fft_size));
|
||||
// must be a power of two
|
||||
debug_assert!(fft_size & (fft_size - 1) == 0);
|
||||
debug_assert!((0. ..=1.).contains(&smoothing_constant));
|
||||
debug_assert!(max_decibels > min_decibels);
|
||||
Self {
|
||||
fft_size,
|
||||
smoothing_constant,
|
||||
min_decibels,
|
||||
max_decibels,
|
||||
data: Box::new([0.; MAX_FFT_SIZE]),
|
||||
current_block: MAX_BLOCK_COUNT - 1,
|
||||
fft_computed: false,
|
||||
blackman_windows: Vec::with_capacity(fft_size),
|
||||
computed_fft_data: Vec::with_capacity(fft_size / 2),
|
||||
smoothed_fft_data: Vec::with_capacity(fft_size / 2),
|
||||
windowed: Vec::with_capacity(fft_size),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_fft_size(&mut self, fft_size: usize) {
|
||||
debug_assert!((32..=32768).contains(&fft_size));
|
||||
// must be a power of two
|
||||
debug_assert!(fft_size & (fft_size - 1) == 0);
|
||||
self.fft_size = fft_size;
|
||||
self.fft_computed = false;
|
||||
}
|
||||
|
||||
pub fn get_fft_size(&self) -> usize {
|
||||
self.fft_size
|
||||
}
|
||||
|
||||
pub fn set_smoothing_constant(&mut self, smoothing_constant: f64) {
|
||||
debug_assert!((0. ..=1.).contains(&smoothing_constant));
|
||||
self.smoothing_constant = smoothing_constant;
|
||||
self.fft_computed = false;
|
||||
}
|
||||
|
||||
pub fn get_smoothing_constant(&self) -> f64 {
|
||||
self.smoothing_constant
|
||||
}
|
||||
|
||||
pub fn set_min_decibels(&mut self, min_decibels: f64) {
|
||||
debug_assert!(min_decibels < self.max_decibels);
|
||||
self.min_decibels = min_decibels;
|
||||
}
|
||||
|
||||
pub fn get_min_decibels(&self) -> f64 {
|
||||
self.min_decibels
|
||||
}
|
||||
|
||||
pub fn set_max_decibels(&mut self, max_decibels: f64) {
|
||||
debug_assert!(self.min_decibels < max_decibels);
|
||||
self.max_decibels = max_decibels;
|
||||
}
|
||||
|
||||
pub fn get_max_decibels(&self) -> f64 {
|
||||
self.max_decibels
|
||||
}
|
||||
|
||||
fn advance(&mut self) {
|
||||
self.current_block += 1;
|
||||
if self.current_block >= MAX_BLOCK_COUNT {
|
||||
self.current_block = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the data of the current block
|
||||
fn curent_block_mut(&mut self) -> &mut [f32] {
|
||||
let index = FRAMES_PER_BLOCK_USIZE * self.current_block;
|
||||
&mut self.data[index..(index + FRAMES_PER_BLOCK_USIZE)]
|
||||
}
|
||||
|
||||
/// Given an index from 0 to fft_size, convert it into an index into
|
||||
/// the backing array
|
||||
fn convert_index(&self, index: usize) -> usize {
|
||||
let offset = self.fft_size - index;
|
||||
let last_element = (1 + self.current_block) * FRAMES_PER_BLOCK_USIZE - 1;
|
||||
if offset > last_element {
|
||||
MAX_FFT_SIZE - offset + last_element
|
||||
} else {
|
||||
last_element - offset
|
||||
}
|
||||
}
|
||||
|
||||
/// Given an index into the backing array, increment it
|
||||
fn advance_index(&self, index: &mut usize) {
|
||||
*index += 1;
|
||||
if *index >= MAX_FFT_SIZE {
|
||||
*index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, mut block: Block) {
|
||||
debug_assert!(block.chan_count() == 1);
|
||||
self.advance();
|
||||
if !block.is_silence() {
|
||||
self.curent_block_mut().copy_from_slice(block.data_mut());
|
||||
}
|
||||
self.fft_computed = false;
|
||||
}
|
||||
|
||||
/// <https://webaudio.github.io/web-audio-api/#blackman-window>
|
||||
fn compute_blackman_windows(&mut self) {
|
||||
if self.blackman_windows.len() == self.fft_size {
|
||||
return;
|
||||
}
|
||||
const ALPHA: f32 = 0.16;
|
||||
const ALPHA_0: f32 = (1. - ALPHA) / 2.;
|
||||
const ALPHA_1: f32 = 1. / 2.;
|
||||
const ALPHA_2: f32 = ALPHA / 2.;
|
||||
self.blackman_windows.resize(self.fft_size, 0.);
|
||||
let coeff = PI * 2. / self.fft_size as f32;
|
||||
for n in 0..self.fft_size {
|
||||
self.blackman_windows[n] = ALPHA_0 - ALPHA_1 * (coeff * n as f32).cos() +
|
||||
ALPHA_2 * (2. * coeff * n as f32).cos();
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_blackman_window(&mut self) {
|
||||
self.compute_blackman_windows();
|
||||
self.windowed.resize(self.fft_size, 0.);
|
||||
|
||||
let mut data_idx = self.convert_index(0);
|
||||
for n in 0..self.fft_size {
|
||||
self.windowed[n] = self.blackman_windows[n] * self.data[data_idx];
|
||||
self.advance_index(&mut data_idx);
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_fft(&mut self) {
|
||||
if self.fft_computed {
|
||||
return;
|
||||
}
|
||||
self.fft_computed = true;
|
||||
self.apply_blackman_window();
|
||||
self.computed_fft_data.resize(self.fft_size / 2, 0.);
|
||||
self.smoothed_fft_data.resize(self.fft_size / 2, 0.);
|
||||
|
||||
for k in 0..(self.fft_size / 2) {
|
||||
let mut sum_real = 0.;
|
||||
let mut sum_imaginary = 0.;
|
||||
let factor = -2. * PI * k as f32 / self.fft_size as f32;
|
||||
for n in 0..(self.fft_size) {
|
||||
sum_real += self.windowed[n] * (factor * n as f32).cos();
|
||||
sum_imaginary += self.windowed[n] * (factor * n as f32).sin();
|
||||
}
|
||||
let sum_real = sum_real / self.fft_size as f32;
|
||||
let sum_imaginary = sum_imaginary / self.fft_size as f32;
|
||||
let magnitude = (sum_real * sum_real + sum_imaginary * sum_imaginary).sqrt();
|
||||
self.smoothed_fft_data[k] = (self.smoothing_constant * self.smoothed_fft_data[k] as f64 +
|
||||
(1. - self.smoothing_constant) * magnitude as f64)
|
||||
as f32;
|
||||
self.computed_fft_data[k] = 20. * self.smoothed_fft_data[k].log(10.);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_time_domain_data(&self, dest: &mut [f32]) {
|
||||
let mut data_idx = self.convert_index(0);
|
||||
let end = cmp::min(self.fft_size, dest.len());
|
||||
for entry in &mut dest[0..end] {
|
||||
*entry = self.data[data_idx];
|
||||
self.advance_index(&mut data_idx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_byte_time_domain_data(&self, dest: &mut [u8]) {
|
||||
let mut data_idx = self.convert_index(0);
|
||||
let end = cmp::min(self.fft_size, dest.len());
|
||||
for entry in &mut dest[0..end] {
|
||||
let result = 128. * (1. + self.data[data_idx]);
|
||||
*entry = clamp_255(result);
|
||||
self.advance_index(&mut data_idx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_frequency_data(&mut self, dest: &mut [f32]) {
|
||||
self.compute_fft();
|
||||
let len = cmp::min(dest.len(), self.computed_fft_data.len());
|
||||
dest[0..len].copy_from_slice(&self.computed_fft_data[0..len]);
|
||||
}
|
||||
|
||||
pub fn fill_byte_frequency_data(&mut self, dest: &mut [u8]) {
|
||||
self.compute_fft();
|
||||
let len = cmp::min(dest.len(), self.computed_fft_data.len());
|
||||
let ratio = 255. / (self.max_decibels - self.min_decibels);
|
||||
for (index, freq) in dest[0..len].iter_mut().enumerate() {
|
||||
let result = ratio * (self.computed_fft_data[index] as f64 - self.min_decibels);
|
||||
*freq = clamp_255(result as f32);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn clamp_255(val: f32) -> u8 {
|
||||
if val > 255. {
|
||||
255
|
||||
} else if val < 0. {
|
||||
0
|
||||
} else {
|
||||
val as u8
|
||||
}
|
||||
}
|
||||
414
components/media/audio/biquad_filter_node.rs
Normal file
414
components/media/audio/biquad_filter_node.rs
Normal file
@@ -0,0 +1,414 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::f64::consts::{PI, SQRT_2};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::block::{Chunk, Tick};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeMessage, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct BiquadFilterNodeOptions {
|
||||
pub filter: FilterType,
|
||||
pub frequency: f32,
|
||||
pub detune: f32,
|
||||
pub q: f32,
|
||||
pub gain: f32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum FilterType {
|
||||
LowPass,
|
||||
HighPass,
|
||||
BandPass,
|
||||
LowShelf,
|
||||
HighShelf,
|
||||
Peaking,
|
||||
Notch,
|
||||
AllPass,
|
||||
}
|
||||
|
||||
impl Default for BiquadFilterNodeOptions {
|
||||
fn default() -> Self {
|
||||
BiquadFilterNodeOptions {
|
||||
filter: FilterType::LowPass,
|
||||
frequency: 350.,
|
||||
detune: 0.,
|
||||
q: 1.,
|
||||
gain: 0.,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum BiquadFilterNodeMessage {
|
||||
SetFilterType(FilterType),
|
||||
}
|
||||
|
||||
/// The last two input and output values, per-channel
|
||||
// Default sets all fields to zero
|
||||
#[derive(Default, Copy, Clone, PartialEq)]
|
||||
struct BiquadState {
|
||||
/// The input value from last frame
|
||||
x1: f64,
|
||||
/// The input value from two frames ago
|
||||
x2: f64,
|
||||
/// The output value from last frame
|
||||
y1: f64,
|
||||
/// The output value from two frames ago
|
||||
y2: f64,
|
||||
}
|
||||
|
||||
impl BiquadState {
|
||||
/// Update with new input/output values from this frame
|
||||
fn update(&mut self, x: f64, y: f64) {
|
||||
self.x2 = self.x1;
|
||||
self.x1 = x;
|
||||
self.y2 = self.y1;
|
||||
self.y1 = y;
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://webaudio.github.io/web-audio-api/#biquadfilternode>
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct BiquadFilterNode {
|
||||
channel_info: ChannelInfo,
|
||||
filter: FilterType,
|
||||
frequency: Param,
|
||||
detune: Param,
|
||||
q: Param,
|
||||
gain: Param,
|
||||
/// The computed filter parameter b0
|
||||
/// This is actually b0 / a0, we pre-divide
|
||||
/// for efficiency
|
||||
b0: f64,
|
||||
/// The computed filter parameter b1
|
||||
/// This is actually b1 / a0, we pre-divide
|
||||
/// for efficiency
|
||||
b1: f64,
|
||||
/// The computed filter parameter b2
|
||||
/// This is actually b2 / a0, we pre-divide
|
||||
/// for efficiency
|
||||
b2: f64,
|
||||
/// The computed filter parameter a1
|
||||
/// This is actually a1 / a0, we pre-divide
|
||||
/// for efficiency
|
||||
a1: f64,
|
||||
/// The computed filter parameter a2
|
||||
/// This is actually a2 / a0, we pre-divide
|
||||
/// for efficiency
|
||||
a2: f64,
|
||||
/// Stored filter state, this contains the last two
|
||||
/// frames of input and output values for every
|
||||
/// channel
|
||||
state: SmallVec<[BiquadState; 2]>,
|
||||
}
|
||||
|
||||
impl BiquadFilterNode {
|
||||
pub fn new(
|
||||
options: BiquadFilterNodeOptions,
|
||||
channel_info: ChannelInfo,
|
||||
sample_rate: f32,
|
||||
) -> Self {
|
||||
let mut ret = Self {
|
||||
channel_info,
|
||||
filter: options.filter,
|
||||
frequency: Param::new(options.frequency),
|
||||
gain: Param::new(options.gain),
|
||||
q: Param::new(options.q),
|
||||
detune: Param::new(options.detune),
|
||||
b0: 0.,
|
||||
b1: 0.,
|
||||
b2: 0.,
|
||||
a1: 0.,
|
||||
a2: 0.,
|
||||
state: SmallVec::new(),
|
||||
};
|
||||
ret.update_coefficients(sample_rate);
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn update_parameters(&mut self, info: &BlockInfo, tick: Tick) -> bool {
|
||||
let mut changed = self.frequency.update(info, tick);
|
||||
changed |= self.detune.update(info, tick);
|
||||
changed |= self.q.update(info, tick);
|
||||
changed |= self.gain.update(info, tick);
|
||||
|
||||
if changed {
|
||||
self.update_coefficients(info.sample_rate);
|
||||
}
|
||||
changed
|
||||
}
|
||||
|
||||
/// Set to the constant z-transform y[n] = b0 * x[n]
|
||||
fn constant_z_transform(&mut self, b0: f64) {
|
||||
self.b0 = b0;
|
||||
self.b1 = 0.;
|
||||
self.b2 = 0.;
|
||||
self.a1 = 0.;
|
||||
self.a2 = 0.;
|
||||
}
|
||||
|
||||
/// Update the coefficients a1, a2, b0, b1, b2, given the sample_rate
|
||||
///
|
||||
/// See <https://webaudio.github.io/web-audio-api/#filters-characteristics>
|
||||
fn update_coefficients(&mut self, fs: f32) {
|
||||
let g: f64 = self.gain.value().into();
|
||||
let q: f64 = self.q.value().into();
|
||||
let freq: f64 = self.frequency.value().into();
|
||||
let f0: f64 = freq * (2.0_f64).powf(self.detune.value() as f64 / 1200.);
|
||||
let fs: f64 = fs.into();
|
||||
// clamp to nominal range
|
||||
// https://webaudio.github.io/web-audio-api/#biquadfilternode
|
||||
let f0 = if f0 > fs / 2. || !f0.is_finite() {
|
||||
fs / 2.
|
||||
} else if f0 < 0. {
|
||||
0.
|
||||
} else {
|
||||
f0
|
||||
};
|
||||
|
||||
let normalized = f0 / fs;
|
||||
let a = 10.0_f64.powf(g / 40.);
|
||||
|
||||
// the boundary values sometimes need limits to
|
||||
// be taken
|
||||
match self.filter {
|
||||
FilterType::LowPass => {
|
||||
if normalized == 1. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
} else if normalized == 0. {
|
||||
self.constant_z_transform(0.);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::HighPass => {
|
||||
if normalized == 1. {
|
||||
self.constant_z_transform(0.);
|
||||
return;
|
||||
} else if normalized == 0. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::LowShelf => {
|
||||
if normalized == 1. {
|
||||
self.constant_z_transform(a * a);
|
||||
return;
|
||||
} else if normalized == 0. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::HighShelf => {
|
||||
if normalized == 1. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
} else if normalized == 0. {
|
||||
self.constant_z_transform(a * a);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::Peaking => {
|
||||
if normalized == 0. || normalized == 1. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
} else if q <= 0. {
|
||||
self.constant_z_transform(a * a);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::AllPass => {
|
||||
if normalized == 0. || normalized == 1. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
} else if q <= 0. {
|
||||
self.constant_z_transform(-1.);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::Notch => {
|
||||
if normalized == 0. || normalized == 1. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
} else if q <= 0. {
|
||||
self.constant_z_transform(0.);
|
||||
return;
|
||||
}
|
||||
},
|
||||
FilterType::BandPass => {
|
||||
if normalized == 0. || normalized == 1. {
|
||||
self.constant_z_transform(0.);
|
||||
return;
|
||||
} else if q <= 0. {
|
||||
self.constant_z_transform(1.);
|
||||
return;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
let omega0 = 2. * PI * normalized;
|
||||
let sin_omega = omega0.sin();
|
||||
let cos_omega = omega0.cos();
|
||||
let alpha_q = sin_omega / (2. * q);
|
||||
let alpha_q_db = sin_omega / (2. * 10.0_f64.powf(q / 20.));
|
||||
let alpha_s = sin_omega / SQRT_2;
|
||||
|
||||
// we predivide by a0
|
||||
let a0;
|
||||
|
||||
match self.filter {
|
||||
FilterType::LowPass => {
|
||||
self.b0 = (1. - cos_omega) / 2.;
|
||||
self.b1 = 1. - cos_omega;
|
||||
self.b2 = self.b1 / 2.;
|
||||
a0 = 1. + alpha_q_db;
|
||||
self.a1 = -2. * cos_omega;
|
||||
self.a2 = 1. - alpha_q_db;
|
||||
},
|
||||
FilterType::HighPass => {
|
||||
self.b0 = (1. + cos_omega) / 2.;
|
||||
self.b1 = -(1. + cos_omega);
|
||||
self.b2 = -self.b1 / 2.;
|
||||
a0 = 1. + alpha_q_db;
|
||||
self.a1 = -2. * cos_omega;
|
||||
self.a2 = 1. - alpha_q_db;
|
||||
},
|
||||
FilterType::BandPass => {
|
||||
self.b0 = alpha_q;
|
||||
self.b1 = 0.;
|
||||
self.b2 = -alpha_q;
|
||||
a0 = 1. + alpha_q;
|
||||
self.a1 = -2. * cos_omega;
|
||||
self.a2 = 1. - alpha_q;
|
||||
},
|
||||
FilterType::Notch => {
|
||||
self.b0 = 1.;
|
||||
self.b1 = -2. * cos_omega;
|
||||
self.b2 = 1.;
|
||||
a0 = 1. + alpha_q;
|
||||
self.a1 = -2. * cos_omega;
|
||||
self.a2 = 1. - alpha_q;
|
||||
},
|
||||
FilterType::AllPass => {
|
||||
self.b0 = 1. - alpha_q;
|
||||
self.b1 = -2. * cos_omega;
|
||||
self.b2 = 1. + alpha_q;
|
||||
a0 = 1. + alpha_q;
|
||||
self.a1 = -2. * cos_omega;
|
||||
self.a2 = 1. - alpha_q;
|
||||
},
|
||||
FilterType::Peaking => {
|
||||
self.b0 = 1. + alpha_q * a;
|
||||
self.b1 = -2. * cos_omega;
|
||||
self.b2 = 1. - alpha_q * a;
|
||||
a0 = 1. + alpha_q / a;
|
||||
self.a1 = -2. * cos_omega;
|
||||
self.a2 = 1. - alpha_q / a;
|
||||
},
|
||||
FilterType::LowShelf => {
|
||||
let alpha_rt_a = 2. * alpha_s * a.sqrt();
|
||||
self.b0 = a * ((a + 1.) - (a - 1.) * cos_omega + alpha_rt_a);
|
||||
self.b1 = 2. * a * ((a - 1.) - (a + 1.) * cos_omega);
|
||||
self.b2 = a * ((a + 1.) - (a - 1.) * cos_omega - alpha_rt_a);
|
||||
a0 = (a + 1.) + (a - 1.) * cos_omega + alpha_rt_a;
|
||||
self.a1 = -2. * ((a - 1.) + (a + 1.) * cos_omega);
|
||||
self.a2 = (a + 1.) + (a - 1.) * cos_omega - alpha_rt_a;
|
||||
},
|
||||
FilterType::HighShelf => {
|
||||
let alpha_rt_a = 2. * alpha_s * a.sqrt();
|
||||
self.b0 = a * ((a + 1.) + (a - 1.) * cos_omega + alpha_rt_a);
|
||||
self.b1 = -2. * a * ((a - 1.) + (a + 1.) * cos_omega);
|
||||
self.b2 = a * ((a + 1.) + (a - 1.) * cos_omega - alpha_rt_a);
|
||||
a0 = (a + 1.) - (a - 1.) * cos_omega + alpha_rt_a;
|
||||
self.a1 = 2. * ((a - 1.) - (a + 1.) * cos_omega);
|
||||
self.a2 = (a + 1.) - (a - 1.) * cos_omega - alpha_rt_a;
|
||||
},
|
||||
}
|
||||
self.b0 /= a0;
|
||||
self.b1 /= a0;
|
||||
self.b2 /= a0;
|
||||
self.a1 /= a0;
|
||||
self.a2 /= a0;
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for BiquadFilterNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::BiquadFilterNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
self.state
|
||||
.resize(inputs.blocks[0].chan_count() as usize, Default::default());
|
||||
self.update_parameters(info, Tick(0));
|
||||
|
||||
// XXXManishearth this node has tail time, so even if the block is silence
|
||||
// we must still compute things on it. However, it is possible to become
|
||||
// a dumb passthrough as long as we reach a quiescent state
|
||||
//
|
||||
// see https://dxr.mozilla.org/mozilla-central/rev/87a95e1b7ec691bef7b938e722fe1b01cce68664/dom/media/webaudio/blink/Biquad.cpp#81-91
|
||||
|
||||
let repeat_or_silence = inputs.blocks[0].is_silence() || inputs.blocks[0].is_repeat();
|
||||
|
||||
if repeat_or_silence && !self.state.iter().all(|s| *s == self.state[0]) {
|
||||
// In case our input is repeat/silence but our states are not identical, we must
|
||||
// explicitly duplicate, since mutate_with will otherwise only operate
|
||||
// on the first channel, ignoring the states of the later ones
|
||||
inputs.blocks[0].explicit_repeat();
|
||||
} else {
|
||||
// In case the states are identical, just make any silence explicit,
|
||||
// since mutate_with can't handle silent blocks
|
||||
inputs.blocks[0].explicit_silence();
|
||||
}
|
||||
|
||||
{
|
||||
let mut iter = inputs.blocks[0].iter();
|
||||
while let Some(mut frame) = iter.next() {
|
||||
self.update_parameters(info, frame.tick());
|
||||
frame.mutate_with(|sample, chan| {
|
||||
let state = &mut self.state[chan as usize];
|
||||
let x0 = *sample as f64;
|
||||
let y0 = self.b0 * x0 + self.b1 * state.x1 + self.b2 * state.x2 -
|
||||
self.a1 * state.y1 -
|
||||
self.a2 * state.y2;
|
||||
*sample = y0 as f32;
|
||||
state.update(x0, y0);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if inputs.blocks[0].is_repeat() {
|
||||
let state = self.state[0];
|
||||
self.state.iter_mut().for_each(|s| *s = state);
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Frequency => &mut self.frequency,
|
||||
ParamType::Detune => &mut self.detune,
|
||||
ParamType::Q => &mut self.q,
|
||||
ParamType::Gain => &mut self.gain,
|
||||
_ => panic!("Unknown param {:?} for BiquadFilterNode", id),
|
||||
}
|
||||
}
|
||||
|
||||
fn message_specific(&mut self, message: AudioNodeMessage, sample_rate: f32) {
|
||||
if let AudioNodeMessage::BiquadFilterNode(m) = message {
|
||||
match m {
|
||||
BiquadFilterNodeMessage::SetFilterType(f) => {
|
||||
self.filter = f;
|
||||
self.update_coefficients(sample_rate);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
663
components/media/audio/block.rs
Normal file
663
components/media/audio/block.rs
Normal file
@@ -0,0 +1,663 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::f32::consts::SQRT_2;
|
||||
use std::mem;
|
||||
use std::ops::*;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
use euclid::default::Vector3D;
|
||||
use smallvec::{SmallVec, smallvec};
|
||||
|
||||
use crate::graph::{PortIndex, PortKind};
|
||||
use crate::node::ChannelInterpretation;
|
||||
|
||||
// defined by spec
|
||||
// https://webaudio.github.io/web-audio-api/#render-quantum
|
||||
pub const FRAMES_PER_BLOCK: Tick = Tick(128);
|
||||
pub const FRAMES_PER_BLOCK_USIZE: usize = FRAMES_PER_BLOCK.0 as usize;
|
||||
|
||||
/// A tick, i.e. the time taken for a single frame
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
|
||||
pub struct Tick(pub u64);
|
||||
|
||||
/// A collection of blocks received as input by a node
|
||||
/// or outputted by a node.
|
||||
///
|
||||
/// This will usually be a single block.
|
||||
///
|
||||
/// Some nodes have multiple inputs or outputs, which is
|
||||
/// where this becomes useful. Source nodes have an input
|
||||
/// of an empty chunk.
|
||||
pub struct Chunk {
|
||||
pub blocks: SmallVec<[Block; 1]>,
|
||||
}
|
||||
|
||||
impl Default for Chunk {
|
||||
fn default() -> Self {
|
||||
Chunk {
|
||||
blocks: SmallVec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Chunk {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.blocks.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.blocks.len()
|
||||
}
|
||||
|
||||
pub fn explicit_silence() -> Self {
|
||||
let mut block = Block::default();
|
||||
block.explicit_silence();
|
||||
let blocks = smallvec![block];
|
||||
Self { blocks }
|
||||
}
|
||||
}
|
||||
|
||||
/// We render audio in blocks of size FRAMES_PER_BLOCK
|
||||
///
|
||||
/// A single block may contain multiple channels
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
pub struct Block {
|
||||
/// The number of channels in this block
|
||||
channels: u8,
|
||||
/// This is an optimization which means that the buffer is representing multiple channels with the
|
||||
/// same content at once. Happens when audio is upmixed or when a source like
|
||||
/// an oscillator node has multiple channel outputs
|
||||
repeat: bool,
|
||||
/// If this vector is empty, it is a shorthand for "silence"
|
||||
/// It is possible to obtain an explicitly silent buffer via .explicit_silence()
|
||||
///
|
||||
/// This must be of length channels * FRAMES_PER_BLOCK, unless `repeat` is true,
|
||||
/// in which case it will be of length FRAMES_PER_BLOCK
|
||||
buffer: Vec<f32>,
|
||||
}
|
||||
|
||||
impl Default for Block {
|
||||
fn default() -> Self {
|
||||
Block {
|
||||
channels: 1,
|
||||
repeat: false,
|
||||
buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Block {
|
||||
/// Empty block with no channels, for pushing
|
||||
/// new channels to.
|
||||
///
|
||||
/// Must be used with push_chan
|
||||
pub fn empty() -> Self {
|
||||
Block {
|
||||
channels: 0,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_channels_explicit(channels: u8) -> Self {
|
||||
Block {
|
||||
channels,
|
||||
repeat: false,
|
||||
buffer: vec![0.; FRAMES_PER_BLOCK_USIZE * channels as usize],
|
||||
}
|
||||
}
|
||||
|
||||
/// This provides the entire buffer as a mutable slice of u8
|
||||
pub fn as_mut_byte_slice(&mut self) -> &mut [u8] {
|
||||
self.data_mut().as_mut_byte_slice()
|
||||
}
|
||||
|
||||
pub fn for_vec(buffer: Vec<f32>) -> Self {
|
||||
assert!(buffer.len() % FRAMES_PER_BLOCK_USIZE == 0);
|
||||
Block {
|
||||
channels: (buffer.len() / FRAMES_PER_BLOCK_USIZE) as u8,
|
||||
repeat: false,
|
||||
buffer,
|
||||
}
|
||||
}
|
||||
|
||||
/// Zero-gain sum with another buffer
|
||||
///
|
||||
/// Used after mixing multiple inputs to a single port
|
||||
pub fn sum(mut self, mut other: Self) -> Self {
|
||||
if self.is_silence() {
|
||||
other
|
||||
} else if other.is_silence() {
|
||||
self
|
||||
} else {
|
||||
debug_assert_eq!(self.channels, other.channels);
|
||||
if self.repeat ^ other.repeat {
|
||||
self.explicit_repeat();
|
||||
other.explicit_repeat();
|
||||
}
|
||||
debug_assert_eq!(self.buffer.len(), other.buffer.len());
|
||||
for (a, b) in self.buffer.iter_mut().zip(other.buffer.iter()) {
|
||||
*a += b
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// If this is in "silence" mode without a buffer, allocate a silent buffer
|
||||
pub fn explicit_silence(&mut self) {
|
||||
if self.buffer.is_empty() {
|
||||
self.buffer.resize(FRAMES_PER_BLOCK_USIZE, 0.);
|
||||
self.repeat = true;
|
||||
}
|
||||
}
|
||||
|
||||
/// This provides the entire buffer as a mutable slice of f32
|
||||
pub fn data_mut(&mut self) -> &mut [f32] {
|
||||
self.explicit_silence();
|
||||
&mut self.buffer
|
||||
}
|
||||
|
||||
pub fn explicit_repeat(&mut self) {
|
||||
if self.repeat {
|
||||
debug_assert!(self.buffer.len() == FRAMES_PER_BLOCK_USIZE);
|
||||
if self.channels > 1 {
|
||||
let mut new = Vec::with_capacity(FRAMES_PER_BLOCK_USIZE * self.channels as usize);
|
||||
for _ in 0..self.channels {
|
||||
new.extend(&self.buffer)
|
||||
}
|
||||
|
||||
self.buffer = new;
|
||||
}
|
||||
self.repeat = false;
|
||||
} else if self.is_silence() {
|
||||
self.buffer
|
||||
.resize(FRAMES_PER_BLOCK_USIZE * self.channels as usize, 0.);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_chan_mut(&mut self, chan: u8) -> &mut [f32] {
|
||||
self.explicit_repeat();
|
||||
let start = chan as usize * FRAMES_PER_BLOCK_USIZE;
|
||||
&mut self.buffer[start..start + FRAMES_PER_BLOCK_USIZE]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn data_chan(&self, chan: u8) -> &[f32] {
|
||||
debug_assert!(
|
||||
!self.is_silence(),
|
||||
"data_chan doesn't work with silent buffers"
|
||||
);
|
||||
let offset = if self.repeat {
|
||||
0
|
||||
} else {
|
||||
chan as usize * FRAMES_PER_BLOCK_USIZE
|
||||
};
|
||||
&self.buffer[offset..offset + FRAMES_PER_BLOCK_USIZE]
|
||||
}
|
||||
|
||||
pub fn take(&mut self) -> Block {
|
||||
let new = Block {
|
||||
channels: self.channels,
|
||||
..Default::default()
|
||||
};
|
||||
mem::replace(self, new)
|
||||
}
|
||||
|
||||
pub fn chan_count(&self) -> u8 {
|
||||
self.channels
|
||||
}
|
||||
|
||||
pub fn iter(&mut self) -> FrameIterator<'_> {
|
||||
FrameIterator::new(self)
|
||||
}
|
||||
|
||||
pub fn is_silence(&self) -> bool {
|
||||
self.buffer.is_empty()
|
||||
}
|
||||
|
||||
pub fn is_repeat(&self) -> bool {
|
||||
self.repeat
|
||||
}
|
||||
|
||||
pub fn data_chan_frame(&self, frame: usize, chan: u8) -> f32 {
|
||||
if self.is_silence() {
|
||||
0.
|
||||
} else {
|
||||
self.data_chan(chan)[frame]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_chan(&mut self, data: &[f32]) {
|
||||
assert!(!self.repeat);
|
||||
assert!(!self.is_silence() || self.channels == 0);
|
||||
assert!(data.len() == FRAMES_PER_BLOCK_USIZE);
|
||||
self.buffer.extend(data);
|
||||
self.channels += 1;
|
||||
}
|
||||
|
||||
/// upmix/downmix the channels if necessary
|
||||
///
|
||||
/// Currently only supports upmixing from 1
|
||||
pub fn mix(&mut self, channels: u8, interpretation: ChannelInterpretation) {
|
||||
// If we're not changing the number of channels, we
|
||||
// don't actually need to mix
|
||||
if self.channels == channels {
|
||||
return;
|
||||
}
|
||||
|
||||
// Silent buffers stay silent
|
||||
if self.is_silence() {
|
||||
self.channels = channels;
|
||||
return;
|
||||
}
|
||||
|
||||
if interpretation == ChannelInterpretation::Discrete {
|
||||
// discrete downmixes by truncation, upmixes by adding
|
||||
// silent channels
|
||||
|
||||
// If we're discrete, have a repeat, and are downmixing,
|
||||
// just truncate by changing the channel value
|
||||
if self.repeat && self.channels > channels {
|
||||
self.channels = channels;
|
||||
} else {
|
||||
// otherwise resize the buffer, silent-filling when necessary
|
||||
self.resize_silence(channels);
|
||||
}
|
||||
} else {
|
||||
// For speakers, we have to do special things based on the
|
||||
// interpretation of the channels for each kind of speakers
|
||||
|
||||
// The layout of each speaker kind is:
|
||||
//
|
||||
// - Mono: [The mono channel]
|
||||
// - Stereo: [L, R]
|
||||
// - Quad: [L, R, SL, SR]
|
||||
// - 5.1: [L, R, C, LFE, SL, SR]
|
||||
|
||||
match (self.channels, channels) {
|
||||
// Upmixing
|
||||
// https://webaudio.github.io/web-audio-api/#UpMix-sub
|
||||
|
||||
// mono
|
||||
(1, 2) => {
|
||||
// output.{L, R} = input
|
||||
self.repeat(2);
|
||||
},
|
||||
(1, 4) => {
|
||||
// output.{L, R} = input
|
||||
self.repeat(2);
|
||||
// output.{SL, SR} = 0
|
||||
self.resize_silence(4);
|
||||
},
|
||||
(1, 6) => {
|
||||
let mut v = Vec::with_capacity(channels as usize * FRAMES_PER_BLOCK_USIZE);
|
||||
// output.{L, R} = 0
|
||||
v.resize(2 * FRAMES_PER_BLOCK_USIZE, 0.);
|
||||
// output.C = input
|
||||
v.extend(&self.buffer);
|
||||
self.buffer = v;
|
||||
// output.{LFE, SL, SR} = 0
|
||||
self.resize_silence(6);
|
||||
},
|
||||
|
||||
// stereo
|
||||
(2, 4) | (2, 6) => {
|
||||
// output.{L, R} = input.{L, R}
|
||||
// (5.1) output.{C, LFE} = 0
|
||||
// output.{SL, SR} = 0
|
||||
self.resize_silence(channels);
|
||||
},
|
||||
|
||||
// quad
|
||||
(4, 6) => {
|
||||
// we can avoid this and instead calculate offsets
|
||||
// based off whether or not this is `repeat`, but
|
||||
// a `repeat` quad block should be rare
|
||||
self.explicit_repeat();
|
||||
|
||||
let mut v = Vec::with_capacity(6 * FRAMES_PER_BLOCK_USIZE);
|
||||
// output.{L, R} = input.{L, R}
|
||||
v.extend(&self.buffer[0..2 * FRAMES_PER_BLOCK_USIZE]);
|
||||
// output.{C, LFE} = 0
|
||||
v.resize(4 * FRAMES_PER_BLOCK_USIZE, 0.);
|
||||
// output.{SL, R} = input.{SL, SR}
|
||||
v.extend(&self.buffer[2 * FRAMES_PER_BLOCK_USIZE..]);
|
||||
self.buffer = v;
|
||||
self.channels = channels;
|
||||
},
|
||||
|
||||
// Downmixing
|
||||
// https://webaudio.github.io/web-audio-api/#down-mix
|
||||
|
||||
// mono
|
||||
(2, 1) => {
|
||||
let mut v = Vec::with_capacity(FRAMES_PER_BLOCK_USIZE);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
// output = 0.5 * (input.L + input.R);
|
||||
let o =
|
||||
0.5 * (self.data_chan_frame(frame, 0) + self.data_chan_frame(frame, 1));
|
||||
v.push(o);
|
||||
}
|
||||
self.buffer = v;
|
||||
self.channels = 1;
|
||||
self.repeat = false;
|
||||
},
|
||||
(4, 1) => {
|
||||
let mut v = Vec::with_capacity(FRAMES_PER_BLOCK_USIZE);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
// output = 0.5 * (input.L + input.R + input.SL + input.SR);
|
||||
let o = 0.25 *
|
||||
(self.data_chan_frame(frame, 0) +
|
||||
self.data_chan_frame(frame, 1) +
|
||||
self.data_chan_frame(frame, 2) +
|
||||
self.data_chan_frame(frame, 3));
|
||||
v.push(o);
|
||||
}
|
||||
self.buffer = v;
|
||||
self.channels = 1;
|
||||
self.repeat = false;
|
||||
},
|
||||
(6, 1) => {
|
||||
let mut v = Vec::with_capacity(FRAMES_PER_BLOCK_USIZE);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
// output = sqrt(0.5) * (input.L + input.R) + input.C + 0.5 * (input.SL + input.SR)
|
||||
let o =
|
||||
// sqrt(0.5) * (input.L + input.R)
|
||||
SQRT_2 * (self.data_chan_frame(frame, 0) +
|
||||
self.data_chan_frame(frame, 1)) +
|
||||
// input.C
|
||||
self.data_chan_frame(frame, 2) +
|
||||
// (ignore LFE)
|
||||
// + 0 * self.buffer[frame + 3 * FRAMES_PER_BLOCK_USIZE]
|
||||
// 0.5 * (input.SL + input.SR)
|
||||
0.5 * (self.data_chan_frame(frame, 4) +
|
||||
self.data_chan_frame(frame, 5));
|
||||
v.push(o);
|
||||
}
|
||||
self.buffer = v;
|
||||
self.channels = 1;
|
||||
self.repeat = false;
|
||||
},
|
||||
|
||||
// stereo
|
||||
(4, 2) => {
|
||||
let mut v = Vec::with_capacity(2 * FRAMES_PER_BLOCK_USIZE);
|
||||
v.resize(2 * FRAMES_PER_BLOCK_USIZE, 0.);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
// output.L = 0.5 * (input.L + input.SL)
|
||||
v[frame] =
|
||||
0.5 * (self.data_chan_frame(frame, 0) + self.data_chan_frame(frame, 2));
|
||||
// output.R = 0.5 * (input.R + input.SR)
|
||||
v[frame + FRAMES_PER_BLOCK_USIZE] =
|
||||
0.5 * (self.data_chan_frame(frame, 1) + self.data_chan_frame(frame, 3));
|
||||
}
|
||||
self.buffer = v;
|
||||
self.channels = 2;
|
||||
self.repeat = false;
|
||||
},
|
||||
(6, 2) => {
|
||||
let mut v = Vec::with_capacity(2 * FRAMES_PER_BLOCK_USIZE);
|
||||
v.resize(2 * FRAMES_PER_BLOCK_USIZE, 0.);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
// output.L = L + sqrt(0.5) * (input.C + input.SL)
|
||||
v[frame] = self.data_chan_frame(frame, 0) +
|
||||
SQRT_2 *
|
||||
(self.data_chan_frame(frame, 2) +
|
||||
self.data_chan_frame(frame, 4));
|
||||
// output.R = R + sqrt(0.5) * (input.C + input.SR)
|
||||
v[frame + FRAMES_PER_BLOCK_USIZE] = self.data_chan_frame(frame, 1) +
|
||||
SQRT_2 *
|
||||
(self.data_chan_frame(frame, 2) +
|
||||
self.data_chan_frame(frame, 5));
|
||||
}
|
||||
self.buffer = v;
|
||||
self.channels = 2;
|
||||
self.repeat = false;
|
||||
},
|
||||
|
||||
// quad
|
||||
(6, 4) => {
|
||||
let mut v = Vec::with_capacity(6 * FRAMES_PER_BLOCK_USIZE);
|
||||
v.resize(6 * FRAMES_PER_BLOCK_USIZE, 0.);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
// output.L = L + sqrt(0.5) * input.C
|
||||
v[frame] = self.data_chan_frame(frame, 0) +
|
||||
SQRT_2 * self.data_chan_frame(frame, 2);
|
||||
// output.R = R + sqrt(0.5) * input.C
|
||||
v[frame + FRAMES_PER_BLOCK_USIZE] = self.data_chan_frame(frame, 1) +
|
||||
SQRT_2 * self.data_chan_frame(frame, 2);
|
||||
// output.SL = input.SL
|
||||
v[frame + 2 * FRAMES_PER_BLOCK_USIZE] = self.data_chan_frame(frame, 4);
|
||||
// output.SR = input.SR
|
||||
v[frame + 3 * FRAMES_PER_BLOCK_USIZE] = self.data_chan_frame(frame, 5);
|
||||
}
|
||||
self.buffer = v;
|
||||
self.channels = 4;
|
||||
self.repeat = false;
|
||||
},
|
||||
|
||||
// If it's not a known kind of speaker configuration, treat as
|
||||
// discrete
|
||||
_ => {
|
||||
self.mix(channels, ChannelInterpretation::Discrete);
|
||||
},
|
||||
}
|
||||
debug_assert!(self.channels == channels);
|
||||
}
|
||||
}
|
||||
|
||||
/// Resize to add or remove channels, fill extra channels with silence
|
||||
pub fn resize_silence(&mut self, channels: u8) {
|
||||
self.explicit_repeat();
|
||||
self.buffer
|
||||
.resize(FRAMES_PER_BLOCK_USIZE * channels as usize, 0.);
|
||||
self.channels = channels;
|
||||
}
|
||||
|
||||
/// Take a single-channel block and repeat the
|
||||
/// channel
|
||||
pub fn repeat(&mut self, channels: u8) {
|
||||
debug_assert!(self.channels == 1);
|
||||
self.channels = channels;
|
||||
if !self.is_silence() {
|
||||
self.repeat = true;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn interleave(&mut self) -> Vec<f32> {
|
||||
self.explicit_repeat();
|
||||
let mut vec = Vec::with_capacity(self.buffer.len());
|
||||
// FIXME this isn't too efficient
|
||||
vec.resize(self.buffer.len(), 0.);
|
||||
for frame in 0..FRAMES_PER_BLOCK_USIZE {
|
||||
let channels = self.channels as usize;
|
||||
for chan in 0..channels {
|
||||
vec[frame * channels + chan] = self.buffer[chan * FRAMES_PER_BLOCK_USIZE + frame]
|
||||
}
|
||||
}
|
||||
vec
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.buffer.is_empty()
|
||||
}
|
||||
|
||||
/// Get the position, forward, and up vectors for a given
|
||||
/// AudioListener-produced block
|
||||
pub fn listener_data(&self, frame: Tick) -> (Vector3D<f32>, Vector3D<f32>, Vector3D<f32>) {
|
||||
let frame = frame.0 as usize;
|
||||
(
|
||||
Vector3D::new(
|
||||
self.data_chan_frame(frame, 0),
|
||||
self.data_chan_frame(frame, 1),
|
||||
self.data_chan_frame(frame, 2),
|
||||
),
|
||||
Vector3D::new(
|
||||
self.data_chan_frame(frame, 3),
|
||||
self.data_chan_frame(frame, 4),
|
||||
self.data_chan_frame(frame, 5),
|
||||
),
|
||||
Vector3D::new(
|
||||
self.data_chan_frame(frame, 6),
|
||||
self.data_chan_frame(frame, 7),
|
||||
self.data_chan_frame(frame, 8),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over frames in a block
|
||||
pub struct FrameIterator<'a> {
|
||||
frame: Tick,
|
||||
block: &'a mut Block,
|
||||
}
|
||||
|
||||
impl<'a> FrameIterator<'a> {
|
||||
#[inline]
|
||||
pub fn new(block: &'a mut Block) -> Self {
|
||||
FrameIterator {
|
||||
frame: Tick(0),
|
||||
block,
|
||||
}
|
||||
}
|
||||
|
||||
/// Advance the iterator
|
||||
///
|
||||
/// We can't implement Iterator since it doesn't support
|
||||
/// streaming iterators, but we can call `while let Some(frame) = iter.next()`
|
||||
/// here
|
||||
#[inline]
|
||||
pub fn next<'b>(&'b mut self) -> Option<FrameRef<'b>> {
|
||||
let curr = self.frame;
|
||||
if curr < FRAMES_PER_BLOCK {
|
||||
self.frame.advance();
|
||||
Some(FrameRef {
|
||||
frame: curr,
|
||||
block: self.block,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a frame
|
||||
pub struct FrameRef<'a> {
|
||||
frame: Tick,
|
||||
block: &'a mut Block,
|
||||
}
|
||||
|
||||
impl<'a> FrameRef<'a> {
|
||||
#[inline]
|
||||
pub fn tick(&self) -> Tick {
|
||||
self.frame
|
||||
}
|
||||
|
||||
/// Given a block and a function `f`, mutate the frame through all channels with `f`
|
||||
///
|
||||
/// Use this when you plan to do the same operation for each channel.
|
||||
/// (Helpers for the other cases will eventually exist)
|
||||
///
|
||||
/// Block must not be silence
|
||||
///
|
||||
/// The second parameter to f is the channel number, 0 in case of a repeat()
|
||||
#[inline]
|
||||
pub fn mutate_with<F>(&mut self, mut f: F)
|
||||
where
|
||||
F: FnMut(&mut f32, u8),
|
||||
{
|
||||
debug_assert!(
|
||||
!self.block.is_silence(),
|
||||
"mutate_frame_with should not be called with a silenced block, \
|
||||
call .explicit_silence() if you wish to use this"
|
||||
);
|
||||
if self.block.repeat {
|
||||
f(&mut self.block.buffer[self.frame.0 as usize], 0)
|
||||
} else {
|
||||
for chan in 0..self.block.channels {
|
||||
f(
|
||||
&mut self.block.buffer
|
||||
[chan as usize * FRAMES_PER_BLOCK_USIZE + self.frame.0 as usize],
|
||||
chan,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// operator impls
|
||||
|
||||
impl<T: PortKind> IndexMut<PortIndex<T>> for Chunk {
|
||||
fn index_mut(&mut self, i: PortIndex<T>) -> &mut Block {
|
||||
if let PortIndex::Port(i) = i {
|
||||
&mut self.blocks[i as usize]
|
||||
} else {
|
||||
panic!("attempted to index chunk with param")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PortKind> Index<PortIndex<T>> for Chunk {
|
||||
type Output = Block;
|
||||
fn index(&self, i: PortIndex<T>) -> &Block {
|
||||
if let PortIndex::Port(i) = i {
|
||||
&self.blocks[i as usize]
|
||||
} else {
|
||||
panic!("attempted to index chunk with param")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<Tick> for Tick {
|
||||
type Output = Tick;
|
||||
fn add(self, other: Tick) -> Self {
|
||||
self + other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Tick {
|
||||
fn add_assign(&mut self, other: Tick) {
|
||||
*self = *self + other
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<Tick> for Tick {
|
||||
type Output = Tick;
|
||||
fn sub(self, other: Tick) -> Self {
|
||||
self - other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<u64> for Tick {
|
||||
type Output = Tick;
|
||||
fn add(self, other: u64) -> Self {
|
||||
Tick(self.0 + other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<u64> for Tick {
|
||||
type Output = Tick;
|
||||
fn sub(self, other: u64) -> Self {
|
||||
Tick(self.0 - other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Div<f64> for Tick {
|
||||
type Output = f64;
|
||||
fn div(self, other: f64) -> f64 {
|
||||
self.0 as f64 / other
|
||||
}
|
||||
}
|
||||
|
||||
impl Tick {
|
||||
pub const FRAMES_PER_BLOCK: Tick = FRAMES_PER_BLOCK;
|
||||
const EPSILON: f64 = 1e-7;
|
||||
pub fn from_time(time: f64, rate: f32) -> Tick {
|
||||
Tick((time * rate as f64 - Tick::EPSILON).ceil() as u64)
|
||||
}
|
||||
|
||||
pub fn advance(&mut self) {
|
||||
self.0 += 1;
|
||||
}
|
||||
}
|
||||
439
components/media/audio/buffer_source_node.rs
Normal file
439
components/media/audio/buffer_source_node.rs
Normal file
@@ -0,0 +1,439 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::{Block, Chunk, FRAMES_PER_BLOCK, Tick};
|
||||
use crate::node::{
|
||||
AudioNodeEngine, AudioNodeType, AudioScheduledSourceNodeMessage, BlockInfo, ChannelInfo,
|
||||
OnEndedCallback, ShouldPlay,
|
||||
};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
/// Control messages directed to AudioBufferSourceNodes.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum AudioBufferSourceNodeMessage {
|
||||
/// Set the data block holding the audio sample data to be played.
|
||||
SetBuffer(Option<AudioBuffer>),
|
||||
/// Set loop parameter.
|
||||
SetLoopEnabled(bool),
|
||||
/// Set loop parameter.
|
||||
SetLoopEnd(f64),
|
||||
/// Set loop parameter.
|
||||
SetLoopStart(f64),
|
||||
/// Set start parameters (when, offset, duration).
|
||||
SetStartParams(f64, Option<f64>, Option<f64>),
|
||||
}
|
||||
|
||||
/// This specifies options for constructing an AudioBufferSourceNode.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AudioBufferSourceNodeOptions {
|
||||
/// The audio asset to be played.
|
||||
pub buffer: Option<AudioBuffer>,
|
||||
/// The initial value for the detune AudioParam.
|
||||
pub detune: f32,
|
||||
/// The initial value for the loop_enabled attribute.
|
||||
pub loop_enabled: bool,
|
||||
/// The initial value for the loop_end attribute.
|
||||
pub loop_end: Option<f64>,
|
||||
/// The initial value for the loop_start attribute.
|
||||
pub loop_start: Option<f64>,
|
||||
/// The initial value for the playback_rate AudioParam.
|
||||
pub playback_rate: f32,
|
||||
}
|
||||
|
||||
impl Default for AudioBufferSourceNodeOptions {
|
||||
fn default() -> Self {
|
||||
AudioBufferSourceNodeOptions {
|
||||
buffer: None,
|
||||
detune: 0.,
|
||||
loop_enabled: false,
|
||||
loop_end: None,
|
||||
loop_start: None,
|
||||
playback_rate: 1.,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// AudioBufferSourceNode engine.
|
||||
/// <https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode>
|
||||
#[derive(AudioScheduledSourceNode, AudioNodeCommon)]
|
||||
#[allow(dead_code)]
|
||||
pub(crate) struct AudioBufferSourceNode {
|
||||
channel_info: ChannelInfo,
|
||||
/// A data block holding the audio sample data to be played.
|
||||
buffer: Option<AudioBuffer>,
|
||||
/// How many more buffer-frames to output. See buffer_pos for clarification.
|
||||
buffer_duration: f64,
|
||||
/// "Index" of the next buffer frame to play. "Index" is in quotes because
|
||||
/// this variable maps to a playhead position (the offset in seconds can be
|
||||
/// obtained by dividing by self.buffer.sample_rate), and therefore has
|
||||
/// subsample accuracy; a fractional "index" means interpolation is needed.
|
||||
buffer_pos: f64,
|
||||
/// AudioParam to modulate the speed at which is rendered the audio stream.
|
||||
detune: Param,
|
||||
/// Whether we need to compute offsets from scratch.
|
||||
initialized_pos: bool,
|
||||
/// Indicates if the region of audio data designated by loopStart and loopEnd
|
||||
/// should be played continuously in a loop.
|
||||
loop_enabled: bool,
|
||||
/// An playhead position where looping should end if the loop_enabled
|
||||
/// attribute is true.
|
||||
loop_end: Option<f64>,
|
||||
/// An playhead position where looping should begin if the loop_enabled
|
||||
/// attribute is true.
|
||||
loop_start: Option<f64>,
|
||||
/// The speed at which to render the audio stream. Can be negative if the
|
||||
/// audio is to be played backwards. With a negative playback_rate, looping
|
||||
/// jumps from loop_start to loop_end instead of the other way around.
|
||||
playback_rate: Param,
|
||||
/// Time at which the source should start playing.
|
||||
start_at: Option<Tick>,
|
||||
/// Offset parameter passed to Start().
|
||||
start_offset: Option<f64>,
|
||||
/// Duration parameter passed to Start().
|
||||
start_duration: Option<f64>,
|
||||
/// The same as start_at, but with subsample accuracy.
|
||||
/// FIXME: AudioScheduledSourceNode should use this as well.
|
||||
start_when: f64,
|
||||
/// Time at which the source should stop playing.
|
||||
stop_at: Option<Tick>,
|
||||
/// The ended event callback.
|
||||
pub onended_callback: Option<OnEndedCallback>,
|
||||
}
|
||||
|
||||
impl AudioBufferSourceNode {
|
||||
pub fn new(options: AudioBufferSourceNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
channel_info,
|
||||
buffer: options.buffer,
|
||||
buffer_pos: 0.,
|
||||
detune: Param::new_krate(options.detune),
|
||||
initialized_pos: false,
|
||||
loop_enabled: options.loop_enabled,
|
||||
loop_end: options.loop_end,
|
||||
loop_start: options.loop_start,
|
||||
playback_rate: Param::new_krate(options.playback_rate),
|
||||
buffer_duration: f64::INFINITY,
|
||||
start_at: None,
|
||||
start_offset: None,
|
||||
start_duration: None,
|
||||
start_when: 0.,
|
||||
stop_at: None,
|
||||
onended_callback: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn handle_message(&mut self, message: AudioBufferSourceNodeMessage, _: f32) {
|
||||
match message {
|
||||
AudioBufferSourceNodeMessage::SetBuffer(buffer) => {
|
||||
self.buffer = buffer;
|
||||
},
|
||||
// XXX(collares): To fully support dynamically updating loop bounds,
|
||||
// Must truncate self.buffer_pos if it is now outside the loop.
|
||||
AudioBufferSourceNodeMessage::SetLoopEnabled(loop_enabled) => {
|
||||
self.loop_enabled = loop_enabled
|
||||
},
|
||||
AudioBufferSourceNodeMessage::SetLoopEnd(loop_end) => self.loop_end = Some(loop_end),
|
||||
AudioBufferSourceNodeMessage::SetLoopStart(loop_start) => {
|
||||
self.loop_start = Some(loop_start)
|
||||
},
|
||||
AudioBufferSourceNodeMessage::SetStartParams(when, offset, duration) => {
|
||||
self.start_when = when;
|
||||
self.start_offset = offset;
|
||||
self.start_duration = duration;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for AudioBufferSourceNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::AudioBufferSourceNode
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.is_empty());
|
||||
|
||||
if self.buffer.is_none() {
|
||||
inputs.blocks.push(Default::default());
|
||||
return inputs;
|
||||
}
|
||||
|
||||
let (start_at, stop_at) = match self.should_play_at(info.frame) {
|
||||
ShouldPlay::No => {
|
||||
inputs.blocks.push(Default::default());
|
||||
return inputs;
|
||||
},
|
||||
ShouldPlay::Between(start, end) => (start.0 as usize, end.0 as usize),
|
||||
};
|
||||
|
||||
let buffer = self.buffer.as_ref().unwrap();
|
||||
|
||||
let (mut actual_loop_start, mut actual_loop_end) = (0., buffer.len() as f64);
|
||||
if self.loop_enabled {
|
||||
let loop_start = self.loop_start.unwrap_or(0.);
|
||||
let loop_end = self.loop_end.unwrap_or(0.);
|
||||
|
||||
if loop_start >= 0. && loop_end > loop_start {
|
||||
actual_loop_start = loop_start * (buffer.sample_rate as f64);
|
||||
actual_loop_end = loop_end * (buffer.sample_rate as f64);
|
||||
}
|
||||
}
|
||||
|
||||
// https://webaudio.github.io/web-audio-api/#computedplaybackrate
|
||||
self.playback_rate.update(info, Tick(0));
|
||||
self.detune.update(info, Tick(0));
|
||||
// computed_playback_rate can be negative or zero.
|
||||
let computed_playback_rate =
|
||||
self.playback_rate.value() as f64 * (2.0_f64).powf(self.detune.value() as f64 / 1200.);
|
||||
let forward = computed_playback_rate >= 0.;
|
||||
|
||||
if !self.initialized_pos {
|
||||
self.initialized_pos = true;
|
||||
|
||||
// Apply the offset and duration parameters passed to start. We handle
|
||||
// this here because the buffer may be set after Start() gets called, so
|
||||
// this might be the first time we know the buffer's sample rate.
|
||||
if let Some(start_offset) = self.start_offset {
|
||||
self.buffer_pos = start_offset * (buffer.sample_rate as f64);
|
||||
if self.buffer_pos < 0. {
|
||||
self.buffer_pos = 0.
|
||||
} else if self.buffer_pos > buffer.len() as f64 {
|
||||
self.buffer_pos = buffer.len() as f64;
|
||||
}
|
||||
}
|
||||
|
||||
if self.loop_enabled {
|
||||
if forward && self.buffer_pos >= actual_loop_end {
|
||||
self.buffer_pos = actual_loop_start;
|
||||
}
|
||||
// https://github.com/WebAudio/web-audio-api/issues/2031
|
||||
if !forward && self.buffer_pos < actual_loop_start {
|
||||
self.buffer_pos = actual_loop_end;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(start_duration) = self.start_duration {
|
||||
self.buffer_duration = start_duration * (buffer.sample_rate as f64);
|
||||
}
|
||||
|
||||
// start_when can be subsample accurate. Correct buffer_pos.
|
||||
//
|
||||
// XXX(collares): What happens to "start_when" if the buffer gets
|
||||
// set after Start()?
|
||||
// XXX(collares): Need a better way to distingush between Start()
|
||||
// being called with "when" in the past (in which case "when" must
|
||||
// be ignored) and Start() being called with "when" in the future.
|
||||
// This can now make a difference if "when" shouldn't be ignored
|
||||
// but falls after the last frame of the previous quantum.
|
||||
if self.start_when > info.time - 1. / info.sample_rate as f64 {
|
||||
let first_time = info.time + start_at as f64 / info.sample_rate as f64;
|
||||
if self.start_when <= first_time {
|
||||
let subsample_offset = (first_time - self.start_when) *
|
||||
(buffer.sample_rate as f64) *
|
||||
computed_playback_rate;
|
||||
self.buffer_pos += subsample_offset;
|
||||
self.buffer_duration -= subsample_offset.abs();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut buffer_offset_per_tick =
|
||||
computed_playback_rate * (buffer.sample_rate as f64 / info.sample_rate as f64);
|
||||
|
||||
// WebAudio §1.9.5: "Setting the loop attribute to true causes playback of
|
||||
// the region of the buffer defined by the endpoints loopStart and loopEnd
|
||||
// to continue indefinitely, once any part of the looped region has been
|
||||
// played. While loop remains true, looped playback will continue until one
|
||||
// of the following occurs:
|
||||
// * stop() is called,
|
||||
// * the scheduled stop time has been reached,
|
||||
// * the duration has been exceeded, if start() was called with a duration value."
|
||||
// Even with extreme playback rates we must stay inside the loop body, so wrap
|
||||
// the per-tick delta instead of bailing.
|
||||
if self.loop_enabled && actual_loop_end > actual_loop_start {
|
||||
let loop_length = actual_loop_end - actual_loop_start;
|
||||
if loop_length > 0. {
|
||||
let step = buffer_offset_per_tick.abs();
|
||||
if step >= loop_length {
|
||||
let mut wrapped = step.rem_euclid(loop_length);
|
||||
if wrapped == 0. {
|
||||
wrapped = loop_length;
|
||||
}
|
||||
buffer_offset_per_tick = wrapped.copysign(buffer_offset_per_tick);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We will output at most this many frames (fewer if we run out of data).
|
||||
let frames_to_output = stop_at - start_at;
|
||||
|
||||
// Fast path for the case where we can just copy FRAMES_PER_BLOCK
|
||||
// frames straight from the buffer.
|
||||
if frames_to_output == FRAMES_PER_BLOCK.0 as usize &&
|
||||
forward &&
|
||||
buffer_offset_per_tick == 1. &&
|
||||
self.buffer_pos.trunc() == self.buffer_pos &&
|
||||
self.buffer_pos + (FRAMES_PER_BLOCK.0 as f64) <= actual_loop_end &&
|
||||
FRAMES_PER_BLOCK.0 as f64 <= self.buffer_duration
|
||||
{
|
||||
let mut block = Block::empty();
|
||||
let pos = self.buffer_pos as usize;
|
||||
|
||||
for chan in 0..buffer.chans() {
|
||||
block.push_chan(&buffer.buffers[chan as usize][pos..(pos + frames_to_output)]);
|
||||
}
|
||||
|
||||
inputs.blocks.push(block);
|
||||
self.buffer_pos += FRAMES_PER_BLOCK.0 as f64;
|
||||
self.buffer_duration -= FRAMES_PER_BLOCK.0 as f64;
|
||||
} else {
|
||||
// Slow path, with interpolation.
|
||||
let mut block = Block::default();
|
||||
block.repeat(buffer.chans());
|
||||
block.explicit_repeat();
|
||||
|
||||
debug_assert!(buffer.chans() > 0);
|
||||
|
||||
for chan in 0..buffer.chans() {
|
||||
let data = block.data_chan_mut(chan);
|
||||
let (_, data) = data.split_at_mut(start_at);
|
||||
let (data, _) = data.split_at_mut(frames_to_output);
|
||||
|
||||
let mut pos = self.buffer_pos;
|
||||
let mut duration = self.buffer_duration;
|
||||
|
||||
for sample in data {
|
||||
if duration <= 0. {
|
||||
break;
|
||||
}
|
||||
|
||||
if self.loop_enabled {
|
||||
if forward && pos >= actual_loop_end {
|
||||
pos -= actual_loop_end - actual_loop_start;
|
||||
} else if !forward && pos < actual_loop_start {
|
||||
pos += actual_loop_end - actual_loop_start;
|
||||
}
|
||||
} else if pos < 0. || pos >= buffer.len() as f64 {
|
||||
break;
|
||||
}
|
||||
|
||||
*sample = buffer.interpolate(chan, pos);
|
||||
pos += buffer_offset_per_tick;
|
||||
duration -= buffer_offset_per_tick.abs();
|
||||
}
|
||||
|
||||
// This is the last channel, update parameters.
|
||||
if chan == buffer.chans() - 1 {
|
||||
self.buffer_pos = pos;
|
||||
self.buffer_duration = duration;
|
||||
}
|
||||
}
|
||||
|
||||
inputs.blocks.push(block);
|
||||
}
|
||||
|
||||
if !self.loop_enabled && (self.buffer_pos < 0. || self.buffer_pos >= buffer.len() as f64) ||
|
||||
self.buffer_duration <= 0.
|
||||
{
|
||||
self.maybe_trigger_onended_callback();
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::PlaybackRate => &mut self.playback_rate,
|
||||
ParamType::Detune => &mut self.detune,
|
||||
_ => panic!("Unknown param {:?} for AudioBufferSourceNode", id),
|
||||
}
|
||||
}
|
||||
|
||||
make_message_handler!(
|
||||
AudioBufferSourceNode: handle_message,
|
||||
AudioScheduledSourceNode: handle_source_node_message
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AudioBuffer {
|
||||
/// Invariant: all buffers must be of the same length
|
||||
pub buffers: Vec<Vec<f32>>,
|
||||
pub sample_rate: f32,
|
||||
}
|
||||
|
||||
impl AudioBuffer {
|
||||
pub fn new(chan: u8, len: usize, sample_rate: f32) -> Self {
|
||||
assert!(chan > 0);
|
||||
let mut buffers = Vec::with_capacity(chan as usize);
|
||||
let single = vec![0.; len];
|
||||
buffers.resize(chan as usize, single);
|
||||
AudioBuffer {
|
||||
buffers,
|
||||
sample_rate,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_buffers(buffers: Vec<Vec<f32>>, sample_rate: f32) -> Self {
|
||||
for buf in &buffers {
|
||||
assert_eq!(buf.len(), buffers[0].len())
|
||||
}
|
||||
|
||||
Self {
|
||||
buffers,
|
||||
sample_rate,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_buffer(buffer: Vec<f32>, sample_rate: f32) -> Self {
|
||||
AudioBuffer::from_buffers(vec![buffer], sample_rate)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.buffers[0].len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn chans(&self) -> u8 {
|
||||
self.buffers.len() as u8
|
||||
}
|
||||
|
||||
// XXX(collares): There are better fast interpolation algorithms.
|
||||
// Firefox uses (via Speex's resampler) the algorithm described in
|
||||
// https://ccrma.stanford.edu/~jos/resample/resample.pdf
|
||||
// There are Rust bindings: https://github.com/rust-av/speexdsp-rs
|
||||
pub fn interpolate(&self, chan: u8, pos: f64) -> f32 {
|
||||
debug_assert!(pos >= 0. && pos < self.len() as f64);
|
||||
|
||||
let prev = pos.floor() as usize;
|
||||
let offset = pos - pos.floor();
|
||||
match self.buffers[chan as usize].get(prev + 1) {
|
||||
Some(next_sample) => {
|
||||
((1. - offset) * (self.buffers[chan as usize][prev] as f64) +
|
||||
offset * (*next_sample as f64)) as f32
|
||||
},
|
||||
_ => {
|
||||
// linear extrapolation of two prev samples if there are two
|
||||
if prev > 0 {
|
||||
((1. + offset) * (self.buffers[chan as usize][prev] as f64) -
|
||||
offset * (self.buffers[chan as usize][prev - 1] as f64))
|
||||
as f32
|
||||
} else {
|
||||
self.buffers[chan as usize][prev]
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_chan_mut(&mut self, chan: u8) -> &mut [f32] {
|
||||
&mut self.buffers[chan as usize]
|
||||
}
|
||||
}
|
||||
119
components/media/audio/channel_node.rs
Normal file
119
components/media/audio/channel_node.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::{Block, Chunk, FRAMES_PER_BLOCK_USIZE};
|
||||
use crate::node::{
|
||||
AudioNodeEngine, AudioNodeType, BlockInfo, ChannelCountMode, ChannelInfo, ChannelInterpretation,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct ChannelNodeOptions {
|
||||
pub channels: u8,
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct ChannelMergerNode {
|
||||
channel_info: ChannelInfo,
|
||||
channels: u8,
|
||||
}
|
||||
|
||||
impl ChannelMergerNode {
|
||||
pub fn new(params: ChannelNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
ChannelMergerNode {
|
||||
channel_info,
|
||||
channels: params.channels,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for ChannelMergerNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::ChannelMergerNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, _: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == self.channels as usize);
|
||||
|
||||
let mut block = Block::default();
|
||||
block.repeat(self.channels);
|
||||
block.explicit_repeat();
|
||||
|
||||
for (i, channel) in block
|
||||
.data_mut()
|
||||
.chunks_mut(FRAMES_PER_BLOCK_USIZE)
|
||||
.enumerate()
|
||||
{
|
||||
channel.copy_from_slice(inputs.blocks[i].data_mut())
|
||||
}
|
||||
|
||||
inputs.blocks.clear();
|
||||
inputs.blocks.push(block);
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
self.channels as u32
|
||||
}
|
||||
|
||||
fn set_channel_count_mode(&mut self, _: ChannelCountMode) {
|
||||
panic!("channel merger nodes cannot have their mode changed");
|
||||
}
|
||||
|
||||
fn set_channel_count(&mut self, _: u8) {
|
||||
panic!("channel merger nodes cannot have their channel count changed");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct ChannelSplitterNode {
|
||||
channel_info: ChannelInfo,
|
||||
}
|
||||
|
||||
impl ChannelSplitterNode {
|
||||
pub fn new(channel_info: ChannelInfo) -> Self {
|
||||
ChannelSplitterNode { channel_info }
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for ChannelSplitterNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::ChannelSplitterNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, _: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
let original = inputs.blocks.pop().unwrap();
|
||||
|
||||
if original.is_silence() {
|
||||
inputs
|
||||
.blocks
|
||||
.resize(original.chan_count() as usize, Block::default())
|
||||
} else {
|
||||
for chan in 0..original.chan_count() {
|
||||
let mut block = Block::empty();
|
||||
block.push_chan(original.data_chan(chan));
|
||||
inputs.blocks.push(block);
|
||||
}
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn output_count(&self) -> u32 {
|
||||
self.channel_count() as u32
|
||||
}
|
||||
|
||||
fn set_channel_count_mode(&mut self, _: ChannelCountMode) {
|
||||
panic!("channel splitter nodes cannot have their mode changed");
|
||||
}
|
||||
|
||||
fn set_channel_interpretation(&mut self, _: ChannelInterpretation) {
|
||||
panic!("channel splitter nodes cannot have their channel interpretation changed");
|
||||
}
|
||||
|
||||
fn set_channel_count(&mut self, _: u8) {
|
||||
panic!("channel splitter nodes cannot have their channel count changed");
|
||||
}
|
||||
}
|
||||
96
components/media/audio/constant_source_node.rs
Normal file
96
components/media/audio/constant_source_node.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::{Chunk, Tick};
|
||||
use crate::node::{
|
||||
AudioNodeEngine, AudioNodeType, AudioScheduledSourceNodeMessage, BlockInfo, ChannelInfo,
|
||||
OnEndedCallback, ShouldPlay,
|
||||
};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct ConstantSourceNodeOptions {
|
||||
pub offset: f32,
|
||||
}
|
||||
|
||||
impl Default for ConstantSourceNodeOptions {
|
||||
fn default() -> Self {
|
||||
ConstantSourceNodeOptions { offset: 1. }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(AudioScheduledSourceNode, AudioNodeCommon)]
|
||||
pub(crate) struct ConstantSourceNode {
|
||||
channel_info: ChannelInfo,
|
||||
offset: Param,
|
||||
start_at: Option<Tick>,
|
||||
stop_at: Option<Tick>,
|
||||
onended_callback: Option<OnEndedCallback>,
|
||||
}
|
||||
|
||||
impl ConstantSourceNode {
|
||||
pub fn new(options: ConstantSourceNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
channel_info,
|
||||
offset: Param::new(options.offset),
|
||||
start_at: None,
|
||||
stop_at: None,
|
||||
onended_callback: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_parameters(&mut self, info: &BlockInfo, tick: Tick) -> bool {
|
||||
self.offset.update(info, tick)
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for ConstantSourceNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::ConstantSourceNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.is_empty());
|
||||
|
||||
inputs.blocks.push(Default::default());
|
||||
|
||||
let (start_at, stop_at) = match self.should_play_at(info.frame) {
|
||||
ShouldPlay::No => {
|
||||
return inputs;
|
||||
},
|
||||
ShouldPlay::Between(start, end) => (start, end),
|
||||
};
|
||||
|
||||
{
|
||||
inputs.blocks[0].explicit_silence();
|
||||
|
||||
let mut iter = inputs.blocks[0].iter();
|
||||
let mut offset = self.offset.value();
|
||||
while let Some(mut frame) = iter.next() {
|
||||
let tick = frame.tick();
|
||||
if tick < start_at {
|
||||
continue;
|
||||
} else if tick > stop_at {
|
||||
break;
|
||||
}
|
||||
if self.update_parameters(info, frame.tick()) {
|
||||
offset = self.offset.value();
|
||||
}
|
||||
frame.mutate_with(|sample, _| *sample = offset);
|
||||
}
|
||||
}
|
||||
inputs
|
||||
}
|
||||
fn input_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Offset => &mut self.offset,
|
||||
_ => panic!("Unknown param {:?} for the offset", id),
|
||||
}
|
||||
}
|
||||
make_message_handler!(AudioScheduledSourceNode: handle_source_node_message);
|
||||
}
|
||||
362
components/media/audio/context.rs
Normal file
362
components/media/audio/context.rs
Normal file
@@ -0,0 +1,362 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::sync::mpsc::{self, Sender};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread::Builder;
|
||||
|
||||
use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance, MediaInstanceError};
|
||||
|
||||
use crate::AudioBackend;
|
||||
use crate::decoder::{AudioDecoder, AudioDecoderCallbacks, AudioDecoderOptions};
|
||||
use crate::graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
|
||||
use crate::node::{AudioNodeInit, AudioNodeMessage, ChannelInfo};
|
||||
use crate::render_thread::{AudioRenderThread, AudioRenderThreadMsg, SinkEosCallback};
|
||||
use crate::sink::AudioSinkError;
|
||||
|
||||
/// Describes the state of the audio context on the control thread.
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub enum ProcessingState {
|
||||
/// The audio context is suspended (context time is not proceeding,
|
||||
/// audio hardware may be powered down/released).
|
||||
Suspended,
|
||||
/// Audio is being processed.
|
||||
Running,
|
||||
/// The audio context has been released, and can no longer be used
|
||||
/// to process audio.
|
||||
Closed,
|
||||
}
|
||||
|
||||
pub type StateChangeResult = Option<()>;
|
||||
|
||||
/// Identify the type of playback, which affects tradeoffs between audio output
|
||||
/// and power consumption.
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum LatencyCategory {
|
||||
/// Balance audio output latency and power consumption.
|
||||
Balanced,
|
||||
/// Provide the lowest audio output latency possible without glitching.
|
||||
Interactive,
|
||||
/// Prioritize sustained playback without interruption over audio output latency.
|
||||
/// Lowest power consumption.
|
||||
Playback,
|
||||
}
|
||||
|
||||
/// User-specified options for a real time audio context.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct RealTimeAudioContextOptions {
|
||||
/// Number of samples that will play in one second, measured in Hz.
|
||||
pub sample_rate: f32,
|
||||
/// Type of playback.
|
||||
pub latency_hint: LatencyCategory,
|
||||
}
|
||||
|
||||
impl Default for RealTimeAudioContextOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
sample_rate: 44100.,
|
||||
latency_hint: LatencyCategory::Interactive,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// User-specified options for an offline audio context.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct OfflineAudioContextOptions {
|
||||
/// The number of channels for this offline audio context.
|
||||
pub channels: u8,
|
||||
/// The length of the rendered audio buffer in sample-frames.
|
||||
pub length: usize,
|
||||
/// Number of samples that will be rendered in one second, measured in Hz.
|
||||
pub sample_rate: f32,
|
||||
}
|
||||
|
||||
impl Default for OfflineAudioContextOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
channels: 1,
|
||||
length: 0,
|
||||
sample_rate: 44100.,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RealTimeAudioContextOptions> for AudioContextOptions {
|
||||
fn from(options: RealTimeAudioContextOptions) -> Self {
|
||||
AudioContextOptions::RealTimeAudioContext(options)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OfflineAudioContextOptions> for AudioContextOptions {
|
||||
fn from(options: OfflineAudioContextOptions) -> Self {
|
||||
AudioContextOptions::OfflineAudioContext(options)
|
||||
}
|
||||
}
|
||||
|
||||
/// User-specified options for a real time or offline audio context.
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum AudioContextOptions {
|
||||
RealTimeAudioContext(RealTimeAudioContextOptions),
|
||||
OfflineAudioContext(OfflineAudioContextOptions),
|
||||
}
|
||||
|
||||
impl Default for AudioContextOptions {
|
||||
fn default() -> Self {
|
||||
AudioContextOptions::RealTimeAudioContext(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// Representation of an audio context on the control thread.
|
||||
pub struct AudioContext {
|
||||
/// Media instance ID.
|
||||
id: usize,
|
||||
/// Client context ID.
|
||||
client_context_id: ClientContextId,
|
||||
/// Owner backend communication channel.
|
||||
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
|
||||
/// Rendering thread communication channel.
|
||||
sender: Sender<AudioRenderThreadMsg>,
|
||||
/// State of the audio context on the control thread.
|
||||
state: Cell<ProcessingState>,
|
||||
/// Number of samples that will be played in one second.
|
||||
sample_rate: f32,
|
||||
/// The identifier of an AudioDestinationNode with a single input
|
||||
/// representing the final destination for all audio.
|
||||
dest_node: NodeId,
|
||||
listener: NodeId,
|
||||
make_decoder: Arc<dyn Fn() -> Box<dyn AudioDecoder> + Sync + Send>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AudioContextError;
|
||||
|
||||
impl std::fmt::Display for AudioContextError {
|
||||
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "AudioContextError")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for AudioContextError {}
|
||||
|
||||
impl AudioContext {
|
||||
/// Constructs a new audio context.
|
||||
pub fn new<B: AudioBackend>(
|
||||
id: usize,
|
||||
client_context_id: &ClientContextId,
|
||||
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
|
||||
options: AudioContextOptions,
|
||||
) -> Result<Self, AudioSinkError> {
|
||||
let (sample_rate, channels) = match options {
|
||||
AudioContextOptions::RealTimeAudioContext(ref options) => (options.sample_rate, 2),
|
||||
AudioContextOptions::OfflineAudioContext(ref options) => {
|
||||
(options.sample_rate, options.channels)
|
||||
},
|
||||
};
|
||||
|
||||
let (sender, receiver) = mpsc::channel();
|
||||
let sender_ = sender.clone();
|
||||
let graph = AudioGraph::new(channels);
|
||||
let dest_node = graph.dest_id();
|
||||
let listener = graph.listener_id();
|
||||
|
||||
let (init_sender, init_receiver) = mpsc::channel();
|
||||
Builder::new()
|
||||
.name("AudioRenderThread".to_owned())
|
||||
.spawn(move || {
|
||||
AudioRenderThread::start::<B>(
|
||||
receiver,
|
||||
sender_,
|
||||
sample_rate,
|
||||
graph,
|
||||
options,
|
||||
init_sender,
|
||||
)
|
||||
})
|
||||
.expect("Failed to spawn AudioRenderThread");
|
||||
|
||||
init_receiver
|
||||
.recv()
|
||||
.expect("Failed to receive result from AudioRenderThread")?;
|
||||
Ok(Self {
|
||||
id,
|
||||
client_context_id: *client_context_id,
|
||||
backend_chan,
|
||||
sender,
|
||||
state: Cell::new(ProcessingState::Suspended),
|
||||
sample_rate,
|
||||
dest_node,
|
||||
listener,
|
||||
make_decoder: Arc::new(|| B::make_decoder()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn state(&self) -> ProcessingState {
|
||||
self.state.get()
|
||||
}
|
||||
|
||||
pub fn dest_node(&self) -> NodeId {
|
||||
self.dest_node
|
||||
}
|
||||
|
||||
pub fn listener(&self) -> NodeId {
|
||||
self.listener
|
||||
}
|
||||
|
||||
pub fn current_time(&self) -> f64 {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let _ = self.sender.send(AudioRenderThreadMsg::GetCurrentTime(tx));
|
||||
rx.recv().unwrap()
|
||||
}
|
||||
|
||||
pub fn create_node(&self, node_type: AudioNodeInit, ch: ChannelInfo) -> Option<NodeId> {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::CreateNode(node_type, tx, ch));
|
||||
rx.recv().ok()
|
||||
}
|
||||
|
||||
// Resume audio processing.
|
||||
make_state_change!(resume, Running, Resume);
|
||||
|
||||
// Suspend audio processing.
|
||||
make_state_change!(suspend, Suspended, Suspend);
|
||||
|
||||
// Stop audio processing and close render thread.
|
||||
make_state_change!(close, Closed, Close);
|
||||
|
||||
pub fn message_node(&self, id: NodeId, msg: AudioNodeMessage) {
|
||||
let _ = self.sender.send(AudioRenderThreadMsg::MessageNode(id, msg));
|
||||
}
|
||||
|
||||
pub fn connect_ports(&self, from: PortId<OutputPort>, to: PortId<InputPort>) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::ConnectPorts(from, to));
|
||||
}
|
||||
|
||||
pub fn disconnect_all_from(&self, node: NodeId) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::DisconnectAllFrom(node));
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node's output
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-output>>
|
||||
pub fn disconnect_output(&self, out: PortId<OutputPort>) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::DisconnectOutput(out));
|
||||
}
|
||||
|
||||
/// Disconnect connections from a node to another node
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode>
|
||||
pub fn disconnect_between(&self, from: NodeId, to: NodeId) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::DisconnectBetween(from, to));
|
||||
}
|
||||
|
||||
/// Disconnect connections from a node to another node's input
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam>
|
||||
pub fn disconnect_to(&self, from: NodeId, to: PortId<InputPort>) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::DisconnectTo(from, to));
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node's output to another node
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output>
|
||||
pub fn disconnect_output_between(&self, out: PortId<OutputPort>, to: NodeId) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::DisconnectOutputBetween(out, to));
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node's output to another node's input
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output-input>
|
||||
pub fn disconnect_output_between_to(&self, out: PortId<OutputPort>, inp: PortId<InputPort>) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::DisconnectOutputBetweenTo(out, inp));
|
||||
}
|
||||
|
||||
/// Asynchronously decodes the audio file data contained in the given
|
||||
/// buffer.
|
||||
pub fn decode_audio_data(&self, data: Vec<u8>, callbacks: AudioDecoderCallbacks) {
|
||||
let options = AudioDecoderOptions {
|
||||
sample_rate: self.sample_rate,
|
||||
};
|
||||
let make_decoder = self.make_decoder.clone();
|
||||
Builder::new()
|
||||
.name("AudioDecoder".to_owned())
|
||||
.spawn(move || {
|
||||
let audio_decoder = make_decoder();
|
||||
|
||||
audio_decoder.decode(data, callbacks, Some(options));
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn set_eos_callback(&self, callback: SinkEosCallback) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(AudioRenderThreadMsg::SetSinkEosCallback(callback));
|
||||
}
|
||||
|
||||
fn set_mute(&self, val: bool) {
|
||||
let _ = self.sender.send(AudioRenderThreadMsg::SetMute(val));
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AudioContext {
|
||||
fn drop(&mut self) {
|
||||
let (tx, _) = mpsc::channel();
|
||||
let _ = self.sender.send(AudioRenderThreadMsg::Close(tx));
|
||||
|
||||
// Ask the backend to unregister this instance and wait for ACK
|
||||
let (tx_ack, rx_ack) = mpsc::channel();
|
||||
let _ = self
|
||||
.backend_chan
|
||||
.lock()
|
||||
.unwrap()
|
||||
.send(BackendMsg::Shutdown {
|
||||
context: self.client_context_id,
|
||||
id: self.id,
|
||||
tx_ack,
|
||||
});
|
||||
let _ = rx_ack.recv();
|
||||
}
|
||||
}
|
||||
|
||||
impl MediaInstance for AudioContext {
|
||||
fn get_id(&self) -> usize {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn mute(&self, val: bool) -> Result<(), MediaInstanceError> {
|
||||
self.set_mute(val);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn suspend(&self) -> Result<(), MediaInstanceError> {
|
||||
let (tx, _) = mpsc::channel();
|
||||
self.sender
|
||||
.send(AudioRenderThreadMsg::Suspend(tx))
|
||||
.map_err(|_| MediaInstanceError)
|
||||
}
|
||||
|
||||
fn resume(&self) -> Result<(), MediaInstanceError> {
|
||||
let (tx, _) = mpsc::channel();
|
||||
self.sender
|
||||
.send(AudioRenderThreadMsg::Resume(tx))
|
||||
.map_err(|_| MediaInstanceError)
|
||||
}
|
||||
}
|
||||
128
components/media/audio/decoder.rs
Normal file
128
components/media/audio/decoder.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum AudioDecoderError {
|
||||
/// Backend specific error.
|
||||
Backend(String),
|
||||
/// Could not read the audio buffer content.
|
||||
BufferReadFailed,
|
||||
/// The media trying to be decoded has an invalid format.
|
||||
InvalidMediaFormat,
|
||||
/// An invalid sample was found while decoding the audio.
|
||||
InvalidSample,
|
||||
/// Could not move to a different state.
|
||||
StateChangeFailed,
|
||||
}
|
||||
|
||||
type AudioDecoderEosCallback = Box<dyn FnOnce() + Send + 'static>;
|
||||
type AudioDecoderErrorCallback = Box<dyn FnOnce(AudioDecoderError) + Send + 'static>;
|
||||
type AudioDecoderProgressCallback = Box<dyn Fn(Box<dyn AsRef<[f32]>>, u32) + Send + Sync + 'static>;
|
||||
type AudioDecoderReadyCallback = Box<dyn FnOnce(u32) + Send + 'static>;
|
||||
|
||||
pub struct AudioDecoderCallbacks {
|
||||
pub eos: Mutex<Option<AudioDecoderEosCallback>>,
|
||||
pub error: Mutex<Option<AudioDecoderErrorCallback>>,
|
||||
pub progress: Option<AudioDecoderProgressCallback>,
|
||||
pub ready: Mutex<Option<AudioDecoderReadyCallback>>,
|
||||
}
|
||||
|
||||
impl AudioDecoderCallbacks {
|
||||
pub fn eos(&self) {
|
||||
if let Some(callback) = self.eos.lock().unwrap().take() {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn error(&self, error: AudioDecoderError) {
|
||||
if let Some(callback) = self.error.lock().unwrap().take() {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn progress(&self, buffer: Box<dyn AsRef<[f32]>>, channel: u32) {
|
||||
if let Some(callback) = self.progress.as_ref() {
|
||||
callback(buffer, channel);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ready(&self, channels: u32) {
|
||||
if let Some(callback) = self.ready.lock().unwrap().take() {
|
||||
callback(channels);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AudioDecoderCallbacksBuilder {
|
||||
eos: Option<AudioDecoderEosCallback>,
|
||||
error: Option<AudioDecoderErrorCallback>,
|
||||
progress: Option<AudioDecoderProgressCallback>,
|
||||
ready: Option<AudioDecoderReadyCallback>,
|
||||
}
|
||||
|
||||
impl AudioDecoderCallbacksBuilder {
|
||||
pub fn eos<F: FnOnce() + Send + 'static>(self, eos: F) -> Self {
|
||||
Self {
|
||||
eos: Some(Box::new(eos)),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn error<F: FnOnce(AudioDecoderError) + Send + 'static>(self, error: F) -> Self {
|
||||
Self {
|
||||
error: Some(Box::new(error)),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn progress<F: Fn(Box<dyn AsRef<[f32]>>, u32) + Send + Sync + 'static>(
|
||||
self,
|
||||
progress: F,
|
||||
) -> Self {
|
||||
Self {
|
||||
progress: Some(Box::new(progress)),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ready<F: FnOnce(u32) + Send + 'static>(self, ready: F) -> Self {
|
||||
Self {
|
||||
ready: Some(Box::new(ready)),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> AudioDecoderCallbacks {
|
||||
AudioDecoderCallbacks {
|
||||
eos: Mutex::new(self.eos),
|
||||
error: Mutex::new(self.error),
|
||||
progress: self.progress,
|
||||
ready: Mutex::new(self.ready),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AudioDecoderOptions {
|
||||
pub sample_rate: f32,
|
||||
}
|
||||
|
||||
impl Default for AudioDecoderOptions {
|
||||
fn default() -> Self {
|
||||
AudioDecoderOptions {
|
||||
sample_rate: 44100.,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AudioDecoder {
|
||||
fn decode(
|
||||
&self,
|
||||
data: Vec<u8>,
|
||||
callbacks: AudioDecoderCallbacks,
|
||||
options: Option<AudioDecoderOptions>,
|
||||
);
|
||||
}
|
||||
44
components/media/audio/destination_node.rs
Normal file
44
components/media/audio/destination_node.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::Chunk;
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelCountMode, ChannelInfo};
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct DestinationNode {
|
||||
channel_info: ChannelInfo,
|
||||
chunk: Option<Chunk>,
|
||||
}
|
||||
|
||||
impl DestinationNode {
|
||||
pub fn new(channel_count: u8) -> Self {
|
||||
DestinationNode {
|
||||
channel_info: ChannelInfo {
|
||||
mode: ChannelCountMode::Explicit,
|
||||
count: channel_count,
|
||||
..Default::default()
|
||||
},
|
||||
chunk: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for DestinationNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::DestinationNode
|
||||
}
|
||||
|
||||
fn process(&mut self, inputs: Chunk, _: &BlockInfo) -> Chunk {
|
||||
self.chunk = Some(inputs);
|
||||
Chunk::default()
|
||||
}
|
||||
|
||||
fn destination_data(&mut self) -> Option<Chunk> {
|
||||
self.chunk.take()
|
||||
}
|
||||
|
||||
fn output_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
}
|
||||
71
components/media/audio/gain_node.rs
Normal file
71
components/media/audio/gain_node.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::{Chunk, Tick};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct GainNodeOptions {
|
||||
pub gain: f32,
|
||||
}
|
||||
|
||||
impl Default for GainNodeOptions {
|
||||
fn default() -> Self {
|
||||
GainNodeOptions { gain: 1. }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct GainNode {
|
||||
channel_info: ChannelInfo,
|
||||
gain: Param,
|
||||
}
|
||||
|
||||
impl GainNode {
|
||||
pub fn new(options: GainNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
channel_info,
|
||||
gain: Param::new(options.gain),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_parameters(&mut self, info: &BlockInfo, tick: Tick) -> bool {
|
||||
self.gain.update(info, tick)
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for GainNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::GainNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
if inputs.blocks[0].is_silence() {
|
||||
return inputs;
|
||||
}
|
||||
|
||||
{
|
||||
let mut iter = inputs.blocks[0].iter();
|
||||
let mut gain = self.gain.value();
|
||||
|
||||
while let Some(mut frame) = iter.next() {
|
||||
if self.update_parameters(info, frame.tick()) {
|
||||
gain = self.gain.value();
|
||||
}
|
||||
frame.mutate_with(|sample, _| *sample *= gain);
|
||||
}
|
||||
}
|
||||
inputs
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Gain => &mut self.gain,
|
||||
_ => panic!("Unknown param {:?} for GainNode", id),
|
||||
}
|
||||
}
|
||||
}
|
||||
535
components/media/audio/graph.rs
Normal file
535
components/media/audio/graph.rs
Normal file
@@ -0,0 +1,535 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cell::{RefCell, RefMut};
|
||||
use std::{cmp, fmt, hash};
|
||||
|
||||
use petgraph::Direction;
|
||||
use petgraph::graph::DefaultIx;
|
||||
use petgraph::stable_graph::{NodeIndex, StableGraph};
|
||||
use petgraph::visit::{DfsPostOrder, EdgeRef, Reversed};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::block::{Block, Chunk};
|
||||
use crate::destination_node::DestinationNode;
|
||||
use crate::listener::AudioListenerNode;
|
||||
use crate::node::{AudioNodeEngine, BlockInfo, ChannelCountMode, ChannelInterpretation};
|
||||
use crate::param::ParamType;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, Debug)]
|
||||
/// A unique identifier for nodes in the graph. Stable
|
||||
/// under graph mutation.
|
||||
pub struct NodeId(NodeIndex<DefaultIx>);
|
||||
|
||||
impl NodeId {
|
||||
pub fn input(self, port: u32) -> PortId<InputPort> {
|
||||
PortId(self, PortIndex::Port(port))
|
||||
}
|
||||
pub fn param(self, param: ParamType) -> PortId<InputPort> {
|
||||
PortId(self, PortIndex::Param(param))
|
||||
}
|
||||
pub fn output(self, port: u32) -> PortId<OutputPort> {
|
||||
PortId(self, PortIndex::Port(port))
|
||||
}
|
||||
pub(crate) fn listener(self) -> PortId<InputPort> {
|
||||
PortId(self, PortIndex::Listener(()))
|
||||
}
|
||||
}
|
||||
|
||||
/// A zero-indexed "port" for a node. Most nodes have one
|
||||
/// input and one output port, but some may have more.
|
||||
/// For example, a channel splitter node will have one output
|
||||
/// port for each channel.
|
||||
///
|
||||
/// These are essentially indices into the Chunks
|
||||
///
|
||||
/// Kind is a zero sized type and is useful for distinguishing
|
||||
/// between input and output ports (which may otherwise share indices)
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, Debug)]
|
||||
pub enum PortIndex<Kind: PortKind> {
|
||||
Port(u32),
|
||||
Param(Kind::ParamId),
|
||||
/// special variant only used for the implicit connection
|
||||
/// from listeners to params
|
||||
Listener(Kind::Listener),
|
||||
}
|
||||
|
||||
impl<Kind: PortKind> PortId<Kind> {
|
||||
pub fn node(&self) -> NodeId {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PortKind {
|
||||
type ParamId: Copy + Eq + PartialEq + Ord + PartialOrd + hash::Hash + fmt::Debug;
|
||||
type Listener: Copy + Eq + PartialEq + Ord + PartialOrd + hash::Hash + fmt::Debug;
|
||||
}
|
||||
|
||||
/// An identifier for a port.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, Debug)]
|
||||
pub struct PortId<Kind: PortKind>(NodeId, PortIndex<Kind>);
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
|
||||
/// Marker type for denoting that the port is an input port
|
||||
/// of the node it is connected to
|
||||
pub struct InputPort;
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
|
||||
/// Marker type for denoting that the port is an output port
|
||||
/// of the node it is connected to
|
||||
pub struct OutputPort;
|
||||
|
||||
impl PortKind for InputPort {
|
||||
type ParamId = ParamType;
|
||||
type Listener = ();
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash, PartialOrd, Ord, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum Void {}
|
||||
|
||||
impl PortKind for OutputPort {
|
||||
// Params are only a feature of input ports. By using an empty type here
|
||||
// we ensure that the PortIndex enum has zero overhead for outputs,
|
||||
// taking up no extra discriminant space and eliminating PortIndex::Param
|
||||
// branches entirely from the compiled code
|
||||
type ParamId = Void;
|
||||
type Listener = Void;
|
||||
}
|
||||
|
||||
pub struct AudioGraph {
|
||||
graph: StableGraph<Node, Edge>,
|
||||
dest_id: NodeId,
|
||||
dests: Vec<NodeId>,
|
||||
listener_id: NodeId,
|
||||
}
|
||||
|
||||
pub(crate) struct Node {
|
||||
node: RefCell<Box<dyn AudioNodeEngine>>,
|
||||
}
|
||||
|
||||
/// An edge in the graph
|
||||
///
|
||||
/// This connects one or more pair of ports between two
|
||||
/// nodes, each connection represented by a `Connection`.
|
||||
/// WebAudio allows for multiple connections to/from the same port
|
||||
/// however it does not allow for duplicate connections between pairs
|
||||
/// of ports
|
||||
pub(crate) struct Edge {
|
||||
connections: SmallVec<[Connection; 1]>,
|
||||
}
|
||||
|
||||
impl Edge {
|
||||
/// Find if there are connections between two given ports, return the index
|
||||
fn has_between(
|
||||
&self,
|
||||
output_idx: PortIndex<OutputPort>,
|
||||
input_idx: PortIndex<InputPort>,
|
||||
) -> bool {
|
||||
self.connections
|
||||
.iter()
|
||||
.any(|e| e.input_idx == input_idx && e.output_idx == output_idx)
|
||||
}
|
||||
|
||||
fn remove_by_output(&mut self, output_idx: PortIndex<OutputPort>) {
|
||||
self.connections.retain(|i| i.output_idx != output_idx)
|
||||
}
|
||||
|
||||
fn remove_by_input(&mut self, input_idx: PortIndex<InputPort>) {
|
||||
self.connections.retain(|i| i.input_idx != input_idx)
|
||||
}
|
||||
|
||||
fn remove_by_pair(
|
||||
&mut self,
|
||||
output_idx: PortIndex<OutputPort>,
|
||||
input_idx: PortIndex<InputPort>,
|
||||
) {
|
||||
self.connections
|
||||
.retain(|i| i.output_idx != output_idx || i.input_idx != input_idx)
|
||||
}
|
||||
}
|
||||
|
||||
/// A single connection between ports
|
||||
struct Connection {
|
||||
/// The index of the port on the input node
|
||||
/// This is actually the /output/ of this edge
|
||||
input_idx: PortIndex<InputPort>,
|
||||
/// The index of the port on the output node
|
||||
/// This is actually the /input/ of this edge
|
||||
output_idx: PortIndex<OutputPort>,
|
||||
/// When the from node finishes processing, it will push
|
||||
/// its data into this cache for the input node to read
|
||||
cache: RefCell<Option<Block>>,
|
||||
}
|
||||
|
||||
impl AudioGraph {
|
||||
pub fn new(channel_count: u8) -> Self {
|
||||
let mut graph = StableGraph::new();
|
||||
let dest_id =
|
||||
NodeId(graph.add_node(Node::new(Box::new(DestinationNode::new(channel_count)))));
|
||||
let listener_id = NodeId(graph.add_node(Node::new(Box::new(AudioListenerNode::new()))));
|
||||
AudioGraph {
|
||||
graph,
|
||||
dest_id,
|
||||
dests: vec![dest_id],
|
||||
listener_id,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a node, obtain its id
|
||||
pub(crate) fn add_node(&mut self, node: Box<dyn AudioNodeEngine>) -> NodeId {
|
||||
NodeId(self.graph.add_node(Node::new(node)))
|
||||
}
|
||||
|
||||
/// Connect an output port to an input port
|
||||
///
|
||||
/// The edge goes *from* the output port *to* the input port, connecting two nodes
|
||||
pub fn add_edge(&mut self, out: PortId<OutputPort>, inp: PortId<InputPort>) {
|
||||
let edge = self
|
||||
.graph
|
||||
.edges(out.node().0)
|
||||
.find(|e| e.target() == inp.node().0)
|
||||
.map(|e| e.id());
|
||||
if let Some(e) = edge {
|
||||
// .find(|e| e.weight().has_between(out.1, inp.1));
|
||||
let w = self
|
||||
.graph
|
||||
.edge_weight_mut(e)
|
||||
.expect("This edge is known to exist");
|
||||
if w.has_between(out.1, inp.1) {
|
||||
return;
|
||||
}
|
||||
w.connections.push(Connection::new(inp.1, out.1))
|
||||
} else {
|
||||
// add a new edge
|
||||
self.graph
|
||||
.add_edge(out.node().0, inp.node().0, Edge::new(inp.1, out.1));
|
||||
}
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect>
|
||||
pub fn disconnect_all_from(&mut self, node: NodeId) {
|
||||
let edges = self.graph.edges(node.0).map(|e| e.id()).collect::<Vec<_>>();
|
||||
for edge in edges {
|
||||
self.graph.remove_edge(edge);
|
||||
}
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node's output
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-output>
|
||||
pub fn disconnect_output(&mut self, out: PortId<OutputPort>) {
|
||||
let candidates: Vec<_> = self
|
||||
.graph
|
||||
.edges(out.node().0)
|
||||
.map(|e| (e.id(), e.target()))
|
||||
.collect();
|
||||
for (edge, to) in candidates {
|
||||
let mut e = self
|
||||
.graph
|
||||
.remove_edge(edge)
|
||||
.expect("Edge index is known to exist");
|
||||
e.remove_by_output(out.1);
|
||||
if !e.connections.is_empty() {
|
||||
self.graph.add_edge(out.node().0, to, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Disconnect connections from a node to another node
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode>
|
||||
pub fn disconnect_between(&mut self, from: NodeId, to: NodeId) {
|
||||
let edge = self
|
||||
.graph
|
||||
.edges(from.0)
|
||||
.find(|e| e.target() == to.0)
|
||||
.map(|e| e.id());
|
||||
if let Some(i) = edge {
|
||||
self.graph.remove_edge(i);
|
||||
}
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node's output to another node
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output>
|
||||
pub fn disconnect_output_between(&mut self, out: PortId<OutputPort>, to: NodeId) {
|
||||
let edge = self
|
||||
.graph
|
||||
.edges(out.node().0)
|
||||
.find(|e| e.target() == to.0)
|
||||
.map(|e| e.id());
|
||||
if let Some(edge) = edge {
|
||||
let mut e = self
|
||||
.graph
|
||||
.remove_edge(edge)
|
||||
.expect("Edge index is known to exist");
|
||||
e.remove_by_output(out.1);
|
||||
if !e.connections.is_empty() {
|
||||
self.graph.add_edge(out.node().0, to.0, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node to another node's input
|
||||
///
|
||||
/// Only used in WebAudio for disconnecting audio params
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam>
|
||||
pub fn disconnect_to(&mut self, node: NodeId, inp: PortId<InputPort>) {
|
||||
let edge = self
|
||||
.graph
|
||||
.edges(node.0)
|
||||
.find(|e| e.target() == inp.node().0)
|
||||
.map(|e| e.id());
|
||||
if let Some(edge) = edge {
|
||||
let mut e = self
|
||||
.graph
|
||||
.remove_edge(edge)
|
||||
.expect("Edge index is known to exist");
|
||||
e.remove_by_input(inp.1);
|
||||
if !e.connections.is_empty() {
|
||||
self.graph.add_edge(node.0, inp.node().0, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Disconnect all outgoing connections from a node's output to another node's input
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output-input>
|
||||
/// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam-output>
|
||||
pub fn disconnect_output_between_to(
|
||||
&mut self,
|
||||
out: PortId<OutputPort>,
|
||||
inp: PortId<InputPort>,
|
||||
) {
|
||||
let edge = self
|
||||
.graph
|
||||
.edges(out.node().0)
|
||||
.find(|e| e.target() == inp.node().0)
|
||||
.map(|e| e.id());
|
||||
if let Some(edge) = edge {
|
||||
let mut e = self
|
||||
.graph
|
||||
.remove_edge(edge)
|
||||
.expect("Edge index is known to exist");
|
||||
e.remove_by_pair(out.1, inp.1);
|
||||
if !e.connections.is_empty() {
|
||||
self.graph.add_edge(out.node().0, inp.node().0, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the id of the destination node in this graph
|
||||
///
|
||||
/// All graphs have a destination node, with one input port
|
||||
pub fn dest_id(&self) -> NodeId {
|
||||
self.dest_id
|
||||
}
|
||||
|
||||
/// Add additional terminator nodes
|
||||
pub fn add_extra_dest(&mut self, dest: NodeId) {
|
||||
self.dests.push(dest);
|
||||
}
|
||||
|
||||
/// Get the id of the AudioListener in this graph
|
||||
///
|
||||
/// All graphs have a single listener, with no ports (but nine AudioParams)
|
||||
///
|
||||
/// N.B. The listener actually has a single output port containing
|
||||
/// its position data for the block, however this should
|
||||
/// not be exposed to the DOM.
|
||||
pub fn listener_id(&self) -> NodeId {
|
||||
self.listener_id
|
||||
}
|
||||
|
||||
/// For a given block, process all the data on this graph
|
||||
pub fn process(&mut self, info: &BlockInfo) -> Chunk {
|
||||
// DFS post order: Children are processed before their parent,
|
||||
// which is exactly what we need since the parent depends on the
|
||||
// children's output
|
||||
//
|
||||
// This will only visit each node once
|
||||
let reversed = Reversed(&self.graph);
|
||||
|
||||
let mut blocks: SmallVec<[SmallVec<[Block; 1]>; 1]> = SmallVec::new();
|
||||
let mut output_counts: SmallVec<[u32; 1]> = SmallVec::new();
|
||||
|
||||
let mut visit = DfsPostOrder::empty(reversed);
|
||||
|
||||
for dest in &self.dests {
|
||||
visit.move_to(dest.0);
|
||||
|
||||
while let Some(ix) = visit.next(reversed) {
|
||||
let mut curr = self.graph[ix].node.borrow_mut();
|
||||
|
||||
let mut chunk = Chunk::default();
|
||||
chunk
|
||||
.blocks
|
||||
.resize(curr.input_count() as usize, Default::default());
|
||||
|
||||
// if we have inputs, collect all the computed blocks
|
||||
// and construct a Chunk
|
||||
|
||||
// set up scratch space to store all the blocks
|
||||
blocks.clear();
|
||||
blocks.resize(curr.input_count() as usize, Default::default());
|
||||
|
||||
let mode = curr.channel_count_mode();
|
||||
let count = curr.channel_count();
|
||||
let interpretation = curr.channel_interpretation();
|
||||
|
||||
// all edges to this node are from its dependencies
|
||||
for edge in self.graph.edges_directed(ix, Direction::Incoming) {
|
||||
let edge = edge.weight();
|
||||
for connection in &edge.connections {
|
||||
let mut block = connection
|
||||
.cache
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.expect("Cache should have been filled from traversal");
|
||||
|
||||
match connection.input_idx {
|
||||
PortIndex::Port(idx) => {
|
||||
blocks[idx as usize].push(block);
|
||||
},
|
||||
PortIndex::Param(param) => {
|
||||
// param inputs are downmixed to mono
|
||||
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect-destinationparam-output
|
||||
block.mix(1, ChannelInterpretation::Speakers);
|
||||
curr.get_param(param).add_block(block)
|
||||
},
|
||||
PortIndex::Listener(_) => curr.set_listenerdata(block),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (i, mut blocks) in blocks.drain(..).enumerate() {
|
||||
if blocks.is_empty() {
|
||||
if mode == ChannelCountMode::Explicit {
|
||||
// It's silence, but mix it anyway
|
||||
chunk.blocks[i].mix(count, interpretation);
|
||||
}
|
||||
} else if blocks.len() == 1 {
|
||||
chunk.blocks[i] = blocks.pop().expect("`blocks` had length 1");
|
||||
match mode {
|
||||
ChannelCountMode::Explicit => {
|
||||
chunk.blocks[i].mix(count, interpretation);
|
||||
},
|
||||
ChannelCountMode::ClampedMax => {
|
||||
if chunk.blocks[i].chan_count() > count {
|
||||
chunk.blocks[i].mix(count, interpretation);
|
||||
}
|
||||
},
|
||||
// It's one channel, it maxes itself
|
||||
ChannelCountMode::Max => (),
|
||||
}
|
||||
} else {
|
||||
let mix_count = match mode {
|
||||
ChannelCountMode::Explicit => count,
|
||||
_ => {
|
||||
let mut max = 0; // max channel count
|
||||
for block in &blocks {
|
||||
max = cmp::max(max, block.chan_count());
|
||||
}
|
||||
if mode == ChannelCountMode::ClampedMax {
|
||||
max = cmp::min(max, count);
|
||||
}
|
||||
max
|
||||
},
|
||||
};
|
||||
let block = blocks.into_iter().fold(Block::default(), |acc, mut block| {
|
||||
block.mix(mix_count, interpretation);
|
||||
acc.sum(block)
|
||||
});
|
||||
chunk.blocks[i] = block;
|
||||
}
|
||||
}
|
||||
|
||||
// actually run the node engine
|
||||
let mut out = curr.process(chunk, info);
|
||||
|
||||
assert_eq!(out.len(), curr.output_count() as usize);
|
||||
if curr.output_count() == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Count how many output connections fan out from each port
|
||||
// This is so that we don't have to needlessly clone audio buffers
|
||||
//
|
||||
// If this is inefficient, we can instead maintain this data
|
||||
// cached on the node
|
||||
output_counts.clear();
|
||||
output_counts.resize(curr.output_count() as usize, 0);
|
||||
for edge in self.graph.edges(ix) {
|
||||
let edge = edge.weight();
|
||||
for conn in &edge.connections {
|
||||
if let PortIndex::Port(idx) = conn.output_idx {
|
||||
output_counts[idx as usize] += 1;
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// all the edges from this node go to nodes which depend on it,
|
||||
// i.e. the nodes it outputs to. Store the blocks for retrieval.
|
||||
for edge in self.graph.edges(ix) {
|
||||
let edge = edge.weight();
|
||||
for conn in &edge.connections {
|
||||
if let PortIndex::Port(idx) = conn.output_idx {
|
||||
output_counts[idx as usize] -= 1;
|
||||
// if there are no consumers left after this, take the data
|
||||
let block = if output_counts[idx as usize] == 0 {
|
||||
out[conn.output_idx].take()
|
||||
} else {
|
||||
out[conn.output_idx].clone()
|
||||
};
|
||||
*conn.cache.borrow_mut() = Some(block);
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// The destination node stores its output on itself, extract it.
|
||||
self.graph[self.dest_id.0]
|
||||
.node
|
||||
.borrow_mut()
|
||||
.destination_data()
|
||||
.expect("Destination node should have data cached")
|
||||
}
|
||||
|
||||
/// Obtain a mutable reference to a node
|
||||
pub(crate) fn node_mut(&self, ix: NodeId) -> RefMut<'_, Box<dyn AudioNodeEngine>> {
|
||||
self.graph[ix.0].node.borrow_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl Node {
|
||||
pub fn new(node: Box<dyn AudioNodeEngine>) -> Self {
|
||||
Node {
|
||||
node: RefCell::new(node),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Edge {
|
||||
pub fn new(input_idx: PortIndex<InputPort>, output_idx: PortIndex<OutputPort>) -> Self {
|
||||
Edge {
|
||||
connections: SmallVec::from_buf([Connection::new(input_idx, output_idx)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Connection {
|
||||
pub fn new(input_idx: PortIndex<InputPort>, output_idx: PortIndex<OutputPort>) -> Self {
|
||||
Connection {
|
||||
input_idx,
|
||||
output_idx,
|
||||
cache: RefCell::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
188
components/media/audio/iir_filter_node.rs
Normal file
188
components/media/audio/iir_filter_node.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::sync::Arc;
|
||||
|
||||
use log::warn;
|
||||
use num_complex::Complex64;
|
||||
|
||||
use crate::block::Chunk;
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
|
||||
const MAX_COEFFS: usize = 20;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IIRFilterNodeOptions {
|
||||
pub feedforward: Arc<Vec<f64>>,
|
||||
pub feedback: Arc<Vec<f64>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct IIRFilter {
|
||||
feedforward: Arc<Vec<f64>>,
|
||||
feedback: Arc<Vec<f64>>,
|
||||
inputs: VecDeque<f64>,
|
||||
outputs: VecDeque<f64>,
|
||||
}
|
||||
|
||||
impl IIRFilter {
|
||||
fn new(feedforward: Arc<Vec<f64>>, feedback: Arc<Vec<f64>>) -> Self {
|
||||
Self {
|
||||
feedforward,
|
||||
feedback,
|
||||
inputs: VecDeque::with_capacity(MAX_COEFFS),
|
||||
outputs: VecDeque::with_capacity(MAX_COEFFS),
|
||||
}
|
||||
}
|
||||
|
||||
fn calculate_output(&mut self, input: f32) -> f32 {
|
||||
self.inputs.push_front(input as f64);
|
||||
|
||||
if self.inputs.len() > MAX_COEFFS {
|
||||
self.inputs.pop_back();
|
||||
}
|
||||
|
||||
let inputs_sum = self
|
||||
.feedforward
|
||||
.iter()
|
||||
.zip(self.inputs.iter())
|
||||
.fold(0.0, |acc, (c, v)| acc + c * v);
|
||||
|
||||
let outputs_sum = self
|
||||
.feedback
|
||||
.iter()
|
||||
.skip(1)
|
||||
.zip(self.outputs.iter())
|
||||
.fold(0.0, |acc, (c, v)| acc + c * v);
|
||||
|
||||
let output = (inputs_sum - outputs_sum) / self.feedback[0];
|
||||
|
||||
if output.is_nan() {
|
||||
// Per spec:
|
||||
// Note: The UA may produce a warning to notify the user that NaN values have occurred in the filter state.
|
||||
// This is usually indicative of an unstable filter.
|
||||
//
|
||||
// But idk how to produce warnings
|
||||
warn!("NaN in IIRFilter state");
|
||||
}
|
||||
|
||||
self.outputs.push_front(output);
|
||||
|
||||
if self.outputs.len() > MAX_COEFFS {
|
||||
self.outputs.pop_back();
|
||||
}
|
||||
|
||||
output as f32
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub struct IIRFilterNode {
|
||||
channel_info: ChannelInfo,
|
||||
filters: Vec<IIRFilter>,
|
||||
}
|
||||
|
||||
impl IIRFilterNode {
|
||||
pub fn new(options: IIRFilterNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
debug_assert!(
|
||||
!options.feedforward.is_empty(),
|
||||
"NotSupportedError: feedforward must have at least one coeff"
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
options.feedforward.len() <= MAX_COEFFS,
|
||||
"NotSupportedError: feedforward max length is {}",
|
||||
MAX_COEFFS
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
options.feedforward.iter().any(|&v| v != 0.0_f64),
|
||||
"InvalidStateError: all coeffs are zero"
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
!options.feedback.is_empty(),
|
||||
"NotSupportedError: feedback must have at least one coeff"
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
options.feedback.len() <= MAX_COEFFS,
|
||||
"NotSupportedError: feedback max length is {}",
|
||||
MAX_COEFFS
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
options.feedback[0] != 0.0,
|
||||
"InvalidStateError: first feedback coeff must not be zero"
|
||||
);
|
||||
|
||||
let filter = IIRFilter::new(options.feedforward.clone(), options.feedback.clone());
|
||||
|
||||
Self {
|
||||
filters: vec![filter; channel_info.computed_number_of_channels() as usize],
|
||||
channel_info,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_frequency_response(
|
||||
feedforward: &[f64],
|
||||
feedback: &[f64],
|
||||
frequency_hz: &[f32],
|
||||
mag_response: &mut [f32],
|
||||
phase_response: &mut [f32],
|
||||
) {
|
||||
debug_assert!(
|
||||
frequency_hz.len() == mag_response.len() && frequency_hz.len() == phase_response.len(),
|
||||
"get_frequency_response params are of different length"
|
||||
);
|
||||
|
||||
frequency_hz.iter().enumerate().for_each(|(idx, &f)| {
|
||||
if !(0.0..1.0).contains(&f) {
|
||||
mag_response[idx] = f32::NAN;
|
||||
phase_response[idx] = f32::NAN;
|
||||
} else {
|
||||
let f = (-f as f64) * std::f64::consts::PI;
|
||||
let z = Complex64::new(f64::cos(f), f64::sin(f));
|
||||
let numerator = Self::sum(feedforward, z);
|
||||
let denominator = Self::sum(feedback, z);
|
||||
|
||||
let response = numerator / denominator;
|
||||
mag_response[idx] = response.norm() as f32;
|
||||
phase_response[idx] = response.arg() as f32;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn sum(coeffs: &[f64], z: Complex64) -> Complex64 {
|
||||
coeffs.iter().fold(Complex64::new(0.0, 0.0), |acc, &coeff| {
|
||||
acc * z + Complex64::new(coeff, 0.0)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for IIRFilterNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::IIRFilterNode
|
||||
}
|
||||
|
||||
fn process(&mut self, inputs: Chunk, _info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
let mut inputs = if inputs.blocks[0].is_silence() {
|
||||
Chunk::explicit_silence()
|
||||
} else {
|
||||
inputs
|
||||
};
|
||||
|
||||
let mut iter = inputs.blocks[0].iter();
|
||||
|
||||
while let Some(mut frame) = iter.next() {
|
||||
frame.mutate_with(|sample, chan_idx| {
|
||||
*sample = self.filters[chan_idx as usize].calculate_output(*sample);
|
||||
});
|
||||
}
|
||||
inputs
|
||||
}
|
||||
}
|
||||
66
components/media/audio/lib.rs
Normal file
66
components/media/audio/lib.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
#[macro_use]
|
||||
extern crate servo_media_derive;
|
||||
|
||||
extern crate servo_media_player as player;
|
||||
|
||||
extern crate byte_slice_cast;
|
||||
extern crate euclid;
|
||||
extern crate log;
|
||||
extern crate num_complex;
|
||||
extern crate num_traits;
|
||||
extern crate petgraph;
|
||||
extern crate smallvec;
|
||||
extern crate speexdsp_resampler;
|
||||
#[macro_use]
|
||||
pub mod macros;
|
||||
extern crate servo_media_streams;
|
||||
extern crate servo_media_traits;
|
||||
|
||||
pub mod analyser_node;
|
||||
pub mod biquad_filter_node;
|
||||
pub mod block;
|
||||
pub mod buffer_source_node;
|
||||
pub mod channel_node;
|
||||
pub mod constant_source_node;
|
||||
pub mod context;
|
||||
pub mod decoder;
|
||||
pub mod destination_node;
|
||||
pub mod gain_node;
|
||||
pub mod graph;
|
||||
pub mod iir_filter_node;
|
||||
pub mod listener;
|
||||
pub mod media_element_source_node;
|
||||
pub mod media_stream_destination_node;
|
||||
pub mod media_stream_source_node;
|
||||
pub mod node;
|
||||
pub mod offline_sink;
|
||||
pub mod oscillator_node;
|
||||
pub mod panner_node;
|
||||
pub mod param;
|
||||
pub mod render_thread;
|
||||
pub mod sink;
|
||||
pub mod stereo_panner;
|
||||
pub mod wave_shaper_node;
|
||||
|
||||
pub trait AudioBackend {
|
||||
type Sink: sink::AudioSink + 'static;
|
||||
fn make_decoder() -> Box<dyn decoder::AudioDecoder>;
|
||||
fn make_sink() -> Result<Self::Sink, sink::AudioSinkError>;
|
||||
fn make_streamreader(
|
||||
id: servo_media_streams::MediaStreamId,
|
||||
sample_rate: f32,
|
||||
) -> Box<dyn AudioStreamReader + Send>;
|
||||
}
|
||||
|
||||
pub trait AudioStreamReader {
|
||||
fn pull(&self) -> block::Block;
|
||||
fn start(&self);
|
||||
fn stop(&self);
|
||||
}
|
||||
93
components/media/audio/listener.rs
Normal file
93
components/media/audio/listener.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::{Block, Chunk};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::param::{Param, ParamDir, ParamType};
|
||||
|
||||
/// AudioListeners are fake nodes; from the user's point of view they're
|
||||
/// a non-node entity with zero inputs and outputs, but with AudioParams
|
||||
/// that can be manipulated.
|
||||
///
|
||||
/// Internally, PannerNodes all have an implicit PortIndex::Listener connection
|
||||
/// from a hidden output port on AudioListeners that contains all the position data.
|
||||
///
|
||||
/// This encodes the otherwise implicit dependency between AudioListeners and PannerNodes
|
||||
/// so that if there is a cycle involving panner nodes and the audio params on the listener,
|
||||
/// the cycle breaking algorithm can deal with it.
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct AudioListenerNode {
|
||||
channel_info: ChannelInfo,
|
||||
position_x: Param,
|
||||
position_y: Param,
|
||||
position_z: Param,
|
||||
forward_x: Param,
|
||||
forward_y: Param,
|
||||
forward_z: Param,
|
||||
up_x: Param,
|
||||
up_y: Param,
|
||||
up_z: Param,
|
||||
}
|
||||
|
||||
impl AudioListenerNode {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
channel_info: Default::default(),
|
||||
position_x: Param::new(0.),
|
||||
position_y: Param::new(0.),
|
||||
position_z: Param::new(0.),
|
||||
forward_x: Param::new(0.),
|
||||
forward_y: Param::new(0.),
|
||||
forward_z: Param::new(-1.),
|
||||
up_x: Param::new(0.),
|
||||
up_y: Param::new(1.),
|
||||
up_z: Param::new(0.),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for AudioListenerNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::AudioListenerNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.is_empty());
|
||||
|
||||
// XXXManishearth in the common case when all of these are constant,
|
||||
// it would be nice to instead send just the constant values down
|
||||
let mut block = Block::for_channels_explicit(9);
|
||||
self.position_x.flush_to_block(info, block.data_chan_mut(0));
|
||||
self.position_y.flush_to_block(info, block.data_chan_mut(1));
|
||||
self.position_z.flush_to_block(info, block.data_chan_mut(2));
|
||||
self.forward_x.flush_to_block(info, block.data_chan_mut(3));
|
||||
self.forward_y.flush_to_block(info, block.data_chan_mut(4));
|
||||
self.forward_z.flush_to_block(info, block.data_chan_mut(5));
|
||||
self.up_x.flush_to_block(info, block.data_chan_mut(6));
|
||||
self.up_y.flush_to_block(info, block.data_chan_mut(7));
|
||||
self.up_z.flush_to_block(info, block.data_chan_mut(8));
|
||||
|
||||
inputs.blocks.push(block);
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Position(ParamDir::X) => &mut self.position_x,
|
||||
ParamType::Position(ParamDir::Y) => &mut self.position_y,
|
||||
ParamType::Position(ParamDir::Z) => &mut self.position_z,
|
||||
ParamType::Forward(ParamDir::X) => &mut self.forward_x,
|
||||
ParamType::Forward(ParamDir::Y) => &mut self.forward_y,
|
||||
ParamType::Forward(ParamDir::Z) => &mut self.forward_z,
|
||||
ParamType::Up(ParamDir::X) => &mut self.up_x,
|
||||
ParamType::Up(ParamDir::Y) => &mut self.up_y,
|
||||
ParamType::Up(ParamDir::Z) => &mut self.up_z,
|
||||
_ => panic!("Unknown param {:?} for AudioListenerNode", id),
|
||||
}
|
||||
}
|
||||
}
|
||||
44
components/media/audio/macros.rs
Normal file
44
components/media/audio/macros.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_message_handler(
|
||||
(
|
||||
$(
|
||||
$node:ident: $handler:ident
|
||||
),+
|
||||
) => (
|
||||
fn message_specific(&mut self, msg: $crate::node::AudioNodeMessage, sample_rate: f32) {
|
||||
match msg {
|
||||
$($crate::node::AudioNodeMessage::$node(m) => self.$handler(m, sample_rate)),+,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_state_change(
|
||||
($fn_name:ident, $state:ident, $render_msg:ident) => (
|
||||
pub fn $fn_name(&self) -> StateChangeResult {
|
||||
self.state.set(ProcessingState::$state);
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let _ = self.sender.send(AudioRenderThreadMsg::$render_msg(tx));
|
||||
rx.recv().unwrap()
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_render_thread_state_change(
|
||||
($fn_name:ident, $state:ident, $sink_method:ident) => (
|
||||
fn $fn_name(&mut self) -> StateChangeResult {
|
||||
if self.state == ProcessingState::$state {
|
||||
return Some(());
|
||||
}
|
||||
self.state = ProcessingState::$state;
|
||||
self.sink.$sink_method().ok()
|
||||
}
|
||||
);
|
||||
);
|
||||
136
components/media/audio/media_element_source_node.rs
Normal file
136
components/media/audio/media_element_source_node.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::sync::mpsc::Sender;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use player::audio::AudioRenderer;
|
||||
|
||||
use crate::block::{Block, Chunk, FRAMES_PER_BLOCK};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum MediaElementSourceNodeMessage {
|
||||
GetAudioRenderer(Sender<Arc<Mutex<dyn AudioRenderer>>>),
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct MediaElementSourceNode {
|
||||
channel_info: ChannelInfo,
|
||||
renderer: Arc<Mutex<dyn AudioRenderer>>,
|
||||
buffers: Arc<Mutex<Vec<Vec<f32>>>>,
|
||||
playback_offset: usize,
|
||||
}
|
||||
|
||||
impl MediaElementSourceNode {
|
||||
pub fn new(channel_info: ChannelInfo) -> Self {
|
||||
let buffers = Arc::new(Mutex::new(Vec::new()));
|
||||
let renderer = Arc::new(Mutex::new(MediaElementSourceNodeRenderer::new(
|
||||
buffers.clone(),
|
||||
)));
|
||||
Self {
|
||||
channel_info,
|
||||
renderer,
|
||||
buffers,
|
||||
playback_offset: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn handle_message(&mut self, message: MediaElementSourceNodeMessage, _: f32) {
|
||||
match message {
|
||||
MediaElementSourceNodeMessage::GetAudioRenderer(sender) => {
|
||||
let _ = sender.send(self.renderer.clone());
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for MediaElementSourceNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::MediaElementSourceNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, _info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.is_empty());
|
||||
|
||||
let buffers = self.buffers.lock().unwrap();
|
||||
let chans = buffers.len() as u8;
|
||||
|
||||
if chans == 0 {
|
||||
inputs.blocks.push(Default::default());
|
||||
return inputs;
|
||||
}
|
||||
|
||||
let len = buffers[0].len();
|
||||
|
||||
let frames_per_block = FRAMES_PER_BLOCK.0 as usize;
|
||||
let samples_to_copy = if self.playback_offset + frames_per_block > len {
|
||||
len - self.playback_offset
|
||||
} else {
|
||||
frames_per_block
|
||||
};
|
||||
let next_offset = self.playback_offset + samples_to_copy;
|
||||
if samples_to_copy == FRAMES_PER_BLOCK.0 as usize {
|
||||
// copy entire chan
|
||||
let mut block = Block::empty();
|
||||
for chan in 0..chans {
|
||||
block.push_chan(&buffers[chan as usize][self.playback_offset..next_offset]);
|
||||
}
|
||||
inputs.blocks.push(block)
|
||||
} else {
|
||||
// silent fill and copy
|
||||
let mut block = Block::default();
|
||||
block.repeat(chans);
|
||||
block.explicit_repeat();
|
||||
for chan in 0..chans {
|
||||
let data = block.data_chan_mut(chan);
|
||||
let (_, data) = data.split_at_mut(0);
|
||||
let (data, _) = data.split_at_mut(samples_to_copy);
|
||||
data.copy_from_slice(&buffers[chan as usize][self.playback_offset..next_offset]);
|
||||
}
|
||||
inputs.blocks.push(block)
|
||||
}
|
||||
|
||||
self.playback_offset = next_offset;
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
|
||||
make_message_handler!(MediaElementSourceNode: handle_message);
|
||||
}
|
||||
|
||||
struct MediaElementSourceNodeRenderer {
|
||||
buffers: Arc<Mutex<Vec<Vec<f32>>>>,
|
||||
channels: HashMap<u32, usize>,
|
||||
}
|
||||
|
||||
impl MediaElementSourceNodeRenderer {
|
||||
pub fn new(buffers: Arc<Mutex<Vec<Vec<f32>>>>) -> Self {
|
||||
Self {
|
||||
buffers,
|
||||
channels: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioRenderer for MediaElementSourceNodeRenderer {
|
||||
fn render(&mut self, sample: Box<dyn AsRef<[f32]>>, channel_pos: u32) {
|
||||
let channel = match self.channels.entry(channel_pos) {
|
||||
Entry::Occupied(entry) => *entry.get(),
|
||||
Entry::Vacant(entry) => {
|
||||
let mut buffers = self.buffers.lock().unwrap();
|
||||
let len = buffers.len();
|
||||
buffers.resize(len + 1, Vec::new());
|
||||
*entry.insert(buffers.len())
|
||||
},
|
||||
};
|
||||
self.buffers.lock().unwrap()[channel - 1].extend_from_slice((*sample).as_ref());
|
||||
}
|
||||
}
|
||||
46
components/media/audio/media_stream_destination_node.rs
Normal file
46
components/media/audio/media_stream_destination_node.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use servo_media_streams::MediaSocket;
|
||||
|
||||
use crate::block::Chunk;
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::sink::AudioSink;
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct MediaStreamDestinationNode {
|
||||
channel_info: ChannelInfo,
|
||||
sink: Box<dyn AudioSink + 'static>,
|
||||
}
|
||||
|
||||
impl MediaStreamDestinationNode {
|
||||
pub fn new(
|
||||
socket: Box<dyn MediaSocket>,
|
||||
sample_rate: f32,
|
||||
sink: Box<dyn AudioSink + 'static>,
|
||||
channel_info: ChannelInfo,
|
||||
) -> Self {
|
||||
sink.init_stream(channel_info.count, sample_rate, socket)
|
||||
.expect("init_stream failed");
|
||||
sink.play().expect("Sink didn't start");
|
||||
MediaStreamDestinationNode { channel_info, sink }
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for MediaStreamDestinationNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::MediaStreamDestinationNode
|
||||
}
|
||||
|
||||
fn process(&mut self, inputs: Chunk, _: &BlockInfo) -> Chunk {
|
||||
self.sink
|
||||
.push_data(inputs)
|
||||
.expect("Pushing to stream failed");
|
||||
Chunk::default()
|
||||
}
|
||||
|
||||
fn output_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
}
|
||||
53
components/media/audio/media_stream_source_node.rs
Normal file
53
components/media/audio/media_stream_source_node.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::AudioStreamReader;
|
||||
use crate::block::Chunk;
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct MediaStreamSourceNode {
|
||||
channel_info: ChannelInfo,
|
||||
reader: Box<dyn AudioStreamReader + Send>,
|
||||
playing: bool,
|
||||
}
|
||||
|
||||
impl MediaStreamSourceNode {
|
||||
pub fn new(reader: Box<dyn AudioStreamReader + Send>, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
channel_info,
|
||||
reader,
|
||||
playing: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for MediaStreamSourceNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::MediaStreamSourceNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, _: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.is_empty());
|
||||
|
||||
if !self.playing {
|
||||
self.playing = true;
|
||||
self.reader.start();
|
||||
}
|
||||
|
||||
let block = self.reader.pull();
|
||||
inputs.blocks.push(block);
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
|
||||
fn get_param(&mut self, _: ParamType) -> &mut Param {
|
||||
panic!("No params on MediaStreamSourceNode");
|
||||
}
|
||||
}
|
||||
250
components/media/audio/node.rs
Normal file
250
components/media/audio/node.rs
Normal file
@@ -0,0 +1,250 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cmp::min;
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
use servo_media_streams::{MediaSocket, MediaStreamId};
|
||||
|
||||
use crate::biquad_filter_node::{BiquadFilterNodeMessage, BiquadFilterNodeOptions};
|
||||
use crate::block::{Block, Chunk, Tick};
|
||||
use crate::buffer_source_node::{AudioBufferSourceNodeMessage, AudioBufferSourceNodeOptions};
|
||||
use crate::channel_node::ChannelNodeOptions;
|
||||
use crate::constant_source_node::ConstantSourceNodeOptions;
|
||||
use crate::gain_node::GainNodeOptions;
|
||||
use crate::iir_filter_node::IIRFilterNodeOptions;
|
||||
use crate::media_element_source_node::MediaElementSourceNodeMessage;
|
||||
use crate::oscillator_node::{OscillatorNodeMessage, OscillatorNodeOptions};
|
||||
use crate::panner_node::{PannerNodeMessage, PannerNodeOptions};
|
||||
use crate::param::{Param, ParamRate, ParamType, UserAutomationEvent};
|
||||
use crate::stereo_panner::StereoPannerOptions;
|
||||
use crate::wave_shaper_node::{WaveShaperNodeMessage, WaveShaperNodeOptions};
|
||||
|
||||
/// Information required to construct an audio node
|
||||
pub enum AudioNodeInit {
|
||||
AnalyserNode(Box<dyn FnMut(Block) + Send>),
|
||||
BiquadFilterNode(BiquadFilterNodeOptions),
|
||||
AudioBuffer,
|
||||
AudioBufferSourceNode(AudioBufferSourceNodeOptions),
|
||||
ChannelMergerNode(ChannelNodeOptions),
|
||||
ChannelSplitterNode,
|
||||
ConstantSourceNode(ConstantSourceNodeOptions),
|
||||
ConvolverNode,
|
||||
DelayNode,
|
||||
DynamicsCompressionNode,
|
||||
GainNode(GainNodeOptions),
|
||||
IIRFilterNode(IIRFilterNodeOptions),
|
||||
MediaElementSourceNode,
|
||||
MediaStreamDestinationNode(Box<dyn MediaSocket>),
|
||||
MediaStreamSourceNode(MediaStreamId),
|
||||
OscillatorNode(OscillatorNodeOptions),
|
||||
PannerNode(PannerNodeOptions),
|
||||
PeriodicWave,
|
||||
ScriptProcessorNode,
|
||||
StereoPannerNode(StereoPannerOptions),
|
||||
WaveShaperNode(WaveShaperNodeOptions),
|
||||
}
|
||||
|
||||
/// Type of AudioNodeEngine.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum AudioNodeType {
|
||||
/// Not a constructable node
|
||||
AudioListenerNode,
|
||||
AnalyserNode,
|
||||
BiquadFilterNode,
|
||||
AudioBuffer,
|
||||
AudioBufferSourceNode,
|
||||
ChannelMergerNode,
|
||||
ChannelSplitterNode,
|
||||
ConstantSourceNode,
|
||||
ConvolverNode,
|
||||
DelayNode,
|
||||
DestinationNode,
|
||||
DynamicsCompressionNode,
|
||||
GainNode,
|
||||
IIRFilterNode,
|
||||
MediaElementSourceNode,
|
||||
MediaStreamDestinationNode,
|
||||
MediaStreamSourceNode,
|
||||
OscillatorNode,
|
||||
PannerNode,
|
||||
PeriodicWave,
|
||||
ScriptProcessorNode,
|
||||
StereoPannerNode,
|
||||
WaveShaperNode,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum ChannelCountMode {
|
||||
Max,
|
||||
ClampedMax,
|
||||
Explicit,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum ChannelInterpretation {
|
||||
Discrete,
|
||||
Speakers,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct BlockInfo {
|
||||
pub sample_rate: f32,
|
||||
pub frame: Tick,
|
||||
pub time: f64,
|
||||
}
|
||||
|
||||
impl BlockInfo {
|
||||
/// Given the current block, calculate the absolute zero-relative
|
||||
/// tick of the given tick
|
||||
pub fn absolute_tick(&self, tick: Tick) -> Tick {
|
||||
self.frame + tick
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChannelInfo {
|
||||
pub count: u8,
|
||||
pub mode: ChannelCountMode,
|
||||
pub interpretation: ChannelInterpretation,
|
||||
pub context_channel_count: u8,
|
||||
}
|
||||
|
||||
impl Default for ChannelInfo {
|
||||
fn default() -> Self {
|
||||
ChannelInfo {
|
||||
count: 2,
|
||||
mode: ChannelCountMode::Max,
|
||||
interpretation: ChannelInterpretation::Speakers,
|
||||
context_channel_count: 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChannelInfo {
|
||||
/// <https://webaudio.github.io/web-audio-api/#computednumberofchannels>
|
||||
pub fn computed_number_of_channels(&self) -> u8 {
|
||||
match self.mode {
|
||||
ChannelCountMode::Max => self.context_channel_count,
|
||||
ChannelCountMode::ClampedMax => min(self.count, self.context_channel_count),
|
||||
ChannelCountMode::Explicit => self.count,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait AudioNodeCommon {
|
||||
fn channel_info(&self) -> &ChannelInfo;
|
||||
|
||||
fn channel_info_mut(&mut self) -> &mut ChannelInfo;
|
||||
}
|
||||
|
||||
/// This trait represents the common features of all audio nodes.
|
||||
pub(crate) trait AudioNodeEngine: Send + AudioNodeCommon {
|
||||
fn node_type(&self) -> AudioNodeType;
|
||||
|
||||
fn process(&mut self, inputs: Chunk, info: &BlockInfo) -> Chunk;
|
||||
|
||||
fn message(&mut self, msg: AudioNodeMessage, sample_rate: f32) {
|
||||
match msg {
|
||||
AudioNodeMessage::GetParamValue(id, tx) => {
|
||||
let _ = tx.send(self.get_param(id).value());
|
||||
},
|
||||
AudioNodeMessage::SetChannelCount(c) => self.set_channel_count(c),
|
||||
AudioNodeMessage::SetChannelMode(c) => self.set_channel_count_mode(c),
|
||||
AudioNodeMessage::SetChannelInterpretation(c) => self.set_channel_interpretation(c),
|
||||
AudioNodeMessage::SetParam(id, event) => self
|
||||
.get_param(id)
|
||||
.insert_event(event.convert_to_event(sample_rate)),
|
||||
AudioNodeMessage::SetParamRate(id, rate) => self.get_param(id).set_rate(rate),
|
||||
_ => self.message_specific(msg, sample_rate),
|
||||
}
|
||||
}
|
||||
|
||||
/// Messages specific to this node
|
||||
fn message_specific(&mut self, _: AudioNodeMessage, _sample_rate: f32) {}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
1
|
||||
}
|
||||
fn output_count(&self) -> u32 {
|
||||
1
|
||||
}
|
||||
|
||||
/// Number of input channels for each input port
|
||||
fn channel_count(&self) -> u8 {
|
||||
self.channel_info().count
|
||||
}
|
||||
|
||||
fn channel_count_mode(&self) -> ChannelCountMode {
|
||||
self.channel_info().mode
|
||||
}
|
||||
|
||||
fn channel_interpretation(&self) -> ChannelInterpretation {
|
||||
self.channel_info().interpretation
|
||||
}
|
||||
|
||||
fn set_channel_interpretation(&mut self, i: ChannelInterpretation) {
|
||||
self.channel_info_mut().interpretation = i
|
||||
}
|
||||
fn set_channel_count(&mut self, c: u8) {
|
||||
self.channel_info_mut().count = c;
|
||||
}
|
||||
fn set_channel_count_mode(&mut self, m: ChannelCountMode) {
|
||||
self.channel_info_mut().mode = m;
|
||||
}
|
||||
|
||||
/// If we're the destination node, extract the contained data
|
||||
fn destination_data(&mut self) -> Option<Chunk> {
|
||||
None
|
||||
}
|
||||
|
||||
fn get_param(&mut self, _: ParamType) -> &mut Param {
|
||||
panic!("No params on node {:?}", self.node_type())
|
||||
}
|
||||
|
||||
fn set_listenerdata(&mut self, _: Block) {
|
||||
panic!("can't accept listener connections")
|
||||
}
|
||||
}
|
||||
|
||||
pub enum AudioNodeMessage {
|
||||
AudioBufferSourceNode(AudioBufferSourceNodeMessage),
|
||||
AudioScheduledSourceNode(AudioScheduledSourceNodeMessage),
|
||||
BiquadFilterNode(BiquadFilterNodeMessage),
|
||||
GetParamValue(ParamType, Sender<f32>),
|
||||
MediaElementSourceNode(MediaElementSourceNodeMessage),
|
||||
OscillatorNode(OscillatorNodeMessage),
|
||||
PannerNode(PannerNodeMessage),
|
||||
SetChannelCount(u8),
|
||||
SetChannelMode(ChannelCountMode),
|
||||
SetChannelInterpretation(ChannelInterpretation),
|
||||
SetParam(ParamType, UserAutomationEvent),
|
||||
SetParamRate(ParamType, ParamRate),
|
||||
WaveShaperNode(WaveShaperNodeMessage),
|
||||
}
|
||||
|
||||
pub struct OnEndedCallback(pub Box<dyn FnOnce() + Send + 'static>);
|
||||
|
||||
impl OnEndedCallback {
|
||||
pub fn new<F: FnOnce() + Send + 'static>(callback: F) -> Self {
|
||||
OnEndedCallback(Box::new(callback))
|
||||
}
|
||||
}
|
||||
|
||||
/// Type of message directed to AudioScheduledSourceNodes.
|
||||
pub enum AudioScheduledSourceNodeMessage {
|
||||
/// Schedules a sound to playback at an exact time.
|
||||
Start(f64),
|
||||
/// Schedules a sound to stop playback at an exact time.
|
||||
Stop(f64),
|
||||
/// Register onended event callback.
|
||||
RegisterOnEndedCallback(OnEndedCallback),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ShouldPlay {
|
||||
/// Don't play anything
|
||||
No,
|
||||
/// Play, given start and end tick offsets
|
||||
Between(Tick, Tick),
|
||||
}
|
||||
109
components/media/audio/offline_sink.rs
Normal file
109
components/media/audio/offline_sink.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
use servo_media_streams::MediaSocket;
|
||||
|
||||
use crate::block::{Chunk, FRAMES_PER_BLOCK_USIZE};
|
||||
use crate::render_thread::{AudioRenderThreadMsg, SinkEosCallback};
|
||||
use crate::sink::{AudioSink, AudioSinkError};
|
||||
|
||||
pub struct ProcessedAudio(Box<[f32]>);
|
||||
|
||||
impl AsRef<[f32]> for ProcessedAudio {
|
||||
fn as_ref(&self) -> &[f32] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OfflineAudioSink {
|
||||
buffer: RefCell<Option<Vec<f32>>>,
|
||||
channel_count: usize,
|
||||
has_enough_data: Cell<bool>,
|
||||
length: usize,
|
||||
rendered_blocks: Cell<usize>,
|
||||
eos_callback: RefCell<Option<SinkEosCallback>>,
|
||||
}
|
||||
|
||||
impl OfflineAudioSink {
|
||||
pub fn new(channel_count: usize, length: usize) -> Self {
|
||||
Self {
|
||||
buffer: RefCell::new(None),
|
||||
channel_count,
|
||||
has_enough_data: Cell::new(false),
|
||||
length,
|
||||
rendered_blocks: Cell::new(0),
|
||||
eos_callback: RefCell::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioSink for OfflineAudioSink {
|
||||
fn init(&self, _: f32, _: Sender<AudioRenderThreadMsg>) -> Result<(), AudioSinkError> {
|
||||
Ok(())
|
||||
}
|
||||
fn init_stream(&self, _: u8, _: f32, _: Box<dyn MediaSocket>) -> Result<(), AudioSinkError> {
|
||||
unreachable!("OfflineAudioSink should never be used for MediaStreamDestinationNode")
|
||||
}
|
||||
fn play(&self) -> Result<(), AudioSinkError> {
|
||||
self.has_enough_data.set(false);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<(), AudioSinkError> {
|
||||
self.has_enough_data.set(true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn has_enough_data(&self) -> bool {
|
||||
self.has_enough_data.get() ||
|
||||
(self.rendered_blocks.get() * FRAMES_PER_BLOCK_USIZE >= self.length)
|
||||
}
|
||||
|
||||
fn push_data(&self, mut chunk: Chunk) -> Result<(), AudioSinkError> {
|
||||
let offset = self.rendered_blocks.get() * FRAMES_PER_BLOCK_USIZE;
|
||||
let (last, copy_len) = if self.length - offset <= FRAMES_PER_BLOCK_USIZE {
|
||||
(true, self.length - offset)
|
||||
} else {
|
||||
(false, FRAMES_PER_BLOCK_USIZE)
|
||||
};
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
if buffer.is_none() {
|
||||
*buffer = Some(vec![0.; self.channel_count * self.length]);
|
||||
}
|
||||
if chunk.is_empty() {
|
||||
chunk.blocks.push(Default::default());
|
||||
}
|
||||
if chunk.blocks[0].is_empty() {
|
||||
chunk.blocks[0].explicit_silence();
|
||||
}
|
||||
if let Some(ref mut buffer) = *buffer {
|
||||
for channel_number in 0..self.channel_count {
|
||||
let channel_offset = offset + (channel_number * self.length);
|
||||
let channel_data = &mut buffer[channel_offset..channel_offset + copy_len];
|
||||
channel_data
|
||||
.copy_from_slice(&chunk.blocks[0].data_chan(channel_number as u8)[0..copy_len]);
|
||||
}
|
||||
};
|
||||
self.rendered_blocks.set(self.rendered_blocks.get() + 1);
|
||||
|
||||
if last {
|
||||
if let Some(callback) = self.eos_callback.borrow_mut().take() {
|
||||
let processed_audio = ProcessedAudio(buffer.take().unwrap().into_boxed_slice());
|
||||
callback(Box::new(processed_audio));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_eos_callback(
|
||||
&self,
|
||||
callback: Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>,
|
||||
) {
|
||||
*self.eos_callback.borrow_mut() = Some(callback);
|
||||
}
|
||||
}
|
||||
196
components/media/audio/oscillator_node.rs
Normal file
196
components/media/audio/oscillator_node.rs
Normal file
@@ -0,0 +1,196 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use num_traits::cast::NumCast;
|
||||
|
||||
use crate::block::{Chunk, Tick};
|
||||
use crate::node::{
|
||||
AudioNodeEngine, AudioNodeType, AudioScheduledSourceNodeMessage, BlockInfo, ChannelInfo,
|
||||
OnEndedCallback, ShouldPlay,
|
||||
};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PeriodicWaveOptions {
|
||||
// XXX https://webaudio.github.io/web-audio-api/#dictdef-periodicwaveoptions
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum OscillatorType {
|
||||
Sine,
|
||||
Square,
|
||||
Sawtooth,
|
||||
Triangle,
|
||||
Custom,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OscillatorNodeOptions {
|
||||
pub oscillator_type: OscillatorType,
|
||||
pub freq: f32,
|
||||
pub detune: f32,
|
||||
pub periodic_wave_options: Option<PeriodicWaveOptions>,
|
||||
}
|
||||
|
||||
impl Default for OscillatorNodeOptions {
|
||||
fn default() -> Self {
|
||||
OscillatorNodeOptions {
|
||||
oscillator_type: OscillatorType::Sine,
|
||||
freq: 440.,
|
||||
detune: 0.,
|
||||
periodic_wave_options: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum OscillatorNodeMessage {
|
||||
SetOscillatorType(OscillatorType),
|
||||
}
|
||||
|
||||
#[derive(AudioScheduledSourceNode, AudioNodeCommon)]
|
||||
pub(crate) struct OscillatorNode {
|
||||
channel_info: ChannelInfo,
|
||||
oscillator_type: OscillatorType,
|
||||
frequency: Param,
|
||||
detune: Param,
|
||||
phase: f64,
|
||||
/// Time at which the source should start playing.
|
||||
start_at: Option<Tick>,
|
||||
/// Time at which the source should stop playing.
|
||||
stop_at: Option<Tick>,
|
||||
/// The ended event callback.
|
||||
onended_callback: Option<OnEndedCallback>,
|
||||
}
|
||||
|
||||
impl OscillatorNode {
|
||||
pub fn new(options: OscillatorNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
channel_info,
|
||||
oscillator_type: options.oscillator_type,
|
||||
frequency: Param::new(options.freq),
|
||||
detune: Param::new(options.detune),
|
||||
phase: 0.,
|
||||
start_at: None,
|
||||
stop_at: None,
|
||||
onended_callback: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_parameters(&mut self, info: &BlockInfo, tick: Tick) -> bool {
|
||||
self.frequency.update(info, tick)
|
||||
}
|
||||
|
||||
fn handle_oscillator_message(&mut self, message: OscillatorNodeMessage, _sample_rate: f32) {
|
||||
match message {
|
||||
OscillatorNodeMessage::SetOscillatorType(o) => {
|
||||
self.oscillator_type = o;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for OscillatorNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::OscillatorNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
// XXX Implement this properly and according to self.options
|
||||
// as defined in https://webaudio.github.io/web-audio-api/#oscillatornode
|
||||
use std::f64::consts::PI;
|
||||
debug_assert!(inputs.is_empty());
|
||||
inputs.blocks.push(Default::default());
|
||||
let (start_at, stop_at) = match self.should_play_at(info.frame) {
|
||||
ShouldPlay::No => {
|
||||
return inputs;
|
||||
},
|
||||
ShouldPlay::Between(start, end) => (start, end),
|
||||
};
|
||||
|
||||
{
|
||||
inputs.blocks[0].explicit_silence();
|
||||
let mut iter = inputs.blocks[0].iter();
|
||||
|
||||
// Convert all our parameters to the target type for calculations
|
||||
let vol: f32 = 1.0;
|
||||
let sample_rate = info.sample_rate as f64;
|
||||
let two_pi = 2.0 * PI;
|
||||
|
||||
// We're carrying a phase with up to 2pi around instead of working
|
||||
// on the sample offset. High sample offsets cause too much inaccuracy when
|
||||
// converted to floating point numbers and then iterated over in 1-steps
|
||||
//
|
||||
// Also, if the frequency changes the phase should not
|
||||
let mut step = two_pi * self.frequency.value() as f64 / sample_rate;
|
||||
while let Some(mut frame) = iter.next() {
|
||||
let tick = frame.tick();
|
||||
if tick < start_at {
|
||||
continue;
|
||||
} else if tick > stop_at {
|
||||
break;
|
||||
}
|
||||
|
||||
if self.update_parameters(info, tick) {
|
||||
step = two_pi * self.frequency.value() as f64 / sample_rate;
|
||||
}
|
||||
let mut value = vol;
|
||||
match self.oscillator_type {
|
||||
OscillatorType::Sine => {
|
||||
value = vol * f32::sin(NumCast::from(self.phase).unwrap());
|
||||
},
|
||||
|
||||
OscillatorType::Square => {
|
||||
if self.phase >= PI && self.phase < two_pi {
|
||||
value = vol * 1.0;
|
||||
} else if self.phase > 0.0 && self.phase < PI {
|
||||
value = -vol;
|
||||
}
|
||||
},
|
||||
|
||||
OscillatorType::Sawtooth => {
|
||||
value = vol * (self.phase / (PI)) as f32;
|
||||
},
|
||||
|
||||
OscillatorType::Triangle => {
|
||||
if self.phase >= 0. && self.phase < PI / 2. {
|
||||
value = vol * 2.0 * (self.phase / (PI)) as f32;
|
||||
} else if self.phase >= PI / 2. && self.phase < PI {
|
||||
value = vol * (1. - ((self.phase - (PI / 2.)) * (2. / PI)) as f32);
|
||||
} else if self.phase >= PI && self.phase < (3. * PI / 2.) {
|
||||
value = -vol * (1. - ((self.phase - (PI / 2.)) * (2. / PI)) as f32);
|
||||
} else if self.phase >= 3. * PI / 2. && self.phase < 2. * PI {
|
||||
value = vol * (-2.0) * (self.phase / (PI)) as f32;
|
||||
}
|
||||
},
|
||||
|
||||
OscillatorType::Custom => {},
|
||||
}
|
||||
|
||||
frame.mutate_with(|sample, _| *sample = value);
|
||||
|
||||
self.phase += step;
|
||||
if self.phase >= two_pi {
|
||||
self.phase -= two_pi;
|
||||
}
|
||||
}
|
||||
}
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
0
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Frequency => &mut self.frequency,
|
||||
ParamType::Detune => &mut self.detune,
|
||||
_ => panic!("Unknown param {:?} for OscillatorNode", id),
|
||||
}
|
||||
}
|
||||
make_message_handler!(
|
||||
AudioScheduledSourceNode: handle_source_node_message,
|
||||
OscillatorNode: handle_oscillator_message
|
||||
);
|
||||
}
|
||||
410
components/media/audio/panner_node.rs
Normal file
410
components/media/audio/panner_node.rs
Normal file
@@ -0,0 +1,410 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::f32::consts::PI;
|
||||
|
||||
use euclid::default::Vector3D;
|
||||
|
||||
use crate::block::{Block, Chunk, FRAMES_PER_BLOCK, Tick};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeMessage, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::param::{Param, ParamDir, ParamType};
|
||||
|
||||
// .normalize(), but it takes into account zero vectors
|
||||
pub fn normalize_zero(v: Vector3D<f32>) -> Vector3D<f32> {
|
||||
let len = v.length();
|
||||
if len == 0. { v } else { v / len }
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum PanningModel {
|
||||
EqualPower,
|
||||
HRTF,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DistanceModel {
|
||||
Linear,
|
||||
Inverse,
|
||||
Exponential,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct PannerNodeOptions {
|
||||
pub panning_model: PanningModel,
|
||||
pub distance_model: DistanceModel,
|
||||
pub position_x: f32,
|
||||
pub position_y: f32,
|
||||
pub position_z: f32,
|
||||
pub orientation_x: f32,
|
||||
pub orientation_y: f32,
|
||||
pub orientation_z: f32,
|
||||
pub ref_distance: f64,
|
||||
pub max_distance: f64,
|
||||
pub rolloff_factor: f64,
|
||||
pub cone_inner_angle: f64,
|
||||
pub cone_outer_angle: f64,
|
||||
pub cone_outer_gain: f64,
|
||||
}
|
||||
|
||||
pub enum PannerNodeMessage {
|
||||
SetPanningModel(PanningModel),
|
||||
SetDistanceModel(DistanceModel),
|
||||
SetRefDistance(f64),
|
||||
SetMaxDistance(f64),
|
||||
SetRolloff(f64),
|
||||
SetConeInner(f64),
|
||||
SetConeOuter(f64),
|
||||
SetConeGain(f64),
|
||||
}
|
||||
|
||||
impl Default for PannerNodeOptions {
|
||||
fn default() -> Self {
|
||||
PannerNodeOptions {
|
||||
panning_model: PanningModel::EqualPower,
|
||||
distance_model: DistanceModel::Inverse,
|
||||
position_x: 0.,
|
||||
position_y: 0.,
|
||||
position_z: 0.,
|
||||
orientation_x: 1.,
|
||||
orientation_y: 0.,
|
||||
orientation_z: 0.,
|
||||
ref_distance: 1.,
|
||||
max_distance: 10000.,
|
||||
rolloff_factor: 1.,
|
||||
cone_inner_angle: 360.,
|
||||
cone_outer_angle: 360.,
|
||||
cone_outer_gain: 0.,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct PannerNode {
|
||||
channel_info: ChannelInfo,
|
||||
panning_model: PanningModel,
|
||||
distance_model: DistanceModel,
|
||||
position_x: Param,
|
||||
position_y: Param,
|
||||
position_z: Param,
|
||||
orientation_x: Param,
|
||||
orientation_y: Param,
|
||||
orientation_z: Param,
|
||||
ref_distance: f64,
|
||||
max_distance: f64,
|
||||
rolloff_factor: f64,
|
||||
cone_inner_angle: f64,
|
||||
cone_outer_angle: f64,
|
||||
cone_outer_gain: f64,
|
||||
listener_data: Option<Block>,
|
||||
}
|
||||
|
||||
impl PannerNode {
|
||||
pub fn new(options: PannerNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
if options.panning_model == PanningModel::HRTF {
|
||||
log::warn!("HRTF requested but not supported")
|
||||
}
|
||||
Self {
|
||||
channel_info,
|
||||
panning_model: options.panning_model,
|
||||
distance_model: options.distance_model,
|
||||
position_x: Param::new(options.position_x),
|
||||
position_y: Param::new(options.position_y),
|
||||
position_z: Param::new(options.position_z),
|
||||
orientation_x: Param::new(options.orientation_x),
|
||||
orientation_y: Param::new(options.orientation_y),
|
||||
orientation_z: Param::new(options.orientation_z),
|
||||
ref_distance: options.ref_distance,
|
||||
max_distance: options.max_distance,
|
||||
rolloff_factor: options.rolloff_factor,
|
||||
cone_inner_angle: options.cone_inner_angle,
|
||||
cone_outer_angle: options.cone_outer_angle,
|
||||
cone_outer_gain: options.cone_outer_gain,
|
||||
listener_data: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_parameters(&mut self, info: &BlockInfo, tick: Tick) -> bool {
|
||||
let mut changed = self.position_x.update(info, tick);
|
||||
changed |= self.position_y.update(info, tick);
|
||||
changed |= self.position_z.update(info, tick);
|
||||
changed |= self.orientation_x.update(info, tick);
|
||||
changed |= self.orientation_y.update(info, tick);
|
||||
changed |= self.orientation_z.update(info, tick);
|
||||
changed
|
||||
}
|
||||
|
||||
/// Computes azimuth, elevation, and distance of source with respect to a
|
||||
/// given AudioListener's position, forward, and up vectors
|
||||
/// in degrees
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#azimuth-elevation>
|
||||
/// <https://webaudio.github.io/web-audio-api/#Spatialization-distance-effects>
|
||||
fn azimuth_elevation_distance(
|
||||
&self,
|
||||
listener: (Vector3D<f32>, Vector3D<f32>, Vector3D<f32>),
|
||||
) -> (f32, f32, f64) {
|
||||
let (listener_position, listener_forward, listener_up) = listener;
|
||||
let source_position = Vector3D::new(
|
||||
self.position_x.value(),
|
||||
self.position_y.value(),
|
||||
self.position_z.value(),
|
||||
);
|
||||
|
||||
// degenerate case
|
||||
if source_position == listener_position {
|
||||
return (0., 0., 0.);
|
||||
}
|
||||
|
||||
let diff = source_position - listener_position;
|
||||
let distance = diff.length();
|
||||
let source_listener = normalize_zero(diff);
|
||||
let listener_right = listener_forward.cross(listener_up);
|
||||
let listener_right_norm = normalize_zero(listener_right);
|
||||
let listener_forward_norm = normalize_zero(listener_forward);
|
||||
|
||||
let up = listener_right_norm.cross(listener_forward_norm);
|
||||
|
||||
let up_projection = source_listener.dot(up);
|
||||
let projected_source = normalize_zero(source_listener - up * up_projection);
|
||||
let mut azimuth = 180. * projected_source.dot(listener_right_norm).acos() / PI;
|
||||
|
||||
let front_back = projected_source.dot(listener_forward_norm);
|
||||
if front_back < 0. {
|
||||
azimuth = 360. - azimuth;
|
||||
}
|
||||
if (0. ..=270.).contains(&azimuth) {
|
||||
azimuth = 90. - azimuth;
|
||||
} else {
|
||||
azimuth = 450. - azimuth;
|
||||
}
|
||||
|
||||
let mut elevation = 90. - 180. * source_listener.dot(up).acos() / PI;
|
||||
|
||||
if elevation > 90. {
|
||||
elevation = 180. - elevation;
|
||||
} else if elevation < -90. {
|
||||
elevation = -180. - elevation;
|
||||
}
|
||||
|
||||
(azimuth, elevation, distance as f64)
|
||||
}
|
||||
|
||||
/// <https://webaudio.github.io/web-audio-api/#Spatialization-sound-cones>
|
||||
fn cone_gain(&self, listener: (Vector3D<f32>, Vector3D<f32>, Vector3D<f32>)) -> f64 {
|
||||
let (listener_position, _, _) = listener;
|
||||
let source_position = Vector3D::new(
|
||||
self.position_x.value(),
|
||||
self.position_y.value(),
|
||||
self.position_z.value(),
|
||||
);
|
||||
let source_orientation = Vector3D::new(
|
||||
self.orientation_x.value(),
|
||||
self.orientation_y.value(),
|
||||
self.orientation_z.value(),
|
||||
);
|
||||
|
||||
if source_orientation == Vector3D::zero() ||
|
||||
(self.cone_inner_angle == 360. && self.cone_outer_angle == 360.)
|
||||
{
|
||||
return 0.;
|
||||
}
|
||||
|
||||
let normalized_source_orientation = normalize_zero(source_orientation);
|
||||
|
||||
let source_to_listener = normalize_zero(source_position - listener_position);
|
||||
// Angle between the source orientation vector and the source-listener vector
|
||||
let angle = 180. * source_to_listener.dot(normalized_source_orientation).acos() / PI;
|
||||
let abs_angle = angle.abs() as f64;
|
||||
|
||||
// Divide by 2 here since API is entire angle (not half-angle)
|
||||
let abs_inner_angle = self.cone_inner_angle.abs() / 2.;
|
||||
let abs_outer_angle = self.cone_outer_angle.abs() / 2.;
|
||||
|
||||
if abs_angle < abs_inner_angle {
|
||||
// no attenuation
|
||||
1.
|
||||
} else if abs_angle >= abs_outer_angle {
|
||||
// max attenuation
|
||||
self.cone_outer_gain
|
||||
} else {
|
||||
// gain changes linearly from 1 to cone_outer_gain
|
||||
// as we go from inner to outer
|
||||
let x = (abs_angle - abs_inner_angle) / (abs_outer_angle - abs_inner_angle);
|
||||
(1. - x) + self.cone_outer_gain * x
|
||||
}
|
||||
}
|
||||
|
||||
fn linear_distance(&self, mut distance: f64, rolloff_factor: f64) -> f64 {
|
||||
if distance > self.max_distance {
|
||||
distance = self.max_distance;
|
||||
}
|
||||
if distance < self.ref_distance {
|
||||
distance = self.ref_distance;
|
||||
}
|
||||
let denom = self.max_distance - self.ref_distance;
|
||||
1. - rolloff_factor * (distance - self.ref_distance) / denom
|
||||
}
|
||||
|
||||
fn inverse_distance(&self, mut distance: f64, rolloff_factor: f64) -> f64 {
|
||||
if distance < self.ref_distance {
|
||||
distance = self.ref_distance;
|
||||
}
|
||||
let denom = self.ref_distance + rolloff_factor * (distance - self.ref_distance);
|
||||
self.ref_distance / denom
|
||||
}
|
||||
|
||||
fn exponential_distance(&self, mut distance: f64, rolloff_factor: f64) -> f64 {
|
||||
if distance < self.ref_distance {
|
||||
distance = self.ref_distance;
|
||||
}
|
||||
|
||||
(distance / self.ref_distance).powf(-rolloff_factor)
|
||||
}
|
||||
|
||||
fn distance_gain_fn(&self) -> fn(&Self, f64, f64) -> f64 {
|
||||
match self.distance_model {
|
||||
DistanceModel::Linear => |x, d, r| x.linear_distance(d, r),
|
||||
DistanceModel::Inverse => |x, d, r| x.inverse_distance(d, r),
|
||||
DistanceModel::Exponential => |x, d, r| x.exponential_distance(d, r),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for PannerNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::PannerNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
let listener_data = if let Some(listener_data) = self.listener_data.take() {
|
||||
listener_data
|
||||
} else {
|
||||
return inputs;
|
||||
};
|
||||
|
||||
// We clamp this early
|
||||
let rolloff_factor =
|
||||
if self.distance_model == DistanceModel::Linear && self.rolloff_factor > 1. {
|
||||
1.
|
||||
} else {
|
||||
self.rolloff_factor
|
||||
};
|
||||
|
||||
{
|
||||
let block = &mut inputs.blocks[0];
|
||||
|
||||
block.explicit_repeat();
|
||||
|
||||
let mono = if block.chan_count() == 1 {
|
||||
block.resize_silence(2);
|
||||
true
|
||||
} else {
|
||||
debug_assert!(block.chan_count() == 2);
|
||||
false
|
||||
};
|
||||
|
||||
let distance_gain_fn = self.distance_gain_fn();
|
||||
|
||||
if self.panning_model == PanningModel::EqualPower {
|
||||
let (l, r) = block.data_mut().split_at_mut(FRAMES_PER_BLOCK.0 as usize);
|
||||
for frame in 0..FRAMES_PER_BLOCK.0 {
|
||||
let frame = Tick(frame);
|
||||
self.update_parameters(info, frame);
|
||||
let data = listener_data.listener_data(frame);
|
||||
let (mut azimuth, _elev, dist) = self.azimuth_elevation_distance(data);
|
||||
let distance_gain = distance_gain_fn(self, dist, rolloff_factor);
|
||||
let cone_gain = self.cone_gain(data);
|
||||
|
||||
// https://webaudio.github.io/web-audio-api/#Spatialization-equal-power-panning
|
||||
|
||||
// clamp to [-180, 180], then wrap to [-90, 90]
|
||||
azimuth = azimuth.clamp(-180., 180.);
|
||||
if azimuth < -90. {
|
||||
azimuth = -180. - azimuth;
|
||||
} else if azimuth > 90. {
|
||||
azimuth = 180. - azimuth;
|
||||
}
|
||||
|
||||
let x = if mono {
|
||||
(azimuth + 90.) / 180.
|
||||
} else if azimuth <= 0. {
|
||||
(azimuth + 90.) / 90.
|
||||
} else {
|
||||
azimuth / 90.
|
||||
};
|
||||
let x = x * PI / 2.;
|
||||
|
||||
let mut gain_l = x.cos();
|
||||
let mut gain_r = x.sin();
|
||||
// 9. * PI / 2 is often slightly negative, clamp
|
||||
if gain_l <= 0. {
|
||||
gain_l = 0.
|
||||
}
|
||||
if gain_r <= 0. {
|
||||
gain_r = 0.;
|
||||
}
|
||||
|
||||
let index = frame.0 as usize;
|
||||
if mono {
|
||||
let input = l[index];
|
||||
l[index] = input * gain_l;
|
||||
r[index] = input * gain_r;
|
||||
} else if azimuth <= 0. {
|
||||
l[index] += r[index] * gain_l;
|
||||
r[index] *= gain_r;
|
||||
} else {
|
||||
r[index] += l[index] * gain_r;
|
||||
l[index] *= gain_l;
|
||||
}
|
||||
l[index] = l[index] * distance_gain as f32 * cone_gain as f32;
|
||||
r[index] = r[index] * distance_gain as f32 * cone_gain as f32;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
1
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Position(ParamDir::X) => &mut self.position_x,
|
||||
ParamType::Position(ParamDir::Y) => &mut self.position_y,
|
||||
ParamType::Position(ParamDir::Z) => &mut self.position_z,
|
||||
ParamType::Orientation(ParamDir::X) => &mut self.orientation_x,
|
||||
ParamType::Orientation(ParamDir::Y) => &mut self.orientation_y,
|
||||
ParamType::Orientation(ParamDir::Z) => &mut self.orientation_z,
|
||||
_ => panic!("Unknown param {:?} for PannerNode", id),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_listenerdata(&mut self, data: Block) {
|
||||
self.listener_data = Some(data);
|
||||
}
|
||||
|
||||
fn message_specific(&mut self, message: AudioNodeMessage, _sample_rate: f32) {
|
||||
if let AudioNodeMessage::PannerNode(p) = message {
|
||||
match p {
|
||||
PannerNodeMessage::SetPanningModel(p) => {
|
||||
if p == PanningModel::HRTF {
|
||||
log::warn!("HRTF requested but not supported");
|
||||
}
|
||||
self.panning_model = p;
|
||||
},
|
||||
PannerNodeMessage::SetDistanceModel(d) => self.distance_model = d,
|
||||
PannerNodeMessage::SetRefDistance(val) => self.ref_distance = val,
|
||||
PannerNodeMessage::SetMaxDistance(val) => self.max_distance = val,
|
||||
PannerNodeMessage::SetRolloff(val) => self.rolloff_factor = val,
|
||||
PannerNodeMessage::SetConeInner(val) => self.cone_inner_angle = val,
|
||||
PannerNodeMessage::SetConeOuter(val) => self.cone_outer_angle = val,
|
||||
PannerNodeMessage::SetConeGain(val) => self.cone_outer_gain = val,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
464
components/media/audio/param.rs
Normal file
464
components/media/audio/param.rs
Normal file
@@ -0,0 +1,464 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::block::{Block, FRAMES_PER_BLOCK_USIZE, Tick};
|
||||
use crate::node::BlockInfo;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum ParamType {
|
||||
Frequency,
|
||||
Detune,
|
||||
Gain,
|
||||
Q,
|
||||
Pan,
|
||||
PlaybackRate,
|
||||
Position(ParamDir),
|
||||
Forward(ParamDir),
|
||||
Up(ParamDir),
|
||||
Orientation(ParamDir),
|
||||
Offset,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum ParamDir {
|
||||
X,
|
||||
Y,
|
||||
Z,
|
||||
}
|
||||
|
||||
/// An AudioParam.
|
||||
///
|
||||
/// <https://webaudio.github.io/web-audio-api/#AudioParam>
|
||||
pub struct Param {
|
||||
val: f32,
|
||||
kind: ParamRate,
|
||||
events: Vec<AutomationEvent>,
|
||||
current_event: usize,
|
||||
event_start_time: Tick,
|
||||
event_start_value: f32,
|
||||
/// Cache of inputs from connect()ed nodes
|
||||
blocks: Vec<Block>,
|
||||
/// The value of all connect()ed inputs mixed together, for this frame
|
||||
block_mix_val: f32,
|
||||
/// If true, `blocks` has been summed together into a single block
|
||||
summed: bool,
|
||||
dirty: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
pub enum ParamRate {
|
||||
/// Value is held for entire block
|
||||
KRate,
|
||||
/// Value is updated each frame
|
||||
ARate,
|
||||
}
|
||||
|
||||
impl Param {
|
||||
pub fn new(val: f32) -> Self {
|
||||
Param {
|
||||
val,
|
||||
kind: ParamRate::ARate,
|
||||
events: vec![],
|
||||
current_event: 0,
|
||||
event_start_time: Tick(0),
|
||||
event_start_value: val,
|
||||
blocks: Vec::new(),
|
||||
block_mix_val: 0.,
|
||||
summed: false,
|
||||
dirty: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_krate(val: f32) -> Self {
|
||||
Param {
|
||||
val,
|
||||
kind: ParamRate::KRate,
|
||||
events: vec![],
|
||||
current_event: 0,
|
||||
event_start_time: Tick(0),
|
||||
event_start_value: val,
|
||||
blocks: Vec::new(),
|
||||
block_mix_val: 0.,
|
||||
summed: false,
|
||||
dirty: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the value of this param to the next
|
||||
///
|
||||
/// Invariant: This should be called with monotonically increasing
|
||||
/// ticks, and Tick(0) should never be skipped.
|
||||
///
|
||||
/// Returns true if anything changed
|
||||
pub fn update(&mut self, block: &BlockInfo, tick: Tick) -> bool {
|
||||
let mut changed = self.dirty;
|
||||
self.dirty = false;
|
||||
if tick.0 == 0 {
|
||||
self.summed = true;
|
||||
if let Some(first) = self.blocks.pop() {
|
||||
// first sum them together
|
||||
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect-destinationparam-output
|
||||
let block = self
|
||||
.blocks
|
||||
.drain(..)
|
||||
.fold(first, |acc, block| acc.sum(block));
|
||||
self.blocks.push(block);
|
||||
}
|
||||
} else if self.kind == ParamRate::KRate {
|
||||
return changed;
|
||||
}
|
||||
|
||||
// Even if the timeline does nothing, it's still possible
|
||||
// that there were connected inputs, so we should not
|
||||
// directly return `false` after this point, instead returning
|
||||
// `changed`
|
||||
changed |= if let Some(block) = self.blocks.first() {
|
||||
// store to be summed with `val` later
|
||||
self.block_mix_val = block.data_chan_frame(tick.0 as usize, 0);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if self.events.len() <= self.current_event {
|
||||
return changed;
|
||||
}
|
||||
|
||||
let current_tick = block.absolute_tick(tick);
|
||||
let mut current_event = &self.events[self.current_event];
|
||||
|
||||
// move to next event if necessary
|
||||
// XXXManishearth k-rate events may get skipped over completely by this
|
||||
// method. Firefox currently doesn't support these, however, so we can
|
||||
// handle those later
|
||||
loop {
|
||||
let mut move_next = false;
|
||||
if let Some(done_time) = current_event.done_time() {
|
||||
// If this event is done, move on
|
||||
if done_time < current_tick {
|
||||
move_next = true;
|
||||
}
|
||||
} else if let Some(next) = self.events.get(self.current_event + 1) {
|
||||
// this event has no done time and we must run it till the next one
|
||||
// starts
|
||||
if let Some(start_time) = next.start_time() {
|
||||
// if the next one is ready to start, move on
|
||||
if start_time <= current_tick {
|
||||
move_next = true;
|
||||
}
|
||||
} else {
|
||||
// If we have a next event with no start time and
|
||||
// the current event has no done time, this *has* to be because
|
||||
// the current event is SetTargetAtTime and the next is a Ramp
|
||||
// event. In this case we skip directly to the ramp assuming
|
||||
// the SetTarget is ready to start (or has started already)
|
||||
if current_event.time() <= current_tick {
|
||||
move_next = true;
|
||||
} else {
|
||||
// This is a SetTarget event before its start time, ignore
|
||||
return changed;
|
||||
}
|
||||
}
|
||||
}
|
||||
if move_next {
|
||||
self.current_event += 1;
|
||||
self.event_start_value = self.val;
|
||||
self.event_start_time = current_tick;
|
||||
if let Some(next) = self.events.get(self.current_event + 1) {
|
||||
current_event = next;
|
||||
// may need to move multiple times
|
||||
continue;
|
||||
} else {
|
||||
return changed;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
current_event.run(
|
||||
&mut self.val,
|
||||
current_tick,
|
||||
self.event_start_time,
|
||||
self.event_start_value,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn value(&self) -> f32 {
|
||||
// the data from connect()ed audionodes is first mixed
|
||||
// together in update(), and then mixed with the actual param value
|
||||
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect-destinationparam-output
|
||||
self.val + self.block_mix_val
|
||||
}
|
||||
|
||||
pub fn set_rate(&mut self, rate: ParamRate) {
|
||||
self.kind = rate;
|
||||
}
|
||||
|
||||
pub(crate) fn insert_event(&mut self, event: AutomationEvent) {
|
||||
if let AutomationEvent::SetValue(val) = event {
|
||||
self.val = val;
|
||||
self.event_start_value = val;
|
||||
self.dirty = true;
|
||||
return;
|
||||
}
|
||||
|
||||
let time = event.time();
|
||||
|
||||
let result = self.events.binary_search_by(|e| e.time().cmp(&time));
|
||||
// XXXManishearth this should handle overlapping events
|
||||
let idx = match result {
|
||||
Ok(idx) => idx,
|
||||
Err(idx) => idx,
|
||||
};
|
||||
|
||||
// XXXManishearth this isn't quite correct, this
|
||||
// doesn't handle cases for when this lands inside a running
|
||||
// event
|
||||
if let Some(is_hold) = event.cancel_event() {
|
||||
self.events.truncate(idx);
|
||||
if !is_hold {
|
||||
// If we cancelled the current event, reset
|
||||
// the value to what it was before
|
||||
if self.current_event >= self.events.len() {
|
||||
self.val = self.event_start_value;
|
||||
}
|
||||
}
|
||||
// don't actually insert the event
|
||||
return;
|
||||
}
|
||||
self.events.insert(idx, event);
|
||||
// XXXManishearth handle inserting events with a time before that
|
||||
// of the current one
|
||||
}
|
||||
|
||||
pub(crate) fn add_block(&mut self, block: Block) {
|
||||
debug_assert!(block.chan_count() == 1);
|
||||
// summed only becomes true during a node's process() call,
|
||||
// but add_block is called during graph traversal before processing,
|
||||
// so if summed is true that means we've moved on to the next block
|
||||
// and should clear our inputs
|
||||
if self.summed {
|
||||
self.blocks.clear();
|
||||
}
|
||||
self.blocks.push(block)
|
||||
}
|
||||
|
||||
/// Flush an entire block of values into a buffer
|
||||
///
|
||||
/// Only for use with AudioListener.
|
||||
///
|
||||
/// Invariant: `block` must be a FRAMES_PER_BLOCK length array filled with silence
|
||||
pub(crate) fn flush_to_block(&mut self, info: &BlockInfo, block: &mut [f32]) {
|
||||
// common case
|
||||
if self.current_event >= self.events.len() && self.blocks.is_empty() {
|
||||
if self.val != 0. {
|
||||
for block_tick in &mut block[0..FRAMES_PER_BLOCK_USIZE] {
|
||||
// ideally this can use some kind of vectorized memset()
|
||||
*block_tick = self.val;
|
||||
}
|
||||
}
|
||||
// if the value is zero, our buffer is already zeroed
|
||||
} else {
|
||||
for block_tick in &mut block[0..FRAMES_PER_BLOCK_USIZE] {
|
||||
self.update(info, Tick(*block_tick as u64));
|
||||
*block_tick = self.val;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
|
||||
pub enum RampKind {
|
||||
Linear,
|
||||
Exponential,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
/// <https://webaudio.github.io/web-audio-api/#dfn-automation-event>
|
||||
pub(crate) enum AutomationEvent {
|
||||
SetValue(f32),
|
||||
SetValueAtTime(f32, Tick),
|
||||
RampToValueAtTime(RampKind, f32, Tick),
|
||||
SetTargetAtTime(f32, Tick, /* time constant, units of Tick */ f64),
|
||||
SetValueCurveAtTime(
|
||||
Vec<f32>,
|
||||
/* start time */ Tick,
|
||||
/* duration */ Tick,
|
||||
),
|
||||
CancelAndHoldAtTime(Tick),
|
||||
CancelScheduledValues(Tick),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
/// An AutomationEvent that uses times in s instead of Ticks
|
||||
pub enum UserAutomationEvent {
|
||||
SetValue(f32),
|
||||
SetValueAtTime(f32, /* time */ f64),
|
||||
RampToValueAtTime(RampKind, f32, /* time */ f64),
|
||||
SetTargetAtTime(f32, f64, /* time constant, units of s */ f64),
|
||||
SetValueCurveAtTime(Vec<f32>, /* start time */ f64, /* duration */ f64),
|
||||
CancelAndHoldAtTime(f64),
|
||||
CancelScheduledValues(f64),
|
||||
}
|
||||
|
||||
impl UserAutomationEvent {
|
||||
pub(crate) fn convert_to_event(self, rate: f32) -> AutomationEvent {
|
||||
match self {
|
||||
UserAutomationEvent::SetValue(val) => AutomationEvent::SetValue(val),
|
||||
UserAutomationEvent::SetValueAtTime(val, time) => {
|
||||
AutomationEvent::SetValueAtTime(val, Tick::from_time(time, rate))
|
||||
},
|
||||
UserAutomationEvent::RampToValueAtTime(kind, val, time) => {
|
||||
AutomationEvent::RampToValueAtTime(kind, val, Tick::from_time(time, rate))
|
||||
},
|
||||
UserAutomationEvent::SetValueCurveAtTime(values, start, duration) => {
|
||||
AutomationEvent::SetValueCurveAtTime(
|
||||
values,
|
||||
Tick::from_time(start, rate),
|
||||
Tick::from_time(duration, rate),
|
||||
)
|
||||
},
|
||||
UserAutomationEvent::SetTargetAtTime(val, start, tau) => {
|
||||
AutomationEvent::SetTargetAtTime(
|
||||
val,
|
||||
Tick::from_time(start, rate),
|
||||
tau * rate as f64,
|
||||
)
|
||||
},
|
||||
UserAutomationEvent::CancelScheduledValues(t) => {
|
||||
AutomationEvent::CancelScheduledValues(Tick::from_time(t, rate))
|
||||
},
|
||||
UserAutomationEvent::CancelAndHoldAtTime(t) => {
|
||||
AutomationEvent::CancelAndHoldAtTime(Tick::from_time(t, rate))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AutomationEvent {
|
||||
/// The time of the event used for ordering
|
||||
pub fn time(&self) -> Tick {
|
||||
match *self {
|
||||
AutomationEvent::SetValueAtTime(_, tick) => tick,
|
||||
AutomationEvent::SetValueCurveAtTime(_, start, _) => start,
|
||||
AutomationEvent::RampToValueAtTime(_, _, tick) => tick,
|
||||
AutomationEvent::SetTargetAtTime(_, start, _) => start,
|
||||
AutomationEvent::CancelAndHoldAtTime(t) => t,
|
||||
AutomationEvent::CancelScheduledValues(tick) => tick,
|
||||
AutomationEvent::SetValue(..) => {
|
||||
unreachable!("SetValue should never appear in the timeline")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn done_time(&self) -> Option<Tick> {
|
||||
match *self {
|
||||
AutomationEvent::SetValueAtTime(_, tick) => Some(tick),
|
||||
AutomationEvent::RampToValueAtTime(_, _, tick) => Some(tick),
|
||||
AutomationEvent::SetValueCurveAtTime(_, start, duration) => Some(start + duration),
|
||||
AutomationEvent::SetTargetAtTime(..) => None,
|
||||
AutomationEvent::CancelAndHoldAtTime(t) => Some(t),
|
||||
AutomationEvent::CancelScheduledValues(..) | AutomationEvent::SetValue(..) => {
|
||||
unreachable!("CancelScheduledValues/SetValue should never appear in the timeline")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_time(&self) -> Option<Tick> {
|
||||
match *self {
|
||||
AutomationEvent::SetValueAtTime(_, tick) => Some(tick),
|
||||
AutomationEvent::RampToValueAtTime(..) => None,
|
||||
AutomationEvent::SetValueCurveAtTime(_, start, _) => Some(start),
|
||||
AutomationEvent::SetTargetAtTime(_, start, _) => Some(start),
|
||||
AutomationEvent::CancelAndHoldAtTime(t) => Some(t),
|
||||
AutomationEvent::CancelScheduledValues(..) | AutomationEvent::SetValue(..) => {
|
||||
unreachable!("CancelScheduledValues/SetValue should never appear in the timeline")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns Some if it's a cancel event
|
||||
/// the boolean is if it's CancelAndHold
|
||||
pub fn cancel_event(&self) -> Option<bool> {
|
||||
match *self {
|
||||
AutomationEvent::CancelAndHoldAtTime(..) => Some(true),
|
||||
AutomationEvent::CancelScheduledValues(..) => Some(false),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update a parameter based on this event
|
||||
///
|
||||
/// Returns true if something changed
|
||||
pub fn run(
|
||||
&self,
|
||||
value: &mut f32,
|
||||
current_tick: Tick,
|
||||
event_start_time: Tick,
|
||||
event_start_value: f32,
|
||||
) -> bool {
|
||||
if matches!(self.start_time(), Some(start_time) if start_time > current_tick) {
|
||||
// The previous event finished and we advanced to this
|
||||
// event, but it's not started yet. Return early
|
||||
return false;
|
||||
}
|
||||
|
||||
match *self {
|
||||
AutomationEvent::SetValueAtTime(val, time) => {
|
||||
if current_tick == time {
|
||||
*value = val;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
},
|
||||
AutomationEvent::RampToValueAtTime(kind, val, time) => {
|
||||
let progress =
|
||||
(current_tick - event_start_time).0 as f32 / (time - event_start_time).0 as f32;
|
||||
match kind {
|
||||
RampKind::Linear => {
|
||||
*value = event_start_value + (val - event_start_value) * progress;
|
||||
},
|
||||
RampKind::Exponential => {
|
||||
let ratio = val / event_start_value;
|
||||
if event_start_value == 0. || ratio < 0. {
|
||||
if time == current_tick {
|
||||
*value = val;
|
||||
} else {
|
||||
*value = event_start_value;
|
||||
}
|
||||
} else {
|
||||
*value = event_start_value * (ratio).powf(progress);
|
||||
}
|
||||
},
|
||||
}
|
||||
true
|
||||
},
|
||||
AutomationEvent::SetTargetAtTime(val, start, tau) => {
|
||||
let exp = -((current_tick - start) / tau);
|
||||
*value = val + (event_start_value - val) * exp.exp() as f32;
|
||||
true
|
||||
},
|
||||
AutomationEvent::SetValueCurveAtTime(ref values, start, duration) => {
|
||||
let progress = ((current_tick.0 as f32) - (start.0 as f32)) / (duration.0 as f32);
|
||||
debug_assert!(progress >= 0.);
|
||||
let n = values.len() as f32;
|
||||
let k_float = (n - 1.) * progress;
|
||||
let k = k_float.floor();
|
||||
if (k + 1.) < n {
|
||||
let progress = k_float - k;
|
||||
*value =
|
||||
values[k as usize] * (1. - progress) + values[(k + 1.) as usize] * progress;
|
||||
} else {
|
||||
*value = values[(n - 1.) as usize];
|
||||
}
|
||||
true
|
||||
},
|
||||
AutomationEvent::CancelAndHoldAtTime(..) => false,
|
||||
AutomationEvent::CancelScheduledValues(..) | AutomationEvent::SetValue(..) => {
|
||||
unreachable!("CancelScheduledValues/SetValue should never appear in the timeline")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
376
components/media/audio/render_thread.rs
Normal file
376
components/media/audio/render_thread.rs
Normal file
@@ -0,0 +1,376 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::mpsc::{Receiver, Sender};
|
||||
|
||||
use servo_media_streams::{MediaSocket, MediaStreamId};
|
||||
|
||||
use crate::analyser_node::AnalyserNode;
|
||||
use crate::biquad_filter_node::BiquadFilterNode;
|
||||
use crate::block::{Chunk, FRAMES_PER_BLOCK, Tick};
|
||||
use crate::buffer_source_node::AudioBufferSourceNode;
|
||||
use crate::channel_node::{ChannelMergerNode, ChannelSplitterNode};
|
||||
use crate::constant_source_node::ConstantSourceNode;
|
||||
use crate::context::{AudioContextOptions, ProcessingState, StateChangeResult};
|
||||
use crate::gain_node::GainNode;
|
||||
use crate::graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
|
||||
use crate::iir_filter_node::IIRFilterNode;
|
||||
use crate::media_element_source_node::MediaElementSourceNode;
|
||||
use crate::media_stream_destination_node::MediaStreamDestinationNode;
|
||||
use crate::media_stream_source_node::MediaStreamSourceNode;
|
||||
use crate::node::{AudioNodeEngine, AudioNodeInit, AudioNodeMessage, BlockInfo, ChannelInfo};
|
||||
use crate::offline_sink::OfflineAudioSink;
|
||||
use crate::oscillator_node::OscillatorNode;
|
||||
use crate::panner_node::PannerNode;
|
||||
use crate::sink::{AudioSink, AudioSinkError};
|
||||
use crate::stereo_panner::StereoPannerNode;
|
||||
use crate::wave_shaper_node::WaveShaperNode;
|
||||
use crate::{AudioBackend, AudioStreamReader};
|
||||
|
||||
pub type SinkEosCallback = Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>;
|
||||
|
||||
pub enum AudioRenderThreadMsg {
|
||||
CreateNode(AudioNodeInit, Sender<NodeId>, ChannelInfo),
|
||||
ConnectPorts(PortId<OutputPort>, PortId<InputPort>),
|
||||
MessageNode(NodeId, AudioNodeMessage),
|
||||
Resume(Sender<StateChangeResult>),
|
||||
Suspend(Sender<StateChangeResult>),
|
||||
Close(Sender<StateChangeResult>),
|
||||
SinkNeedData,
|
||||
GetCurrentTime(Sender<f64>),
|
||||
|
||||
DisconnectAllFrom(NodeId),
|
||||
DisconnectOutput(PortId<OutputPort>),
|
||||
DisconnectBetween(NodeId, NodeId),
|
||||
DisconnectTo(NodeId, PortId<InputPort>),
|
||||
DisconnectOutputBetween(PortId<OutputPort>, NodeId),
|
||||
DisconnectOutputBetweenTo(PortId<OutputPort>, PortId<InputPort>),
|
||||
|
||||
SetSinkEosCallback(SinkEosCallback),
|
||||
|
||||
SetMute(bool),
|
||||
}
|
||||
|
||||
pub enum Sink {
|
||||
RealTime(Box<dyn AudioSink>),
|
||||
Offline(OfflineAudioSink),
|
||||
}
|
||||
|
||||
impl AudioSink for Sink {
|
||||
fn init(
|
||||
&self,
|
||||
sample_rate: f32,
|
||||
sender: Sender<AudioRenderThreadMsg>,
|
||||
) -> Result<(), AudioSinkError> {
|
||||
match *self {
|
||||
Sink::RealTime(ref sink) => sink.init(sample_rate, sender),
|
||||
Sink::Offline(ref sink) => {
|
||||
sink.init(sample_rate, sender).unwrap();
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn init_stream(&self, _: u8, _: f32, _: Box<dyn MediaSocket>) -> Result<(), AudioSinkError> {
|
||||
unreachable!("Sink should never be used for MediaStreamDestinationNode")
|
||||
}
|
||||
|
||||
fn play(&self) -> Result<(), AudioSinkError> {
|
||||
match *self {
|
||||
Sink::RealTime(ref sink) => sink.play(),
|
||||
Sink::Offline(ref sink) => {
|
||||
sink.play().unwrap();
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<(), AudioSinkError> {
|
||||
match *self {
|
||||
Sink::RealTime(ref sink) => sink.stop(),
|
||||
Sink::Offline(ref sink) => {
|
||||
sink.stop().unwrap();
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn has_enough_data(&self) -> bool {
|
||||
match *self {
|
||||
Sink::RealTime(ref sink) => sink.has_enough_data(),
|
||||
Sink::Offline(ref sink) => sink.has_enough_data(),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_data(&self, chunk: Chunk) -> Result<(), AudioSinkError> {
|
||||
match *self {
|
||||
Sink::RealTime(ref sink) => sink.push_data(chunk),
|
||||
Sink::Offline(ref sink) => {
|
||||
sink.push_data(chunk).unwrap();
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn set_eos_callback(
|
||||
&self,
|
||||
callback: Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>,
|
||||
) {
|
||||
match *self {
|
||||
Sink::RealTime(ref sink) => sink.set_eos_callback(callback),
|
||||
Sink::Offline(ref sink) => sink.set_eos_callback(callback),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AudioRenderThread {
|
||||
pub graph: AudioGraph,
|
||||
pub sink: Sink,
|
||||
pub sink_factory: Box<dyn Fn() -> Result<Box<dyn AudioSink + 'static>, AudioSinkError>>,
|
||||
pub reader_factory: Box<dyn Fn(MediaStreamId, f32) -> Box<dyn AudioStreamReader + Send>>,
|
||||
pub state: ProcessingState,
|
||||
pub sample_rate: f32,
|
||||
pub current_time: f64,
|
||||
pub current_frame: Tick,
|
||||
pub muted: bool,
|
||||
}
|
||||
|
||||
impl AudioRenderThread {
|
||||
/// Initializes the AudioRenderThread object
|
||||
///
|
||||
/// You must call .event_loop() on this to run it!
|
||||
fn prepare_thread<B: AudioBackend>(
|
||||
sender: Sender<AudioRenderThreadMsg>,
|
||||
sample_rate: f32,
|
||||
graph: AudioGraph,
|
||||
options: AudioContextOptions,
|
||||
) -> Result<Self, AudioSinkError> {
|
||||
let sink_factory = Box::new(|| B::make_sink().map(|s| Box::new(s) as Box<dyn AudioSink>));
|
||||
let reader_factory = Box::new(|id, sample_rate| B::make_streamreader(id, sample_rate));
|
||||
let sink = match options {
|
||||
AudioContextOptions::RealTimeAudioContext(_) => Sink::RealTime(sink_factory()?),
|
||||
AudioContextOptions::OfflineAudioContext(options) => Sink::Offline(
|
||||
OfflineAudioSink::new(options.channels as usize, options.length),
|
||||
),
|
||||
};
|
||||
|
||||
sink.init(sample_rate, sender)?;
|
||||
|
||||
Ok(Self {
|
||||
graph,
|
||||
sink,
|
||||
sink_factory,
|
||||
reader_factory,
|
||||
state: ProcessingState::Suspended,
|
||||
sample_rate,
|
||||
current_time: 0.,
|
||||
current_frame: Tick(0),
|
||||
muted: false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Start the audio render thread
|
||||
///
|
||||
/// In case something fails, it will instead start a thread with a dummy backend
|
||||
pub fn start<B: AudioBackend>(
|
||||
event_queue: Receiver<AudioRenderThreadMsg>,
|
||||
sender: Sender<AudioRenderThreadMsg>,
|
||||
sample_rate: f32,
|
||||
graph: AudioGraph,
|
||||
options: AudioContextOptions,
|
||||
init_sender: Sender<Result<(), AudioSinkError>>,
|
||||
) {
|
||||
let mut thread =
|
||||
match Self::prepare_thread::<B>(sender.clone(), sample_rate, graph, options) {
|
||||
Ok(thread) => {
|
||||
let _ = init_sender.send(Ok(()));
|
||||
thread
|
||||
},
|
||||
Err(e) => {
|
||||
let _ = init_sender.send(Err(e));
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
thread.event_loop(event_queue);
|
||||
}
|
||||
|
||||
make_render_thread_state_change!(resume, Running, play);
|
||||
|
||||
make_render_thread_state_change!(suspend, Suspended, stop);
|
||||
|
||||
fn create_node(&mut self, node_type: AudioNodeInit, ch: ChannelInfo) -> NodeId {
|
||||
let mut needs_listener = false;
|
||||
let mut is_dest = false;
|
||||
let node: Box<dyn AudioNodeEngine> = match node_type {
|
||||
AudioNodeInit::AnalyserNode(sender) => Box::new(AnalyserNode::new(sender, ch)),
|
||||
AudioNodeInit::AudioBufferSourceNode(options) => {
|
||||
Box::new(AudioBufferSourceNode::new(options, ch))
|
||||
},
|
||||
AudioNodeInit::BiquadFilterNode(options) => {
|
||||
Box::new(BiquadFilterNode::new(options, ch, self.sample_rate))
|
||||
},
|
||||
AudioNodeInit::GainNode(options) => Box::new(GainNode::new(options, ch)),
|
||||
AudioNodeInit::StereoPannerNode(options) => {
|
||||
Box::new(StereoPannerNode::new(options, ch))
|
||||
},
|
||||
AudioNodeInit::PannerNode(options) => {
|
||||
needs_listener = true;
|
||||
Box::new(PannerNode::new(options, ch))
|
||||
},
|
||||
AudioNodeInit::MediaStreamSourceNode(id) => {
|
||||
let reader = (self.reader_factory)(id, self.sample_rate);
|
||||
Box::new(MediaStreamSourceNode::new(reader, ch))
|
||||
},
|
||||
AudioNodeInit::OscillatorNode(options) => Box::new(OscillatorNode::new(options, ch)),
|
||||
AudioNodeInit::ChannelMergerNode(options) => {
|
||||
Box::new(ChannelMergerNode::new(options, ch))
|
||||
},
|
||||
AudioNodeInit::ConstantSourceNode(options) => {
|
||||
Box::new(ConstantSourceNode::new(options, ch))
|
||||
},
|
||||
AudioNodeInit::MediaStreamDestinationNode(socket) => {
|
||||
is_dest = true;
|
||||
Box::new(MediaStreamDestinationNode::new(
|
||||
socket,
|
||||
self.sample_rate,
|
||||
(self.sink_factory)().unwrap(),
|
||||
ch,
|
||||
))
|
||||
},
|
||||
AudioNodeInit::ChannelSplitterNode => Box::new(ChannelSplitterNode::new(ch)),
|
||||
AudioNodeInit::WaveShaperNode(options) => Box::new(WaveShaperNode::new(options, ch)),
|
||||
AudioNodeInit::MediaElementSourceNode => Box::new(MediaElementSourceNode::new(ch)),
|
||||
AudioNodeInit::IIRFilterNode(options) => Box::new(IIRFilterNode::new(options, ch)),
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
let id = self.graph.add_node(node);
|
||||
if needs_listener {
|
||||
let listener = self.graph.listener_id().output(0);
|
||||
self.graph.add_edge(listener, id.listener());
|
||||
}
|
||||
if is_dest {
|
||||
self.graph.add_extra_dest(id);
|
||||
}
|
||||
id
|
||||
}
|
||||
|
||||
fn connect_ports(&mut self, output: PortId<OutputPort>, input: PortId<InputPort>) {
|
||||
self.graph.add_edge(output, input)
|
||||
}
|
||||
|
||||
fn process(&mut self) -> Chunk {
|
||||
if self.muted {
|
||||
return Chunk::explicit_silence();
|
||||
}
|
||||
|
||||
let info = BlockInfo {
|
||||
sample_rate: self.sample_rate,
|
||||
frame: self.current_frame,
|
||||
time: self.current_time,
|
||||
};
|
||||
self.graph.process(&info)
|
||||
}
|
||||
|
||||
fn set_mute(&mut self, val: bool) {
|
||||
self.muted = val;
|
||||
}
|
||||
|
||||
fn event_loop(&mut self, event_queue: Receiver<AudioRenderThreadMsg>) {
|
||||
let sample_rate = self.sample_rate;
|
||||
let handle_msg = move |context: &mut Self, msg: AudioRenderThreadMsg| -> bool {
|
||||
let mut break_loop = false;
|
||||
match msg {
|
||||
AudioRenderThreadMsg::CreateNode(node_type, tx, ch) => {
|
||||
let _ = tx.send(context.create_node(node_type, ch));
|
||||
},
|
||||
AudioRenderThreadMsg::ConnectPorts(output, input) => {
|
||||
context.connect_ports(output, input);
|
||||
},
|
||||
AudioRenderThreadMsg::Resume(tx) => {
|
||||
let _ = tx.send(context.resume());
|
||||
},
|
||||
AudioRenderThreadMsg::Suspend(tx) => {
|
||||
let _ = tx.send(context.suspend());
|
||||
},
|
||||
AudioRenderThreadMsg::Close(tx) => {
|
||||
let _ = tx.send(context.suspend());
|
||||
break_loop = true;
|
||||
},
|
||||
AudioRenderThreadMsg::GetCurrentTime(response) => {
|
||||
response.send(context.current_time).unwrap()
|
||||
},
|
||||
AudioRenderThreadMsg::MessageNode(id, msg) => {
|
||||
context.graph.node_mut(id).message(msg, sample_rate)
|
||||
},
|
||||
AudioRenderThreadMsg::SinkNeedData => {
|
||||
// Do nothing. This will simply unblock the thread so we
|
||||
// can restart the non-blocking event loop.
|
||||
},
|
||||
AudioRenderThreadMsg::DisconnectAllFrom(id) => {
|
||||
context.graph.disconnect_all_from(id)
|
||||
},
|
||||
AudioRenderThreadMsg::DisconnectOutput(out) => context.graph.disconnect_output(out),
|
||||
AudioRenderThreadMsg::DisconnectBetween(from, to) => {
|
||||
context.graph.disconnect_between(from, to)
|
||||
},
|
||||
AudioRenderThreadMsg::DisconnectTo(from, to) => {
|
||||
context.graph.disconnect_to(from, to)
|
||||
},
|
||||
AudioRenderThreadMsg::DisconnectOutputBetween(from, to) => {
|
||||
context.graph.disconnect_output_between(from, to)
|
||||
},
|
||||
AudioRenderThreadMsg::DisconnectOutputBetweenTo(from, to) => {
|
||||
context.graph.disconnect_output_between_to(from, to)
|
||||
},
|
||||
AudioRenderThreadMsg::SetSinkEosCallback(callback) => {
|
||||
context.sink.set_eos_callback(callback);
|
||||
},
|
||||
AudioRenderThreadMsg::SetMute(val) => {
|
||||
context.set_mute(val);
|
||||
},
|
||||
};
|
||||
|
||||
break_loop
|
||||
};
|
||||
|
||||
loop {
|
||||
if self.sink.has_enough_data() || self.state == ProcessingState::Suspended {
|
||||
// If we are not processing audio or
|
||||
// if we have already pushed enough data into the audio sink
|
||||
// we wait for messages coming from the control thread or
|
||||
// the audio sink. The audio sink will notify whenever it
|
||||
// needs more data.
|
||||
if event_queue.recv().is_ok_and(|msg| handle_msg(self, msg)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// If we have not pushed enough data into the audio sink yet,
|
||||
// we process the control message queue
|
||||
if event_queue
|
||||
.try_recv()
|
||||
.is_ok_and(|msg| handle_msg(self, msg))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if self.state == ProcessingState::Suspended {
|
||||
// Bail out if we just suspended processing.
|
||||
continue;
|
||||
}
|
||||
|
||||
// push into the audio sink the result of processing a
|
||||
// render quantum.
|
||||
let data = self.process();
|
||||
if self.sink.push_data(data).is_ok() {
|
||||
// increment current frame by the render quantum size.
|
||||
self.current_frame += FRAMES_PER_BLOCK;
|
||||
self.current_time = self.current_frame / self.sample_rate as f64;
|
||||
} else {
|
||||
eprintln!("Could not push data to audio sink");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
39
components/media/audio/sink.rs
Normal file
39
components/media/audio/sink.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
use servo_media_streams::MediaSocket;
|
||||
|
||||
use crate::block::Chunk;
|
||||
use crate::render_thread::{AudioRenderThreadMsg, SinkEosCallback};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum AudioSinkError {
|
||||
/// Backend specific error.
|
||||
Backend(String),
|
||||
/// Could not push buffer into the audio sink.
|
||||
BufferPushFailed,
|
||||
/// Could not move to a different state.
|
||||
StateChangeFailed,
|
||||
}
|
||||
|
||||
pub trait AudioSink: Send {
|
||||
fn init(
|
||||
&self,
|
||||
sample_rate: f32,
|
||||
render_thread_channel: Sender<AudioRenderThreadMsg>,
|
||||
) -> Result<(), AudioSinkError>;
|
||||
fn init_stream(
|
||||
&self,
|
||||
channels: u8,
|
||||
sample_rate: f32,
|
||||
socket: Box<dyn MediaSocket>,
|
||||
) -> Result<(), AudioSinkError>;
|
||||
fn play(&self) -> Result<(), AudioSinkError>;
|
||||
fn stop(&self) -> Result<(), AudioSinkError>;
|
||||
fn has_enough_data(&self) -> bool;
|
||||
fn push_data(&self, chunk: Chunk) -> Result<(), AudioSinkError>;
|
||||
fn set_eos_callback(&self, callback: SinkEosCallback);
|
||||
}
|
||||
122
components/media/audio/stereo_panner.rs
Normal file
122
components/media/audio/stereo_panner.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::f32::consts::PI;
|
||||
|
||||
use crate::block::{Chunk, FRAMES_PER_BLOCK, Tick};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
use crate::param::{Param, ParamType};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct StereoPannerOptions {
|
||||
pub pan: f32,
|
||||
}
|
||||
|
||||
impl Default for StereoPannerOptions {
|
||||
fn default() -> Self {
|
||||
StereoPannerOptions { pan: 0. }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct StereoPannerNode {
|
||||
channel_info: ChannelInfo,
|
||||
pan: Param,
|
||||
}
|
||||
|
||||
impl StereoPannerNode {
|
||||
pub fn new(options: StereoPannerOptions, channel_info: ChannelInfo) -> Self {
|
||||
Self {
|
||||
channel_info,
|
||||
pan: Param::new(options.pan),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_parameters(&mut self, info: &BlockInfo, tick: Tick) -> bool {
|
||||
self.pan.update(info, tick)
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for StereoPannerNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::StereoPannerNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
{
|
||||
let block = &mut inputs.blocks[0];
|
||||
|
||||
block.explicit_repeat();
|
||||
|
||||
let mono = if block.chan_count() == 1 {
|
||||
block.resize_silence(2);
|
||||
true
|
||||
} else {
|
||||
debug_assert!(block.chan_count() == 2);
|
||||
false
|
||||
};
|
||||
|
||||
let (l, r) = block.data_mut().split_at_mut(FRAMES_PER_BLOCK.0 as usize);
|
||||
let mut pan = self.pan.value();
|
||||
for frame in 0..FRAMES_PER_BLOCK.0 {
|
||||
let frame = Tick(frame);
|
||||
if self.update_parameters(info, frame) {
|
||||
pan = self.pan.value();
|
||||
}
|
||||
|
||||
// https://webaudio.github.io/web-audio-api/#stereopanner-algorithm
|
||||
|
||||
// clamp pan to [-1, 1]
|
||||
pan = pan.clamp(-1., 1.);
|
||||
|
||||
let x = if mono {
|
||||
(pan + 1.) / 2.
|
||||
} else if pan <= 0. {
|
||||
pan + 1.
|
||||
} else {
|
||||
pan
|
||||
};
|
||||
let x = x * PI / 2.;
|
||||
|
||||
let mut gain_l = x.cos();
|
||||
let mut gain_r = x.sin();
|
||||
// 9. * PI / 2 is often slightly negative, clamp
|
||||
if gain_l <= 0. {
|
||||
gain_l = 0.
|
||||
}
|
||||
if gain_r <= 0. {
|
||||
gain_r = 0.;
|
||||
}
|
||||
|
||||
let index = frame.0 as usize;
|
||||
if mono {
|
||||
let input = l[index];
|
||||
l[index] = input * gain_l;
|
||||
r[index] = input * gain_r;
|
||||
} else if pan <= 0. {
|
||||
l[index] += r[index] * gain_l;
|
||||
r[index] *= gain_r;
|
||||
} else {
|
||||
r[index] += l[index] * gain_r;
|
||||
l[index] *= gain_l;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
fn input_count(&self) -> u32 {
|
||||
1
|
||||
}
|
||||
|
||||
fn get_param(&mut self, id: ParamType) -> &mut Param {
|
||||
match id {
|
||||
ParamType::Pan => &mut self.pan,
|
||||
_ => panic!("Unknown param {:?} for PannerNode", id),
|
||||
}
|
||||
}
|
||||
}
|
||||
255
components/media/audio/wave_shaper_node.rs
Normal file
255
components/media/audio/wave_shaper_node.rs
Normal file
@@ -0,0 +1,255 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use speexdsp_resampler::State as SpeexResamplerState;
|
||||
|
||||
use crate::block::{Chunk, FRAMES_PER_BLOCK_USIZE};
|
||||
use crate::node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum OverSampleType {
|
||||
None,
|
||||
Double,
|
||||
Quadruple,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum TailtimeBlocks {
|
||||
Zero,
|
||||
One,
|
||||
Two,
|
||||
}
|
||||
|
||||
const OVERSAMPLING_QUALITY: usize = 0;
|
||||
|
||||
impl OverSampleType {
|
||||
fn value(&self) -> usize {
|
||||
match self {
|
||||
OverSampleType::None => 1,
|
||||
OverSampleType::Double => 2,
|
||||
OverSampleType::Quadruple => 4,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type WaveShaperCurve = Option<Vec<f32>>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct WaveShaperNodeOptions {
|
||||
pub curve: WaveShaperCurve,
|
||||
pub oversample: OverSampleType,
|
||||
}
|
||||
|
||||
impl Default for WaveShaperNodeOptions {
|
||||
fn default() -> Self {
|
||||
WaveShaperNodeOptions {
|
||||
curve: None,
|
||||
oversample: OverSampleType::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum WaveShaperNodeMessage {
|
||||
SetCurve(WaveShaperCurve),
|
||||
}
|
||||
|
||||
#[derive(AudioNodeCommon)]
|
||||
pub(crate) struct WaveShaperNode {
|
||||
curve_set: bool,
|
||||
curve: WaveShaperCurve,
|
||||
#[allow(dead_code)]
|
||||
oversample: OverSampleType,
|
||||
channel_info: ChannelInfo,
|
||||
upsampler: Option<SpeexResamplerState>,
|
||||
downsampler: Option<SpeexResamplerState>,
|
||||
tailtime_blocks_left: TailtimeBlocks,
|
||||
}
|
||||
|
||||
impl WaveShaperNode {
|
||||
pub fn new(options: WaveShaperNodeOptions, channel_info: ChannelInfo) -> Self {
|
||||
if let Some(vec) = &options.curve {
|
||||
assert!(
|
||||
vec.len() > 1,
|
||||
"WaveShaperNode curve must have length of 2 or more"
|
||||
)
|
||||
}
|
||||
|
||||
Self {
|
||||
curve_set: options.curve.is_some(),
|
||||
curve: options.curve,
|
||||
oversample: options.oversample,
|
||||
channel_info,
|
||||
upsampler: None,
|
||||
downsampler: None,
|
||||
tailtime_blocks_left: TailtimeBlocks::Zero,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_waveshaper_message(&mut self, message: WaveShaperNodeMessage, _sample_rate: f32) {
|
||||
match message {
|
||||
WaveShaperNodeMessage::SetCurve(new_curve) => {
|
||||
if self.curve_set && new_curve.is_some() {
|
||||
panic!("InvalidStateError: cant set curve if it was already set");
|
||||
}
|
||||
self.curve_set = new_curve.is_some();
|
||||
self.curve = new_curve;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNodeEngine for WaveShaperNode {
|
||||
fn node_type(&self) -> AudioNodeType {
|
||||
AudioNodeType::WaveShaperNode
|
||||
}
|
||||
|
||||
fn process(&mut self, mut inputs: Chunk, info: &BlockInfo) -> Chunk {
|
||||
debug_assert!(inputs.len() == 1);
|
||||
|
||||
if self.curve.is_none() {
|
||||
return inputs;
|
||||
}
|
||||
|
||||
let curve = &self.curve.as_ref().expect("Just checked for is_none()");
|
||||
|
||||
if inputs.blocks[0].is_silence() {
|
||||
if WaveShaperNode::silence_produces_nonsilent_output(curve) {
|
||||
inputs.blocks[0].explicit_silence();
|
||||
self.tailtime_blocks_left = TailtimeBlocks::Two;
|
||||
} else if self.tailtime_blocks_left != TailtimeBlocks::Zero {
|
||||
inputs.blocks[0].explicit_silence();
|
||||
|
||||
self.tailtime_blocks_left = match self.tailtime_blocks_left {
|
||||
TailtimeBlocks::Zero => TailtimeBlocks::Zero,
|
||||
TailtimeBlocks::One => TailtimeBlocks::Zero,
|
||||
TailtimeBlocks::Two => TailtimeBlocks::One,
|
||||
}
|
||||
} else {
|
||||
return inputs;
|
||||
}
|
||||
} else {
|
||||
self.tailtime_blocks_left = TailtimeBlocks::Two;
|
||||
}
|
||||
|
||||
let block = &mut inputs.blocks[0];
|
||||
let channels = block.chan_count();
|
||||
|
||||
if self.oversample != OverSampleType::None {
|
||||
let rate: usize = info.sample_rate as usize;
|
||||
let sampling_factor = self.oversample.value();
|
||||
|
||||
if self.upsampler.is_none() {
|
||||
self.upsampler = Some(
|
||||
SpeexResamplerState::new(
|
||||
channels as usize,
|
||||
rate,
|
||||
rate * sampling_factor,
|
||||
OVERSAMPLING_QUALITY,
|
||||
)
|
||||
.expect("Couldnt create upsampler"),
|
||||
);
|
||||
};
|
||||
|
||||
if self.downsampler.is_none() {
|
||||
self.downsampler = Some(
|
||||
SpeexResamplerState::new(
|
||||
channels as usize,
|
||||
rate * sampling_factor,
|
||||
rate,
|
||||
OVERSAMPLING_QUALITY,
|
||||
)
|
||||
.expect("Couldnt create downsampler"),
|
||||
);
|
||||
};
|
||||
|
||||
let upsampler = self.upsampler.as_mut().unwrap();
|
||||
let downsampler = self.downsampler.as_mut().unwrap();
|
||||
|
||||
let mut oversampled_buffer: Vec<f32> =
|
||||
vec![0.; FRAMES_PER_BLOCK_USIZE * sampling_factor];
|
||||
|
||||
for chan in 0..channels {
|
||||
let out_len = WaveShaperNode::resample(
|
||||
upsampler,
|
||||
chan,
|
||||
block.data_chan(chan),
|
||||
&mut oversampled_buffer,
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
out_len == 128 * sampling_factor,
|
||||
"Expected {} samples in output after upsampling, got: {}",
|
||||
128 * sampling_factor,
|
||||
out_len
|
||||
);
|
||||
|
||||
WaveShaperNode::apply_curve(&mut oversampled_buffer, curve);
|
||||
|
||||
let out_len = WaveShaperNode::resample(
|
||||
downsampler,
|
||||
chan,
|
||||
&oversampled_buffer,
|
||||
block.data_chan_mut(chan),
|
||||
);
|
||||
|
||||
debug_assert!(
|
||||
out_len == 128,
|
||||
"Expected 128 samples in output after downsampling, got {}",
|
||||
out_len
|
||||
);
|
||||
}
|
||||
} else {
|
||||
WaveShaperNode::apply_curve(block.data_mut(), curve);
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
make_message_handler!(WaveShaperNode: handle_waveshaper_message);
|
||||
}
|
||||
|
||||
impl WaveShaperNode {
|
||||
fn silence_produces_nonsilent_output(curve: &[f32]) -> bool {
|
||||
let len = curve.len();
|
||||
let len_halved = ((len - 1) as f32) / 2.;
|
||||
let curve_index: f32 = len_halved;
|
||||
let index_lo = curve_index as usize;
|
||||
let index_hi = index_lo + 1;
|
||||
let interp_factor: f32 = curve_index - index_lo as f32;
|
||||
let shaped_val = (1. - interp_factor) * curve[index_lo] + interp_factor * curve[index_hi];
|
||||
shaped_val == 0.0
|
||||
}
|
||||
|
||||
fn apply_curve(buf: &mut [f32], curve: &[f32]) {
|
||||
let len = curve.len();
|
||||
let len_halved = ((len - 1) as f32) / 2.;
|
||||
buf.iter_mut().for_each(|sample| {
|
||||
let curve_index: f32 = len_halved * (*sample + 1.);
|
||||
|
||||
if curve_index <= 0. {
|
||||
*sample = curve[0];
|
||||
} else if curve_index >= (len - 1) as f32 {
|
||||
*sample = curve[len - 1];
|
||||
} else {
|
||||
let index_lo = curve_index as usize;
|
||||
let index_hi = index_lo + 1;
|
||||
let interp_factor: f32 = curve_index - index_lo as f32;
|
||||
*sample = (1. - interp_factor) * curve[index_lo] + interp_factor * curve[index_hi];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn resample(
|
||||
st: &mut SpeexResamplerState,
|
||||
chan: u8,
|
||||
input: &[f32],
|
||||
output: &mut [f32],
|
||||
) -> usize {
|
||||
let (_in_len, out_len) = st
|
||||
.process_float(chan as usize, input, output)
|
||||
.expect("Resampling failed");
|
||||
out_len
|
||||
}
|
||||
}
|
||||
18
components/media/backends/auto/Cargo.toml
Normal file
18
components/media/backends/auto/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "servo-media-auto"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_auto"
|
||||
path = "lib.rs"
|
||||
|
||||
[target.'cfg(any(all(target_os = "android", any(target_arch = "arm", target_arch = "aarch64")), target_arch = "x86_64",target_arch = "aarch64"))'.dependencies.servo-media-gstreamer]
|
||||
path = "../gstreamer"
|
||||
|
||||
[target.'cfg(not(any(all(target_os = "android", any(target_arch = "arm", target_arch = "aarch64")), target_arch = "x86_64", target_arch = "aarch64")))'.dependencies.servo-media-dummy]
|
||||
path = "../dummy"
|
||||
29
components/media/backends/auto/lib.rs
Normal file
29
components/media/backends/auto/lib.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#[cfg(any(
|
||||
all(
|
||||
target_os = "android",
|
||||
any(target_arch = "arm", target_arch = "aarch64")
|
||||
),
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
))]
|
||||
mod platform {
|
||||
pub use servo_media_gstreamer::GStreamerBackend as Backend;
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
all(
|
||||
target_os = "android",
|
||||
any(target_arch = "arm", target_arch = "aarch64")
|
||||
),
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
)))]
|
||||
mod platform {
|
||||
pub use servo_media_dummy::DummyBackend as Backend;
|
||||
}
|
||||
|
||||
pub type Backend = platform::Backend;
|
||||
21
components/media/backends/dummy/Cargo.toml
Normal file
21
components/media/backends/dummy/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "servo-media-dummy"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_dummy"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
ipc-channel = { workspace = true }
|
||||
servo-media = { path = "../../servo-media" }
|
||||
servo-media-audio = { path = "../../audio" }
|
||||
servo-media-player = { path = "../../player" }
|
||||
servo-media-streams = { path = "../../streams" }
|
||||
servo-media-traits = { path = "../../traits" }
|
||||
servo-media-webrtc = { path = "../../webrtc" }
|
||||
393
components/media/backends/dummy/lib.rs
Normal file
393
components/media/backends/dummy/lib.rs
Normal file
@@ -0,0 +1,393 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate ipc_channel;
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_audio;
|
||||
extern crate servo_media_player;
|
||||
extern crate servo_media_streams;
|
||||
extern crate servo_media_traits;
|
||||
extern crate servo_media_webrtc;
|
||||
|
||||
use std::any::Any;
|
||||
use std::ops::Range;
|
||||
use std::sync::mpsc::{self, Sender};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use ipc_channel::ipc::IpcSender;
|
||||
use servo_media::{Backend, BackendInit, MediaInstanceError, SupportsMediaType};
|
||||
use servo_media_audio::block::{Block, Chunk};
|
||||
use servo_media_audio::context::{AudioContext, AudioContextOptions};
|
||||
use servo_media_audio::decoder::{AudioDecoder, AudioDecoderCallbacks, AudioDecoderOptions};
|
||||
use servo_media_audio::render_thread::AudioRenderThreadMsg;
|
||||
use servo_media_audio::sink::{AudioSink, AudioSinkError};
|
||||
use servo_media_audio::{AudioBackend, AudioStreamReader};
|
||||
use servo_media_player::context::PlayerGLContext;
|
||||
use servo_media_player::{Player, PlayerError, PlayerEvent, StreamType, audio, video};
|
||||
use servo_media_streams::capture::MediaTrackConstraintSet;
|
||||
use servo_media_streams::device_monitor::{MediaDeviceInfo, MediaDeviceMonitor};
|
||||
use servo_media_streams::registry::{MediaStreamId, register_stream, unregister_stream};
|
||||
use servo_media_streams::{MediaOutput, MediaSocket, MediaStream, MediaStreamType};
|
||||
use servo_media_traits::{ClientContextId, MediaInstance};
|
||||
use servo_media_webrtc::{
|
||||
BundlePolicy, DataChannelId, DataChannelInit, DataChannelMessage, IceCandidate,
|
||||
SessionDescription, WebRtcBackend, WebRtcController, WebRtcControllerBackend,
|
||||
WebRtcDataChannelResult, WebRtcResult, WebRtcSignaller, thread,
|
||||
};
|
||||
|
||||
pub struct DummyBackend;
|
||||
|
||||
impl BackendInit for DummyBackend {
|
||||
fn init() -> Box<dyn Backend> {
|
||||
Box::new(DummyBackend)
|
||||
}
|
||||
}
|
||||
|
||||
impl Backend for DummyBackend {
|
||||
fn create_audiostream(&self) -> MediaStreamId {
|
||||
register_stream(Arc::new(Mutex::new(DummyMediaStream {
|
||||
id: MediaStreamId::new(),
|
||||
})))
|
||||
}
|
||||
|
||||
fn create_videostream(&self) -> MediaStreamId {
|
||||
register_stream(Arc::new(Mutex::new(DummyMediaStream {
|
||||
id: MediaStreamId::new(),
|
||||
})))
|
||||
}
|
||||
|
||||
fn create_stream_output(&self) -> Box<dyn MediaOutput> {
|
||||
Box::new(DummyMediaOutput)
|
||||
}
|
||||
|
||||
fn create_audioinput_stream(&self, _: MediaTrackConstraintSet) -> Option<MediaStreamId> {
|
||||
Some(register_stream(Arc::new(Mutex::new(DummyMediaStream {
|
||||
id: MediaStreamId::new(),
|
||||
}))))
|
||||
}
|
||||
|
||||
fn create_stream_and_socket(
|
||||
&self,
|
||||
_: MediaStreamType,
|
||||
) -> (Box<dyn MediaSocket>, MediaStreamId) {
|
||||
let id = register_stream(Arc::new(Mutex::new(DummyMediaStream {
|
||||
id: MediaStreamId::new(),
|
||||
})));
|
||||
(Box::new(DummySocket), id)
|
||||
}
|
||||
|
||||
fn create_videoinput_stream(&self, _: MediaTrackConstraintSet) -> Option<MediaStreamId> {
|
||||
Some(register_stream(Arc::new(Mutex::new(DummyMediaStream {
|
||||
id: MediaStreamId::new(),
|
||||
}))))
|
||||
}
|
||||
|
||||
fn create_player(
|
||||
&self,
|
||||
_id: &ClientContextId,
|
||||
_: StreamType,
|
||||
_: IpcSender<PlayerEvent>,
|
||||
_: Option<Arc<Mutex<dyn video::VideoFrameRenderer>>>,
|
||||
_: Option<Arc<Mutex<dyn audio::AudioRenderer>>>,
|
||||
_: Box<dyn PlayerGLContext>,
|
||||
) -> Arc<Mutex<dyn Player>> {
|
||||
Arc::new(Mutex::new(DummyPlayer))
|
||||
}
|
||||
|
||||
fn create_audio_context(
|
||||
&self,
|
||||
_id: &ClientContextId,
|
||||
options: AudioContextOptions,
|
||||
) -> Result<Arc<Mutex<AudioContext>>, AudioSinkError> {
|
||||
let (sender, _) = mpsc::channel();
|
||||
let sender = Arc::new(Mutex::new(sender));
|
||||
Ok(Arc::new(Mutex::new(AudioContext::new::<Self>(
|
||||
0,
|
||||
&ClientContextId::build(1, 1),
|
||||
sender,
|
||||
options,
|
||||
)?)))
|
||||
}
|
||||
|
||||
fn create_webrtc(&self, signaller: Box<dyn WebRtcSignaller>) -> WebRtcController {
|
||||
WebRtcController::new::<Self>(signaller)
|
||||
}
|
||||
|
||||
fn can_play_type(&self, _media_type: &str) -> SupportsMediaType {
|
||||
SupportsMediaType::No
|
||||
}
|
||||
|
||||
fn get_device_monitor(&self) -> Box<dyn MediaDeviceMonitor> {
|
||||
Box::new(DummyMediaDeviceMonitor {})
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioBackend for DummyBackend {
|
||||
type Sink = DummyAudioSink;
|
||||
fn make_decoder() -> Box<dyn AudioDecoder> {
|
||||
Box::new(DummyAudioDecoder)
|
||||
}
|
||||
|
||||
fn make_sink() -> Result<Self::Sink, AudioSinkError> {
|
||||
Ok(DummyAudioSink)
|
||||
}
|
||||
fn make_streamreader(
|
||||
_id: MediaStreamId,
|
||||
_sample_rate: f32,
|
||||
) -> Box<dyn AudioStreamReader + Send> {
|
||||
Box::new(DummyStreamReader)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DummyPlayer;
|
||||
|
||||
pub struct DummyStreamReader;
|
||||
|
||||
impl AudioStreamReader for DummyStreamReader {
|
||||
fn pull(&self) -> Block {
|
||||
Default::default()
|
||||
}
|
||||
fn start(&self) {}
|
||||
fn stop(&self) {}
|
||||
}
|
||||
|
||||
impl Player for DummyPlayer {
|
||||
fn play(&self) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
fn pause(&self) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn paused(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn can_resume(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
fn seek(&self, _: f64) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_mute(&self, _: bool) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn muted(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn set_volume(&self, _: f64) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn volume(&self) -> f64 {
|
||||
1.0
|
||||
}
|
||||
|
||||
fn set_input_size(&self, _: u64) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_playback_rate(&self, _: f64) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn playback_rate(&self) -> f64 {
|
||||
1.0
|
||||
}
|
||||
|
||||
fn push_data(&self, _: Vec<u8>) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
fn end_of_stream(&self) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn buffered(&self) -> Vec<Range<f64>> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn seekable(&self) -> Vec<Range<f64>> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn set_stream(&self, _: &MediaStreamId, _: bool) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn render_use_gl(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn set_audio_track(&self, _: i32, _: bool) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
fn set_video_track(&self, _: i32, _: bool) -> Result<(), PlayerError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl WebRtcBackend for DummyBackend {
|
||||
type Controller = DummyWebRtcController;
|
||||
fn construct_webrtc_controller(
|
||||
_: Box<dyn WebRtcSignaller>,
|
||||
_: WebRtcController,
|
||||
) -> Self::Controller {
|
||||
DummyWebRtcController
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DummyAudioDecoder;
|
||||
|
||||
impl AudioDecoder for DummyAudioDecoder {
|
||||
fn decode(&self, _: Vec<u8>, _: AudioDecoderCallbacks, _: Option<AudioDecoderOptions>) {}
|
||||
}
|
||||
|
||||
pub struct DummySocket;
|
||||
|
||||
impl MediaSocket for DummySocket {
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DummyMediaStream {
|
||||
id: MediaStreamId,
|
||||
}
|
||||
|
||||
impl MediaStream for DummyMediaStream {
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
fn as_mut_any(&mut self) -> &mut dyn Any {
|
||||
self
|
||||
}
|
||||
fn set_id(&mut self, _: MediaStreamId) {}
|
||||
|
||||
fn ty(&self) -> MediaStreamType {
|
||||
MediaStreamType::Audio
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for DummyMediaStream {
|
||||
fn drop(&mut self) {
|
||||
unregister_stream(&self.id);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DummyAudioSink;
|
||||
|
||||
impl AudioSink for DummyAudioSink {
|
||||
fn init(&self, _: f32, _: Sender<AudioRenderThreadMsg>) -> Result<(), AudioSinkError> {
|
||||
Ok(())
|
||||
}
|
||||
fn init_stream(&self, _: u8, _: f32, _: Box<dyn MediaSocket>) -> Result<(), AudioSinkError> {
|
||||
Ok(())
|
||||
}
|
||||
fn play(&self) -> Result<(), AudioSinkError> {
|
||||
Ok(())
|
||||
}
|
||||
fn stop(&self) -> Result<(), AudioSinkError> {
|
||||
Ok(())
|
||||
}
|
||||
fn has_enough_data(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn push_data(&self, _: Chunk) -> Result<(), AudioSinkError> {
|
||||
Ok(())
|
||||
}
|
||||
fn set_eos_callback(&self, _: Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>) {}
|
||||
}
|
||||
|
||||
pub struct DummyMediaOutput;
|
||||
impl MediaOutput for DummyMediaOutput {
|
||||
fn add_stream(&mut self, _stream: &MediaStreamId) {}
|
||||
}
|
||||
|
||||
pub struct DummyWebRtcController;
|
||||
|
||||
impl WebRtcControllerBackend for DummyWebRtcController {
|
||||
fn configure(&mut self, _: &str, _: BundlePolicy) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn set_remote_description(
|
||||
&mut self,
|
||||
_: SessionDescription,
|
||||
_: Box<dyn FnOnce() + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn set_local_description(
|
||||
&mut self,
|
||||
_: SessionDescription,
|
||||
_: Box<dyn FnOnce() + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn add_ice_candidate(&mut self, _: IceCandidate) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn create_offer(
|
||||
&mut self,
|
||||
_: Box<dyn FnOnce(SessionDescription) + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn create_answer(
|
||||
&mut self,
|
||||
_: Box<dyn FnOnce(SessionDescription) + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn add_stream(&mut self, _: &MediaStreamId) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn create_data_channel(&mut self, _: &DataChannelInit) -> WebRtcDataChannelResult {
|
||||
Ok(0)
|
||||
}
|
||||
fn close_data_channel(&mut self, _: &DataChannelId) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn send_data_channel_message(
|
||||
&mut self,
|
||||
_: &DataChannelId,
|
||||
_: &DataChannelMessage,
|
||||
) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn internal_event(&mut self, _: thread::InternalEvent) -> WebRtcResult {
|
||||
Ok(())
|
||||
}
|
||||
fn quit(&mut self) {}
|
||||
}
|
||||
|
||||
impl MediaInstance for DummyPlayer {
|
||||
fn get_id(&self) -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
fn mute(&self, _val: bool) -> Result<(), MediaInstanceError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn suspend(&self) -> Result<(), MediaInstanceError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resume(&self) -> Result<(), MediaInstanceError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct DummyMediaDeviceMonitor;
|
||||
|
||||
impl MediaDeviceMonitor for DummyMediaDeviceMonitor {
|
||||
fn enumerate_devices(&self) -> Option<Vec<MediaDeviceInfo>> {
|
||||
Some(vec![])
|
||||
}
|
||||
}
|
||||
50
components/media/backends/gstreamer/Cargo.toml
Normal file
50
components/media/backends/gstreamer/Cargo.toml
Normal file
@@ -0,0 +1,50 @@
|
||||
[package]
|
||||
name = "servo-media-gstreamer"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_gstreamer"
|
||||
path = "lib.rs"
|
||||
|
||||
[features]
|
||||
glx = ["servo-media-gstreamer-render-unix/gl-x11"]
|
||||
wayland = ["servo-media-gstreamer-render-unix/gl-wayland"]
|
||||
|
||||
[dependencies]
|
||||
byte-slice-cast = "1"
|
||||
glib = { workspace = true }
|
||||
glib-sys = { workspace = true }
|
||||
gstreamer = { workspace = true }
|
||||
gstreamer-app = { workspace = true }
|
||||
gstreamer-audio = { workspace = true }
|
||||
gstreamer-video = { workspace = true }
|
||||
gstreamer-base = { workspace = true }
|
||||
gstreamer-play = { workspace = true }
|
||||
gstreamer-webrtc = { workspace = true }
|
||||
gstreamer-sdp = { workspace = true }
|
||||
gstreamer-sys = { workspace = true }
|
||||
ipc-channel = { workspace = true }
|
||||
log = "0.4"
|
||||
mime = "0.3.13"
|
||||
once_cell = "1.18.0"
|
||||
servo-media = { path = "../../servo-media" }
|
||||
servo-media-audio = { path = "../../audio" }
|
||||
servo-media-gstreamer-render = { path = "render" }
|
||||
servo-media-player = { path = "../../player" }
|
||||
servo-media-streams = { path = "../../streams" }
|
||||
servo-media-traits = { path = "../../traits" }
|
||||
servo-media-webrtc = { path = "../../webrtc" }
|
||||
url = "2.0"
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd", target_os = "openbsd"))'.dependencies]
|
||||
servo-media-gstreamer-render-unix = { path = "render-unix", features = [
|
||||
"gl-egl",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
servo-media-gstreamer-render-android = { path = "render-android" }
|
||||
408
components/media/backends/gstreamer/audio_decoder.rs
Normal file
408
components/media/backends/gstreamer/audio_decoder.rs
Normal file
@@ -0,0 +1,408 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::io::{Cursor, Read};
|
||||
use std::sync::{Arc, Mutex, mpsc};
|
||||
|
||||
use byte_slice_cast::*;
|
||||
use gstreamer::prelude::*;
|
||||
use servo_media_audio::decoder::{
|
||||
AudioDecoder, AudioDecoderCallbacks, AudioDecoderError, AudioDecoderOptions,
|
||||
};
|
||||
use {gstreamer, gstreamer_app, gstreamer_audio};
|
||||
|
||||
pub struct GStreamerAudioDecoderProgress(
|
||||
gstreamer::buffer::MappedBuffer<gstreamer::buffer::Readable>,
|
||||
);
|
||||
|
||||
impl AsRef<[f32]> for GStreamerAudioDecoderProgress {
|
||||
fn as_ref(&self) -> &[f32] {
|
||||
self.0.as_ref().as_slice_of::<f32>().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct GStreamerAudioDecoder {}
|
||||
|
||||
impl GStreamerAudioDecoder {
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioDecoder for GStreamerAudioDecoder {
|
||||
fn decode(
|
||||
&self,
|
||||
data: Vec<u8>,
|
||||
callbacks: AudioDecoderCallbacks,
|
||||
options: Option<AudioDecoderOptions>,
|
||||
) {
|
||||
let pipeline = gstreamer::Pipeline::new();
|
||||
let callbacks = Arc::new(callbacks);
|
||||
|
||||
let appsrc = match gstreamer::ElementFactory::make("appsrc").build() {
|
||||
Ok(appsrc) => appsrc,
|
||||
_ => {
|
||||
return callbacks.error(AudioDecoderError::Backend(
|
||||
"appsrc creation failed".to_owned(),
|
||||
));
|
||||
},
|
||||
};
|
||||
|
||||
let decodebin = match gstreamer::ElementFactory::make("decodebin").build() {
|
||||
Ok(decodebin) => decodebin,
|
||||
_ => {
|
||||
return callbacks.error(AudioDecoderError::Backend(
|
||||
"decodebin creation failed".to_owned(),
|
||||
));
|
||||
},
|
||||
};
|
||||
|
||||
// decodebin uses something called a "sometimes-pad", which is basically
|
||||
// a pad that will show up when a certain condition is met,
|
||||
// in decodebins case that is media being decoded
|
||||
if let Err(e) = pipeline.add_many([&appsrc, &decodebin]) {
|
||||
return callbacks.error(AudioDecoderError::Backend(e.to_string()));
|
||||
}
|
||||
|
||||
if let Err(e) = gstreamer::Element::link_many([&appsrc, &decodebin]) {
|
||||
return callbacks.error(AudioDecoderError::Backend(e.to_string()));
|
||||
}
|
||||
|
||||
let appsrc = appsrc.downcast::<gstreamer_app::AppSrc>().unwrap();
|
||||
|
||||
let options = options.unwrap_or_default();
|
||||
|
||||
let (sender, receiver) = mpsc::channel();
|
||||
let sender = Arc::new(Mutex::new(sender));
|
||||
|
||||
let pipeline_ = pipeline.downgrade();
|
||||
let callbacks_ = callbacks.clone();
|
||||
let sender_ = sender.clone();
|
||||
// Initial pipeline looks like
|
||||
//
|
||||
// appsrc ! decodebin2! ...
|
||||
//
|
||||
// We plug in the second part of the pipeline, including the deinterleave element,
|
||||
// once the media starts being decoded.
|
||||
decodebin.connect_pad_added(move |_, src_pad| {
|
||||
// A decodebin pad was added, if this is an audio file,
|
||||
// plug in a deinterleave element to separate each planar channel.
|
||||
//
|
||||
// Sub pipeline looks like
|
||||
//
|
||||
// ... decodebin2 ! audioconvert ! audioresample ! capsfilter ! deinterleave ...
|
||||
//
|
||||
// deinterleave also uses a sometime-pad, so we need to wait until
|
||||
// a pad for a planar channel is added to plug in the last part of
|
||||
// the pipeline, with the appsink that will be pulling the data from
|
||||
// each channel.
|
||||
|
||||
let callbacks = &callbacks_;
|
||||
let sender = &sender_;
|
||||
let pipeline = match pipeline_.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => {
|
||||
callbacks.error(AudioDecoderError::Backend(
|
||||
"Pipeline failed upgrade".to_owned(),
|
||||
));
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
let (is_audio, caps) = {
|
||||
let media_type = src_pad.current_caps().and_then(|caps| {
|
||||
caps.structure(0).map(|s| {
|
||||
let name = s.name();
|
||||
(name.starts_with("audio/"), caps.clone())
|
||||
})
|
||||
});
|
||||
|
||||
match media_type {
|
||||
None => {
|
||||
callbacks.error(AudioDecoderError::Backend(
|
||||
"Failed to get media type from pad".to_owned(),
|
||||
));
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
return;
|
||||
},
|
||||
Some(media_type) => media_type,
|
||||
}
|
||||
};
|
||||
|
||||
if !is_audio {
|
||||
callbacks.error(AudioDecoderError::InvalidMediaFormat);
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
return;
|
||||
}
|
||||
|
||||
let sample_audio_info = match gstreamer_audio::AudioInfo::from_caps(&caps) {
|
||||
Ok(sample_audio_info) => sample_audio_info,
|
||||
_ => {
|
||||
callbacks.error(AudioDecoderError::Backend("AudioInfo failed".to_owned()));
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
return;
|
||||
},
|
||||
};
|
||||
let channels = sample_audio_info.channels();
|
||||
callbacks.ready(channels);
|
||||
|
||||
let insert_deinterleave = || -> Result<(), AudioDecoderError> {
|
||||
let convert = gstreamer::ElementFactory::make("audioconvert")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioDecoderError::Backend(format!(
|
||||
"audioconvert creation failed: {error:?}"
|
||||
))
|
||||
})?;
|
||||
let resample = gstreamer::ElementFactory::make("audioresample")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioDecoderError::Backend(format!(
|
||||
"audioresample creation failed: {error:?}"
|
||||
))
|
||||
})?;
|
||||
let filter = gstreamer::ElementFactory::make("capsfilter")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioDecoderError::Backend(format!("capsfilter creation failed: {error:?}"))
|
||||
})?;
|
||||
let deinterleave = gstreamer::ElementFactory::make("deinterleave")
|
||||
.name("deinterleave")
|
||||
.property("keep-positions", true)
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioDecoderError::Backend(format!(
|
||||
"deinterleave creation failed: {error:?}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let pipeline_ = pipeline.downgrade();
|
||||
let callbacks_ = callbacks.clone();
|
||||
deinterleave.connect_pad_added(move |_, src_pad| {
|
||||
// A new pad for a planar channel was added in deinterleave.
|
||||
// Plug in an appsink so we can pull the data from each channel.
|
||||
//
|
||||
// The end of the pipeline looks like:
|
||||
//
|
||||
// ... deinterleave ! queue ! appsink.
|
||||
let callbacks = &callbacks_;
|
||||
let pipeline = match pipeline_.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => {
|
||||
return callbacks.error(AudioDecoderError::Backend(
|
||||
"Pipeline failedupgrade".to_owned(),
|
||||
));
|
||||
},
|
||||
};
|
||||
let insert_sink = || -> Result<(), AudioDecoderError> {
|
||||
let queue =
|
||||
gstreamer::ElementFactory::make("queue")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioDecoderError::Backend(format!(
|
||||
"queue creation failed: {error:?}"
|
||||
))
|
||||
})?;
|
||||
let sink = gstreamer::ElementFactory::make("appsink").build().map_err(
|
||||
|error| {
|
||||
AudioDecoderError::Backend(format!(
|
||||
"appsink creation failed: {error:?}"
|
||||
))
|
||||
},
|
||||
)?;
|
||||
let appsink = sink
|
||||
.clone()
|
||||
.dynamic_cast::<gstreamer_app::AppSink>()
|
||||
.unwrap();
|
||||
sink.set_property("sync", false);
|
||||
|
||||
let callbacks_ = callbacks.clone();
|
||||
appsink.set_callbacks(
|
||||
gstreamer_app::AppSinkCallbacks::builder()
|
||||
.new_sample(move |appsink| {
|
||||
let sample = appsink
|
||||
.pull_sample()
|
||||
.map_err(|_| gstreamer::FlowError::Eos)?;
|
||||
let buffer = sample.buffer_owned().ok_or_else(|| {
|
||||
callbacks_.error(AudioDecoderError::InvalidSample);
|
||||
gstreamer::FlowError::Error
|
||||
})?;
|
||||
|
||||
let audio_info = sample
|
||||
.caps()
|
||||
.and_then(|caps| {
|
||||
gstreamer_audio::AudioInfo::from_caps(caps).ok()
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
callbacks_.error(AudioDecoderError::Backend(
|
||||
"Could not get caps from sample".to_owned(),
|
||||
));
|
||||
gstreamer::FlowError::Error
|
||||
})?;
|
||||
let positions = audio_info.positions().ok_or_else(|| {
|
||||
callbacks_.error(AudioDecoderError::Backend(
|
||||
"AudioInfo failed".to_owned(),
|
||||
));
|
||||
gstreamer::FlowError::Error
|
||||
})?;
|
||||
|
||||
for position in positions.iter() {
|
||||
let buffer = buffer.clone();
|
||||
let map = match buffer.into_mapped_buffer_readable() {
|
||||
Ok(map) => map,
|
||||
_ => {
|
||||
callbacks_
|
||||
.error(AudioDecoderError::BufferReadFailed);
|
||||
return Err(gstreamer::FlowError::Error);
|
||||
},
|
||||
};
|
||||
let progress = Box::new(GStreamerAudioDecoderProgress(map));
|
||||
let channel = position.to_mask() as u32;
|
||||
callbacks_.progress(progress, channel);
|
||||
}
|
||||
|
||||
Ok(gstreamer::FlowSuccess::Ok)
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
|
||||
let elements = &[&queue, &sink];
|
||||
pipeline
|
||||
.add_many(elements)
|
||||
.map_err(|e| AudioDecoderError::Backend(e.to_string()))?;
|
||||
gstreamer::Element::link_many(elements)
|
||||
.map_err(|e| AudioDecoderError::Backend(e.to_string()))?;
|
||||
|
||||
for e in elements {
|
||||
e.sync_state_with_parent()
|
||||
.map_err(|e| AudioDecoderError::Backend(e.to_string()))?;
|
||||
}
|
||||
|
||||
let sink_pad = queue.static_pad("sink").ok_or(
|
||||
AudioDecoderError::Backend("Could not get static pad sink".to_owned()),
|
||||
)?;
|
||||
src_pad.link(&sink_pad).map(|_| ()).map_err(|e| {
|
||||
AudioDecoderError::Backend(format!("Sink pad link failed: {}", e))
|
||||
})
|
||||
};
|
||||
|
||||
if let Err(e) = insert_sink() {
|
||||
callbacks.error(e);
|
||||
}
|
||||
});
|
||||
|
||||
let mut audio_info_builder = gstreamer_audio::AudioInfo::builder(
|
||||
gstreamer_audio::AUDIO_FORMAT_F32,
|
||||
options.sample_rate as u32,
|
||||
channels,
|
||||
);
|
||||
if let Some(positions) = sample_audio_info.positions() {
|
||||
audio_info_builder = audio_info_builder.positions(positions);
|
||||
}
|
||||
let audio_info = audio_info_builder.build().map_err(|error| {
|
||||
AudioDecoderError::Backend(format!("AudioInfo failed: {error:?}"))
|
||||
})?;
|
||||
let caps = audio_info.to_caps().map_err(|error| {
|
||||
AudioDecoderError::Backend(format!("AudioInfo failed: {error:?}"))
|
||||
})?;
|
||||
filter.set_property("caps", caps);
|
||||
|
||||
let elements = &[&convert, &resample, &filter, &deinterleave];
|
||||
pipeline
|
||||
.add_many(elements)
|
||||
.map_err(|e| AudioDecoderError::Backend(e.to_string()))?;
|
||||
gstreamer::Element::link_many(elements)
|
||||
.map_err(|e| AudioDecoderError::Backend(e.to_string()))?;
|
||||
|
||||
for e in elements {
|
||||
e.sync_state_with_parent()
|
||||
.map_err(|e| AudioDecoderError::Backend(e.to_string()))?;
|
||||
}
|
||||
|
||||
let sink_pad = convert
|
||||
.static_pad("sink")
|
||||
.ok_or(AudioDecoderError::Backend(
|
||||
"Get static pad sink failed".to_owned(),
|
||||
))?;
|
||||
src_pad
|
||||
.link(&sink_pad)
|
||||
.map(|_| ())
|
||||
.map_err(|e| AudioDecoderError::Backend(format!("Sink pad link failed: {}", e)))
|
||||
};
|
||||
|
||||
if let Err(e) = insert_deinterleave() {
|
||||
callbacks.error(e);
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
}
|
||||
});
|
||||
|
||||
appsrc.set_format(gstreamer::Format::Bytes);
|
||||
appsrc.set_block(true);
|
||||
|
||||
let bus = match pipeline.bus() {
|
||||
Some(bus) => bus,
|
||||
None => {
|
||||
callbacks.error(AudioDecoderError::Backend(
|
||||
"Pipeline without bus. Shouldn't happen!".to_owned(),
|
||||
));
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
let callbacks_ = callbacks.clone();
|
||||
bus.set_sync_handler(move |_, msg| {
|
||||
use gstreamer::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Error(e) => {
|
||||
callbacks_.error(AudioDecoderError::Backend(
|
||||
e.debug()
|
||||
.map(|d| d.to_string())
|
||||
.unwrap_or_else(|| "Unknown".to_owned()),
|
||||
));
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
},
|
||||
MessageView::Eos(_) => {
|
||||
callbacks_.eos();
|
||||
let _ = sender.lock().unwrap().send(());
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
gstreamer::BusSyncReply::Drop
|
||||
});
|
||||
|
||||
if pipeline.set_state(gstreamer::State::Playing).is_err() {
|
||||
callbacks.error(AudioDecoderError::StateChangeFailed);
|
||||
return;
|
||||
}
|
||||
|
||||
let max_bytes = appsrc.max_bytes() as usize;
|
||||
let data_len = data.len();
|
||||
let mut reader = Cursor::new(data);
|
||||
while (reader.position() as usize) < data_len {
|
||||
let data_left = data_len - reader.position() as usize;
|
||||
let buffer_size = if data_left < max_bytes {
|
||||
data_left
|
||||
} else {
|
||||
max_bytes
|
||||
};
|
||||
let mut buffer = gstreamer::Buffer::with_size(buffer_size).unwrap();
|
||||
{
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
let mut map = buffer.map_writable().unwrap();
|
||||
let buffer = map.as_mut_slice();
|
||||
let _ = reader.read(buffer);
|
||||
}
|
||||
let _ = appsrc.push_buffer(buffer);
|
||||
}
|
||||
let _ = appsrc.end_of_stream();
|
||||
|
||||
// Wait until we get an error or EOS.
|
||||
receiver.recv().unwrap();
|
||||
let _ = pipeline.set_state(gstreamer::State::Null);
|
||||
}
|
||||
}
|
||||
252
components/media/backends/gstreamer/audio_sink.rs
Normal file
252
components/media/backends/gstreamer/audio_sink.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::Sender;
|
||||
use std::thread::Builder;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
use gstreamer::prelude::*;
|
||||
use gstreamer_app::{AppSrc, AppSrcCallbacks};
|
||||
use servo_media_audio::block::{Chunk, FRAMES_PER_BLOCK};
|
||||
use servo_media_audio::render_thread::AudioRenderThreadMsg;
|
||||
use servo_media_audio::sink::{AudioSink, AudioSinkError};
|
||||
use servo_media_streams::MediaSocket;
|
||||
use {gstreamer, gstreamer_audio};
|
||||
|
||||
use crate::media_stream::GstreamerMediaSocket;
|
||||
|
||||
const DEFAULT_SAMPLE_RATE: f32 = 44100.;
|
||||
|
||||
pub struct GStreamerAudioSink {
|
||||
pipeline: gstreamer::Pipeline,
|
||||
appsrc: Arc<AppSrc>,
|
||||
sample_rate: Cell<f32>,
|
||||
audio_info: RefCell<Option<gstreamer_audio::AudioInfo>>,
|
||||
sample_offset: Cell<u64>,
|
||||
}
|
||||
|
||||
impl GStreamerAudioSink {
|
||||
pub fn new() -> Result<Self, AudioSinkError> {
|
||||
if let Some(category) = gstreamer::DebugCategory::get("openslessink") {
|
||||
category.set_threshold(gstreamer::DebugLevel::Trace);
|
||||
}
|
||||
gstreamer::init().map_err(|error| {
|
||||
AudioSinkError::Backend(format!("GStreamer init failed: {error:?}"))
|
||||
})?;
|
||||
|
||||
let appsrc = gstreamer::ElementFactory::make("appsrc")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioSinkError::Backend(format!("appsrc creation failed: {error:?}"))
|
||||
})?;
|
||||
let appsrc = appsrc.downcast::<AppSrc>().unwrap();
|
||||
|
||||
Ok(Self {
|
||||
pipeline: gstreamer::Pipeline::new(),
|
||||
appsrc: Arc::new(appsrc),
|
||||
sample_rate: Cell::new(DEFAULT_SAMPLE_RATE),
|
||||
audio_info: RefCell::new(None),
|
||||
sample_offset: Cell::new(0),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl GStreamerAudioSink {
|
||||
fn set_audio_info(&self, sample_rate: f32, channels: u8) -> Result<(), AudioSinkError> {
|
||||
let audio_info = gstreamer_audio::AudioInfo::builder(
|
||||
gstreamer_audio::AUDIO_FORMAT_F32,
|
||||
sample_rate as u32,
|
||||
channels.into(),
|
||||
)
|
||||
.build()
|
||||
.map_err(|error| AudioSinkError::Backend(format!("AudioInfo failed: {error:?}")))?;
|
||||
self.appsrc.set_caps(audio_info.to_caps().ok().as_ref());
|
||||
*self.audio_info.borrow_mut() = Some(audio_info);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_channels_if_changed(&self, channels: u8) -> Result<(), AudioSinkError> {
|
||||
let curr_channels = match self.audio_info.borrow().as_ref() {
|
||||
Some(ch) => ch.channels(),
|
||||
_ => {
|
||||
return Ok(());
|
||||
},
|
||||
};
|
||||
if channels != curr_channels as u8 {
|
||||
self.set_audio_info(self.sample_rate.get(), channels)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioSink for GStreamerAudioSink {
|
||||
fn init(
|
||||
&self,
|
||||
sample_rate: f32,
|
||||
graph_thread_channel: Sender<AudioRenderThreadMsg>,
|
||||
) -> Result<(), AudioSinkError> {
|
||||
self.sample_rate.set(sample_rate);
|
||||
self.set_audio_info(sample_rate, 2)?;
|
||||
self.appsrc.set_format(gstreamer::Format::Time);
|
||||
|
||||
// Allow only a single chunk.
|
||||
self.appsrc.set_max_bytes(1);
|
||||
|
||||
let appsrc = self.appsrc.clone();
|
||||
Builder::new()
|
||||
.name("GstAppSrcCallbacks".to_owned())
|
||||
.spawn(move || {
|
||||
let need_data = move |_: &AppSrc, _: u32| {
|
||||
if let Err(e) = graph_thread_channel.send(AudioRenderThreadMsg::SinkNeedData) {
|
||||
log::warn!("Error sending need data event: {:?}", e);
|
||||
}
|
||||
};
|
||||
appsrc.set_callbacks(AppSrcCallbacks::builder().need_data(need_data).build());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let appsrc = self.appsrc.as_ref().clone().upcast();
|
||||
let resample = gstreamer::ElementFactory::make("audioresample")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioSinkError::Backend(format!("audioresample creation failed: {error:?}"))
|
||||
})?;
|
||||
let convert = gstreamer::ElementFactory::make("audioconvert")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioSinkError::Backend(format!("audioconvert creation failed: {error:?}"))
|
||||
})?;
|
||||
let sink = gstreamer::ElementFactory::make("autoaudiosink")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioSinkError::Backend(format!("autoaudiosink creation failed: {error:?}"))
|
||||
})?;
|
||||
self.pipeline
|
||||
.add_many([&appsrc, &resample, &convert, &sink])
|
||||
.map_err(|error| AudioSinkError::Backend(error.to_string()))?;
|
||||
gstreamer::Element::link_many([&appsrc, &resample, &convert, &sink])
|
||||
.map_err(|error| AudioSinkError::Backend(error.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn init_stream(
|
||||
&self,
|
||||
channels: u8,
|
||||
sample_rate: f32,
|
||||
socket: Box<dyn MediaSocket>,
|
||||
) -> Result<(), AudioSinkError> {
|
||||
self.sample_rate.set(sample_rate);
|
||||
self.set_audio_info(sample_rate, channels)?;
|
||||
self.appsrc.set_format(gstreamer::Format::Time);
|
||||
|
||||
// Do not set max bytes or callback, we will push as needed
|
||||
|
||||
let appsrc = self.appsrc.as_ref().clone().upcast();
|
||||
let convert = gstreamer::ElementFactory::make("audioconvert")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
AudioSinkError::Backend(format!("audioconvert creation failed: {error:?}"))
|
||||
})?;
|
||||
let sink = socket
|
||||
.as_any()
|
||||
.downcast_ref::<GstreamerMediaSocket>()
|
||||
.unwrap()
|
||||
.proxy_sink()
|
||||
.clone();
|
||||
|
||||
self.pipeline
|
||||
.add_many([&appsrc, &convert, &sink])
|
||||
.map_err(|error| AudioSinkError::Backend(error.to_string()))?;
|
||||
gstreamer::Element::link_many([&appsrc, &convert, &sink])
|
||||
.map_err(|error| AudioSinkError::Backend(error.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn play(&self) -> Result<(), AudioSinkError> {
|
||||
self.pipeline
|
||||
.set_state(gstreamer::State::Playing)
|
||||
.map(|_| ())
|
||||
.map_err(|_| AudioSinkError::StateChangeFailed)
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<(), AudioSinkError> {
|
||||
self.pipeline
|
||||
.set_state(gstreamer::State::Paused)
|
||||
.map(|_| ())
|
||||
.map_err(|_| AudioSinkError::StateChangeFailed)
|
||||
}
|
||||
|
||||
fn has_enough_data(&self) -> bool {
|
||||
self.appsrc.current_level_bytes() >= self.appsrc.max_bytes()
|
||||
}
|
||||
|
||||
fn push_data(&self, mut chunk: Chunk) -> Result<(), AudioSinkError> {
|
||||
if let Some(block) = chunk.blocks.first() {
|
||||
self.set_channels_if_changed(block.chan_count())?;
|
||||
}
|
||||
|
||||
let sample_rate = self.sample_rate.get() as u64;
|
||||
let audio_info = self.audio_info.borrow();
|
||||
let audio_info = audio_info.as_ref().unwrap();
|
||||
let channels = audio_info.channels();
|
||||
let bpf = audio_info.bpf() as usize;
|
||||
assert_eq!(bpf, 4 * channels as usize);
|
||||
let n_samples = FRAMES_PER_BLOCK.0;
|
||||
let buf_size = (n_samples as usize) * (bpf);
|
||||
let mut buffer = gstreamer::Buffer::with_size(buf_size).unwrap();
|
||||
{
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
let mut sample_offset = self.sample_offset.get();
|
||||
// Calculate the current timestamp (PTS) and the next one,
|
||||
// and calculate the duration from the difference instead of
|
||||
// simply the number of samples to prevent rounding errors
|
||||
let pts = gstreamer::ClockTime::from_nseconds(
|
||||
sample_offset
|
||||
.mul_div_floor(gstreamer::ClockTime::SECOND.nseconds(), sample_rate)
|
||||
.unwrap(),
|
||||
);
|
||||
let next_pts: gstreamer::ClockTime = gstreamer::ClockTime::from_nseconds(
|
||||
(sample_offset + n_samples)
|
||||
.mul_div_floor(gstreamer::ClockTime::SECOND.nseconds(), sample_rate)
|
||||
.unwrap(),
|
||||
);
|
||||
buffer.set_pts(Some(pts));
|
||||
buffer.set_duration(next_pts - pts);
|
||||
|
||||
// sometimes nothing reaches the output
|
||||
if chunk.is_empty() {
|
||||
chunk.blocks.push(Default::default());
|
||||
chunk.blocks[0].repeat(channels as u8);
|
||||
}
|
||||
debug_assert!(chunk.len() == 1);
|
||||
let mut data = chunk.blocks[0].interleave();
|
||||
let data = data.as_mut_byte_slice();
|
||||
|
||||
// XXXManishearth if we have a safe way to convert
|
||||
// from Box<[f32]> to Box<[u8]> (similarly for Vec)
|
||||
// we can use Buffer::from_slice instead
|
||||
buffer.copy_from_slice(0, data).expect("copying failed");
|
||||
|
||||
sample_offset += n_samples;
|
||||
self.sample_offset.set(sample_offset);
|
||||
}
|
||||
|
||||
self.appsrc
|
||||
.push_buffer(buffer)
|
||||
.map(|_| ())
|
||||
.map_err(|_| AudioSinkError::BufferPushFailed)
|
||||
}
|
||||
|
||||
fn set_eos_callback(&self, _: Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>) {}
|
||||
}
|
||||
|
||||
impl Drop for GStreamerAudioSink {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.stop();
|
||||
}
|
||||
}
|
||||
119
components/media/backends/gstreamer/audio_stream_reader.rs
Normal file
119
components/media/backends/gstreamer/audio_stream_reader.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::mpsc::{Receiver, channel};
|
||||
|
||||
use byte_slice_cast::*;
|
||||
use gstreamer::Fraction;
|
||||
use gstreamer::prelude::*;
|
||||
use gstreamer_audio::AUDIO_FORMAT_F32;
|
||||
use servo_media_audio::AudioStreamReader;
|
||||
use servo_media_audio::block::{Block, FRAMES_PER_BLOCK_USIZE};
|
||||
use servo_media_streams::registry::{MediaStreamId, get_stream};
|
||||
|
||||
use crate::media_stream::GStreamerMediaStream;
|
||||
|
||||
pub struct GStreamerAudioStreamReader {
|
||||
rx: Receiver<Block>,
|
||||
pipeline: gstreamer::Pipeline,
|
||||
}
|
||||
|
||||
impl GStreamerAudioStreamReader {
|
||||
pub fn new(stream: MediaStreamId, sample_rate: f32) -> Result<Self, String> {
|
||||
let (tx, rx) = channel();
|
||||
let stream = get_stream(&stream).unwrap();
|
||||
let mut stream = stream.lock().unwrap();
|
||||
let g_stream = stream
|
||||
.as_mut_any()
|
||||
.downcast_mut::<GStreamerMediaStream>()
|
||||
.unwrap();
|
||||
let element = g_stream.src_element();
|
||||
let pipeline = g_stream.pipeline_or_new();
|
||||
drop(stream);
|
||||
let time_per_block = Fraction::new(FRAMES_PER_BLOCK_USIZE as i32, sample_rate as i32);
|
||||
|
||||
// XXXManishearth this is only necessary because of an upstream
|
||||
// gstreamer bug. https://github.com/servo/media/pull/362#issuecomment-647947034
|
||||
let caps = gstreamer_audio::AudioCapsBuilder::new()
|
||||
.layout(gstreamer_audio::AudioLayout::Interleaved)
|
||||
.build();
|
||||
let capsfilter0 = gstreamer::ElementFactory::make("capsfilter")
|
||||
.property("caps", caps)
|
||||
.build()
|
||||
.map_err(|error| format!("capsfilter creation failed: {error:?}"))?;
|
||||
|
||||
let split = gstreamer::ElementFactory::make("audiobuffersplit")
|
||||
.property("output-buffer-duration", time_per_block)
|
||||
.build()
|
||||
.map_err(|error| format!("audiobuffersplit creation failed: {error:?}"))?;
|
||||
let convert = gstreamer::ElementFactory::make("audioconvert")
|
||||
.build()
|
||||
.map_err(|error| format!("audioconvert creation failed: {error:?}"))?;
|
||||
let caps = gstreamer_audio::AudioCapsBuilder::new()
|
||||
.layout(gstreamer_audio::AudioLayout::NonInterleaved)
|
||||
.format(AUDIO_FORMAT_F32)
|
||||
.rate(sample_rate as i32)
|
||||
.build();
|
||||
let capsfilter = gstreamer::ElementFactory::make("capsfilter")
|
||||
.property("caps", caps)
|
||||
.build()
|
||||
.map_err(|error| format!("capsfilter creation failed: {error:?}"))?;
|
||||
let sink = gstreamer::ElementFactory::make("appsink")
|
||||
.property("sync", false)
|
||||
.build()
|
||||
.map_err(|error| format!("appsink creation failed: {error:?}"))?;
|
||||
|
||||
let appsink = sink
|
||||
.clone()
|
||||
.dynamic_cast::<gstreamer_app::AppSink>()
|
||||
.unwrap();
|
||||
|
||||
let elements = [&element, &capsfilter0, &split, &convert, &capsfilter, &sink];
|
||||
pipeline
|
||||
.add_many(&elements[1..])
|
||||
.map_err(|error| format!("pipeline adding failed: {error:?}"))?;
|
||||
gstreamer::Element::link_many(elements)
|
||||
.map_err(|error| format!("element linking failed: {error:?}"))?;
|
||||
for e in &elements {
|
||||
e.sync_state_with_parent().map_err(|e| e.to_string())?;
|
||||
}
|
||||
appsink.set_callbacks(
|
||||
gstreamer_app::AppSinkCallbacks::builder()
|
||||
.new_sample(move |appsink| {
|
||||
let sample = appsink
|
||||
.pull_sample()
|
||||
.map_err(|_| gstreamer::FlowError::Eos)?;
|
||||
let buffer = sample.buffer_owned().ok_or(gstreamer::FlowError::Error)?;
|
||||
|
||||
let buffer = buffer
|
||||
.into_mapped_buffer_readable()
|
||||
.map_err(|_| gstreamer::FlowError::Error)?;
|
||||
let floatref = buffer
|
||||
.as_slice()
|
||||
.as_slice_of::<f32>()
|
||||
.map_err(|_| gstreamer::FlowError::Error)?;
|
||||
|
||||
let block = Block::for_vec(floatref.into());
|
||||
tx.send(block).map_err(|_| gstreamer::FlowError::Error)?;
|
||||
Ok(gstreamer::FlowSuccess::Ok)
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
Ok(Self { rx, pipeline })
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioStreamReader for GStreamerAudioStreamReader {
|
||||
fn pull(&self) -> Block {
|
||||
self.rx.recv().unwrap()
|
||||
}
|
||||
|
||||
fn start(&self) {
|
||||
self.pipeline.set_state(gstreamer::State::Playing).unwrap();
|
||||
}
|
||||
|
||||
fn stop(&self) {
|
||||
self.pipeline.set_state(gstreamer::State::Null).unwrap();
|
||||
}
|
||||
}
|
||||
177
components/media/backends/gstreamer/datachannel.rs
Normal file
177
components/media/backends/gstreamer/datachannel.rs
Normal file
@@ -0,0 +1,177 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::Mutex;
|
||||
|
||||
use glib::prelude::*;
|
||||
use gstreamer_webrtc::{WebRTCDataChannel, WebRTCDataChannelState};
|
||||
use servo_media_webrtc::thread::InternalEvent;
|
||||
use servo_media_webrtc::{
|
||||
DataChannelEvent, DataChannelId, DataChannelInit, DataChannelMessage, DataChannelState,
|
||||
WebRtcController as WebRtcThread, WebRtcError,
|
||||
};
|
||||
|
||||
pub struct GStreamerWebRtcDataChannel {
|
||||
channel: WebRTCDataChannel,
|
||||
id: DataChannelId,
|
||||
thread: WebRtcThread,
|
||||
}
|
||||
|
||||
impl GStreamerWebRtcDataChannel {
|
||||
pub fn new(
|
||||
servo_channel_id: &DataChannelId,
|
||||
webrtc: &gstreamer::Element,
|
||||
thread: &WebRtcThread,
|
||||
init: &DataChannelInit,
|
||||
) -> Result<Self, String> {
|
||||
let label = &init.label;
|
||||
let mut init_struct = gstreamer::Structure::builder("options")
|
||||
.field("ordered", init.ordered)
|
||||
.field("protocol", &init.protocol)
|
||||
.field("negotiated", init.negotiated)
|
||||
.build();
|
||||
|
||||
if let Some(max_packet_life_time) = init.max_packet_life_time {
|
||||
init_struct.set_value(
|
||||
"max-packet-lifetime",
|
||||
(max_packet_life_time as u32).to_send_value(),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(max_retransmits) = init.max_retransmits {
|
||||
init_struct.set_value("max-retransmits", (max_retransmits as u32).to_send_value());
|
||||
}
|
||||
|
||||
if let Some(id) = init.id {
|
||||
init_struct.set_value("id", (id as u32).to_send_value());
|
||||
}
|
||||
|
||||
let channel = webrtc
|
||||
.emit_by_name::<WebRTCDataChannel>("create-data-channel", &[&label, &init_struct]);
|
||||
|
||||
GStreamerWebRtcDataChannel::from(servo_channel_id, channel, thread)
|
||||
}
|
||||
|
||||
pub fn from(
|
||||
id: &DataChannelId,
|
||||
channel: WebRTCDataChannel,
|
||||
thread: &WebRtcThread,
|
||||
) -> Result<Self, String> {
|
||||
let id_ = *id;
|
||||
let thread_ = Mutex::new(thread.clone());
|
||||
channel.connect_on_open(move |_| {
|
||||
thread_
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id_,
|
||||
DataChannelEvent::Open,
|
||||
));
|
||||
});
|
||||
|
||||
let id_ = *id;
|
||||
let thread_ = Mutex::new(thread.clone());
|
||||
channel.connect_on_close(move |_| {
|
||||
thread_
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id_,
|
||||
DataChannelEvent::Close,
|
||||
));
|
||||
});
|
||||
|
||||
let id_ = *id;
|
||||
let thread_ = Mutex::new(thread.clone());
|
||||
channel.connect_on_error(move |_, error| {
|
||||
thread_
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id_,
|
||||
DataChannelEvent::Error(WebRtcError::Backend(error.to_string())),
|
||||
));
|
||||
});
|
||||
|
||||
let id_ = *id;
|
||||
let thread_ = Mutex::new(thread.clone());
|
||||
channel.connect_on_message_string(move |_, message| {
|
||||
let Some(message) = message.map(|s| s.to_owned()) else {
|
||||
return;
|
||||
};
|
||||
thread_
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id_,
|
||||
DataChannelEvent::OnMessage(DataChannelMessage::Text(message)),
|
||||
));
|
||||
});
|
||||
|
||||
let id_ = *id;
|
||||
let thread_ = Mutex::new(thread.clone());
|
||||
channel.connect_on_message_data(move |_, message| {
|
||||
let Some(message) = message.map(|b| b.to_owned()) else {
|
||||
return;
|
||||
};
|
||||
thread_
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id_,
|
||||
DataChannelEvent::OnMessage(DataChannelMessage::Binary(message.to_vec())),
|
||||
));
|
||||
});
|
||||
|
||||
let id_ = *id;
|
||||
let thread_ = Mutex::new(thread.clone());
|
||||
channel.connect_ready_state_notify(move |channel| {
|
||||
let ready_state = channel.ready_state();
|
||||
let ready_state = match ready_state {
|
||||
WebRTCDataChannelState::Connecting => DataChannelState::Connecting,
|
||||
WebRTCDataChannelState::Open => DataChannelState::Open,
|
||||
WebRTCDataChannelState::Closing => DataChannelState::Closing,
|
||||
WebRTCDataChannelState::Closed => DataChannelState::Closed,
|
||||
WebRTCDataChannelState::__Unknown(state) => DataChannelState::__Unknown(state),
|
||||
_ => return,
|
||||
};
|
||||
thread_
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id_,
|
||||
DataChannelEvent::StateChange(ready_state),
|
||||
));
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
id: *id,
|
||||
thread: thread.to_owned(),
|
||||
channel,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn send(&self, message: &DataChannelMessage) {
|
||||
match message {
|
||||
DataChannelMessage::Text(text) => self.channel.send_string(Some(text)),
|
||||
DataChannelMessage::Binary(data) => self
|
||||
.channel
|
||||
.send_data(Some(&glib::Bytes::from(data.as_slice()))),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn close(&self) {
|
||||
self.channel.close()
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for GStreamerWebRtcDataChannel {
|
||||
fn drop(&mut self) {
|
||||
self.thread
|
||||
.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
self.id,
|
||||
DataChannelEvent::Close,
|
||||
));
|
||||
}
|
||||
}
|
||||
64
components/media/backends/gstreamer/device_monitor.rs
Normal file
64
components/media/backends/gstreamer/device_monitor.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
||||
use gstreamer::DeviceMonitor as GstDeviceMonitor;
|
||||
use gstreamer::prelude::*;
|
||||
use servo_media_streams::device_monitor::{MediaDeviceInfo, MediaDeviceKind, MediaDeviceMonitor};
|
||||
|
||||
pub struct GStreamerDeviceMonitor {
|
||||
devices: RefCell<Option<Vec<MediaDeviceInfo>>>,
|
||||
}
|
||||
|
||||
impl GStreamerDeviceMonitor {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
devices: RefCell::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_devices(&self) -> Result<Vec<MediaDeviceInfo>, ()> {
|
||||
const AUDIO_SOURCE: &str = "Audio/Source";
|
||||
const AUDIO_SINK: &str = "Audio/Sink";
|
||||
const VIDEO_SOURCE: &str = "Video/Source";
|
||||
let device_monitor = GstDeviceMonitor::new();
|
||||
let audio_caps = gstreamer_audio::AudioCapsBuilder::new().build();
|
||||
device_monitor.add_filter(Some(AUDIO_SOURCE), Some(&audio_caps));
|
||||
device_monitor.add_filter(Some(AUDIO_SINK), Some(&audio_caps));
|
||||
let video_caps = gstreamer_video::VideoCapsBuilder::new().build();
|
||||
device_monitor.add_filter(Some(VIDEO_SOURCE), Some(&video_caps));
|
||||
let devices = device_monitor
|
||||
.devices()
|
||||
.iter()
|
||||
.filter_map(|device| {
|
||||
let display_name = device.display_name().as_str().to_owned();
|
||||
Some(MediaDeviceInfo {
|
||||
device_id: display_name.clone(),
|
||||
kind: match device.device_class().as_str() {
|
||||
AUDIO_SOURCE => MediaDeviceKind::AudioInput,
|
||||
AUDIO_SINK => MediaDeviceKind::AudioOutput,
|
||||
VIDEO_SOURCE => MediaDeviceKind::VideoInput,
|
||||
_ => return None,
|
||||
},
|
||||
label: display_name,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Ok(devices)
|
||||
}
|
||||
}
|
||||
|
||||
impl MediaDeviceMonitor for GStreamerDeviceMonitor {
|
||||
fn enumerate_devices(&self) -> Option<Vec<MediaDeviceInfo>> {
|
||||
{
|
||||
if let Some(ref devices) = *self.devices.borrow() {
|
||||
return Some(devices.clone());
|
||||
}
|
||||
}
|
||||
let devices = self.get_devices().ok()?;
|
||||
*self.devices.borrow_mut() = Some(devices.clone());
|
||||
Some(devices)
|
||||
}
|
||||
}
|
||||
359
components/media/backends/gstreamer/lib.rs
Normal file
359
components/media/backends/gstreamer/lib.rs
Normal file
@@ -0,0 +1,359 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
pub mod audio_decoder;
|
||||
pub mod audio_sink;
|
||||
pub mod audio_stream_reader;
|
||||
mod datachannel;
|
||||
mod device_monitor;
|
||||
pub mod media_capture;
|
||||
pub mod media_stream;
|
||||
mod media_stream_source;
|
||||
pub mod player;
|
||||
mod registry_scanner;
|
||||
mod render;
|
||||
mod source;
|
||||
pub mod webrtc;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
|
||||
use std::sync::mpsc::{self, Sender};
|
||||
use std::sync::{Arc, Mutex, Weak};
|
||||
use std::thread;
|
||||
use std::vec::Vec;
|
||||
|
||||
use device_monitor::GStreamerDeviceMonitor;
|
||||
use gstreamer::prelude::*;
|
||||
use ipc_channel::ipc::IpcSender;
|
||||
use log::warn;
|
||||
use media_stream::GStreamerMediaStream;
|
||||
use mime::Mime;
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use registry_scanner::GSTREAMER_REGISTRY_SCANNER;
|
||||
use servo_media::{Backend, BackendDeInit, BackendInit, MediaInstanceError, SupportsMediaType};
|
||||
use servo_media_audio::context::{AudioContext, AudioContextOptions};
|
||||
use servo_media_audio::decoder::AudioDecoder;
|
||||
use servo_media_audio::sink::AudioSinkError;
|
||||
use servo_media_audio::{AudioBackend, AudioStreamReader};
|
||||
use servo_media_player::audio::AudioRenderer;
|
||||
use servo_media_player::context::PlayerGLContext;
|
||||
use servo_media_player::video::VideoFrameRenderer;
|
||||
use servo_media_player::{Player, PlayerEvent, StreamType};
|
||||
use servo_media_streams::capture::MediaTrackConstraintSet;
|
||||
use servo_media_streams::device_monitor::MediaDeviceMonitor;
|
||||
use servo_media_streams::registry::MediaStreamId;
|
||||
use servo_media_streams::{MediaOutput, MediaSocket, MediaStreamType};
|
||||
use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance};
|
||||
use servo_media_webrtc::{WebRtcBackend, WebRtcController, WebRtcSignaller};
|
||||
|
||||
static BACKEND_BASE_TIME: Lazy<gstreamer::ClockTime> =
|
||||
Lazy::new(|| gstreamer::SystemClock::obtain().time());
|
||||
|
||||
static BACKEND_THREAD: OnceCell<bool> = OnceCell::new();
|
||||
|
||||
pub type WeakMediaInstance = Weak<Mutex<dyn MediaInstance>>;
|
||||
pub type WeakMediaInstanceHashMap = HashMap<ClientContextId, Vec<(usize, WeakMediaInstance)>>;
|
||||
|
||||
pub struct GStreamerBackend {
|
||||
capture_mocking: AtomicBool,
|
||||
instances: Arc<Mutex<WeakMediaInstanceHashMap>>,
|
||||
next_instance_id: AtomicUsize,
|
||||
/// Channel to communicate media instances with its owner Backend.
|
||||
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct ErrorLoadingPlugins(Vec<&'static str>);
|
||||
|
||||
impl GStreamerBackend {
|
||||
pub fn init_with_plugins(
|
||||
plugin_dir: PathBuf,
|
||||
plugins: &[&'static str],
|
||||
) -> Result<Box<dyn Backend>, ErrorLoadingPlugins> {
|
||||
gstreamer::init().unwrap();
|
||||
|
||||
// GStreamer between 1.19.1 and 1.22.7 will not send messages like "end of stream"
|
||||
// to GstPlayer unless there is a GLib main loop running somewhere. We should remove
|
||||
// this workaround when we raise of required version of GStreamer.
|
||||
// See https://github.com/servo/media/pull/393.
|
||||
let needs_background_glib_main_loop = {
|
||||
let (major, minor, micro, _) = gstreamer::version();
|
||||
(major, minor, micro) >= (1, 19, 1) && (major, minor, micro) <= (1, 22, 7)
|
||||
};
|
||||
|
||||
if needs_background_glib_main_loop {
|
||||
BACKEND_THREAD.get_or_init(|| {
|
||||
thread::spawn(|| glib::MainLoop::new(None, false).run());
|
||||
true
|
||||
});
|
||||
}
|
||||
|
||||
let mut errors = vec![];
|
||||
for plugin in plugins {
|
||||
let mut path = plugin_dir.clone();
|
||||
path.push(plugin);
|
||||
let registry = gstreamer::Registry::get();
|
||||
if gstreamer::Plugin::load_file(&path)
|
||||
.is_ok_and(|plugin| registry.add_plugin(&plugin).is_ok())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
errors.push(*plugin);
|
||||
}
|
||||
|
||||
if !errors.is_empty() {
|
||||
return Err(ErrorLoadingPlugins(errors));
|
||||
}
|
||||
|
||||
type MediaInstancesVec = Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>;
|
||||
let instances: HashMap<ClientContextId, MediaInstancesVec> = Default::default();
|
||||
let instances = Arc::new(Mutex::new(instances));
|
||||
|
||||
let instances_ = instances.clone();
|
||||
let (backend_chan, recvr) = mpsc::channel();
|
||||
thread::Builder::new()
|
||||
.name("GStreamerBackend ShutdownThread".to_owned())
|
||||
.spawn(move || {
|
||||
match recvr.recv().unwrap() {
|
||||
BackendMsg::Shutdown {
|
||||
context,
|
||||
id,
|
||||
tx_ack,
|
||||
} => {
|
||||
let mut instances_ = instances_.lock().unwrap();
|
||||
if let Some(vec) = instances_.get_mut(&context) {
|
||||
vec.retain(|m| m.0 != id);
|
||||
if vec.is_empty() {
|
||||
instances_.remove(&context);
|
||||
}
|
||||
}
|
||||
// tell caller we are done removing this instance
|
||||
let _ = tx_ack.send(());
|
||||
},
|
||||
};
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
Ok(Box::new(GStreamerBackend {
|
||||
capture_mocking: AtomicBool::new(false),
|
||||
instances,
|
||||
next_instance_id: AtomicUsize::new(0),
|
||||
backend_chan: Arc::new(Mutex::new(backend_chan)),
|
||||
}))
|
||||
}
|
||||
|
||||
fn media_instance_action(
|
||||
&self,
|
||||
id: &ClientContextId,
|
||||
cb: &dyn Fn(&dyn MediaInstance) -> Result<(), MediaInstanceError>,
|
||||
) {
|
||||
let mut instances = self.instances.lock().unwrap();
|
||||
match instances.get_mut(id) {
|
||||
Some(vec) => vec.retain(|(_, weak)| match weak.upgrade() {
|
||||
Some(instance) => {
|
||||
if cb(&*(instance.lock().unwrap())).is_err() {
|
||||
warn!("Error executing media instance action");
|
||||
}
|
||||
true
|
||||
},
|
||||
_ => false,
|
||||
}),
|
||||
None => {
|
||||
warn!("Trying to exec media action on an unknown client context");
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Backend for GStreamerBackend {
|
||||
fn create_player(
|
||||
&self,
|
||||
context_id: &ClientContextId,
|
||||
stream_type: StreamType,
|
||||
sender: IpcSender<PlayerEvent>,
|
||||
renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>>,
|
||||
audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>,
|
||||
gl_context: Box<dyn PlayerGLContext>,
|
||||
) -> Arc<Mutex<dyn Player>> {
|
||||
let id = self.next_instance_id.fetch_add(1, Ordering::Relaxed);
|
||||
let player = Arc::new(Mutex::new(player::GStreamerPlayer::new(
|
||||
id,
|
||||
context_id,
|
||||
self.backend_chan.clone(),
|
||||
stream_type,
|
||||
sender,
|
||||
renderer,
|
||||
audio_renderer,
|
||||
gl_context,
|
||||
)));
|
||||
let mut instances = self.instances.lock().unwrap();
|
||||
let entry = instances.entry(*context_id).or_default();
|
||||
entry.push((id, Arc::downgrade(&player).clone()));
|
||||
player
|
||||
}
|
||||
|
||||
fn create_audio_context(
|
||||
&self,
|
||||
client_context_id: &ClientContextId,
|
||||
options: AudioContextOptions,
|
||||
) -> Result<Arc<Mutex<AudioContext>>, AudioSinkError> {
|
||||
let id = self.next_instance_id.fetch_add(1, Ordering::Relaxed);
|
||||
let audio_context =
|
||||
AudioContext::new::<Self>(id, client_context_id, self.backend_chan.clone(), options)?;
|
||||
|
||||
let audio_context = Arc::new(Mutex::new(audio_context));
|
||||
|
||||
let mut instances = self.instances.lock().unwrap();
|
||||
let entry = instances.entry(*client_context_id).or_default();
|
||||
entry.push((id, Arc::downgrade(&audio_context).clone()));
|
||||
|
||||
Ok(audio_context)
|
||||
}
|
||||
|
||||
fn create_webrtc(&self, signaller: Box<dyn WebRtcSignaller>) -> WebRtcController {
|
||||
WebRtcController::new::<Self>(signaller)
|
||||
}
|
||||
|
||||
fn create_audiostream(&self) -> MediaStreamId {
|
||||
GStreamerMediaStream::create_audio()
|
||||
}
|
||||
|
||||
fn create_videostream(&self) -> MediaStreamId {
|
||||
GStreamerMediaStream::create_video()
|
||||
}
|
||||
|
||||
fn create_stream_output(&self) -> Box<dyn MediaOutput> {
|
||||
Box::new(media_stream::MediaSink::default())
|
||||
}
|
||||
|
||||
fn create_stream_and_socket(
|
||||
&self,
|
||||
ty: MediaStreamType,
|
||||
) -> (Box<dyn MediaSocket>, MediaStreamId) {
|
||||
let (id, socket) = GStreamerMediaStream::create_proxy(ty);
|
||||
(Box::new(socket), id)
|
||||
}
|
||||
|
||||
fn create_audioinput_stream(&self, set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
|
||||
if self.capture_mocking.load(Ordering::Acquire) {
|
||||
// XXXManishearth we should caps filter this
|
||||
return Some(self.create_audiostream());
|
||||
}
|
||||
media_capture::create_audioinput_stream(set)
|
||||
}
|
||||
|
||||
fn create_videoinput_stream(&self, set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
|
||||
if self.capture_mocking.load(Ordering::Acquire) {
|
||||
// XXXManishearth we should caps filter this
|
||||
return Some(self.create_videostream());
|
||||
}
|
||||
media_capture::create_videoinput_stream(set)
|
||||
}
|
||||
|
||||
fn can_play_type(&self, media_type: &str) -> SupportsMediaType {
|
||||
if let Ok(mime) = media_type.parse::<Mime>() {
|
||||
let mime_type = mime.type_().as_str().to_owned() + "/" + mime.subtype().as_str();
|
||||
let codecs = match mime.get_param("codecs") {
|
||||
Some(codecs) => codecs
|
||||
.as_str()
|
||||
.split(',')
|
||||
.map(|codec| codec.trim())
|
||||
.collect(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
if GSTREAMER_REGISTRY_SCANNER.is_container_type_supported(&mime_type) {
|
||||
if codecs.is_empty() {
|
||||
return SupportsMediaType::Maybe;
|
||||
} else if GSTREAMER_REGISTRY_SCANNER.are_all_codecs_supported(&codecs) {
|
||||
return SupportsMediaType::Probably;
|
||||
} else {
|
||||
return SupportsMediaType::No;
|
||||
}
|
||||
}
|
||||
}
|
||||
SupportsMediaType::No
|
||||
}
|
||||
|
||||
fn set_capture_mocking(&self, mock: bool) {
|
||||
self.capture_mocking.store(mock, Ordering::Release)
|
||||
}
|
||||
|
||||
fn mute(&self, id: &ClientContextId, val: bool) {
|
||||
self.media_instance_action(
|
||||
id,
|
||||
&(move |instance: &dyn MediaInstance| instance.mute(val)),
|
||||
);
|
||||
}
|
||||
|
||||
fn suspend(&self, id: &ClientContextId) {
|
||||
self.media_instance_action(id, &|instance: &dyn MediaInstance| instance.suspend());
|
||||
}
|
||||
|
||||
fn resume(&self, id: &ClientContextId) {
|
||||
self.media_instance_action(id, &|instance: &dyn MediaInstance| instance.resume());
|
||||
}
|
||||
|
||||
fn get_device_monitor(&self) -> Box<dyn MediaDeviceMonitor> {
|
||||
Box::new(GStreamerDeviceMonitor::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioBackend for GStreamerBackend {
|
||||
type Sink = audio_sink::GStreamerAudioSink;
|
||||
fn make_decoder() -> Box<dyn AudioDecoder> {
|
||||
Box::new(audio_decoder::GStreamerAudioDecoder::new())
|
||||
}
|
||||
fn make_sink() -> Result<Self::Sink, AudioSinkError> {
|
||||
audio_sink::GStreamerAudioSink::new()
|
||||
}
|
||||
|
||||
fn make_streamreader(id: MediaStreamId, sample_rate: f32) -> Box<dyn AudioStreamReader + Send> {
|
||||
Box::new(audio_stream_reader::GStreamerAudioStreamReader::new(id, sample_rate).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl WebRtcBackend for GStreamerBackend {
|
||||
type Controller = webrtc::GStreamerWebRtcController;
|
||||
|
||||
fn construct_webrtc_controller(
|
||||
signaller: Box<dyn WebRtcSignaller>,
|
||||
thread: WebRtcController,
|
||||
) -> Self::Controller {
|
||||
webrtc::construct(signaller, thread).expect("WebRTC creation failed")
|
||||
}
|
||||
}
|
||||
|
||||
impl BackendInit for GStreamerBackend {
|
||||
fn init() -> Box<dyn Backend> {
|
||||
Self::init_with_plugins(PathBuf::new(), &[]).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl BackendDeInit for GStreamerBackend {
|
||||
fn deinit(&self) {
|
||||
let to_shutdown: Vec<(ClientContextId, usize)> = {
|
||||
let map = self.instances.lock().unwrap();
|
||||
map.iter()
|
||||
.flat_map(|(ctx, v)| v.iter().map(move |(id, _)| (*ctx, *id)))
|
||||
.collect()
|
||||
};
|
||||
|
||||
for (ctx, id) in to_shutdown {
|
||||
let (tx_ack, rx_ack) = mpsc::channel();
|
||||
let _ = self
|
||||
.backend_chan
|
||||
.lock()
|
||||
.unwrap()
|
||||
.send(BackendMsg::Shutdown {
|
||||
context: ctx,
|
||||
id,
|
||||
tx_ack,
|
||||
});
|
||||
let _ = rx_ack.recv();
|
||||
}
|
||||
}
|
||||
}
|
||||
172
components/media/backends/gstreamer/media_capture.rs
Normal file
172
components/media/backends/gstreamer/media_capture.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use gstreamer;
|
||||
use gstreamer::caps::NoFeature;
|
||||
use gstreamer::prelude::*;
|
||||
use servo_media_streams::MediaStreamType;
|
||||
use servo_media_streams::capture::*;
|
||||
use servo_media_streams::registry::MediaStreamId;
|
||||
|
||||
use crate::media_stream::GStreamerMediaStream;
|
||||
|
||||
trait AddToCaps {
|
||||
type Bound;
|
||||
fn add_to_caps(
|
||||
&self,
|
||||
name: &str,
|
||||
min: Self::Bound,
|
||||
max: Self::Bound,
|
||||
builder: gstreamer::caps::Builder<NoFeature>,
|
||||
) -> Option<gstreamer::caps::Builder<NoFeature>>;
|
||||
}
|
||||
|
||||
impl AddToCaps for Constrain<u32> {
|
||||
type Bound = u32;
|
||||
fn add_to_caps(
|
||||
&self,
|
||||
name: &str,
|
||||
min: u32,
|
||||
max: u32,
|
||||
builder: gstreamer::caps::Builder<NoFeature>,
|
||||
) -> Option<gstreamer::caps::Builder<NoFeature>> {
|
||||
match self {
|
||||
Constrain::Value(v) => Some(builder.field(name, v)),
|
||||
Constrain::Range(r) => {
|
||||
let min = into_i32(r.min.unwrap_or(min));
|
||||
let max = into_i32(r.max.unwrap_or(max));
|
||||
let range = gstreamer::IntRange::<i32>::new(min, max);
|
||||
|
||||
// TODO: Include the ideal caps value in the caps, needs a refactor
|
||||
// of the AddToCaps trait
|
||||
Some(builder.field(name, range))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn into_i32(x: u32) -> i32 {
|
||||
if x > i32::MAX as u32 {
|
||||
i32::MAX
|
||||
} else {
|
||||
x as i32
|
||||
}
|
||||
}
|
||||
|
||||
impl AddToCaps for Constrain<f64> {
|
||||
type Bound = i32;
|
||||
fn add_to_caps<'a>(
|
||||
&self,
|
||||
name: &str,
|
||||
min: i32,
|
||||
max: i32,
|
||||
builder: gstreamer::caps::Builder<NoFeature>,
|
||||
) -> Option<gstreamer::caps::Builder<NoFeature>> {
|
||||
match self {
|
||||
Constrain::Value(v) => {
|
||||
Some(builder.field("name", gstreamer::Fraction::approximate_f64(*v)?))
|
||||
},
|
||||
Constrain::Range(r) => {
|
||||
let min = r
|
||||
.min
|
||||
.and_then(gstreamer::Fraction::approximate_f64)
|
||||
.unwrap_or(gstreamer::Fraction::new(min, 1));
|
||||
let max = r
|
||||
.max
|
||||
.and_then(gstreamer::Fraction::approximate_f64)
|
||||
.unwrap_or(gstreamer::Fraction::new(max, 1));
|
||||
let range = gstreamer::FractionRange::new(min, max);
|
||||
// TODO: Include the ideal caps value in the caps, needs a refactor
|
||||
// of the AddToCaps trait
|
||||
Some(builder.field(name, range))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(Manishearth): Should support a set of constraints
|
||||
fn into_caps(set: MediaTrackConstraintSet, format: &str) -> Option<gstreamer::Caps> {
|
||||
let mut builder = gstreamer::Caps::builder(format);
|
||||
if let Some(w) = set.width {
|
||||
builder = w.add_to_caps("width", 0, 1000000, builder)?;
|
||||
}
|
||||
if let Some(h) = set.height {
|
||||
builder = h.add_to_caps("height", 0, 1000000, builder)?;
|
||||
}
|
||||
if let Some(aspect) = set.aspect {
|
||||
builder = aspect.add_to_caps("pixel-aspect-ratio", 0, 1000000, builder)?;
|
||||
}
|
||||
if let Some(fr) = set.frame_rate {
|
||||
builder = fr.add_to_caps("framerate", 0, 1000000, builder)?;
|
||||
}
|
||||
if let Some(sr) = set.sample_rate {
|
||||
builder = sr.add_to_caps("rate", 0, 1000000, builder)?;
|
||||
}
|
||||
Some(builder.build())
|
||||
}
|
||||
|
||||
struct GstMediaDevices {
|
||||
monitor: gstreamer::DeviceMonitor,
|
||||
}
|
||||
|
||||
impl GstMediaDevices {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
monitor: gstreamer::DeviceMonitor::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_track(
|
||||
&self,
|
||||
video: bool,
|
||||
constraints: MediaTrackConstraintSet,
|
||||
) -> Option<GstMediaTrack> {
|
||||
let (format, filter) = if video {
|
||||
("video/x-raw", "Video/Source")
|
||||
} else {
|
||||
("audio/x-raw", "Audio/Source")
|
||||
};
|
||||
let caps = into_caps(constraints, format)?;
|
||||
let f = self.monitor.add_filter(Some(filter), Some(&caps));
|
||||
let devices = self.monitor.devices();
|
||||
if let Some(f) = f {
|
||||
let _ = self.monitor.remove_filter(f);
|
||||
}
|
||||
match devices.front() {
|
||||
Some(d) => {
|
||||
let element = d.create_element(None).ok()?;
|
||||
Some(GstMediaTrack { element })
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GstMediaTrack {
|
||||
element: gstreamer::Element,
|
||||
}
|
||||
|
||||
fn create_input_stream(
|
||||
stream_type: MediaStreamType,
|
||||
constraint_set: MediaTrackConstraintSet,
|
||||
) -> Option<MediaStreamId> {
|
||||
let devices = GstMediaDevices::new();
|
||||
devices
|
||||
.get_track(stream_type == MediaStreamType::Video, constraint_set)
|
||||
.map(|track| {
|
||||
let f = match stream_type {
|
||||
MediaStreamType::Audio => GStreamerMediaStream::create_audio_from,
|
||||
MediaStreamType::Video => GStreamerMediaStream::create_video_from,
|
||||
};
|
||||
f(track.element)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_audioinput_stream(constraint_set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
|
||||
create_input_stream(MediaStreamType::Audio, constraint_set)
|
||||
}
|
||||
|
||||
pub fn create_videoinput_stream(constraint_set: MediaTrackConstraintSet) -> Option<MediaStreamId> {
|
||||
create_input_stream(MediaStreamType::Video, constraint_set)
|
||||
}
|
||||
298
components/media/backends/gstreamer/media_stream.rs
Normal file
298
components/media/backends/gstreamer/media_stream.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::any::Any;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use gstreamer;
|
||||
use gstreamer::prelude::*;
|
||||
use once_cell::sync::Lazy;
|
||||
use servo_media_streams::registry::{
|
||||
MediaStreamId, get_stream, register_stream, unregister_stream,
|
||||
};
|
||||
use servo_media_streams::{MediaOutput, MediaSocket, MediaStream, MediaStreamType};
|
||||
|
||||
use super::BACKEND_BASE_TIME;
|
||||
|
||||
pub static RTP_CAPS_OPUS: Lazy<gstreamer::Caps> = Lazy::new(|| {
|
||||
gstreamer::Caps::builder("application/x-rtp")
|
||||
.field("media", "audio")
|
||||
.field("encoding-name", "OPUS")
|
||||
.build()
|
||||
});
|
||||
|
||||
pub static RTP_CAPS_VP8: Lazy<gstreamer::Caps> = Lazy::new(|| {
|
||||
gstreamer::Caps::builder("application/x-rtp")
|
||||
.field("media", "video")
|
||||
.field("encoding-name", "VP8")
|
||||
.build()
|
||||
});
|
||||
|
||||
pub struct GStreamerMediaStream {
|
||||
id: Option<MediaStreamId>,
|
||||
type_: MediaStreamType,
|
||||
elements: Vec<gstreamer::Element>,
|
||||
pipeline: Option<gstreamer::Pipeline>,
|
||||
}
|
||||
|
||||
impl MediaStream for GStreamerMediaStream {
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_mut_any(&mut self) -> &mut dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn set_id(&mut self, id: MediaStreamId) {
|
||||
self.id = Some(id);
|
||||
}
|
||||
|
||||
fn ty(&self) -> MediaStreamType {
|
||||
self.type_
|
||||
}
|
||||
}
|
||||
|
||||
impl GStreamerMediaStream {
|
||||
pub fn new(type_: MediaStreamType, elements: Vec<gstreamer::Element>) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
type_,
|
||||
elements,
|
||||
pipeline: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn caps(&self) -> &gstreamer::Caps {
|
||||
match self.type_ {
|
||||
MediaStreamType::Audio => &RTP_CAPS_OPUS,
|
||||
MediaStreamType::Video => &RTP_CAPS_VP8,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn caps_with_payload(&self, payload: i32) -> gstreamer::Caps {
|
||||
match self.type_ {
|
||||
MediaStreamType::Audio => gstreamer::Caps::builder("application/x-rtp")
|
||||
.field("media", "audio")
|
||||
.field("encoding-name", "OPUS")
|
||||
.field("payload", payload)
|
||||
.build(),
|
||||
MediaStreamType::Video => gstreamer::Caps::builder("application/x-rtp")
|
||||
.field("media", "video")
|
||||
.field("encoding-name", "VP8")
|
||||
.field("payload", payload)
|
||||
.build(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn src_element(&self) -> gstreamer::Element {
|
||||
self.elements.last().unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn attach_to_pipeline(&mut self, pipeline: &gstreamer::Pipeline) {
|
||||
assert!(self.pipeline.is_none());
|
||||
let elements: Vec<_> = self.elements.iter().collect();
|
||||
pipeline.add_many(&elements[..]).unwrap();
|
||||
gstreamer::Element::link_many(&elements[..]).unwrap();
|
||||
for element in elements {
|
||||
element.sync_state_with_parent().unwrap();
|
||||
}
|
||||
self.pipeline = Some(pipeline.clone());
|
||||
}
|
||||
|
||||
pub fn pipeline_or_new(&mut self) -> gstreamer::Pipeline {
|
||||
match self.pipeline {
|
||||
Some(ref pipeline) => pipeline.clone(),
|
||||
_ => {
|
||||
let pipeline =
|
||||
gstreamer::Pipeline::with_name("gstreamermediastream fresh pipeline");
|
||||
let clock = gstreamer::SystemClock::obtain();
|
||||
pipeline.set_start_time(gstreamer::ClockTime::NONE);
|
||||
pipeline.set_base_time(*BACKEND_BASE_TIME);
|
||||
pipeline.use_clock(Some(&clock));
|
||||
self.attach_to_pipeline(&pipeline);
|
||||
pipeline
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_video() -> MediaStreamId {
|
||||
let videotestsrc = gstreamer::ElementFactory::make("videotestsrc")
|
||||
.property_from_str("pattern", "ball")
|
||||
.property("is-live", true)
|
||||
.build()
|
||||
.unwrap();
|
||||
Self::create_video_from(videotestsrc)
|
||||
}
|
||||
|
||||
/// Attaches encoding adapters to the stream, returning the source element
|
||||
pub fn encoded(&mut self) -> gstreamer::Element {
|
||||
let pipeline = self
|
||||
.pipeline
|
||||
.as_ref()
|
||||
.expect("GStreamerMediaStream::encoded() should not be called without a pipeline");
|
||||
let src = self.src_element();
|
||||
|
||||
let capsfilter = gstreamer::ElementFactory::make("capsfilter")
|
||||
.property("caps", self.caps())
|
||||
.build()
|
||||
.unwrap();
|
||||
match self.type_ {
|
||||
MediaStreamType::Video => {
|
||||
let vp8enc = gstreamer::ElementFactory::make("vp8enc")
|
||||
.property("deadline", 1i64)
|
||||
.property("error-resilient", "default")
|
||||
.property("cpu-used", -16i32)
|
||||
.property("lag-in-frames", 0i32)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let rtpvp8pay = gstreamer::ElementFactory::make("rtpvp8pay")
|
||||
.property("picture-id-mode", "15-bit")
|
||||
.property("mtu", 1200u32)
|
||||
.build()
|
||||
.unwrap();
|
||||
let queue2 = gstreamer::ElementFactory::make("queue").build().unwrap();
|
||||
|
||||
pipeline
|
||||
.add_many([&vp8enc, &rtpvp8pay, &queue2, &capsfilter])
|
||||
.unwrap();
|
||||
gstreamer::Element::link_many([&src, &vp8enc, &rtpvp8pay, &queue2, &capsfilter])
|
||||
.unwrap();
|
||||
vp8enc.sync_state_with_parent().unwrap();
|
||||
rtpvp8pay.sync_state_with_parent().unwrap();
|
||||
queue2.sync_state_with_parent().unwrap();
|
||||
capsfilter.sync_state_with_parent().unwrap();
|
||||
capsfilter
|
||||
},
|
||||
MediaStreamType::Audio => {
|
||||
let opusenc = gstreamer::ElementFactory::make("opusenc").build().unwrap();
|
||||
let rtpopuspay = gstreamer::ElementFactory::make("rtpopuspay")
|
||||
.property("mtu", 1200u32)
|
||||
.build()
|
||||
.unwrap();
|
||||
let queue3 = gstreamer::ElementFactory::make("queue").build().unwrap();
|
||||
pipeline
|
||||
.add_many([&opusenc, &rtpopuspay, &queue3, &capsfilter])
|
||||
.unwrap();
|
||||
gstreamer::Element::link_many([&src, &opusenc, &rtpopuspay, &queue3, &capsfilter])
|
||||
.unwrap();
|
||||
opusenc.sync_state_with_parent().unwrap();
|
||||
rtpopuspay.sync_state_with_parent().unwrap();
|
||||
queue3.sync_state_with_parent().unwrap();
|
||||
capsfilter
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_video_from(source: gstreamer::Element) -> MediaStreamId {
|
||||
let videoconvert = gstreamer::ElementFactory::make("videoconvert")
|
||||
.build()
|
||||
.unwrap();
|
||||
let queue = gstreamer::ElementFactory::make("queue").build().unwrap();
|
||||
|
||||
register_stream(Arc::new(Mutex::new(GStreamerMediaStream::new(
|
||||
MediaStreamType::Video,
|
||||
vec![source, videoconvert, queue],
|
||||
))))
|
||||
}
|
||||
|
||||
pub fn create_audio() -> MediaStreamId {
|
||||
let audiotestsrc = gstreamer::ElementFactory::make("audiotestsrc")
|
||||
.property_from_str("wave", "sine")
|
||||
.property("is-live", true)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
Self::create_audio_from(audiotestsrc)
|
||||
}
|
||||
|
||||
pub fn create_audio_from(source: gstreamer::Element) -> MediaStreamId {
|
||||
let queue = gstreamer::ElementFactory::make("queue").build().unwrap();
|
||||
let audioconvert = gstreamer::ElementFactory::make("audioconvert")
|
||||
.build()
|
||||
.unwrap();
|
||||
let audioresample = gstreamer::ElementFactory::make("audioresample")
|
||||
.build()
|
||||
.unwrap();
|
||||
let queue2 = gstreamer::ElementFactory::make("queue").build().unwrap();
|
||||
|
||||
register_stream(Arc::new(Mutex::new(GStreamerMediaStream::new(
|
||||
MediaStreamType::Audio,
|
||||
vec![source, queue, audioconvert, audioresample, queue2],
|
||||
))))
|
||||
}
|
||||
|
||||
pub fn create_proxy(ty: MediaStreamType) -> (MediaStreamId, GstreamerMediaSocket) {
|
||||
let proxy_sink = gstreamer::ElementFactory::make("proxysink")
|
||||
.build()
|
||||
.unwrap();
|
||||
let proxy_src = gstreamer::ElementFactory::make("proxysrc")
|
||||
.property("proxysink", &proxy_sink)
|
||||
.build()
|
||||
.unwrap();
|
||||
let stream = match ty {
|
||||
MediaStreamType::Audio => Self::create_audio_from(proxy_src),
|
||||
MediaStreamType::Video => Self::create_video_from(proxy_src),
|
||||
};
|
||||
|
||||
(stream, GstreamerMediaSocket { proxy_sink })
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for GStreamerMediaStream {
|
||||
fn drop(&mut self) {
|
||||
if let Some(ref id) = self.id {
|
||||
unregister_stream(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MediaSink {
|
||||
streams: Vec<Arc<Mutex<dyn MediaStream>>>,
|
||||
}
|
||||
|
||||
impl MediaOutput for MediaSink {
|
||||
fn add_stream(&mut self, stream: &MediaStreamId) {
|
||||
let stream = get_stream(stream).expect("Media streams registry does not contain such ID");
|
||||
{
|
||||
let mut stream = stream.lock().unwrap();
|
||||
let stream = stream
|
||||
.as_mut_any()
|
||||
.downcast_mut::<GStreamerMediaStream>()
|
||||
.unwrap();
|
||||
let pipeline = stream.pipeline_or_new();
|
||||
let last_element = stream.elements.last();
|
||||
let last_element = last_element.as_ref().unwrap();
|
||||
let sink = match stream.type_ {
|
||||
MediaStreamType::Audio => "autoaudiosink",
|
||||
MediaStreamType::Video => "autovideosink",
|
||||
};
|
||||
let sink = gstreamer::ElementFactory::make(sink).build().unwrap();
|
||||
pipeline.add(&sink).unwrap();
|
||||
gstreamer::Element::link_many(&[last_element, &sink][..]).unwrap();
|
||||
|
||||
pipeline.set_state(gstreamer::State::Playing).unwrap();
|
||||
sink.sync_state_with_parent().unwrap();
|
||||
}
|
||||
self.streams.push(stream.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GstreamerMediaSocket {
|
||||
proxy_sink: gstreamer::Element,
|
||||
}
|
||||
|
||||
impl GstreamerMediaSocket {
|
||||
pub fn proxy_sink(&self) -> &gstreamer::Element {
|
||||
&self.proxy_sink
|
||||
}
|
||||
}
|
||||
|
||||
impl MediaSocket for GstreamerMediaSocket {
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
332
components/media/backends/gstreamer/media_stream_source.rs
Normal file
332
components/media/backends/gstreamer/media_stream_source.rs
Normal file
@@ -0,0 +1,332 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use glib::subclass::prelude::*;
|
||||
use gstreamer::prelude::*;
|
||||
use gstreamer::subclass::prelude::*;
|
||||
use gstreamer_base::UniqueFlowCombiner;
|
||||
use once_cell::sync::Lazy;
|
||||
use servo_media_streams::{MediaStream, MediaStreamType};
|
||||
use url::Url;
|
||||
|
||||
use crate::media_stream::{GStreamerMediaStream, RTP_CAPS_OPUS, RTP_CAPS_VP8};
|
||||
|
||||
// Implementation sub-module of the GObject
|
||||
mod imp {
|
||||
use super::*;
|
||||
|
||||
static AUDIO_SRC_PAD_TEMPLATE: Lazy<gstreamer::PadTemplate> = Lazy::new(|| {
|
||||
gstreamer::PadTemplate::new(
|
||||
"audio_src",
|
||||
gstreamer::PadDirection::Src,
|
||||
gstreamer::PadPresence::Sometimes,
|
||||
&RTP_CAPS_OPUS,
|
||||
)
|
||||
.expect("Could not create audio src pad template")
|
||||
});
|
||||
|
||||
static VIDEO_SRC_PAD_TEMPLATE: Lazy<gstreamer::PadTemplate> = Lazy::new(|| {
|
||||
gstreamer::PadTemplate::new(
|
||||
"video_src",
|
||||
gstreamer::PadDirection::Src,
|
||||
gstreamer::PadPresence::Sometimes,
|
||||
&RTP_CAPS_VP8,
|
||||
)
|
||||
.expect("Could not create video src pad template")
|
||||
});
|
||||
|
||||
pub struct ServoMediaStreamSrc {
|
||||
cat: gstreamer::DebugCategory,
|
||||
audio_proxysrc: gstreamer::Element,
|
||||
audio_srcpad: gstreamer::GhostPad,
|
||||
video_proxysrc: gstreamer::Element,
|
||||
video_srcpad: gstreamer::GhostPad,
|
||||
flow_combiner: Arc<Mutex<UniqueFlowCombiner>>,
|
||||
has_audio_stream: Arc<AtomicBool>,
|
||||
has_video_stream: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl ServoMediaStreamSrc {
|
||||
pub fn set_stream(
|
||||
&self,
|
||||
stream: &mut GStreamerMediaStream,
|
||||
src: &gstreamer::Element,
|
||||
only_stream: bool,
|
||||
) {
|
||||
// XXXferjm the current design limits the number of streams to one
|
||||
// per type. This fulfills the basic use case for WebRTC, but we should
|
||||
// implement support for multiple streams per type at some point, which
|
||||
// likely involves encoding and muxing all streams of the same type
|
||||
// in a single stream.
|
||||
|
||||
gstreamer::log!(self.cat, "Setting stream");
|
||||
|
||||
// Append a proxysink to the media stream pipeline.
|
||||
let pipeline = stream.pipeline_or_new();
|
||||
let last_element = stream.encoded();
|
||||
let sink = gstreamer::ElementFactory::make("proxysink")
|
||||
.build()
|
||||
.unwrap();
|
||||
pipeline.add(&sink).unwrap();
|
||||
gstreamer::Element::link_many(&[&last_element, &sink][..]).unwrap();
|
||||
|
||||
// Create the appropriate proxysrc depending on the stream type
|
||||
// and connect the media stream proxysink to it.
|
||||
self.setup_proxy_src(stream.ty(), &sink, src, only_stream);
|
||||
|
||||
sink.sync_state_with_parent().unwrap();
|
||||
|
||||
pipeline.set_state(gstreamer::State::Playing).unwrap();
|
||||
}
|
||||
|
||||
fn setup_proxy_src(
|
||||
&self,
|
||||
stream_type: MediaStreamType,
|
||||
sink: &gstreamer::Element,
|
||||
src: &gstreamer::Element,
|
||||
only_stream: bool,
|
||||
) {
|
||||
let (proxysrc, src_pad, no_more_pads) = match stream_type {
|
||||
MediaStreamType::Audio => {
|
||||
self.has_audio_stream.store(true, Ordering::Relaxed);
|
||||
(
|
||||
&self.audio_proxysrc,
|
||||
&self.audio_srcpad,
|
||||
self.has_video_stream.load(Ordering::Relaxed),
|
||||
)
|
||||
},
|
||||
MediaStreamType::Video => {
|
||||
self.has_video_stream.store(true, Ordering::Relaxed);
|
||||
(
|
||||
&self.video_proxysrc,
|
||||
&self.video_srcpad,
|
||||
self.has_audio_stream.load(Ordering::Relaxed),
|
||||
)
|
||||
},
|
||||
};
|
||||
proxysrc.set_property("proxysink", sink);
|
||||
|
||||
// Add proxysrc to bin
|
||||
let bin = src.downcast_ref::<gstreamer::Bin>().unwrap();
|
||||
bin.add(proxysrc)
|
||||
.expect("Could not add proxysrc element to bin");
|
||||
|
||||
let target_pad = proxysrc
|
||||
.static_pad("src")
|
||||
.expect("Could not get proxysrc's static src pad");
|
||||
src_pad
|
||||
.set_target(Some(&target_pad))
|
||||
.expect("Could not set target pad");
|
||||
|
||||
src.add_pad(src_pad)
|
||||
.expect("Could not add source pad to media stream src");
|
||||
src.set_element_flags(gstreamer::ElementFlags::SOURCE);
|
||||
|
||||
let proxy_pad = src_pad.internal().unwrap();
|
||||
src_pad.set_active(true).expect("Could not active pad");
|
||||
self.flow_combiner.lock().unwrap().add_pad(&proxy_pad);
|
||||
|
||||
src.sync_state_with_parent().unwrap();
|
||||
|
||||
if no_more_pads || only_stream {
|
||||
src.no_more_pads();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Basic declaration of our type for the GObject type system.
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for ServoMediaStreamSrc {
|
||||
const NAME: &'static str = "ServoMediaStreamSrc";
|
||||
type Type = super::ServoMediaStreamSrc;
|
||||
type ParentType = gstreamer::Bin;
|
||||
type Interfaces = (gstreamer::URIHandler,);
|
||||
|
||||
// Called once at the very beginning of instantiation of each instance and
|
||||
// creates the data structure that contains all our state
|
||||
fn with_class(_klass: &Self::Class) -> Self {
|
||||
let flow_combiner = Arc::new(Mutex::new(UniqueFlowCombiner::new()));
|
||||
|
||||
fn create_ghost_pad_with_template(
|
||||
name: &str,
|
||||
pad_template: &gstreamer::PadTemplate,
|
||||
flow_combiner: Arc<Mutex<UniqueFlowCombiner>>,
|
||||
) -> gstreamer::GhostPad {
|
||||
gstreamer::GhostPad::builder_from_template(pad_template)
|
||||
.name(name)
|
||||
.chain_function({
|
||||
move |pad, parent, buffer| {
|
||||
let chain_result =
|
||||
gstreamer::ProxyPad::chain_default(pad, parent, buffer);
|
||||
let result = flow_combiner
|
||||
.lock()
|
||||
.unwrap()
|
||||
.update_pad_flow(pad, chain_result);
|
||||
if result == Err(gstreamer::FlowError::Flushing) {
|
||||
return chain_result;
|
||||
}
|
||||
result
|
||||
}
|
||||
})
|
||||
.build()
|
||||
}
|
||||
|
||||
let audio_proxysrc = gstreamer::ElementFactory::make("proxysrc")
|
||||
.build()
|
||||
.expect("Could not create proxysrc element");
|
||||
let audio_srcpad = create_ghost_pad_with_template(
|
||||
"audio_src",
|
||||
&AUDIO_SRC_PAD_TEMPLATE,
|
||||
flow_combiner.clone(),
|
||||
);
|
||||
|
||||
let video_proxysrc = gstreamer::ElementFactory::make("proxysrc")
|
||||
.build()
|
||||
.expect("Could not create proxysrc element");
|
||||
let video_srcpad = create_ghost_pad_with_template(
|
||||
"video_src",
|
||||
&VIDEO_SRC_PAD_TEMPLATE,
|
||||
flow_combiner.clone(),
|
||||
);
|
||||
|
||||
Self {
|
||||
cat: gstreamer::DebugCategory::new(
|
||||
"servomediastreamsrc",
|
||||
gstreamer::DebugColorFlags::empty(),
|
||||
Some("Servo media stream source"),
|
||||
),
|
||||
audio_proxysrc,
|
||||
audio_srcpad,
|
||||
video_proxysrc,
|
||||
video_srcpad,
|
||||
flow_combiner,
|
||||
has_video_stream: Arc::new(AtomicBool::new(false)),
|
||||
has_audio_stream: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The ObjectImpl trait provides the setters/getters for GObject properties.
|
||||
// Here we need to provide the values that are internally stored back to the
|
||||
// caller, or store whatever new value the caller is providing.
|
||||
//
|
||||
// This maps between the GObject properties and our internal storage of the
|
||||
// corresponding values of the properties.
|
||||
impl ObjectImpl for ServoMediaStreamSrc {
|
||||
fn properties() -> &'static [glib::ParamSpec] {
|
||||
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
|
||||
vec![
|
||||
// Let playbin3 know we are a live source.
|
||||
glib::ParamSpecBoolean::builder("is-live")
|
||||
.nick("Is Live")
|
||||
.blurb("Let playbin3 know we are a live source")
|
||||
.default_value(true)
|
||||
.readwrite()
|
||||
.build(),
|
||||
]
|
||||
});
|
||||
|
||||
&PROPERTIES
|
||||
}
|
||||
|
||||
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
|
||||
match pspec.name() {
|
||||
"is-live" => true.to_value(),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GstObjectImpl for ServoMediaStreamSrc {}
|
||||
|
||||
// Implementation of gstreamer::Element virtual methods
|
||||
impl ElementImpl for ServoMediaStreamSrc {
|
||||
fn metadata() -> Option<&'static gstreamer::subclass::ElementMetadata> {
|
||||
static ELEMENT_METADATA: Lazy<gstreamer::subclass::ElementMetadata> = Lazy::new(|| {
|
||||
gstreamer::subclass::ElementMetadata::new(
|
||||
"Servo Media Stream Source",
|
||||
"Source/Audio/Video",
|
||||
"Feed player with media stream data",
|
||||
"Servo developers",
|
||||
)
|
||||
});
|
||||
|
||||
Some(&*ELEMENT_METADATA)
|
||||
}
|
||||
|
||||
fn pad_templates() -> &'static [gstreamer::PadTemplate] {
|
||||
static PAD_TEMPLATES: Lazy<Vec<gstreamer::PadTemplate>> = Lazy::new(|| {
|
||||
// Add pad templates for our audio and video source pads.
|
||||
// These are later used for actually creating the pads and beforehand
|
||||
// already provide information to GStreamer about all possible
|
||||
// pads that could exist for this type.
|
||||
vec![
|
||||
AUDIO_SRC_PAD_TEMPLATE.clone(),
|
||||
VIDEO_SRC_PAD_TEMPLATE.clone(),
|
||||
]
|
||||
});
|
||||
|
||||
PAD_TEMPLATES.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gstreamer::Bin virtual methods
|
||||
impl BinImpl for ServoMediaStreamSrc {}
|
||||
|
||||
impl URIHandlerImpl for ServoMediaStreamSrc {
|
||||
const URI_TYPE: gstreamer::URIType = gstreamer::URIType::Src;
|
||||
|
||||
fn protocols() -> &'static [&'static str] {
|
||||
&["mediastream"]
|
||||
}
|
||||
|
||||
fn uri(&self) -> Option<String> {
|
||||
Some("mediastream://".to_string())
|
||||
}
|
||||
|
||||
fn set_uri(&self, uri: &str) -> Result<(), glib::Error> {
|
||||
if let Ok(uri) = Url::parse(uri) {
|
||||
if uri.scheme() == "mediastream" {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
Err(glib::Error::new(
|
||||
gstreamer::URIError::BadUri,
|
||||
format!("Invalid URI '{:?}'", uri,).as_str(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Public part of the ServoMediaStreamSrc type. This behaves like a normal
|
||||
// GObject binding
|
||||
glib::wrapper! {
|
||||
pub struct ServoMediaStreamSrc(ObjectSubclass<imp::ServoMediaStreamSrc>)
|
||||
@extends gstreamer::Bin, gstreamer::Element, gstreamer::Object, @implements gstreamer::URIHandler;
|
||||
}
|
||||
|
||||
unsafe impl Send for ServoMediaStreamSrc {}
|
||||
unsafe impl Sync for ServoMediaStreamSrc {}
|
||||
|
||||
impl ServoMediaStreamSrc {
|
||||
pub fn set_stream(&self, stream: &mut GStreamerMediaStream, only_stream: bool) {
|
||||
self.imp()
|
||||
.set_stream(stream, self.upcast_ref::<gstreamer::Element>(), only_stream)
|
||||
}
|
||||
}
|
||||
|
||||
// Registers the type for our element, and then registers in GStreamer
|
||||
// under the name "servomediastreamsrc" for being able to instantiate it via e.g.
|
||||
// gstreamer::ElementFactory::make().
|
||||
pub fn register_servo_media_stream_src() -> Result<(), glib::BoolError> {
|
||||
gstreamer::Element::register(
|
||||
None,
|
||||
"servomediastreamsrc",
|
||||
gstreamer::Rank::NONE,
|
||||
ServoMediaStreamSrc::static_type(),
|
||||
)
|
||||
}
|
||||
1028
components/media/backends/gstreamer/player.rs
Normal file
1028
components/media/backends/gstreamer/player.rs
Normal file
File diff suppressed because it is too large
Load Diff
266
components/media/backends/gstreamer/registry_scanner.rs
Normal file
266
components/media/backends/gstreamer/registry_scanner.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
// The GStreamer registry holds the metadata of the set of plugins available in the host.
|
||||
// This scanner is used to lazily analyze the registry and to provide information about
|
||||
// the set of supported mime types and codecs that the backend is able to deal with.
|
||||
pub static GSTREAMER_REGISTRY_SCANNER: Lazy<GStreamerRegistryScanner> =
|
||||
Lazy::new(GStreamerRegistryScanner::new);
|
||||
|
||||
pub struct GStreamerRegistryScanner {
|
||||
supported_mime_types: HashSet<&'static str>,
|
||||
supported_codecs: HashSet<&'static str>,
|
||||
}
|
||||
|
||||
impl GStreamerRegistryScanner {
|
||||
fn new() -> GStreamerRegistryScanner {
|
||||
let mut registry_scanner = GStreamerRegistryScanner {
|
||||
supported_mime_types: HashSet::new(),
|
||||
supported_codecs: HashSet::new(),
|
||||
};
|
||||
registry_scanner.initialize();
|
||||
registry_scanner
|
||||
}
|
||||
|
||||
pub fn is_container_type_supported(&self, container_type: &str) -> bool {
|
||||
self.supported_mime_types.contains(container_type)
|
||||
}
|
||||
|
||||
fn is_codec_supported(&self, codec: &str) -> bool {
|
||||
self.supported_codecs.contains(codec)
|
||||
}
|
||||
|
||||
pub fn are_all_codecs_supported(&self, codecs: &Vec<&str>) -> bool {
|
||||
codecs.iter().all(|&codec| self.is_codec_supported(codec))
|
||||
}
|
||||
|
||||
fn initialize(&mut self) {
|
||||
let audio_decoder_factories = gstreamer::ElementFactory::factories_with_type(
|
||||
gstreamer::ElementFactoryType::DECODER | gstreamer::ElementFactoryType::MEDIA_AUDIO,
|
||||
gstreamer::Rank::MARGINAL,
|
||||
);
|
||||
let audio_parser_factories = gstreamer::ElementFactory::factories_with_type(
|
||||
gstreamer::ElementFactoryType::PARSER | gstreamer::ElementFactoryType::MEDIA_AUDIO,
|
||||
gstreamer::Rank::NONE,
|
||||
);
|
||||
let video_decoder_factories = gstreamer::ElementFactory::factories_with_type(
|
||||
gstreamer::ElementFactoryType::DECODER | gstreamer::ElementFactoryType::MEDIA_VIDEO,
|
||||
gstreamer::Rank::MARGINAL,
|
||||
);
|
||||
let video_parser_factories = gstreamer::ElementFactory::factories_with_type(
|
||||
gstreamer::ElementFactoryType::PARSER | gstreamer::ElementFactoryType::MEDIA_VIDEO,
|
||||
gstreamer::Rank::MARGINAL,
|
||||
);
|
||||
let demux_factories = gstreamer::ElementFactory::factories_with_type(
|
||||
gstreamer::ElementFactoryType::DEMUXER,
|
||||
gstreamer::Rank::MARGINAL,
|
||||
);
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/mpeg, mpegversion=(int)4") {
|
||||
self.supported_mime_types.insert("audio/aac");
|
||||
self.supported_mime_types.insert("audio/mp4");
|
||||
self.supported_mime_types.insert("audio/x-m4a");
|
||||
self.supported_codecs.insert("mpeg");
|
||||
self.supported_codecs.insert("mp4a*");
|
||||
}
|
||||
|
||||
let is_opus_supported =
|
||||
has_element_for_media_type(&audio_decoder_factories, "audio/x-opus");
|
||||
if is_opus_supported && has_element_for_media_type(&audio_parser_factories, "audio/x-opus")
|
||||
{
|
||||
self.supported_mime_types.insert("audio/opus");
|
||||
self.supported_codecs.insert("opus");
|
||||
self.supported_codecs.insert("x-opus");
|
||||
}
|
||||
|
||||
let is_vorbis_supported =
|
||||
has_element_for_media_type(&audio_decoder_factories, "audio/x-vorbis");
|
||||
if is_vorbis_supported &&
|
||||
has_element_for_media_type(&audio_parser_factories, "audio/x-vorbis")
|
||||
{
|
||||
self.supported_codecs.insert("vorbis");
|
||||
self.supported_codecs.insert("x-vorbis");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&demux_factories, "video/x-matroska") {
|
||||
let is_vp8_decoder_available =
|
||||
has_element_for_media_type(&video_decoder_factories, "video/x-vp8");
|
||||
let is_vp9_decoder_available =
|
||||
has_element_for_media_type(&video_decoder_factories, "video/x-vp9");
|
||||
|
||||
if is_vp8_decoder_available || is_vp9_decoder_available {
|
||||
self.supported_mime_types.insert("video/webm");
|
||||
}
|
||||
|
||||
if is_vp8_decoder_available {
|
||||
self.supported_codecs.insert("vp8");
|
||||
self.supported_codecs.insert("x-vp8");
|
||||
self.supported_codecs.insert("vp8.0");
|
||||
}
|
||||
|
||||
if is_vp9_decoder_available {
|
||||
self.supported_codecs.insert("vp9");
|
||||
self.supported_codecs.insert("x-vp9");
|
||||
self.supported_codecs.insert("vp9.0");
|
||||
}
|
||||
|
||||
if is_opus_supported {
|
||||
self.supported_mime_types.insert("audio/webm");
|
||||
}
|
||||
}
|
||||
|
||||
let is_h264_decoder_available = has_element_for_media_type(
|
||||
&video_decoder_factories,
|
||||
"video/x-h264, profile=(string){ constrained-baseline, baseline, high }",
|
||||
);
|
||||
if is_h264_decoder_available &&
|
||||
has_element_for_media_type(&video_parser_factories, "video/x-h264")
|
||||
{
|
||||
self.supported_mime_types.insert("video/mp4");
|
||||
self.supported_mime_types.insert("video/x-m4v");
|
||||
self.supported_codecs.insert("x-h264");
|
||||
self.supported_codecs.insert("avc*");
|
||||
self.supported_codecs.insert("mp4v*");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/midi") {
|
||||
self.supported_mime_types.insert("audio/midi");
|
||||
self.supported_mime_types.insert("audio/riff-midi");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/x-ac3") {
|
||||
self.supported_mime_types.insert("audio/x-ac3");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/x-flac") {
|
||||
self.supported_mime_types.insert("audio/flac");
|
||||
self.supported_mime_types.insert("audio/x-flac");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/x-speex") {
|
||||
self.supported_mime_types.insert("audio/speex");
|
||||
self.supported_mime_types.insert("audio/x-speex");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/x-wavpack") {
|
||||
self.supported_mime_types.insert("audio/x-wavpack");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(
|
||||
&video_decoder_factories,
|
||||
"video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false",
|
||||
) {
|
||||
self.supported_mime_types.insert("video/mpeg");
|
||||
self.supported_codecs.insert("mpeg");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&video_decoder_factories, "video/x-flash-video") {
|
||||
self.supported_mime_types.insert("video/flv");
|
||||
self.supported_mime_types.insert("video/x-flv");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&video_decoder_factories, "video/x-msvideocodec") {
|
||||
self.supported_mime_types.insert("video/x-msvideo");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&demux_factories, "application/x-hls") {
|
||||
self.supported_mime_types
|
||||
.insert("application/vnd.apple.mpegurl");
|
||||
self.supported_mime_types.insert("application/x-mpegurl");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&demux_factories, "application/x-wav") ||
|
||||
has_element_for_media_type(&demux_factories, "audio/x-wav")
|
||||
{
|
||||
self.supported_mime_types.insert("audio/wav");
|
||||
self.supported_mime_types.insert("audio/vnd.wav");
|
||||
self.supported_mime_types.insert("audio/x-wav");
|
||||
self.supported_codecs.insert("1");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&demux_factories, "video/quicktime, variant=(string)3gpp") {
|
||||
self.supported_mime_types.insert("video/3gpp");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&demux_factories, "application/ogg") {
|
||||
self.supported_mime_types.insert("application/ogg");
|
||||
|
||||
if is_vorbis_supported {
|
||||
self.supported_mime_types.insert("audio/ogg");
|
||||
self.supported_mime_types.insert("audio/x-vorbis+ogg");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/x-speex") {
|
||||
self.supported_mime_types.insert("audio/ogg");
|
||||
self.supported_codecs.insert("speex");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&video_decoder_factories, "video/x-theora") {
|
||||
self.supported_mime_types.insert("video/ogg");
|
||||
self.supported_codecs.insert("theora");
|
||||
}
|
||||
}
|
||||
|
||||
let mut is_audio_mpeg_supported = false;
|
||||
if has_element_for_media_type(
|
||||
&audio_decoder_factories,
|
||||
"audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]",
|
||||
) {
|
||||
is_audio_mpeg_supported = true;
|
||||
self.supported_mime_types.insert("audio/mp1");
|
||||
self.supported_mime_types.insert("audio/mp3");
|
||||
self.supported_mime_types.insert("audio/x-mp3");
|
||||
self.supported_codecs.insert("audio/mp3");
|
||||
}
|
||||
|
||||
if has_element_for_media_type(&audio_decoder_factories, "audio/mpeg, mpegversion=(int)2") {
|
||||
is_audio_mpeg_supported = true;
|
||||
self.supported_mime_types.insert("audio/mp2");
|
||||
}
|
||||
|
||||
is_audio_mpeg_supported |= self.is_container_type_supported("video/mp4");
|
||||
if is_audio_mpeg_supported {
|
||||
self.supported_mime_types.insert("audio/mpeg");
|
||||
self.supported_mime_types.insert("audio/x-mpeg");
|
||||
}
|
||||
|
||||
let is_matroska_supported =
|
||||
has_element_for_media_type(&demux_factories, "video/x-matroska");
|
||||
if is_matroska_supported {
|
||||
self.supported_mime_types.insert("video/x-matroska");
|
||||
|
||||
if has_element_for_media_type(&video_decoder_factories, "video/x-vp10") {
|
||||
self.supported_mime_types.insert("video/webm");
|
||||
}
|
||||
}
|
||||
|
||||
if (is_matroska_supported || self.is_container_type_supported("video/mp4")) &&
|
||||
has_element_for_media_type(&video_decoder_factories, "video/x-av1")
|
||||
{
|
||||
self.supported_codecs.insert("av01*");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_element_for_media_type(
|
||||
factories: &glib::List<gstreamer::ElementFactory>,
|
||||
media_type: &str,
|
||||
) -> bool {
|
||||
match gstreamer::caps::Caps::from_str(media_type) {
|
||||
Ok(caps) => {
|
||||
for factory in factories {
|
||||
if factory.can_sink_all_caps(&caps) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "servo-media-gstreamer-render-android"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_gstreamer_render_android"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
glib = { workspace = true }
|
||||
gstreamer = { workspace = true }
|
||||
gstreamer-gl = { workspace = true }
|
||||
gstreamer-gl-egl = { workspace = true }
|
||||
gstreamer-video = { workspace = true }
|
||||
sm-player = { package = "servo-media-player", path = "../../../player" }
|
||||
sm-gst-render = { package = "servo-media-gstreamer-render", path = "../render" }
|
||||
256
components/media/backends/gstreamer/render-android/lib.rs
Normal file
256
components/media/backends/gstreamer/render-android/lib.rs
Normal file
@@ -0,0 +1,256 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
//! `RenderAndroid` is a `Render` implementation for Android
|
||||
//! platform. It only implements an OpenGLES mechanism.
|
||||
//!
|
||||
//! Internally it uses GStreamer's *glsinkbin* element as *videosink*
|
||||
//! wrapping the *appsink* from the Player. And the shared frames are
|
||||
//! mapped as texture IDs.
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use gstreamer::prelude::*;
|
||||
use gstreamer_gl::prelude::*;
|
||||
use sm_gst_render::Render;
|
||||
use sm_player::PlayerError;
|
||||
use sm_player::context::{GlApi, GlContext, NativeDisplay, PlayerGLContext};
|
||||
use sm_player::video::{Buffer, VideoFrame, VideoFrameData};
|
||||
|
||||
struct GStreamerBuffer {
|
||||
is_external_oes: bool,
|
||||
frame: gstreamer_gl::GLVideoFrame<gstreamer_gl::gl_video_frame::Readable>,
|
||||
}
|
||||
|
||||
impl Buffer for GStreamerBuffer {
|
||||
fn to_vec(&self) -> Result<VideoFrameData, ()> {
|
||||
// packed formats are guaranteed to be in a single plane
|
||||
if self.frame.format() == gstreamer_video::VideoFormat::Rgba {
|
||||
let tex_id = self.frame.texture_id(0).map_err(|_| ())?;
|
||||
Ok(if self.is_external_oes {
|
||||
VideoFrameData::OESTexture(tex_id)
|
||||
} else {
|
||||
VideoFrameData::Texture(tex_id)
|
||||
})
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RenderAndroid {
|
||||
display: gstreamer_gl::GLDisplay,
|
||||
app_context: gstreamer_gl::GLContext,
|
||||
gst_context: Arc<Mutex<Option<gstreamer_gl::GLContext>>>,
|
||||
gl_upload: Arc<Mutex<Option<gstreamer::Element>>>,
|
||||
}
|
||||
|
||||
impl RenderAndroid {
|
||||
/// Tries to create a new intance of the `RenderAndroid`
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `context` - is the PlayerContext trait object from
|
||||
/// application.
|
||||
pub fn new(app_gl_context: Box<dyn PlayerGLContext>) -> Option<RenderAndroid> {
|
||||
// Check that we actually have the elements that we
|
||||
// need to make this work.
|
||||
if gstreamer::ElementFactory::find("glsinkbin").is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let display_native = app_gl_context.get_native_display();
|
||||
let gl_context = app_gl_context.get_gl_context();
|
||||
let gl_api = match app_gl_context.get_gl_api() {
|
||||
GlApi::OpenGL => gstreamer_gl::GLAPI::OPENGL,
|
||||
GlApi::OpenGL3 => gstreamer_gl::GLAPI::OPENGL3,
|
||||
GlApi::Gles1 => gstreamer_gl::GLAPI::GLES1,
|
||||
GlApi::Gles2 => gstreamer_gl::GLAPI::GLES2,
|
||||
GlApi::None => return None,
|
||||
};
|
||||
|
||||
let (wrapped_context, display) = match gl_context {
|
||||
GlContext::Egl(context) => {
|
||||
let display = match display_native {
|
||||
NativeDisplay::Egl(display_native) => {
|
||||
unsafe { gstreamer_gl_egl::GLDisplayEGL::with_egl_display(display_native) }
|
||||
.and_then(|display| Ok(display.upcast()))
|
||||
.ok()
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
match display {
|
||||
Some(display) => {
|
||||
let wrapped_context = unsafe {
|
||||
gstreamer_gl::GLContext::new_wrapped(
|
||||
&display,
|
||||
context,
|
||||
gstreamer_gl::GLPlatform::EGL,
|
||||
gl_api,
|
||||
)
|
||||
};
|
||||
(wrapped_context, Some(display))
|
||||
},
|
||||
_ => (None, None),
|
||||
}
|
||||
},
|
||||
_ => (None, None),
|
||||
};
|
||||
|
||||
match wrapped_context {
|
||||
Some(app_context) => Some(RenderAndroid {
|
||||
display: display.unwrap(),
|
||||
app_context,
|
||||
gst_context: Arc::new(Mutex::new(None)),
|
||||
gl_upload: Arc::new(Mutex::new(None)),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for RenderAndroid {
|
||||
fn is_gl(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn build_frame(&self, sample: gstreamer::Sample) -> Option<VideoFrame> {
|
||||
if self.gst_context.lock().unwrap().is_none() && self.gl_upload.lock().unwrap().is_some() {
|
||||
*self.gst_context.lock().unwrap() = match self.gl_upload.lock().unwrap().as_ref() {
|
||||
Some(glupload) => Some(glupload.property::<gstreamer_gl::GLContext>("context")),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
|
||||
let buffer = sample.buffer_owned().ok_or_else(|| ())?;
|
||||
let caps = sample.caps().ok_or_else(|| ())?;
|
||||
|
||||
let is_external_oes = caps
|
||||
.structure(0)
|
||||
.and_then(|s| {
|
||||
s.get::<&str>("texture-target").ok().and_then(|target| {
|
||||
if target == "external-oes" {
|
||||
Some(s)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.is_some();
|
||||
|
||||
let info = gstreamer_video::VideoInfo::from_caps(caps).ok()?;
|
||||
|
||||
if self.gst_context.lock().unwrap().is_some() {
|
||||
if let Some(sync_meta) = buffer.meta::<gstreamer_gl::GLSyncMeta>() {
|
||||
sync_meta.set_sync_point(self.gst_context.lock().unwrap().as_ref().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
let frame =
|
||||
gstreamer_gl::GLVideoFrame::from_buffer_readable(buffer, &info).or_else(|_| Err(()))?;
|
||||
|
||||
if self.gst_context.lock().unwrap().is_some() {
|
||||
if let Some(sync_meta) = frame.buffer().meta::<gstreamer_gl::GLSyncMeta>() {
|
||||
// This should possibly be
|
||||
// sync_meta.wait(&self.app_context);
|
||||
// since we want the main app thread to sync it's GPU pipeline too,
|
||||
// but the main thread and the app context aren't managed by gstreamer,
|
||||
// so we can't do that directly.
|
||||
// https://github.com/servo/media/issues/309
|
||||
sync_meta.wait(self.gst_context.lock().unwrap().as_ref().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
VideoFrame::new(
|
||||
info.width() as i32,
|
||||
info.height() as i32,
|
||||
Arc::new(GStreamerBuffer {
|
||||
is_external_oes,
|
||||
frame,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn build_video_sink(
|
||||
&self,
|
||||
appsink: &gstreamer::Element,
|
||||
pipeline: &gstreamer::Element,
|
||||
) -> Result<(), PlayerError> {
|
||||
if self.gl_upload.lock().unwrap().is_some() {
|
||||
return Err(PlayerError::Backend(
|
||||
"render unix already setup the video sink".to_owned(),
|
||||
));
|
||||
}
|
||||
|
||||
let caps = gstreamer::Caps::builder("video/x-raw")
|
||||
.features([gstreamer_gl::CAPS_FEATURE_MEMORY_GL_MEMORY])
|
||||
.field("format", gstreamer_video::VideoFormat::Rgba.to_str())
|
||||
.field(
|
||||
"texture-target",
|
||||
gstreamer::List::new(["2D", "external-oes"]),
|
||||
)
|
||||
.build();
|
||||
appsink.set_property("caps", &caps);
|
||||
|
||||
let vsinkbin = gstreamer::ElementFactory::make("glsinkbin")
|
||||
.name("servo-media-vsink")
|
||||
.property("sink", &appsink)
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
PlayerError::Backend(format!("glupload creation failed: {error:?}"))
|
||||
})?;
|
||||
|
||||
pipeline.set_property("video-sink", &vsinkbin);
|
||||
|
||||
let bus = pipeline.bus().expect("pipeline with no bus");
|
||||
let display_ = self.display.clone();
|
||||
let context_ = self.app_context.clone();
|
||||
bus.set_sync_handler(move |_, msg| {
|
||||
match msg.view() {
|
||||
gstreamer::MessageView::NeedContext(ctxt) => {
|
||||
if let Some(el) = msg
|
||||
.src()
|
||||
.map(|s| s.clone().downcast::<gstreamer::Element>().unwrap())
|
||||
{
|
||||
let context_type = ctxt.context_type();
|
||||
if context_type == *gstreamer_gl::GL_DISPLAY_CONTEXT_TYPE {
|
||||
let ctxt = gstreamer::Context::new(context_type, true);
|
||||
ctxt.set_gl_display(&display_);
|
||||
el.set_context(&ctxt);
|
||||
} else if context_type == "gst.gl.app_context" {
|
||||
let mut ctxt = gstreamer::Context::new(context_type, true);
|
||||
{
|
||||
let s = ctxt.get_mut().unwrap().structure_mut();
|
||||
s.set_value("context", context_.to_send_value());
|
||||
}
|
||||
el.set_context(&ctxt);
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
|
||||
gstreamer::BusSyncReply::Pass
|
||||
});
|
||||
|
||||
let mut iter = vsinkbin
|
||||
.dynamic_cast::<gstreamer::Bin>()
|
||||
.unwrap()
|
||||
.iterate_elements();
|
||||
*self.gl_upload.lock().unwrap() = loop {
|
||||
match iter.next() {
|
||||
Ok(Some(element)) => {
|
||||
if Some(true) == element.factory().map(|f| f.name() == "glupload") {
|
||||
break Some(element);
|
||||
}
|
||||
},
|
||||
Err(gstreamer::IteratorError::Resync) => iter.resync(),
|
||||
_ => break None,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
28
components/media/backends/gstreamer/render-unix/Cargo.toml
Normal file
28
components/media/backends/gstreamer/render-unix/Cargo.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[package]
|
||||
name = "servo-media-gstreamer-render-unix"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[features]
|
||||
gl-egl = ["gstreamer-gl-egl"]
|
||||
gl-x11 = ["gstreamer-gl-x11"]
|
||||
gl-wayland = ["gstreamer-gl-wayland"]
|
||||
|
||||
[lib]
|
||||
name = "servo_media_gstreamer_render_unix"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
glib = { workspace = true }
|
||||
gstreamer = { workspace = true }
|
||||
gstreamer-gl = { workspace = true }
|
||||
gstreamer-gl-egl = { workspace = true, optional = true }
|
||||
gstreamer-gl-x11 = { workspace = true, optional = true }
|
||||
gstreamer-gl-wayland = { workspace = true, optional = true }
|
||||
gstreamer-video = { workspace = true }
|
||||
sm-player = { package = "servo-media-player", path = "../../../player" }
|
||||
sm-gst-render = { package = "servo-media-gstreamer-render", path = "../render" }
|
||||
288
components/media/backends/gstreamer/render-unix/lib.rs
Normal file
288
components/media/backends/gstreamer/render-unix/lib.rs
Normal file
@@ -0,0 +1,288 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
//! `RenderUnix` is a `Render` implementation for Unix-based
|
||||
//! platforms. It implements an OpenGL mechanism shared by Linux and
|
||||
//! many of the BSD flavors.
|
||||
//!
|
||||
//! Internally it uses GStreamer's *glsinkbin* element as *videosink*
|
||||
//! wrapping the *appsink* from the Player. And the shared frames are
|
||||
//! mapped as texture IDs.
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use gstreamer_gl::prelude::*;
|
||||
use sm_gst_render::Render;
|
||||
use sm_player::PlayerError;
|
||||
use sm_player::context::{GlApi, GlContext, NativeDisplay, PlayerGLContext};
|
||||
use sm_player::video::{Buffer, VideoFrame, VideoFrameData};
|
||||
|
||||
struct GStreamerBuffer {
|
||||
is_external_oes: bool,
|
||||
frame: gstreamer_gl::GLVideoFrame<gstreamer_gl::gl_video_frame::Readable>,
|
||||
}
|
||||
|
||||
impl Buffer for GStreamerBuffer {
|
||||
fn to_vec(&self) -> Option<VideoFrameData> {
|
||||
// packed formats are guaranteed to be in a single plane
|
||||
if self.frame.format() == gstreamer_video::VideoFormat::Rgba {
|
||||
let tex_id = self.frame.texture_id(0).ok()?;
|
||||
Some(if self.is_external_oes {
|
||||
VideoFrameData::OESTexture(tex_id)
|
||||
} else {
|
||||
VideoFrameData::Texture(tex_id)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RenderUnix {
|
||||
display: gstreamer_gl::GLDisplay,
|
||||
app_context: gstreamer_gl::GLContext,
|
||||
gst_context: Arc<Mutex<Option<gstreamer_gl::GLContext>>>,
|
||||
gl_upload: Arc<Mutex<Option<gstreamer::Element>>>,
|
||||
}
|
||||
|
||||
impl RenderUnix {
|
||||
/// Tries to create a new intance of the `RenderUnix`
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `context` - is the PlayerContext trait object from application.
|
||||
pub fn new(app_gl_context: Box<dyn PlayerGLContext>) -> Option<RenderUnix> {
|
||||
// Check that we actually have the elements that we
|
||||
// need to make this work.
|
||||
gstreamer::ElementFactory::find("glsinkbin")?;
|
||||
|
||||
let display_native = app_gl_context.get_native_display();
|
||||
let gl_context = app_gl_context.get_gl_context();
|
||||
let gl_api = match app_gl_context.get_gl_api() {
|
||||
GlApi::OpenGL => gstreamer_gl::GLAPI::OPENGL,
|
||||
GlApi::OpenGL3 => gstreamer_gl::GLAPI::OPENGL3,
|
||||
GlApi::Gles1 => gstreamer_gl::GLAPI::GLES1,
|
||||
GlApi::Gles2 => gstreamer_gl::GLAPI::GLES2,
|
||||
GlApi::None => return None,
|
||||
};
|
||||
|
||||
let (wrapped_context, display) = match gl_context {
|
||||
GlContext::Egl(context) => {
|
||||
let display = match display_native {
|
||||
#[cfg(feature = "gl-egl")]
|
||||
NativeDisplay::Egl(display_native) => {
|
||||
unsafe { gstreamer_gl_egl::GLDisplayEGL::with_egl_display(display_native) }
|
||||
.map(|display| display.upcast())
|
||||
.ok()
|
||||
},
|
||||
#[cfg(feature = "gl-wayland")]
|
||||
NativeDisplay::Wayland(display_native) => unsafe {
|
||||
gstreamer_gl_wayland::GLDisplayWayland::with_display(display_native)
|
||||
}
|
||||
.map(|display| display.upcast())
|
||||
.ok(),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
RenderUnix::create_wrapped_context(
|
||||
display,
|
||||
context,
|
||||
gstreamer_gl::GLPlatform::EGL,
|
||||
gl_api,
|
||||
)
|
||||
},
|
||||
GlContext::Glx(context) => {
|
||||
let display = match display_native {
|
||||
#[cfg(feature = "gl-x11")]
|
||||
NativeDisplay::X11(display_native) => {
|
||||
unsafe { gstreamer_gl_x11::GLDisplayX11::with_display(display_native) }
|
||||
.map(|display| display.upcast())
|
||||
.ok()
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
RenderUnix::create_wrapped_context(
|
||||
display,
|
||||
context,
|
||||
gstreamer_gl::GLPlatform::GLX,
|
||||
gl_api,
|
||||
)
|
||||
},
|
||||
GlContext::Unknown => (None, None),
|
||||
};
|
||||
|
||||
match wrapped_context {
|
||||
Some(app_context) => {
|
||||
let cat = gstreamer::DebugCategory::get("servoplayer").unwrap();
|
||||
let _: Result<(), ()> = app_context
|
||||
.activate(true)
|
||||
.and_then(|_| {
|
||||
app_context.fill_info().or_else(|err| {
|
||||
gstreamer::warning!(
|
||||
cat,
|
||||
"Couldn't fill the wrapped app GL context: {}",
|
||||
err.to_string()
|
||||
);
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.or_else(|_| {
|
||||
gstreamer::warning!(cat, "Couldn't activate the wrapped app GL context");
|
||||
Ok(())
|
||||
});
|
||||
Some(RenderUnix {
|
||||
display: display.unwrap(),
|
||||
app_context,
|
||||
gst_context: Arc::new(Mutex::new(None)),
|
||||
gl_upload: Arc::new(Mutex::new(None)),
|
||||
})
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn create_wrapped_context(
|
||||
display: Option<gstreamer_gl::GLDisplay>,
|
||||
handle: usize,
|
||||
platform: gstreamer_gl::GLPlatform,
|
||||
api: gstreamer_gl::GLAPI,
|
||||
) -> (
|
||||
Option<gstreamer_gl::GLContext>,
|
||||
Option<gstreamer_gl::GLDisplay>,
|
||||
) {
|
||||
match display {
|
||||
Some(display) => {
|
||||
let wrapped_context = unsafe {
|
||||
gstreamer_gl::GLContext::new_wrapped(&display, handle, platform, api)
|
||||
};
|
||||
(wrapped_context, Some(display))
|
||||
},
|
||||
_ => (None, None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for RenderUnix {
|
||||
fn is_gl(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn build_frame(&self, sample: gstreamer::Sample) -> Option<VideoFrame> {
|
||||
if self.gst_context.lock().unwrap().is_none() && self.gl_upload.lock().unwrap().is_some() {
|
||||
*self.gst_context.lock().unwrap() = self
|
||||
.gl_upload
|
||||
.lock()
|
||||
.unwrap()
|
||||
.as_ref()
|
||||
.map(|glupload| glupload.property::<gstreamer_gl::GLContext>("context"));
|
||||
}
|
||||
|
||||
let buffer = sample.buffer_owned()?;
|
||||
let caps = sample.caps()?;
|
||||
|
||||
let is_external_oes = caps
|
||||
.structure(0)
|
||||
.and_then(|s| {
|
||||
s.get::<&str>("texture-target").ok().and_then(|target| {
|
||||
if target == "external-oes" {
|
||||
Some(s)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.is_some();
|
||||
|
||||
let info = gstreamer_video::VideoInfo::from_caps(caps).ok()?;
|
||||
let frame = gstreamer_gl::GLVideoFrame::from_buffer_readable(buffer, &info).ok()?;
|
||||
VideoFrame::new(
|
||||
info.width() as i32,
|
||||
info.height() as i32,
|
||||
Arc::new(GStreamerBuffer {
|
||||
is_external_oes,
|
||||
frame,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn build_video_sink(
|
||||
&self,
|
||||
appsink: &gstreamer::Element,
|
||||
pipeline: &gstreamer::Element,
|
||||
) -> Result<(), PlayerError> {
|
||||
if self.gl_upload.lock().unwrap().is_some() {
|
||||
return Err(PlayerError::Backend(
|
||||
"render unix already setup the video sink".to_owned(),
|
||||
));
|
||||
}
|
||||
|
||||
let vsinkbin = gstreamer::ElementFactory::make("glsinkbin")
|
||||
.name("servo-media-vsink")
|
||||
.build()
|
||||
.map_err(|error| {
|
||||
PlayerError::Backend(format!("glupload creation failed: {error:?}"))
|
||||
})?;
|
||||
|
||||
let caps = gstreamer::Caps::builder("video/x-raw")
|
||||
.features([gstreamer_gl::CAPS_FEATURE_MEMORY_GL_MEMORY])
|
||||
.field("format", gstreamer_video::VideoFormat::Rgba.to_str())
|
||||
.field(
|
||||
"texture-target",
|
||||
gstreamer::List::new(["2D", "external-oes"]),
|
||||
)
|
||||
.build();
|
||||
appsink.set_property("caps", caps);
|
||||
|
||||
vsinkbin.set_property("sink", appsink);
|
||||
|
||||
pipeline.set_property("video-sink", &vsinkbin);
|
||||
|
||||
let bus = pipeline.bus().expect("pipeline with no bus");
|
||||
let display_ = self.display.clone();
|
||||
let context_ = self.app_context.clone();
|
||||
bus.set_sync_handler(move |_, msg| {
|
||||
if let gstreamer::MessageView::NeedContext(ctxt) = msg.view() {
|
||||
if let Some(el) = msg
|
||||
.src()
|
||||
.map(|s| s.clone().downcast::<gstreamer::Element>().unwrap())
|
||||
{
|
||||
let context_type = ctxt.context_type();
|
||||
if context_type == *gstreamer_gl::GL_DISPLAY_CONTEXT_TYPE {
|
||||
let ctxt = gstreamer::Context::new(context_type, true);
|
||||
ctxt.set_gl_display(&display_);
|
||||
el.set_context(&ctxt);
|
||||
} else if context_type == "gst.gl.app_context" {
|
||||
let mut ctxt = gstreamer::Context::new(context_type, true);
|
||||
{
|
||||
let s = ctxt.get_mut().unwrap().structure_mut();
|
||||
s.set_value("context", context_.to_send_value());
|
||||
}
|
||||
el.set_context(&ctxt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gstreamer::BusSyncReply::Pass
|
||||
});
|
||||
|
||||
let mut iter = vsinkbin
|
||||
.dynamic_cast::<gstreamer::Bin>()
|
||||
.unwrap()
|
||||
.iterate_elements();
|
||||
*self.gl_upload.lock().unwrap() = loop {
|
||||
match iter.next() {
|
||||
Ok(Some(element)) => {
|
||||
if "glupload" == element.factory().unwrap().name() {
|
||||
break Some(element);
|
||||
}
|
||||
},
|
||||
Err(gstreamer::IteratorError::Resync) => iter.resync(),
|
||||
_ => break None,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
154
components/media/backends/gstreamer/render.rs
Normal file
154
components/media/backends/gstreamer/render.rs
Normal file
@@ -0,0 +1,154 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use glib::prelude::*;
|
||||
use servo_media_gstreamer_render::Render;
|
||||
use servo_media_player::PlayerError;
|
||||
use servo_media_player::context::PlayerGLContext;
|
||||
use servo_media_player::video::{Buffer, VideoFrame, VideoFrameData};
|
||||
|
||||
#[cfg(any(
|
||||
target_os = "linux",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
))]
|
||||
mod platform {
|
||||
extern crate servo_media_gstreamer_render_unix;
|
||||
pub use self::servo_media_gstreamer_render_unix::RenderUnix as Render;
|
||||
use super::*;
|
||||
|
||||
pub fn create_render(gl_context: Box<dyn PlayerGLContext>) -> Option<Render> {
|
||||
Render::new(gl_context)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
mod platform {
|
||||
extern crate servo_media_gstreamer_render_android;
|
||||
pub use self::servo_media_gstreamer_render_android::RenderAndroid as Render;
|
||||
use super::*;
|
||||
|
||||
pub fn create_render(gl_context: Box<dyn PlayerGLContext>) -> Option<Render> {
|
||||
Render::new(gl_context)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
target_os = "linux",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "android",
|
||||
)))]
|
||||
mod platform {
|
||||
use servo_media_gstreamer_render::Render as RenderTrait;
|
||||
use servo_media_player::PlayerError;
|
||||
use servo_media_player::context::PlayerGLContext;
|
||||
use servo_media_player::video::VideoFrame;
|
||||
|
||||
pub struct RenderDummy();
|
||||
pub type Render = RenderDummy;
|
||||
|
||||
pub fn create_render(_: Box<dyn PlayerGLContext>) -> Option<RenderDummy> {
|
||||
None
|
||||
}
|
||||
|
||||
impl RenderTrait for RenderDummy {
|
||||
fn is_gl(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn build_frame(&self, _: gstreamer::Sample) -> Option<VideoFrame> {
|
||||
None
|
||||
}
|
||||
|
||||
fn build_video_sink(
|
||||
&self,
|
||||
_: &gstreamer::Element,
|
||||
_: &gstreamer::Element,
|
||||
) -> Result<(), PlayerError> {
|
||||
Err(PlayerError::Backend(
|
||||
"Not available videosink decorator".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct GStreamerBuffer {
|
||||
frame: gstreamer_video::VideoFrame<gstreamer_video::video_frame::Readable>,
|
||||
}
|
||||
|
||||
impl Buffer for GStreamerBuffer {
|
||||
fn to_vec(&self) -> Option<VideoFrameData> {
|
||||
let data = self.frame.plane_data(0).ok()?;
|
||||
Some(VideoFrameData::Raw(Arc::new(data.to_vec())))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GStreamerRender {
|
||||
render: Option<platform::Render>,
|
||||
}
|
||||
|
||||
impl GStreamerRender {
|
||||
pub fn new(gl_context: Box<dyn PlayerGLContext>) -> Self {
|
||||
GStreamerRender {
|
||||
render: platform::create_render(gl_context),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_gl(&self) -> bool {
|
||||
if let Some(render) = self.render.as_ref() {
|
||||
render.is_gl()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_frame_from_sample(&self, sample: gstreamer::Sample) -> Option<VideoFrame> {
|
||||
if let Some(render) = self.render.as_ref() {
|
||||
render.build_frame(sample)
|
||||
} else {
|
||||
let buffer = sample.buffer_owned()?;
|
||||
let caps = sample.caps()?;
|
||||
let info = gstreamer_video::VideoInfo::from_caps(caps).ok()?;
|
||||
let frame = gstreamer_video::VideoFrame::from_buffer_readable(buffer, &info).ok()?;
|
||||
|
||||
VideoFrame::new(
|
||||
info.width() as i32,
|
||||
info.height() as i32,
|
||||
Arc::new(GStreamerBuffer { frame }),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setup_video_sink(
|
||||
&self,
|
||||
pipeline: &gstreamer::Element,
|
||||
) -> Result<gstreamer_app::AppSink, PlayerError> {
|
||||
let appsink = gstreamer::ElementFactory::make("appsink")
|
||||
.build()
|
||||
.map_err(|error| PlayerError::Backend(format!("appsink creation failed: {error:?}")))?
|
||||
.downcast::<gstreamer_app::AppSink>()
|
||||
.unwrap();
|
||||
|
||||
if let Some(render) = self.render.as_ref() {
|
||||
render.build_video_sink(appsink.upcast_ref::<gstreamer::Element>(), pipeline)?
|
||||
} else {
|
||||
let caps = gstreamer::Caps::builder("video/x-raw")
|
||||
.field("format", gstreamer_video::VideoFormat::Bgra.to_str())
|
||||
.field("pixel-aspect-ratio", gstreamer::Fraction::from((1, 1)))
|
||||
.build();
|
||||
|
||||
appsink.set_caps(Some(&caps));
|
||||
pipeline.set_property("video-sink", &appsink);
|
||||
};
|
||||
|
||||
Ok(appsink)
|
||||
}
|
||||
}
|
||||
20
components/media/backends/gstreamer/render/Cargo.toml
Normal file
20
components/media/backends/gstreamer/render/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "servo-media-gstreamer-render"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_gstreamer_render"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
gstreamer = { workspace = true }
|
||||
gstreamer-video = { workspace = true }
|
||||
|
||||
[dependencies.sm-player]
|
||||
package = "servo-media-player"
|
||||
path = "../../../player"
|
||||
49
components/media/backends/gstreamer/render/lib.rs
Normal file
49
components/media/backends/gstreamer/render/lib.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
//! `Render` is a trait to be used by GStreamer's backend player
|
||||
//!
|
||||
//! The purpose of this trait is to provide different accelerated
|
||||
//! video renders.
|
||||
//!
|
||||
//! By default, the player will use a rendering mechanism based on
|
||||
//! mapping the raw video into CPU memory, but it might be other
|
||||
//! rendering mechanism. The main target for this trait are
|
||||
//! OpenGL-based render mechanisms.
|
||||
//!
|
||||
//! Each platform (Unix, MacOS, Windows) might offer an implementation
|
||||
//! of this trait, so the player could setup a proper GStreamer
|
||||
//! pipeline, and handle the produced buffers.
|
||||
//!
|
||||
|
||||
pub trait Render {
|
||||
/// Returns `True` if the render implementation uses any version
|
||||
/// or flavor of OpenGL
|
||||
fn is_gl(&self) -> bool;
|
||||
|
||||
/// Returns the Player's `Frame` to be consumed by the API user.
|
||||
///
|
||||
/// The implementation of this method will map the `sample`'s
|
||||
/// buffer to the rendering appropriate structure. In the case of
|
||||
/// OpenGL-based renders, the `Frame`, instead of the raw data,
|
||||
/// will transfer the texture ID.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `sample` - the GStreamer sample with the buffer to map
|
||||
fn build_frame(&self, sample: gstreamer::Sample) -> Option<sm_player::video::VideoFrame>;
|
||||
|
||||
/// Sets the proper *video-sink* to GStreamer's `pipeline`, this
|
||||
/// video sink is simply a decorator of the passed `appsink`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `appsink` - the appsink GStreamer element to decorate
|
||||
/// * `pipeline` - the GStreamer pipeline to set the video sink
|
||||
fn build_video_sink(
|
||||
&self,
|
||||
appsink: &gstreamer::Element,
|
||||
pipeline: &gstreamer::Element,
|
||||
) -> Result<(), sm_player::PlayerError>;
|
||||
}
|
||||
426
components/media/backends/gstreamer/source.rs
Normal file
426
components/media/backends/gstreamer/source.rs
Normal file
@@ -0,0 +1,426 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::convert::TryFrom;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
use glib::subclass::prelude::*;
|
||||
use gstreamer::prelude::*;
|
||||
use gstreamer::subclass::prelude::*;
|
||||
use once_cell::sync::Lazy;
|
||||
use url::Url;
|
||||
|
||||
const MAX_SRC_QUEUE_SIZE: u64 = 50 * 1024 * 1024; // 50 MB.
|
||||
|
||||
// Implementation sub-module of the GObject
|
||||
mod imp {
|
||||
use super::*;
|
||||
|
||||
macro_rules! inner_appsrc_proxy {
|
||||
($fn_name:ident, $return_type:ty) => {
|
||||
pub fn $fn_name(&self) -> $return_type {
|
||||
self.appsrc.$fn_name()
|
||||
}
|
||||
};
|
||||
|
||||
($fn_name:ident, $arg1:ident, $arg1_type:ty, $return_type:ty) => {
|
||||
pub fn $fn_name(&self, $arg1: $arg1_type) -> $return_type {
|
||||
self.appsrc.$fn_name($arg1)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct Position {
|
||||
offset: u64,
|
||||
requested_offset: u64,
|
||||
}
|
||||
|
||||
// The actual data structure that stores our values. This is not accessible
|
||||
// directly from the outside.
|
||||
pub struct ServoSrc {
|
||||
cat: gstreamer::DebugCategory,
|
||||
appsrc: gstreamer_app::AppSrc,
|
||||
srcpad: gstreamer::GhostPad,
|
||||
position: Mutex<Position>,
|
||||
seeking: AtomicBool,
|
||||
size: Mutex<Option<i64>>,
|
||||
}
|
||||
|
||||
impl ServoSrc {
|
||||
pub fn set_size(&self, size: i64) {
|
||||
if self.seeking.load(Ordering::Relaxed) {
|
||||
// We ignore set_size requests if we are seeking.
|
||||
// The size value is temporarily stored so it
|
||||
// is properly set once we are done seeking.
|
||||
*self.size.lock().unwrap() = Some(size);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.appsrc.size() == -1 {
|
||||
self.appsrc.set_size(size);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_seek_offset<O: IsA<gstreamer::Object>>(&self, parent: &O, offset: u64) -> bool {
|
||||
let mut pos = self.position.lock().unwrap();
|
||||
|
||||
if pos.offset == offset || pos.requested_offset != 0 {
|
||||
false
|
||||
} else {
|
||||
self.seeking.store(true, Ordering::Relaxed);
|
||||
pos.requested_offset = offset;
|
||||
gstreamer::debug!(
|
||||
self.cat,
|
||||
obj = parent,
|
||||
"seeking to offset: {}",
|
||||
pos.requested_offset
|
||||
);
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_seek_done(&self) {
|
||||
self.seeking.store(false, Ordering::Relaxed);
|
||||
|
||||
if let Some(size) = self.size.lock().unwrap().take() {
|
||||
if self.appsrc.size() == -1 {
|
||||
self.appsrc.set_size(size);
|
||||
}
|
||||
}
|
||||
|
||||
let mut pos = self.position.lock().unwrap();
|
||||
pos.offset = pos.requested_offset;
|
||||
pos.requested_offset = 0;
|
||||
}
|
||||
|
||||
pub fn push_buffer<O: IsA<gstreamer::Object>>(
|
||||
&self,
|
||||
parent: &O,
|
||||
data: Vec<u8>,
|
||||
) -> Result<gstreamer::FlowSuccess, gstreamer::FlowError> {
|
||||
if self.seeking.load(Ordering::Relaxed) {
|
||||
gstreamer::debug!(self.cat, obj = parent, "seek in progress, ignored data");
|
||||
return Ok(gstreamer::FlowSuccess::Ok);
|
||||
}
|
||||
|
||||
let mut pos = self.position.lock().unwrap(); // will block seeking
|
||||
|
||||
let length = u64::try_from(data.len()).unwrap();
|
||||
let mut data_offset = 0;
|
||||
|
||||
let buffer_starting_offset = pos.offset;
|
||||
|
||||
// @TODO: optimization: update the element's blocksize by
|
||||
// X factor given current length
|
||||
|
||||
pos.offset += length;
|
||||
|
||||
gstreamer::trace!(self.cat, obj = parent, "offset: {}", pos.offset);
|
||||
|
||||
// set the stream size (in bytes) to current offset if
|
||||
// size is lesser than it
|
||||
if let Ok(size) = u64::try_from(self.appsrc.size()) {
|
||||
if pos.offset > size {
|
||||
gstreamer::debug!(
|
||||
self.cat,
|
||||
obj = parent,
|
||||
"Updating internal size from {} to {}",
|
||||
size,
|
||||
pos.offset
|
||||
);
|
||||
let new_size = i64::try_from(pos.offset).unwrap();
|
||||
self.appsrc.set_size(new_size);
|
||||
}
|
||||
}
|
||||
|
||||
// Split the received vec<> into buffers that are of a
|
||||
// size basesrc suggest. It is important not to push
|
||||
// buffers that are too large, otherwise incorrect
|
||||
// buffering messages can be sent from the pipeline
|
||||
let block_size = 4096;
|
||||
let num_blocks = ((length - data_offset) as f64 / block_size as f64).ceil() as u64;
|
||||
|
||||
gstreamer::log!(
|
||||
self.cat,
|
||||
obj = parent,
|
||||
"Splitting the received vec into {} blocks",
|
||||
num_blocks
|
||||
);
|
||||
|
||||
let mut ret: Result<gstreamer::FlowSuccess, gstreamer::FlowError> =
|
||||
Ok(gstreamer::FlowSuccess::Ok);
|
||||
for i in 0..num_blocks {
|
||||
let start = usize::try_from(i * block_size + data_offset).unwrap();
|
||||
data_offset = 0;
|
||||
let size = usize::try_from(block_size.min(length - start as u64)).unwrap();
|
||||
let end = start + size;
|
||||
|
||||
let buffer_offset = buffer_starting_offset + start as u64;
|
||||
let buffer_offset_end = buffer_offset + size as u64;
|
||||
|
||||
let subdata = Vec::from(&data[start..end]);
|
||||
let mut buffer = gstreamer::Buffer::from_slice(subdata);
|
||||
{
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
buffer.set_offset(buffer_offset);
|
||||
buffer.set_offset_end(buffer_offset_end);
|
||||
}
|
||||
|
||||
if self.seeking.load(Ordering::Relaxed) {
|
||||
gstreamer::trace!(
|
||||
self.cat,
|
||||
obj = parent,
|
||||
"stopping buffer appends due to seek"
|
||||
);
|
||||
ret = Ok(gstreamer::FlowSuccess::Ok);
|
||||
break;
|
||||
}
|
||||
|
||||
gstreamer::trace!(self.cat, obj = parent, "Pushing buffer {:?}", buffer);
|
||||
|
||||
ret = self.appsrc.push_buffer(buffer);
|
||||
match ret {
|
||||
Ok(_) => (),
|
||||
Err(gstreamer::FlowError::Eos) | Err(gstreamer::FlowError::Flushing) => {
|
||||
ret = Ok(gstreamer::FlowSuccess::Ok)
|
||||
},
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
inner_appsrc_proxy!(end_of_stream, Result<gstreamer::FlowSuccess, gstreamer::FlowError>);
|
||||
inner_appsrc_proxy!(set_callbacks, callbacks, gstreamer_app::AppSrcCallbacks, ());
|
||||
|
||||
fn query(&self, pad: &gstreamer::GhostPad, query: &mut gstreamer::QueryRef) -> bool {
|
||||
gstreamer::log!(self.cat, obj = pad, "Handling query {:?}", query);
|
||||
|
||||
// In order to make buffering/downloading work as we want, apart from
|
||||
// setting the appropriate flags on the player playbin,
|
||||
// the source needs to either:
|
||||
//
|
||||
// 1. be an http, mms, etc. scheme
|
||||
// 2. report that it is "bandwidth limited".
|
||||
//
|
||||
// 1. is not straightforward because we are using a servosrc scheme for now.
|
||||
// This may change in the future if we end up handling http/https/data
|
||||
// URIs, which is what WebKit does.
|
||||
//
|
||||
// For 2. we need to make servosrc handle the scheduling properties query
|
||||
// to report that it "is bandwidth limited".
|
||||
let ret = match query.view_mut() {
|
||||
gstreamer::QueryViewMut::Scheduling(ref mut q) => {
|
||||
let flags = gstreamer::SchedulingFlags::SEQUENTIAL |
|
||||
gstreamer::SchedulingFlags::BANDWIDTH_LIMITED;
|
||||
q.set(flags, 1, -1, 0);
|
||||
q.add_scheduling_modes([gstreamer::PadMode::Push]);
|
||||
true
|
||||
},
|
||||
_ => gstreamer::Pad::query_default(pad, Some(&*self.obj()), query),
|
||||
};
|
||||
|
||||
if ret {
|
||||
gstreamer::log!(self.cat, obj = pad, "Handled query {:?}", query);
|
||||
} else {
|
||||
gstreamer::info!(self.cat, obj = pad, "Didn't handle query {:?}", query);
|
||||
}
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
// Basic declaration of our type for the GObject type system
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for ServoSrc {
|
||||
const NAME: &'static str = "ServoSrc";
|
||||
type Type = super::ServoSrc;
|
||||
type ParentType = gstreamer::Bin;
|
||||
type Interfaces = (gstreamer::URIHandler,);
|
||||
|
||||
// Called once at the very beginning of instantiation of each instance and
|
||||
// creates the data structure that contains all our state
|
||||
fn with_class(klass: &Self::Class) -> Self {
|
||||
let app_src = gstreamer::ElementFactory::make("appsrc")
|
||||
.build()
|
||||
.map(|elem| elem.downcast::<gstreamer_app::AppSrc>().unwrap())
|
||||
.expect("Could not create appsrc element");
|
||||
|
||||
let pad_templ = klass.pad_template("src").unwrap();
|
||||
let ghost_pad = gstreamer::GhostPad::builder_from_template(&pad_templ)
|
||||
.query_function(|pad, parent, query| {
|
||||
ServoSrc::catch_panic_pad_function(
|
||||
parent,
|
||||
|| false,
|
||||
|servosrc| servosrc.query(pad, query),
|
||||
)
|
||||
})
|
||||
.build();
|
||||
|
||||
Self {
|
||||
cat: gstreamer::DebugCategory::new(
|
||||
"servosrc",
|
||||
gstreamer::DebugColorFlags::empty(),
|
||||
Some("Servo source"),
|
||||
),
|
||||
appsrc: app_src,
|
||||
srcpad: ghost_pad,
|
||||
position: Mutex::new(Default::default()),
|
||||
seeking: AtomicBool::new(false),
|
||||
size: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The ObjectImpl trait provides the setters/getters for GObject properties.
|
||||
// Here we need to provide the values that are internally stored back to the
|
||||
// caller, or store whatever new value the caller is providing.
|
||||
//
|
||||
// This maps between the GObject properties and our internal storage of the
|
||||
// corresponding values of the properties.
|
||||
impl ObjectImpl for ServoSrc {
|
||||
// Called right after construction of a new instance
|
||||
fn constructed(&self) {
|
||||
// Call the parent class' ::constructed() implementation first
|
||||
self.parent_constructed();
|
||||
|
||||
self.obj()
|
||||
.add(&self.appsrc)
|
||||
.expect("Could not add appsrc element to bin");
|
||||
|
||||
let target_pad = self.appsrc.static_pad("src");
|
||||
self.srcpad.set_target(target_pad.as_ref()).unwrap();
|
||||
|
||||
self.obj()
|
||||
.add_pad(&self.srcpad)
|
||||
.expect("Could not add source pad to bin");
|
||||
|
||||
self.appsrc.set_caps(None::<&gstreamer::Caps>);
|
||||
self.appsrc.set_max_bytes(MAX_SRC_QUEUE_SIZE);
|
||||
self.appsrc.set_block(false);
|
||||
self.appsrc.set_format(gstreamer::Format::Bytes);
|
||||
self.appsrc
|
||||
.set_stream_type(gstreamer_app::AppStreamType::Seekable);
|
||||
|
||||
self.obj()
|
||||
.set_element_flags(gstreamer::ElementFlags::SOURCE);
|
||||
}
|
||||
}
|
||||
|
||||
impl GstObjectImpl for ServoSrc {}
|
||||
|
||||
// Implementation of gstreamer::Element virtual methods
|
||||
impl ElementImpl for ServoSrc {
|
||||
fn metadata() -> Option<&'static gstreamer::subclass::ElementMetadata> {
|
||||
static ELEMENT_METADATA: Lazy<gstreamer::subclass::ElementMetadata> = Lazy::new(|| {
|
||||
gstreamer::subclass::ElementMetadata::new(
|
||||
"Servo Media Source",
|
||||
"Source/Audio/Video",
|
||||
"Feed player with media data",
|
||||
"Servo developers",
|
||||
)
|
||||
});
|
||||
|
||||
Some(&*ELEMENT_METADATA)
|
||||
}
|
||||
|
||||
fn pad_templates() -> &'static [gstreamer::PadTemplate] {
|
||||
static PAD_TEMPLATES: Lazy<Vec<gstreamer::PadTemplate>> = Lazy::new(|| {
|
||||
let caps = gstreamer::Caps::new_any();
|
||||
let src_pad_template = gstreamer::PadTemplate::new(
|
||||
"src",
|
||||
gstreamer::PadDirection::Src,
|
||||
gstreamer::PadPresence::Always,
|
||||
&caps,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
vec![src_pad_template]
|
||||
});
|
||||
|
||||
PAD_TEMPLATES.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gstreamer::Bin virtual methods
|
||||
impl BinImpl for ServoSrc {}
|
||||
|
||||
impl URIHandlerImpl for ServoSrc {
|
||||
const URI_TYPE: gstreamer::URIType = gstreamer::URIType::Src;
|
||||
|
||||
fn protocols() -> &'static [&'static str] {
|
||||
&["servosrc"]
|
||||
}
|
||||
|
||||
fn uri(&self) -> Option<String> {
|
||||
Some("servosrc://".to_string())
|
||||
}
|
||||
|
||||
fn set_uri(&self, uri: &str) -> Result<(), glib::Error> {
|
||||
if let Ok(uri) = Url::parse(uri) {
|
||||
if uri.scheme() == "servosrc" {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
Err(glib::Error::new(
|
||||
gstreamer::URIError::BadUri,
|
||||
format!("Invalid URI '{:?}'", uri,).as_str(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Public part of the ServoSrc type. This behaves like a normal
|
||||
// GObject binding
|
||||
glib::wrapper! {
|
||||
pub struct ServoSrc(ObjectSubclass<imp::ServoSrc>)
|
||||
@extends gstreamer::Bin, gstreamer::Element, gstreamer::Object, @implements gstreamer::URIHandler;
|
||||
}
|
||||
|
||||
unsafe impl Send for ServoSrc {}
|
||||
unsafe impl Sync for ServoSrc {}
|
||||
|
||||
impl ServoSrc {
|
||||
pub fn set_size(&self, size: i64) {
|
||||
self.imp().set_size(size);
|
||||
}
|
||||
|
||||
pub fn set_seek_offset(&self, offset: u64) -> bool {
|
||||
self.imp().set_seek_offset(self, offset)
|
||||
}
|
||||
|
||||
pub fn set_seek_done(&self) {
|
||||
self.imp().set_seek_done();
|
||||
}
|
||||
|
||||
pub fn push_buffer(
|
||||
&self,
|
||||
data: Vec<u8>,
|
||||
) -> Result<gstreamer::FlowSuccess, gstreamer::FlowError> {
|
||||
self.imp().push_buffer(self, data)
|
||||
}
|
||||
|
||||
pub fn push_end_of_stream(&self) -> Result<gstreamer::FlowSuccess, gstreamer::FlowError> {
|
||||
self.imp().end_of_stream()
|
||||
}
|
||||
|
||||
pub fn set_callbacks(&self, callbacks: gstreamer_app::AppSrcCallbacks) {
|
||||
self.imp().set_callbacks(callbacks)
|
||||
}
|
||||
}
|
||||
|
||||
// Registers the type for our element, and then registers in GStreamer
|
||||
// under the name "servosrc" for being able to instantiate it via e.g.
|
||||
// gstreamer::ElementFactory::make().
|
||||
pub fn register_servo_src() -> Result<(), glib::BoolError> {
|
||||
gstreamer::Element::register(
|
||||
None,
|
||||
"servosrc",
|
||||
gstreamer::Rank::NONE,
|
||||
ServoSrc::static_type(),
|
||||
)
|
||||
}
|
||||
815
components/media/backends/gstreamer/webrtc.rs
Normal file
815
components/media/backends/gstreamer/webrtc.rs
Normal file
@@ -0,0 +1,815 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{cmp, mem};
|
||||
|
||||
use glib::prelude::*;
|
||||
use gstreamer::prelude::*;
|
||||
use log::warn;
|
||||
use servo_media_streams::MediaStreamType;
|
||||
use servo_media_streams::registry::{MediaStreamId, get_stream};
|
||||
use servo_media_webrtc::datachannel::DataChannelId;
|
||||
use servo_media_webrtc::thread::InternalEvent;
|
||||
use servo_media_webrtc::{WebRtcController as WebRtcThread, *};
|
||||
use {glib, gstreamer, gstreamer_sdp, gstreamer_webrtc};
|
||||
|
||||
use super::BACKEND_BASE_TIME;
|
||||
use crate::datachannel::GStreamerWebRtcDataChannel;
|
||||
use crate::media_stream::GStreamerMediaStream;
|
||||
|
||||
// TODO:
|
||||
// - figure out purpose of glib loop
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MLineInfo {
|
||||
/// The caps for the given m-line
|
||||
caps: gstreamer::Caps,
|
||||
/// Whether or not this sink pad has already been connected
|
||||
is_used: bool,
|
||||
/// The payload value of the given m-line
|
||||
payload: i32,
|
||||
}
|
||||
|
||||
enum DataChannelEventTarget {
|
||||
Buffered(Vec<DataChannelEvent>),
|
||||
Created(GStreamerWebRtcDataChannel),
|
||||
}
|
||||
|
||||
pub struct GStreamerWebRtcController {
|
||||
webrtc: gstreamer::Element,
|
||||
pipeline: gstreamer::Pipeline,
|
||||
/// We can't trigger a negotiation-needed event until we have streams, or otherwise
|
||||
/// a createOffer() call will lead to bad SDP. Instead, we delay negotiation.
|
||||
delayed_negotiation: bool,
|
||||
/// A handle to the event loop abstraction surrounding the webrtc implementations,
|
||||
/// which lets gstreamer callbacks send events back to the event loop to run on this object
|
||||
thread: WebRtcThread,
|
||||
signaller: Box<dyn WebRtcSignaller>,
|
||||
/// All the streams that are actually connected to the webrtcbin (i.e., their presence has already
|
||||
/// been negotiated)
|
||||
streams: Vec<MediaStreamId>,
|
||||
/// Disconnected streams that are waiting to be linked. Streams are
|
||||
/// only linked when:
|
||||
///
|
||||
/// - An offer is made (all pending streams are flushed)
|
||||
/// - An offer is received (all matching pending streams are flushed)
|
||||
/// - A stream is added when there is a so-far-disconnected remote-m-line
|
||||
///
|
||||
/// In other words, these are all yet to be negotiated
|
||||
///
|
||||
/// See link_stream
|
||||
pending_streams: Vec<MediaStreamId>,
|
||||
/// Each new webrtc stream should have a new payload/pt value, starting at 96
|
||||
///
|
||||
/// This is maintained as a known yet-unused payload number, being incremented whenever
|
||||
/// we use it, and set to (remote_pt + 1) if the remote sends us a stream with a higher pt
|
||||
pt_counter: i32,
|
||||
/// We keep track of how many request pads have been created on webrtcbin
|
||||
/// so that we can request more to fill in the gaps and acquire a specific pad if necessary
|
||||
request_pad_counter: usize,
|
||||
/// Streams need to be connected to the relevant sink pad, and we figure this out
|
||||
/// by keeping track of the caps of each m-line in the SDP.
|
||||
remote_mline_info: Vec<MLineInfo>,
|
||||
/// Temporary storage for remote_mline_info until the remote description is applied
|
||||
///
|
||||
/// Without this, a unluckily timed call to link_stream() may happen before the webrtcbin
|
||||
/// knows the remote description, but while we _think_ it does
|
||||
pending_remote_mline_info: Vec<MLineInfo>,
|
||||
/// In case we get multiple remote offers, this lets us keep track of which is the newest
|
||||
remote_offer_generation: u32,
|
||||
_main_loop: glib::MainLoop,
|
||||
data_channels: Arc<Mutex<HashMap<DataChannelId, DataChannelEventTarget>>>,
|
||||
next_data_channel_id: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
impl WebRtcControllerBackend for GStreamerWebRtcController {
|
||||
fn add_ice_candidate(&mut self, candidate: IceCandidate) -> WebRtcResult {
|
||||
self.webrtc.emit_by_name::<()>(
|
||||
"add-ice-candidate",
|
||||
&[&candidate.sdp_mline_index, &candidate.candidate],
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_remote_description(
|
||||
&mut self,
|
||||
desc: SessionDescription,
|
||||
cb: Box<dyn FnOnce() + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
self.set_description(desc, DescriptionType::Remote, cb)
|
||||
}
|
||||
|
||||
fn set_local_description(
|
||||
&mut self,
|
||||
desc: SessionDescription,
|
||||
cb: Box<dyn FnOnce() + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
self.set_description(desc, DescriptionType::Local, cb)
|
||||
}
|
||||
|
||||
fn create_offer(
|
||||
&mut self,
|
||||
cb: Box<dyn FnOnce(SessionDescription) + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
self.flush_pending_streams(true)?;
|
||||
self.pipeline.set_state(gstreamer::State::Playing)?;
|
||||
let promise = gstreamer::Promise::with_change_func(move |res| {
|
||||
res.map(|s| on_offer_or_answer_created(SdpType::Offer, s.unwrap(), cb))
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
self.webrtc
|
||||
.emit_by_name::<()>("create-offer", &[&None::<gstreamer::Structure>, &promise]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_answer(
|
||||
&mut self,
|
||||
cb: Box<dyn FnOnce(SessionDescription) + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
let promise = gstreamer::Promise::with_change_func(move |res| {
|
||||
res.map(|s| on_offer_or_answer_created(SdpType::Answer, s.unwrap(), cb))
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
self.webrtc
|
||||
.emit_by_name::<()>("create-answer", &[&None::<gstreamer::Structure>, &promise]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_stream(&mut self, stream_id: &MediaStreamId) -> WebRtcResult {
|
||||
let stream =
|
||||
get_stream(stream_id).expect("Media streams registry does not contain such ID");
|
||||
let mut stream = stream.lock().unwrap();
|
||||
let stream = stream
|
||||
.as_mut_any()
|
||||
.downcast_mut::<GStreamerMediaStream>()
|
||||
.ok_or("Does not currently support non-gstreamer streams")?;
|
||||
self.link_stream(stream_id, stream, false)?;
|
||||
if self.delayed_negotiation && (self.streams.len() > 1 || self.pending_streams.len() > 1) {
|
||||
self.delayed_negotiation = false;
|
||||
self.signaller.on_negotiation_needed(&self.thread);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_data_channel(&mut self, init: &DataChannelInit) -> WebRtcDataChannelResult {
|
||||
let id = self.next_data_channel_id.fetch_add(1, Ordering::Relaxed);
|
||||
match GStreamerWebRtcDataChannel::new(&id, &self.webrtc, &self.thread, init) {
|
||||
Ok(channel) => register_data_channel(self.data_channels.clone(), id, channel),
|
||||
Err(error) => Err(WebRtcError::Backend(error)),
|
||||
}
|
||||
}
|
||||
|
||||
fn close_data_channel(&mut self, id: &DataChannelId) -> WebRtcResult {
|
||||
// There is no need to unregister the channel here. It will be unregistered
|
||||
// when the data channel backend triggers the on closed event.
|
||||
let mut data_channels = self.data_channels.lock().unwrap();
|
||||
match data_channels.get(id) {
|
||||
Some(ref channel) => match channel {
|
||||
DataChannelEventTarget::Created(channel) => {
|
||||
channel.close();
|
||||
Ok(())
|
||||
},
|
||||
DataChannelEventTarget::Buffered(_) => data_channels
|
||||
.remove(id)
|
||||
.ok_or(WebRtcError::Backend("Unknown data channel".to_owned()))
|
||||
.map(|_| ()),
|
||||
},
|
||||
None => Err(WebRtcError::Backend("Unknown data channel".to_owned())),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_data_channel_message(
|
||||
&mut self,
|
||||
id: &DataChannelId,
|
||||
message: &DataChannelMessage,
|
||||
) -> WebRtcResult {
|
||||
match self.data_channels.lock().unwrap().get(id) {
|
||||
Some(ref channel) => match channel {
|
||||
DataChannelEventTarget::Created(channel) => {
|
||||
channel.send(message);
|
||||
Ok(())
|
||||
},
|
||||
_ => Ok(()),
|
||||
},
|
||||
None => Err(WebRtcError::Backend("Unknown data channel".to_owned())),
|
||||
}
|
||||
}
|
||||
|
||||
fn configure(&mut self, stun_server: &str, policy: BundlePolicy) -> WebRtcResult {
|
||||
self.webrtc
|
||||
.set_property_from_str("stun-server", stun_server);
|
||||
self.webrtc
|
||||
.set_property_from_str("bundle-policy", policy.as_str());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn internal_event(&mut self, e: thread::InternalEvent) -> WebRtcResult {
|
||||
match e {
|
||||
InternalEvent::OnNegotiationNeeded => {
|
||||
if self.streams.is_empty() && self.pending_streams.is_empty() {
|
||||
// we have no streams
|
||||
|
||||
// If the pipeline starts playing and on-negotiation-needed is present before there are any
|
||||
// media streams, an invalid SDP offer will be created. Therefore, delay emitting the signal
|
||||
self.delayed_negotiation = true;
|
||||
} else {
|
||||
self.signaller.on_negotiation_needed(&self.thread);
|
||||
}
|
||||
},
|
||||
InternalEvent::OnIceCandidate(candidate) => {
|
||||
self.signaller.on_ice_candidate(&self.thread, candidate);
|
||||
},
|
||||
InternalEvent::OnAddStream(stream, ty) => {
|
||||
self.pipeline.set_state(gstreamer::State::Playing)?;
|
||||
self.signaller.on_add_stream(&stream, ty);
|
||||
},
|
||||
InternalEvent::OnDataChannelEvent(channel_id, event) => {
|
||||
let mut data_channels = self.data_channels.lock().unwrap();
|
||||
match data_channels.get_mut(&channel_id) {
|
||||
None => {
|
||||
data_channels
|
||||
.insert(channel_id, DataChannelEventTarget::Buffered(vec![event]));
|
||||
},
|
||||
Some(ref mut channel) => match channel {
|
||||
&mut &mut DataChannelEventTarget::Buffered(ref mut events) => {
|
||||
events.push(event);
|
||||
return Ok(());
|
||||
},
|
||||
DataChannelEventTarget::Created(_) => {
|
||||
if let DataChannelEvent::Close = event {
|
||||
data_channels.remove(&channel_id);
|
||||
}
|
||||
self.signaller
|
||||
.on_data_channel_event(channel_id, event, &self.thread);
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
InternalEvent::DescriptionAdded(cb, description_type, ty, remote_offer_generation) => {
|
||||
if description_type == DescriptionType::Remote &&
|
||||
ty == SdpType::Offer &&
|
||||
remote_offer_generation == self.remote_offer_generation
|
||||
{
|
||||
mem::swap(
|
||||
&mut self.pending_remote_mline_info,
|
||||
&mut self.remote_mline_info,
|
||||
);
|
||||
self.pending_remote_mline_info.clear();
|
||||
self.flush_pending_streams(false)?;
|
||||
}
|
||||
self.pipeline.set_state(gstreamer::State::Playing)?;
|
||||
cb();
|
||||
},
|
||||
InternalEvent::UpdateSignalingState => {
|
||||
use gstreamer_webrtc::WebRTCSignalingState::*;
|
||||
let val = self
|
||||
.webrtc
|
||||
.property::<gstreamer_webrtc::WebRTCSignalingState>("signaling-state");
|
||||
let state = match val {
|
||||
Stable => SignalingState::Stable,
|
||||
HaveLocalOffer => SignalingState::HaveLocalOffer,
|
||||
HaveRemoteOffer => SignalingState::HaveRemoteOffer,
|
||||
HaveLocalPranswer => SignalingState::HaveLocalPranswer,
|
||||
HaveRemotePranswer => SignalingState::HaveRemotePranswer,
|
||||
Closed => SignalingState::Closed,
|
||||
i => {
|
||||
return Err(WebRtcError::Backend(format!(
|
||||
"unknown signaling state: {:?}",
|
||||
i
|
||||
)));
|
||||
},
|
||||
};
|
||||
self.signaller.update_signaling_state(state);
|
||||
},
|
||||
InternalEvent::UpdateGatheringState => {
|
||||
use gstreamer_webrtc::WebRTCICEGatheringState::*;
|
||||
let val = self
|
||||
.webrtc
|
||||
.property::<gstreamer_webrtc::WebRTCICEGatheringState>("ice-gathering-state");
|
||||
let state = match val {
|
||||
New => GatheringState::New,
|
||||
Gathering => GatheringState::Gathering,
|
||||
Complete => GatheringState::Complete,
|
||||
i => {
|
||||
return Err(WebRtcError::Backend(format!(
|
||||
"unknown gathering state: {:?}",
|
||||
i
|
||||
)));
|
||||
},
|
||||
};
|
||||
self.signaller.update_gathering_state(state);
|
||||
},
|
||||
InternalEvent::UpdateIceConnectionState => {
|
||||
use gstreamer_webrtc::WebRTCICEConnectionState::*;
|
||||
let val = self
|
||||
.webrtc
|
||||
.property::<gstreamer_webrtc::WebRTCICEConnectionState>("ice-connection-state");
|
||||
let state = match val {
|
||||
New => IceConnectionState::New,
|
||||
Checking => IceConnectionState::Checking,
|
||||
Connected => IceConnectionState::Connected,
|
||||
Completed => IceConnectionState::Completed,
|
||||
Disconnected => IceConnectionState::Disconnected,
|
||||
Failed => IceConnectionState::Failed,
|
||||
Closed => IceConnectionState::Closed,
|
||||
i => {
|
||||
return Err(WebRtcError::Backend(format!(
|
||||
"unknown ICE connection state: {:?}",
|
||||
i
|
||||
)));
|
||||
},
|
||||
};
|
||||
self.signaller.update_ice_connection_state(state);
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn quit(&mut self) {
|
||||
self.signaller.close();
|
||||
|
||||
self.pipeline.set_state(gstreamer::State::Null).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
impl GStreamerWebRtcController {
|
||||
fn set_description(
|
||||
&mut self,
|
||||
desc: SessionDescription,
|
||||
description_type: DescriptionType,
|
||||
cb: Box<dyn FnOnce() + Send + 'static>,
|
||||
) -> WebRtcResult {
|
||||
let ty = match desc.type_ {
|
||||
SdpType::Answer => gstreamer_webrtc::WebRTCSDPType::Answer,
|
||||
SdpType::Offer => gstreamer_webrtc::WebRTCSDPType::Offer,
|
||||
SdpType::Pranswer => gstreamer_webrtc::WebRTCSDPType::Pranswer,
|
||||
SdpType::Rollback => gstreamer_webrtc::WebRTCSDPType::Rollback,
|
||||
};
|
||||
|
||||
let kind = match description_type {
|
||||
DescriptionType::Local => "set-local-description",
|
||||
DescriptionType::Remote => "set-remote-description",
|
||||
};
|
||||
|
||||
let sdp = gstreamer_sdp::SDPMessage::parse_buffer(desc.sdp.as_bytes()).unwrap();
|
||||
if description_type == DescriptionType::Remote {
|
||||
self.remote_offer_generation += 1;
|
||||
self.store_remote_mline_info(&sdp);
|
||||
}
|
||||
let answer = gstreamer_webrtc::WebRTCSessionDescription::new(ty, sdp);
|
||||
let thread = self.thread.clone();
|
||||
let remote_offer_generation = self.remote_offer_generation;
|
||||
let promise = gstreamer::Promise::with_change_func(move |_promise| {
|
||||
// remote_offer_generation here ensures that DescriptionAdded doesn't
|
||||
// flush pending_remote_mline_info for stale remote offer callbacks
|
||||
thread.internal_event(InternalEvent::DescriptionAdded(
|
||||
cb,
|
||||
description_type,
|
||||
desc.type_,
|
||||
remote_offer_generation,
|
||||
));
|
||||
});
|
||||
self.webrtc.emit_by_name::<()>(kind, &[&answer, &promise]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn store_remote_mline_info(&mut self, sdp: &gstreamer_sdp::SDPMessage) {
|
||||
self.pending_remote_mline_info.clear();
|
||||
for media in sdp.medias() {
|
||||
let mut caps = gstreamer::Caps::new_empty();
|
||||
let caps_mut = caps.get_mut().expect("Fresh caps should be uniquely owned");
|
||||
for format in media.formats() {
|
||||
if format == "webrtc-datachannel" {
|
||||
return;
|
||||
}
|
||||
let pt = format
|
||||
.parse()
|
||||
.expect("Gstreamer provided noninteger format");
|
||||
caps_mut.append(
|
||||
media
|
||||
.caps_from_media(pt)
|
||||
.expect("get_format() did not return a format from the SDP"),
|
||||
);
|
||||
self.pt_counter = cmp::max(self.pt_counter, pt + 1);
|
||||
}
|
||||
for s in caps_mut.iter_mut() {
|
||||
// the caps are application/x-unknown by default, which will fail
|
||||
// to intersect
|
||||
//
|
||||
// see https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/blob/ba62917fbfd98ea76d4e066a6f18b4a14b847362/ext/webrtc/gstwebrtcbin.c#L2521
|
||||
s.set_name("application/x-rtp")
|
||||
}
|
||||
// This info is not current until the promise from set-remote-description is resolved,
|
||||
// to avoid any races where we attempt to link streams before the promise resolves we
|
||||
// queue this up in a pending buffer
|
||||
self.pending_remote_mline_info.push(MLineInfo {
|
||||
caps,
|
||||
// XXXManishearth in the (yet unsupported) case of dynamic stream addition and renegotiation
|
||||
// this will need to be checked against the current set of streams
|
||||
is_used: false,
|
||||
// XXXManishearth ideally, we keep track of all payloads and have the capability of picking
|
||||
// the appropriate decoder. For this, a bunch of the streams code will have to be moved into
|
||||
// a webrtc-specific abstraction.
|
||||
payload: media
|
||||
.format(0)
|
||||
.expect("Gstreamer reported incorrect formats_len()")
|
||||
.parse()
|
||||
.expect("Gstreamer provided noninteger format"),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Streams need to be linked to the correct pads, so we buffer them up until we know enough
|
||||
/// to do this.
|
||||
///
|
||||
/// When we get a remote offer, we store the relevant m-line information so that we can
|
||||
/// pick the correct sink pad and payload. Shortly after we look for any pending streams
|
||||
/// and connect them to available compatible m-lines using link_stream.
|
||||
///
|
||||
/// When we create an offer, we're controlling the pad order, so we set request_new_pads
|
||||
/// to true and forcefully link all pending streams before generating the offer.
|
||||
///
|
||||
/// When request_new_pads is false, we may still request new pads, however we only do this for
|
||||
/// streams that have already been negotiated by the remote.
|
||||
fn link_stream(
|
||||
&mut self,
|
||||
stream_id: &MediaStreamId,
|
||||
stream: &mut GStreamerMediaStream,
|
||||
request_new_pads: bool,
|
||||
) -> WebRtcResult {
|
||||
let caps = stream.caps();
|
||||
let idx = self
|
||||
.remote_mline_info
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_, x)| !x.is_used)
|
||||
.find(|(_, x)| x.caps.can_intersect(caps))
|
||||
.map(|x| x.0);
|
||||
if let Some(idx) = idx {
|
||||
if idx >= self.request_pad_counter {
|
||||
for i in self.request_pad_counter..=idx {
|
||||
// webrtcbin needs you to request pads (or use element.link(webrtcbin))
|
||||
// however, it also wants them to be connected in the correct order.
|
||||
//
|
||||
// Here, we make sure all the numbered sink pads have been created beforehand, up to
|
||||
// and including the one we need here.
|
||||
//
|
||||
// An alternate fix is to sort pending_streams according to the m-line index
|
||||
// and just do it in order. This also seems brittle.
|
||||
self.webrtc
|
||||
.request_pad_simple(&format!("sink_{}", i))
|
||||
.ok_or("Cannot request sink pad")?;
|
||||
}
|
||||
self.request_pad_counter = idx + 1;
|
||||
}
|
||||
stream.attach_to_pipeline(&self.pipeline);
|
||||
let element = stream.encoded();
|
||||
self.remote_mline_info[idx].is_used = true;
|
||||
let caps = stream.caps_with_payload(self.remote_mline_info[idx].payload);
|
||||
element.set_property("caps", &caps);
|
||||
let src = element.static_pad("src").ok_or("Cannot request src pad")?;
|
||||
let sink = self
|
||||
.webrtc
|
||||
.static_pad(&format!("sink_{}", idx))
|
||||
.ok_or("Cannot request sink pad")?;
|
||||
src.link(&sink)?;
|
||||
self.streams.push(*stream_id);
|
||||
} else if request_new_pads {
|
||||
stream.attach_to_pipeline(&self.pipeline);
|
||||
let element = stream.encoded();
|
||||
let caps = stream.caps_with_payload(self.pt_counter);
|
||||
self.pt_counter += 1;
|
||||
element.set_property("caps", &caps);
|
||||
let src = element.static_pad("src").ok_or("Cannot request src pad")?;
|
||||
let sink = self
|
||||
.webrtc
|
||||
.request_pad_simple(&format!("sink_{}", self.request_pad_counter))
|
||||
.ok_or("Cannot request sink pad")?;
|
||||
self.request_pad_counter += 1;
|
||||
src.link(&sink)?;
|
||||
self.streams.push(*stream_id);
|
||||
} else {
|
||||
self.pending_streams.push(*stream_id);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// link_stream, but for all pending streams
|
||||
fn flush_pending_streams(&mut self, request_new_pads: bool) -> WebRtcResult {
|
||||
let pending_streams = std::mem::take(&mut self.pending_streams);
|
||||
for stream_id in pending_streams {
|
||||
let stream =
|
||||
get_stream(&stream_id).expect("Media streams registry does not contain such ID");
|
||||
let mut stream = stream.lock().unwrap();
|
||||
let stream = stream
|
||||
.as_mut_any()
|
||||
.downcast_mut::<GStreamerMediaStream>()
|
||||
.ok_or("Does not currently support non-gstreamer streams")?;
|
||||
self.link_stream(&stream_id, stream, request_new_pads)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn start_pipeline(&mut self) -> WebRtcResult {
|
||||
self.pipeline.add(&self.webrtc)?;
|
||||
|
||||
// gstreamer needs Sync on these callbacks for some reason
|
||||
// https://github.com/sdroege/gstreamer-rs/issues/154
|
||||
let thread = Mutex::new(self.thread.clone());
|
||||
self.webrtc
|
||||
.connect("on-ice-candidate", false, move |values| {
|
||||
thread
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnIceCandidate(candidate(values)));
|
||||
None
|
||||
});
|
||||
|
||||
let thread = Arc::new(Mutex::new(self.thread.clone()));
|
||||
self.webrtc.connect_pad_added({
|
||||
let pipeline_weak = self.pipeline.downgrade();
|
||||
move |_element, pad| {
|
||||
let Some(pipe) = pipeline_weak.upgrade() else {
|
||||
warn!("Pipeline already deallocated");
|
||||
return;
|
||||
};
|
||||
process_new_stream(pad, &pipe, thread.clone());
|
||||
}
|
||||
});
|
||||
|
||||
// gstreamer needs Sync on these callbacks for some reason
|
||||
// https://github.com/sdroege/gstreamer-rs/issues/154
|
||||
let thread = Mutex::new(self.thread.clone());
|
||||
self.webrtc
|
||||
.connect("on-negotiation-needed", false, move |_values| {
|
||||
thread
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnNegotiationNeeded);
|
||||
None
|
||||
});
|
||||
|
||||
let thread = Mutex::new(self.thread.clone());
|
||||
self.webrtc
|
||||
.connect("notify::signaling-state", false, move |_values| {
|
||||
thread
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::UpdateSignalingState);
|
||||
None
|
||||
});
|
||||
let thread = Mutex::new(self.thread.clone());
|
||||
self.webrtc
|
||||
.connect("notify::ice-connection-state", false, move |_values| {
|
||||
thread
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::UpdateIceConnectionState);
|
||||
None
|
||||
});
|
||||
let thread = Mutex::new(self.thread.clone());
|
||||
self.webrtc
|
||||
.connect("notify::ice-gathering-state", false, move |_values| {
|
||||
thread
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::UpdateGatheringState);
|
||||
None
|
||||
});
|
||||
let thread = Mutex::new(self.thread.clone());
|
||||
let data_channels = self.data_channels.clone();
|
||||
let next_data_channel_id = self.next_data_channel_id.clone();
|
||||
self.webrtc
|
||||
.connect("on-data-channel", false, move |channel| {
|
||||
let channel = channel[1]
|
||||
.get::<gstreamer_webrtc::WebRTCDataChannel>()
|
||||
.map_err(|e| e.to_string())
|
||||
.expect("Invalid data channel");
|
||||
let id = next_data_channel_id.fetch_add(1, Ordering::Relaxed);
|
||||
let thread_ = thread.lock().unwrap().clone();
|
||||
match GStreamerWebRtcDataChannel::from(&id, channel, &thread_) {
|
||||
Ok(channel) => {
|
||||
let mut closed_channel = false;
|
||||
{
|
||||
thread_.internal_event(InternalEvent::OnDataChannelEvent(
|
||||
id,
|
||||
DataChannelEvent::NewChannel,
|
||||
));
|
||||
|
||||
let mut data_channels = data_channels.lock().unwrap();
|
||||
if let Some(ref mut channel) = data_channels.get_mut(&id) {
|
||||
match channel {
|
||||
&mut &mut DataChannelEventTarget::Buffered(ref mut events) => {
|
||||
for event in events.drain(0..) {
|
||||
if let DataChannelEvent::Close = event {
|
||||
closed_channel = true
|
||||
}
|
||||
thread_.internal_event(
|
||||
InternalEvent::OnDataChannelEvent(id, event),
|
||||
);
|
||||
}
|
||||
},
|
||||
_ => debug_assert!(
|
||||
false,
|
||||
"Trying to register a data channel with an existing ID"
|
||||
),
|
||||
}
|
||||
}
|
||||
data_channels.remove(&id);
|
||||
}
|
||||
if !closed_channel &&
|
||||
register_data_channel(data_channels.clone(), id, channel).is_err()
|
||||
{
|
||||
warn!("Could not register data channel {:?}", id);
|
||||
return None;
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
warn!("Could not create data channel {:?}", error);
|
||||
},
|
||||
}
|
||||
None
|
||||
});
|
||||
|
||||
self.pipeline.set_state(gstreamer::State::Ready)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn construct(
|
||||
signaller: Box<dyn WebRtcSignaller>,
|
||||
thread: WebRtcThread,
|
||||
) -> Result<GStreamerWebRtcController, WebRtcError> {
|
||||
let main_loop = glib::MainLoop::new(None, false);
|
||||
let pipeline = gstreamer::Pipeline::with_name("webrtc main");
|
||||
pipeline.set_start_time(gstreamer::ClockTime::NONE);
|
||||
pipeline.set_base_time(*BACKEND_BASE_TIME);
|
||||
pipeline.use_clock(Some(&gstreamer::SystemClock::obtain()));
|
||||
let webrtc = gstreamer::ElementFactory::make("webrtcbin")
|
||||
.name("sendrecv")
|
||||
.build()
|
||||
.map_err(|error| format!("webrtcbin element not found: {error:?}"))?;
|
||||
let mut controller = GStreamerWebRtcController {
|
||||
webrtc,
|
||||
pipeline,
|
||||
signaller,
|
||||
thread,
|
||||
remote_mline_info: vec![],
|
||||
pending_remote_mline_info: vec![],
|
||||
streams: vec![],
|
||||
pending_streams: vec![],
|
||||
pt_counter: 96,
|
||||
request_pad_counter: 0,
|
||||
remote_offer_generation: 0,
|
||||
delayed_negotiation: false,
|
||||
_main_loop: main_loop,
|
||||
data_channels: Arc::new(Mutex::new(HashMap::new())),
|
||||
next_data_channel_id: Arc::new(AtomicUsize::new(0)),
|
||||
};
|
||||
controller.start_pipeline()?;
|
||||
Ok(controller)
|
||||
}
|
||||
|
||||
fn on_offer_or_answer_created(
|
||||
ty: SdpType,
|
||||
reply: &gstreamer::StructureRef,
|
||||
cb: Box<dyn FnOnce(SessionDescription) + Send + 'static>,
|
||||
) {
|
||||
debug_assert!(ty == SdpType::Offer || ty == SdpType::Answer);
|
||||
let reply = reply
|
||||
.value(ty.as_str())
|
||||
.unwrap()
|
||||
.get::<gstreamer_webrtc::WebRTCSessionDescription>()
|
||||
.expect("Invalid argument");
|
||||
|
||||
let type_ = match reply.type_() {
|
||||
gstreamer_webrtc::WebRTCSDPType::Answer => SdpType::Answer,
|
||||
gstreamer_webrtc::WebRTCSDPType::Offer => SdpType::Offer,
|
||||
gstreamer_webrtc::WebRTCSDPType::Pranswer => SdpType::Pranswer,
|
||||
gstreamer_webrtc::WebRTCSDPType::Rollback => SdpType::Rollback,
|
||||
_ => panic!("unknown sdp response"),
|
||||
};
|
||||
|
||||
let desc = SessionDescription {
|
||||
sdp: reply.sdp().as_text().unwrap(),
|
||||
type_,
|
||||
};
|
||||
|
||||
cb(desc);
|
||||
}
|
||||
|
||||
fn on_incoming_stream(
|
||||
pipe: &gstreamer::Pipeline,
|
||||
thread: Arc<Mutex<WebRtcThread>>,
|
||||
pad: &gstreamer::Pad,
|
||||
) {
|
||||
let decodebin = gstreamer::ElementFactory::make("decodebin")
|
||||
.build()
|
||||
.unwrap();
|
||||
let caps = pad.query_caps(None);
|
||||
let name = caps
|
||||
.structure(0)
|
||||
.unwrap()
|
||||
.get::<String>("media")
|
||||
.expect("Invalid 'media' field");
|
||||
let decodebin2 = decodebin.clone();
|
||||
decodebin.connect_pad_added({
|
||||
let pipeline_weak = pipe.downgrade();
|
||||
move |_element, pad| {
|
||||
let Some(pipe) = pipeline_weak.upgrade() else {
|
||||
warn!("Pipeline already deallocated");
|
||||
return;
|
||||
};
|
||||
on_incoming_decodebin_stream(pad, &pipe, thread.clone(), &name);
|
||||
}
|
||||
});
|
||||
pipe.add(&decodebin).unwrap();
|
||||
|
||||
let decodepad = decodebin.static_pad("sink").unwrap();
|
||||
pad.link(&decodepad).unwrap();
|
||||
decodebin2.sync_state_with_parent().unwrap();
|
||||
}
|
||||
|
||||
fn on_incoming_decodebin_stream(
|
||||
pad: &gstreamer::Pad,
|
||||
pipe: &gstreamer::Pipeline,
|
||||
thread: Arc<Mutex<WebRtcThread>>,
|
||||
name: &str,
|
||||
) {
|
||||
let proxy_sink = gstreamer::ElementFactory::make("proxysink")
|
||||
.build()
|
||||
.unwrap();
|
||||
let proxy_src = gstreamer::ElementFactory::make("proxysrc")
|
||||
.property("proxysink", &proxy_sink)
|
||||
.build()
|
||||
.unwrap();
|
||||
pipe.add(&proxy_sink).unwrap();
|
||||
let sinkpad = proxy_sink.static_pad("sink").unwrap();
|
||||
|
||||
pad.link(&sinkpad).unwrap();
|
||||
proxy_sink.sync_state_with_parent().unwrap();
|
||||
|
||||
let (stream, ty) = if name == "video" {
|
||||
(
|
||||
GStreamerMediaStream::create_video_from(proxy_src),
|
||||
MediaStreamType::Video,
|
||||
)
|
||||
} else {
|
||||
(
|
||||
GStreamerMediaStream::create_audio_from(proxy_src),
|
||||
MediaStreamType::Audio,
|
||||
)
|
||||
};
|
||||
thread
|
||||
.lock()
|
||||
.unwrap()
|
||||
.internal_event(InternalEvent::OnAddStream(stream, ty));
|
||||
}
|
||||
|
||||
fn process_new_stream(
|
||||
pad: &gstreamer::Pad,
|
||||
pipe: &gstreamer::Pipeline,
|
||||
thread: Arc<Mutex<WebRtcThread>>,
|
||||
) {
|
||||
if pad.direction() != gstreamer::PadDirection::Src {
|
||||
// Ignore outgoing pad notifications.
|
||||
return;
|
||||
}
|
||||
on_incoming_stream(pipe, thread, pad)
|
||||
}
|
||||
|
||||
fn candidate(values: &[glib::Value]) -> IceCandidate {
|
||||
let _webrtc = values[0]
|
||||
.get::<gstreamer::Element>()
|
||||
.expect("Invalid argument");
|
||||
let sdp_mline_index = values[1].get::<u32>().expect("Invalid argument");
|
||||
let candidate = values[2].get::<String>().expect("Invalid argument");
|
||||
|
||||
IceCandidate {
|
||||
sdp_mline_index,
|
||||
candidate,
|
||||
}
|
||||
}
|
||||
|
||||
fn register_data_channel(
|
||||
registry: Arc<Mutex<HashMap<DataChannelId, DataChannelEventTarget>>>,
|
||||
id: DataChannelId,
|
||||
channel: GStreamerWebRtcDataChannel,
|
||||
) -> WebRtcDataChannelResult {
|
||||
if registry.lock().unwrap().contains_key(&id) {
|
||||
return Err(WebRtcError::Backend(
|
||||
"Could not register data channel. ID collision".to_owned(),
|
||||
));
|
||||
}
|
||||
registry
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(id, DataChannelEventTarget::Created(channel));
|
||||
Ok(id)
|
||||
}
|
||||
24
components/media/backends/ohos/Cargo.toml
Normal file
24
components/media/backends/ohos/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "servo-media-ohos"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_ohos"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
servo-media = { path = "../../servo-media" }
|
||||
servo-media-audio = { path = "../../audio" }
|
||||
servo-media-player = { path = "../../player" }
|
||||
servo-media-streams = { path = "../../streams" }
|
||||
servo-media-traits = { path = "../../traits" }
|
||||
servo-media-webrtc = { path = "../../webrtc" }
|
||||
mime = "0.3.13"
|
||||
once_cell = "1.18.0"
|
||||
log = "0.4"
|
||||
ohos-media-sys = { version = "0.0.5" ,features = ["api-21"] }
|
||||
208
components/media/backends/ohos/lib.rs
Normal file
208
components/media/backends/ohos/lib.rs
Normal file
@@ -0,0 +1,208 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{
|
||||
atomic::AtomicUsize,
|
||||
mpsc::{self, Sender},
|
||||
Arc, Mutex, Weak,
|
||||
},
|
||||
thread,
|
||||
};
|
||||
|
||||
use log::warn;
|
||||
use mime::Mime;
|
||||
use servo_media::{
|
||||
Backend, BackendInit, BackendMsg, ClientContextId, MediaInstance, SupportsMediaType,
|
||||
};
|
||||
|
||||
use crate::{player::OhosAVPlayer, registry_scanner::OHOS_REGISTRY_SCANNER};
|
||||
mod player;
|
||||
mod registry_scanner;
|
||||
|
||||
pub struct OhosBackend {
|
||||
instances: Arc<Mutex<HashMap<ClientContextId, Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>>>>,
|
||||
next_instance_id: AtomicUsize,
|
||||
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
|
||||
}
|
||||
|
||||
impl OhosBackend {
|
||||
fn media_instance_action(
|
||||
&self,
|
||||
id: &ClientContextId,
|
||||
cb: &dyn Fn(&dyn MediaInstance) -> Result<(), ()>,
|
||||
) {
|
||||
let mut instances = self.instances.lock().unwrap();
|
||||
match instances.get_mut(id) {
|
||||
Some(vec) => vec.retain(|(_, weak)| {
|
||||
if let Some(instance) = weak.upgrade() {
|
||||
if cb(&*(instance.lock().unwrap())).is_err() {
|
||||
warn!("Error executing media instance action");
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}),
|
||||
None => {
|
||||
warn!("Trying to exec media action on an unknown client context");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BackendInit for OhosBackend {
|
||||
fn init() -> Box<dyn Backend> {
|
||||
let instances: Arc<
|
||||
Mutex<HashMap<ClientContextId, Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>>>,
|
||||
> = Arc::new(Mutex::new(HashMap::new()));
|
||||
|
||||
let instances_ = instances.clone();
|
||||
let (backend_chan, recvr) = mpsc::channel();
|
||||
thread::Builder::new()
|
||||
.name("OhosBackend ShutdownThread".to_owned())
|
||||
.spawn(move || {
|
||||
match recvr.recv().unwrap() {
|
||||
BackendMsg::Shutdown { context, id, tx_ack } => {
|
||||
let mut map = instances_.lock().unwrap();
|
||||
if let Some(vec) = map.get_mut(&context) {
|
||||
vec.retain(|m| m.0 != id);
|
||||
if vec.is_empty() {
|
||||
map.remove(&context);
|
||||
}
|
||||
}
|
||||
let _ = tx_ack.send(());
|
||||
}
|
||||
};
|
||||
})
|
||||
.unwrap();
|
||||
return Box::new(OhosBackend {
|
||||
next_instance_id: AtomicUsize::new(0),
|
||||
instances,
|
||||
backend_chan: Arc::new(Mutex::new(backend_chan)),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// https://developer.huawei.com/consumer/en/doc/harmonyos-guides/obtain-supported-codecs
|
||||
// https://developer.huawei.com/consumer/en/doc/harmonyos-guides/media-kit-intro-V5#supported-formats-and-protocols
|
||||
|
||||
impl Backend for OhosBackend {
|
||||
fn create_player(
|
||||
&self,
|
||||
id: &servo_media::ClientContextId,
|
||||
stream_type: servo_media_player::StreamType,
|
||||
sender: servo_media_player::ipc_channel::ipc::IpcSender<servo_media_player::PlayerEvent>,
|
||||
video_renderer: Option<
|
||||
std::sync::Arc<std::sync::Mutex<dyn servo_media_player::video::VideoFrameRenderer>>,
|
||||
>,
|
||||
audio_renderer: Option<
|
||||
std::sync::Arc<std::sync::Mutex<dyn servo_media_player::audio::AudioRenderer>>,
|
||||
>,
|
||||
gl_context: Box<dyn servo_media_player::context::PlayerGLContext>,
|
||||
) -> std::sync::Arc<std::sync::Mutex<dyn servo_media_player::Player>> {
|
||||
Arc::new(Mutex::new(OhosAVPlayer::new()))
|
||||
}
|
||||
|
||||
fn create_audiostream(&self) -> servo_media_streams::MediaStreamId {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_videostream(&self) -> servo_media_streams::MediaStreamId {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_stream_output(&self) -> Box<dyn servo_media_streams::MediaOutput> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_stream_and_socket(
|
||||
&self,
|
||||
ty: servo_media_streams::MediaStreamType,
|
||||
) -> (
|
||||
Box<dyn servo_media_streams::MediaSocket>,
|
||||
servo_media_streams::MediaStreamId,
|
||||
) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_audioinput_stream(
|
||||
&self,
|
||||
set: servo_media_streams::capture::MediaTrackConstraintSet,
|
||||
) -> Option<servo_media_streams::MediaStreamId> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_videoinput_stream(
|
||||
&self,
|
||||
set: servo_media_streams::capture::MediaTrackConstraintSet,
|
||||
) -> Option<servo_media_streams::MediaStreamId> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_audio_context(
|
||||
&self,
|
||||
id: &servo_media::ClientContextId,
|
||||
options: servo_media_audio::context::AudioContextOptions,
|
||||
) -> Result<
|
||||
std::sync::Arc<std::sync::Mutex<servo_media_audio::context::AudioContext>>,
|
||||
servo_media_audio::sink::AudioSinkError,
|
||||
> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn create_webrtc(
|
||||
&self,
|
||||
signaller: Box<dyn servo_media_webrtc::WebRtcSignaller>,
|
||||
) -> servo_media_webrtc::WebRtcController {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn can_play_type(&self, media_type: &str) -> servo_media::SupportsMediaType {
|
||||
if let Ok(mime) = media_type.parse::<Mime>() {
|
||||
let mime_type = mime.type_().as_str().to_owned() + "/" + mime.subtype().as_str();
|
||||
let codecs = match mime.get_param("codecs") {
|
||||
Some(codecs) => codecs
|
||||
.as_str()
|
||||
.split(',')
|
||||
.map(|codec| codec.trim())
|
||||
.collect(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
if OHOS_REGISTRY_SCANNER.are_mime_and_codecs_supported(&mime_type, &codecs) {
|
||||
if codecs.is_empty() {
|
||||
return SupportsMediaType::Maybe;
|
||||
}
|
||||
return SupportsMediaType::Probably;
|
||||
}
|
||||
}
|
||||
SupportsMediaType::No
|
||||
}
|
||||
|
||||
fn get_device_monitor(
|
||||
&self,
|
||||
) -> Box<dyn servo_media_streams::device_monitor::MediaDeviceMonitor> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn mute(&self, id: &ClientContextId, val: bool) {
|
||||
self.media_instance_action(
|
||||
id,
|
||||
&(move |instance: &dyn MediaInstance| instance.mute(val)),
|
||||
);
|
||||
}
|
||||
|
||||
fn resume(&self, id: &ClientContextId) {
|
||||
self.media_instance_action(id, &(move |instance: &dyn MediaInstance| instance.resume()));
|
||||
}
|
||||
|
||||
fn suspend(&self, id: &ClientContextId) {
|
||||
self.media_instance_action(
|
||||
id,
|
||||
&(move |instance: &dyn MediaInstance| instance.suspend()),
|
||||
);
|
||||
}
|
||||
}
|
||||
130
components/media/backends/ohos/player.rs
Normal file
130
components/media/backends/ohos/player.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use servo_media::MediaInstance;
|
||||
use servo_media_player::Player;
|
||||
|
||||
pub struct OhosAVPlayer {}
|
||||
|
||||
impl OhosAVPlayer {
|
||||
pub fn new() -> OhosAVPlayer {
|
||||
OhosAVPlayer {}
|
||||
}
|
||||
}
|
||||
|
||||
impl MediaInstance for OhosAVPlayer {
|
||||
fn get_id(&self) -> usize {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn mute(&self, val: bool) -> Result<(), ()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn suspend(&self) -> Result<(), ()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn resume(&self) -> Result<(), ()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Player for OhosAVPlayer {
|
||||
fn play(&self) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn pause(&self) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn paused(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn can_resume(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn seek(&self, time: f64) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn seekable(&self) -> Vec<std::ops::Range<f64>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_mute(&self, muted: bool) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn muted(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_volume(&self, volume: f64) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn volume(&self) -> f64 {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_input_size(&self, size: u64) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_playback_rate(&self, playback_rate: f64) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn playback_rate(&self) -> f64 {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn push_data(&self, data: Vec<u8>) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn end_of_stream(&self) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn buffered(&self) -> Vec<std::ops::Range<f64>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_stream(
|
||||
&self,
|
||||
stream: &servo_media_streams::MediaStreamId,
|
||||
only_stream: bool,
|
||||
) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn render_use_gl(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_audio_track(
|
||||
&self,
|
||||
stream_index: i32,
|
||||
enabled: bool,
|
||||
) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_video_track(
|
||||
&self,
|
||||
stream_index: i32,
|
||||
enabled: bool,
|
||||
) -> Result<(), servo_media_player::PlayerError> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
75
components/media/backends/ohos/registry_scanner.rs
Normal file
75
components/media/backends/ohos/registry_scanner.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
pub static OHOS_REGISTRY_SCANNER: Lazy<OhosRegistryScanner> =
|
||||
Lazy::new(|| OhosRegistryScanner::new());
|
||||
|
||||
// Should be a combination of mime/codecs
|
||||
// If the type we are matching only contain mime, then we only match the container.
|
||||
//
|
||||
pub struct OhosRegistryScanner {
|
||||
av_player_supported_mime_codecs_type: HashMap<&'static str, &'static [&'static str]>,
|
||||
}
|
||||
|
||||
impl OhosRegistryScanner {
|
||||
fn new() -> OhosRegistryScanner {
|
||||
let mut registry_scanner = OhosRegistryScanner {
|
||||
av_player_supported_mime_codecs_type: HashMap::new(),
|
||||
};
|
||||
registry_scanner.initialize_av_player_container_and_codecs();
|
||||
registry_scanner
|
||||
}
|
||||
|
||||
pub fn are_mime_and_codecs_supported(&self, container_type: &str, codecs: &Vec<&str>) -> bool {
|
||||
let Some(supported_codecs) = self
|
||||
.av_player_supported_mime_codecs_type
|
||||
.get(container_type)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
codecs.iter().all(|codec| {
|
||||
supported_codecs.contains(codec) || {
|
||||
supported_codecs.iter().any(|supported_codec| {
|
||||
if let Some(stripped) = supported_codec.strip_suffix('*') {
|
||||
if codec.starts_with(stripped) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize_av_player_container_and_codecs(&mut self) {
|
||||
// Video Container
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("video/mp4", &["hev1*", "hvc1*", "aac", "mp3", "avc*"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("video/mkv", &["hev1*", "hvc1*", "aac", "mp3", "avc*"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("video/ts", &["hev1*", "hvc1*", "aac", "mp3", "avc*"]);
|
||||
// Audio Container
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/m4a", &["aac"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/aac", &["aac"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/mp3", &["mp3"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/ogg", &["vorbis"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/wav", &["1", "audio/pcm"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/flac", &["flac"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/amr", &["amr"]);
|
||||
self.av_player_supported_mime_codecs_type
|
||||
.insert("audio/ape", &["ape"]);
|
||||
}
|
||||
}
|
||||
16
components/media/examples/Cargo.toml
Normal file
16
components/media/examples/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "media-examples"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
euclid = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
servo-media = { workspace = true }
|
||||
servo-media-auto = { path = "../backends/auto" }
|
||||
servo-media-dummy = { path = "../backends/dummy" }
|
||||
27
components/media/examples/examples/audioinput_stream.rs
Normal file
27
components/media/examples/examples/audioinput_stream.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::ServoMedia;
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
if let Some(stream) = servo_media.create_audioinput_stream(Default::default()) {
|
||||
let mut output = servo_media.create_stream_output();
|
||||
output.add_stream(&stream);
|
||||
thread::sleep(time::Duration::from_millis(6000));
|
||||
} else {
|
||||
print!("No audio input elements available");
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
81
components/media/examples/examples/biquad.rs
Normal file
81
components/media/examples/examples/biquad.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::biquad_filter_node::{
|
||||
BiquadFilterNodeMessage, BiquadFilterNodeOptions, FilterType,
|
||||
};
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
options.freq = 100.;
|
||||
let osc1 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
options.freq = 800.;
|
||||
let osc2 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = BiquadFilterNodeOptions::default();
|
||||
options.frequency = 50.;
|
||||
options.filter = FilterType::LowPass;
|
||||
let biquad = context
|
||||
.create_node(AudioNodeInit::BiquadFilterNode(options), Default::default())
|
||||
.expect("Failed to create biquad filter node");
|
||||
context.connect_ports(osc1.output(0), biquad.input(0));
|
||||
context.connect_ports(osc2.output(0), biquad.input(0));
|
||||
context.connect_ports(biquad.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc1,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
osc2,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
biquad,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 1000., 2.),
|
||||
),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(2200));
|
||||
context.message_node(
|
||||
biquad,
|
||||
AudioNodeMessage::BiquadFilterNode(BiquadFilterNodeMessage::SetFilterType(
|
||||
FilterType::BandPass,
|
||||
)),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(1000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
70
components/media/examples/examples/channels.rs
Normal file
70
components/media/examples/examples/channels.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::channel_node::ChannelNodeOptions;
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
options.freq = 213.;
|
||||
let osc2 = context
|
||||
.create_node(AudioNodeInit::OscillatorNode(options), Default::default())
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = GainNodeOptions::default();
|
||||
options.gain = 0.7;
|
||||
let gain = context
|
||||
.create_node(AudioNodeInit::GainNode(options.clone()), Default::default())
|
||||
.expect("Failed to create gain node");
|
||||
let options = ChannelNodeOptions { channels: 2 };
|
||||
let merger = context
|
||||
.create_node(
|
||||
AudioNodeInit::ChannelMergerNode(options),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create channel merger node");
|
||||
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(osc.output(0), gain.input(0));
|
||||
context.connect_ports(gain.output(0), merger.input(0));
|
||||
context.connect_ports(osc2.output(0), merger.input(1));
|
||||
context.connect_ports(merger.output(0), dest.input(0));
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
osc2,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
let _ = context.resume();
|
||||
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
context.message_node(dest, AudioNodeMessage::SetChannelCount(1));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
84
components/media/examples/examples/channelsum.rs
Normal file
84
components/media/examples/examples/channelsum.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::channel_node::ChannelNodeOptions;
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
options.freq = 213.;
|
||||
let osc2 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
options.freq = 100.;
|
||||
let osc3 = context
|
||||
.create_node(AudioNodeInit::OscillatorNode(options), Default::default())
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = GainNodeOptions::default();
|
||||
options.gain = 0.7;
|
||||
let gain = context
|
||||
.create_node(AudioNodeInit::GainNode(options.clone()), Default::default())
|
||||
.expect("Failed to create gain node");
|
||||
|
||||
let options = ChannelNodeOptions { channels: 2 };
|
||||
let merger = context
|
||||
.create_node(
|
||||
AudioNodeInit::ChannelMergerNode(options),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create channel merger node");
|
||||
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(osc.output(0), merger.input(0));
|
||||
context.connect_ports(osc2.output(0), merger.input(1));
|
||||
context.connect_ports(merger.output(0), gain.input(0));
|
||||
context.connect_ports(osc3.output(0), gain.input(0));
|
||||
context.connect_ports(gain.output(0), dest.input(0));
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
osc2,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
osc3,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
let _ = context.resume();
|
||||
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
context.message_node(dest, AudioNodeMessage::SetChannelCount(1));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
107
components/media/examples/examples/constant_source.rs
Normal file
107
components/media/examples/examples/constant_source.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::constant_source_node::ConstantSourceNodeOptions;
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
|
||||
let cs = context
|
||||
.create_node(
|
||||
AudioNodeInit::ConstantSourceNode(ConstantSourceNodeOptions::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create ConstantSourceNode node");
|
||||
|
||||
let mut gain_options = GainNodeOptions::default();
|
||||
gain_options.gain = 0.1;
|
||||
let gain = context
|
||||
.create_node(
|
||||
AudioNodeInit::GainNode(gain_options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create gain node");
|
||||
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
|
||||
context.connect_ports(osc.output(0), gain.input(0));
|
||||
context.connect_ports(cs.output(0), gain.param(ParamType::Gain));
|
||||
context.connect_ports(gain.output(0), dest.input(0));
|
||||
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Offset,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 1., 1.5),
|
||||
),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Offset,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 0.1, 3.0),
|
||||
),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Offset,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 1., 4.5),
|
||||
),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Offset,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 0.1, 6.0),
|
||||
),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(9000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
14
components/media/examples/examples/dummy.rs
Normal file
14
components/media/examples/examples/dummy.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_dummy;
|
||||
|
||||
use servo_media::ServoMedia;
|
||||
use servo_media_dummy::DummyBackend;
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<DummyBackend>();
|
||||
ServoMedia::get();
|
||||
}
|
||||
89
components/media/examples/examples/iir_filter.rs
Normal file
89
components/media/examples/examples/iir_filter.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::iir_filter_node::{IIRFilterNode, IIRFilterNodeOptions};
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let id = ClientContextId::build(1, 1);
|
||||
let context = servo_media
|
||||
.create_audio_context(&id, Default::default())
|
||||
.unwrap();
|
||||
|
||||
{
|
||||
let context = context.lock().unwrap();
|
||||
|
||||
let dest = context.dest_node();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(OscillatorNodeOptions::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
|
||||
let feedback = Arc::new(vec![7.0, 1.0, 1.0]);
|
||||
let feedforward = Arc::new(vec![1.0, 1.0, 1.0]);
|
||||
|
||||
let iir = context
|
||||
.create_node(
|
||||
AudioNodeInit::IIRFilterNode(IIRFilterNodeOptions {
|
||||
feedback: feedback.clone(),
|
||||
feedforward: feedforward.clone(),
|
||||
}),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create IIR filter node");
|
||||
|
||||
context.connect_ports(osc.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
println!("raw oscillator");
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("oscillator through iir filter");
|
||||
context.disconnect_output(osc.output(0));
|
||||
context.connect_ports(osc.output(0), iir.input(0));
|
||||
context.connect_ports(iir.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("raw oscillator");
|
||||
context.disconnect_output(osc.output(0));
|
||||
context.disconnect_output(iir.output(0));
|
||||
context.connect_ports(osc.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
let freqs = vec![0.0, 0.3, 0.5];
|
||||
let mut mag = vec![0.0; 3];
|
||||
let mut phase = vec![0.0; 3];
|
||||
IIRFilterNode::get_frequency_response(
|
||||
&feedforward,
|
||||
&feedback,
|
||||
&freqs,
|
||||
&mut mag,
|
||||
&mut phase,
|
||||
);
|
||||
print!(
|
||||
"GetFrequencyResponse for freqs: {:?}\n mag: {:?}\n phase: {:?}",
|
||||
&freqs, &mag, &phase
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
95
components/media/examples/examples/muted_audiocontext.rs
Normal file
95
components/media/examples/examples/muted_audiocontext.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::audio::oscillator_node::OscillatorType::Sawtooth;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context_id1 = &ClientContextId::build(1, 1);
|
||||
let context1 = servo_media.create_audio_context(&context_id1, Default::default());
|
||||
{
|
||||
let context1 = context1.unwrap();
|
||||
let context = context1.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let options = OscillatorNodeOptions::default();
|
||||
let osc1 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc1.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc1,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
}
|
||||
|
||||
let context_id2 = &ClientContextId::build(1, 3);
|
||||
let context2 = servo_media.create_audio_context(&context_id2, Default::default());
|
||||
{
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
options.oscillator_type = Sawtooth;
|
||||
let context2 = context2.unwrap();
|
||||
let context = context2.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc3 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc3.output(0), dest.input(0));
|
||||
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc3,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
}
|
||||
|
||||
println!("servo_media raw s1");
|
||||
servo_media.mute(&context_id2, true);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("servo_media raw s2");
|
||||
servo_media.mute(&context_id1, true);
|
||||
servo_media.mute(&context_id2, false);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("servo_media s1+s2");
|
||||
servo_media.mute(&context_id1, false);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("servo_media muting s1");
|
||||
servo_media.mute(&context_id1, true);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("servo_media muting s2");
|
||||
servo_media.mute(&context_id2, true);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("servo_media unmuting s2");
|
||||
servo_media.mute(&context_id2, false);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("servo_media unmuting s1");
|
||||
servo_media.mute(&context_id1, false);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
89
components/media/examples/examples/offline_context.rs
Normal file
89
components/media/examples/examples/offline_context.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::{Arc, Mutex, mpsc};
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::block::FRAMES_PER_BLOCK_USIZE;
|
||||
use servo_media::audio::buffer_source_node::{AudioBuffer, AudioBufferSourceNodeMessage};
|
||||
use servo_media::audio::context::{AudioContextOptions, OfflineAudioContextOptions};
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
// Create offline context to process 1024 blocks of a oscillator node produced
|
||||
// sine wave.
|
||||
let mut options = <OfflineAudioContextOptions>::default();
|
||||
options.channels = 2;
|
||||
options.length = 1024 * FRAMES_PER_BLOCK_USIZE;
|
||||
let sample_rate = options.sample_rate;
|
||||
let options = AudioContextOptions::OfflineAudioContext(options);
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), options)
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let processed_audio = Arc::new(Mutex::new(Vec::new()));
|
||||
let processed_audio_ = processed_audio.clone();
|
||||
let (sender, receiver) = mpsc::channel();
|
||||
let sender = Mutex::new(sender);
|
||||
context.set_eos_callback(Box::new(move |buffer| {
|
||||
processed_audio
|
||||
.lock()
|
||||
.unwrap()
|
||||
.extend_from_slice((*buffer).as_ref());
|
||||
sender.lock().unwrap().send(()).unwrap();
|
||||
}));
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(osc.output(0), dest.input(0));
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
let _ = context.resume();
|
||||
// Block until we processed the data.
|
||||
receiver.recv().unwrap();
|
||||
// Close offline context.
|
||||
let _ = context.close();
|
||||
// Create audio context to play the processed audio.
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 2), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let buffer_source = context
|
||||
.create_node(
|
||||
AudioNodeInit::AudioBufferSourceNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(buffer_source.output(0), dest.input(0));
|
||||
context.message_node(
|
||||
buffer_source,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
buffer_source,
|
||||
AudioNodeMessage::AudioBufferSourceNode(AudioBufferSourceNodeMessage::SetBuffer(Some(
|
||||
AudioBuffer::from_buffer(processed_audio_.lock().unwrap().to_vec(), sample_rate),
|
||||
))),
|
||||
);
|
||||
let _ = context.resume();
|
||||
thread::sleep(time::Duration::from_millis(5000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
143
components/media/examples/examples/oscillator.rs
Normal file
143
components/media/examples/examples/oscillator.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::audio::oscillator_node::OscillatorType::{Custom, Sawtooth, Square, Triangle};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
let osc1 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc1.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc1,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
|
||||
options.oscillator_type = Square;
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 2), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc2 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc2.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc2,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
thread::sleep(time::Duration::from_millis(1000));
|
||||
|
||||
options.oscillator_type = Sawtooth;
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 3), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc3 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc3.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc3,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
thread::sleep(time::Duration::from_millis(1000));
|
||||
|
||||
options.oscillator_type = Triangle;
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 4), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc4 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc4.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc4,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
|
||||
options.oscillator_type = Custom;
|
||||
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 5), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc5 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc5.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc4,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
thread::sleep(time::Duration::from_millis(1000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
190
components/media/examples/examples/panner.rs
Normal file
190
components/media/examples/examples/panner.rs
Normal file
@@ -0,0 +1,190 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::panner_node::PannerNodeOptions;
|
||||
use servo_media::audio::param::{ParamDir, ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let listener = context.listener();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = PannerNodeOptions::default();
|
||||
options.cone_outer_angle = 0.;
|
||||
options.position_x = 100.;
|
||||
options.position_y = 0.;
|
||||
options.position_z = 100.;
|
||||
options.ref_distance = 100.;
|
||||
options.rolloff_factor = 0.01;
|
||||
let panner = context
|
||||
.create_node(AudioNodeInit::PannerNode(options), Default::default())
|
||||
.expect("Failed to create panner node");
|
||||
context.connect_ports(osc.output(0), panner.input(0));
|
||||
context.connect_ports(panner.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
// trace a square around your head twice
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 0.2),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 0.2),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 0.4),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 0.4),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 0.6),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 0.6),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 0.8),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 0.8),
|
||||
),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 1.0),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 1.0),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 1.2),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 1.2),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 1.4),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, -100., 1.4),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::X),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 1.6),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 100., 1.6),
|
||||
),
|
||||
);
|
||||
// now it runs away
|
||||
context.message_node(
|
||||
panner,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 10000., 3.),
|
||||
),
|
||||
);
|
||||
context.message_node(
|
||||
listener,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::SetValueAtTime(0., 3.),
|
||||
),
|
||||
);
|
||||
// chase it
|
||||
context.message_node(
|
||||
listener,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Position(ParamDir::Z),
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 10000., 4.),
|
||||
),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(4000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
112
components/media/examples/examples/params.rs
Normal file
112
components/media/examples/examples/params.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = GainNodeOptions::default();
|
||||
options.gain = 0.5;
|
||||
let gain = context
|
||||
.create_node(AudioNodeInit::GainNode(options), Default::default())
|
||||
.expect("Failed to create gain node");
|
||||
context.connect_ports(osc.output(0), gain.input(0));
|
||||
context.connect_ports(gain.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
// 0.5s: Set frequency to 110Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetValueAtTime(110., 0.5),
|
||||
),
|
||||
);
|
||||
// 1s: Set frequency to 220Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetValueAtTime(220., 1.),
|
||||
),
|
||||
);
|
||||
// 0.75s: Set gain to 0.25
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Gain,
|
||||
UserAutomationEvent::SetValueAtTime(0.25, 0.75),
|
||||
),
|
||||
);
|
||||
// 0.75s - 1.5s: Exponentially ramp gain to 1
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Gain,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Exponential, 1., 1.5),
|
||||
),
|
||||
);
|
||||
// 0.75s - 1.75s: Linearly ramp frequency to 880Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 880., 1.75),
|
||||
),
|
||||
);
|
||||
// 1.75s - 2.5s: Exponentially ramp frequency to 110Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Exponential, 110., 2.5),
|
||||
),
|
||||
);
|
||||
|
||||
// 2.75s: Exponentially approach 110Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetTargetAtTime(1100., 2.75, 1.1),
|
||||
),
|
||||
);
|
||||
// 3.3s: But actually stop at 3.3Hz and hold
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::CancelAndHoldAtTime(3.3),
|
||||
),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(5000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
69
components/media/examples/examples/params_connect.rs
Normal file
69
components/media/examples/examples/params_connect.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
options.freq = 2.0;
|
||||
let lfo = context
|
||||
.create_node(AudioNodeInit::OscillatorNode(options), Default::default())
|
||||
.expect("Failed to create oscillator node");
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let gain = context
|
||||
.create_node(
|
||||
AudioNodeInit::GainNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create gain node");
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(lfo.output(0), gain.param(ParamType::Gain));
|
||||
context.connect_ports(gain.output(0), dest.input(0));
|
||||
context.connect_ports(osc.output(0), gain.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
lfo,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
// 0.75s - 1.75s: Linearly ramp frequency to 880Hz
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Gain,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 0., 6.),
|
||||
),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
69
components/media/examples/examples/params_connect2.rs
Normal file
69
components/media/examples/examples/params_connect2.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let mut options = OscillatorNodeOptions::default();
|
||||
options.freq = 2.0;
|
||||
let lfo = context
|
||||
.create_node(AudioNodeInit::OscillatorNode(options), Default::default())
|
||||
.expect("Failed to create oscillator node");
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = GainNodeOptions::default();
|
||||
options.gain = 100.;
|
||||
let gain = context
|
||||
.create_node(AudioNodeInit::GainNode(options), Default::default())
|
||||
.expect("Failed to create gain node");
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(lfo.output(0), gain.input(0));
|
||||
context.connect_ports(gain.output(0), osc.param(ParamType::Frequency));
|
||||
context.connect_ports(osc.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
lfo,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
// 0.75s - 1.75s: Linearly ramp frequency to 880Hz
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Gain,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 0., 6.),
|
||||
),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
74
components/media/examples/examples/params_settarget.rs
Normal file
74
components/media/examples/examples/params_settarget.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
context.connect_ports(osc.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
// 0.1s: Set frequency to 110Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetValueAtTime(110., 0.1),
|
||||
),
|
||||
);
|
||||
// 0.3s: Start increasing frequency to 440Hz exponentially with a time constant of 1
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetTargetAtTime(440., 0.3, 1.),
|
||||
),
|
||||
);
|
||||
// 1.5s: Start increasing frequency to 1760Hz exponentially
|
||||
// this event effectively doesn't happen, but instead sets a starting point
|
||||
// for the next ramp event
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetTargetAtTime(1760., 1.5, 0.1),
|
||||
),
|
||||
);
|
||||
// 1.5s - 3s Linearly ramp down from the previous event (1.5s) to 110Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 110., 3.0),
|
||||
),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(5000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
100
components/media/examples/examples/play.rs
Normal file
100
components/media/examples/examples/play.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{
|
||||
AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage, OnEndedCallback,
|
||||
};
|
||||
use servo_media::audio::param::{ParamType, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = GainNodeOptions::default();
|
||||
options.gain = 0.5;
|
||||
let gain = context
|
||||
.create_node(AudioNodeInit::GainNode(options), Default::default())
|
||||
.expect("Failed to create gain node");
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(osc.output(0), gain.input(0));
|
||||
context.connect_ports(gain.output(0), dest.input(0));
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Stop(3.)),
|
||||
);
|
||||
let callback = OnEndedCallback::new(|| {
|
||||
println!("Playback ended");
|
||||
});
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(
|
||||
AudioScheduledSourceNodeMessage::RegisterOnEndedCallback(callback),
|
||||
),
|
||||
);
|
||||
assert_eq!(context.current_time(), 0.);
|
||||
let _ = context.resume();
|
||||
// 0.5s: Set frequency to 110Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetValueAtTime(110., 0.5),
|
||||
),
|
||||
);
|
||||
// 1s: Set frequency to 220Hz
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetValueAtTime(220., 1.),
|
||||
),
|
||||
);
|
||||
// 0.75s: Set gain to 0.25
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Gain,
|
||||
UserAutomationEvent::SetValueAtTime(0.25, 0.75),
|
||||
),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(1200));
|
||||
// 1.2s: Suspend processing
|
||||
let _ = context.suspend();
|
||||
thread::sleep(time::Duration::from_millis(500));
|
||||
// 1.7s: Resume processing
|
||||
let _ = context.resume();
|
||||
let current_time = context.current_time();
|
||||
assert!(current_time > 0.);
|
||||
// Leave some time to enjoy the silence after stopping the
|
||||
// oscillator node.
|
||||
thread::sleep(time::Duration::from_millis(5000));
|
||||
// And check that we keep incrementing playback time.
|
||||
assert!(current_time < context.current_time());
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
64
components/media/examples/examples/play_noise.rs
Normal file
64
components/media/examples/examples/play_noise.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate rand;
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::buffer_source_node::{AudioBuffer, AudioBufferSourceNodeMessage};
|
||||
use servo_media::audio::node::{
|
||||
AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage, OnEndedCallback,
|
||||
};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let buffer_source = context
|
||||
.create_node(
|
||||
AudioNodeInit::AudioBufferSourceNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create buffer source node");
|
||||
let dest = context.dest_node();
|
||||
context.connect_ports(buffer_source.output(0), dest.input(0));
|
||||
let mut buffers = vec![Vec::with_capacity(4096), Vec::with_capacity(4096)];
|
||||
for _ in 0..4096 {
|
||||
buffers[0].push(rand::random::<f32>());
|
||||
buffers[1].push(rand::random::<f32>());
|
||||
}
|
||||
context.message_node(
|
||||
buffer_source,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
context.message_node(
|
||||
buffer_source,
|
||||
AudioNodeMessage::AudioBufferSourceNode(AudioBufferSourceNodeMessage::SetBuffer(Some(
|
||||
AudioBuffer::from_buffers(buffers, 44100.),
|
||||
))),
|
||||
);
|
||||
let callback = OnEndedCallback::new(|| {
|
||||
println!("Playback ended");
|
||||
});
|
||||
context.message_node(
|
||||
buffer_source,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(
|
||||
AudioScheduledSourceNodeMessage::RegisterOnEndedCallback(callback),
|
||||
),
|
||||
);
|
||||
let _ = context.resume();
|
||||
thread::sleep(time::Duration::from_millis(5000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
BIN
components/media/examples/examples/resources/mov_bbb.mp4
Normal file
BIN
components/media/examples/examples/resources/mov_bbb.mp4
Normal file
Binary file not shown.
BIN
components/media/examples/examples/resources/viper_cut.ogg
Normal file
BIN
components/media/examples/examples/resources/viper_cut.ogg
Normal file
Binary file not shown.
100
components/media/examples/examples/set_value_curve.rs
Normal file
100
components/media/examples/examples/set_value_curve.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::constant_source_node::ConstantSourceNodeOptions;
|
||||
use servo_media::audio::gain_node::GainNodeOptions;
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::param::{ParamType, UserAutomationEvent};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
|
||||
// Initializing the values vector for SetValueCurve function
|
||||
let values = vec![
|
||||
0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0., 1., 1., 1., 1., 0., 0., 0., 0.,
|
||||
];
|
||||
let start_time = 0.;
|
||||
let end_time = 5.;
|
||||
let n = values.len() as f32;
|
||||
let value_next = values[(n - 1.) as usize];
|
||||
|
||||
let cs = context
|
||||
.create_node(
|
||||
AudioNodeInit::ConstantSourceNode(ConstantSourceNodeOptions::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create ConstantSourceNode node");
|
||||
|
||||
let mut gain_options = GainNodeOptions::default();
|
||||
gain_options.gain = 0.0;
|
||||
let gain = context
|
||||
.create_node(
|
||||
AudioNodeInit::GainNode(gain_options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create gain node");
|
||||
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
|
||||
context.connect_ports(osc.output(0), gain.input(0));
|
||||
context.connect_ports(cs.output(0), gain.param(ParamType::Gain));
|
||||
context.connect_ports(gain.output(0), dest.input(0));
|
||||
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
gain,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
cs,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Offset,
|
||||
UserAutomationEvent::SetValueCurveAtTime(values, start_time, end_time),
|
||||
),
|
||||
);
|
||||
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Frequency,
|
||||
UserAutomationEvent::SetValueAtTime(value_next, end_time),
|
||||
),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(7000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
68
components/media/examples/examples/stereo_panner.rs
Normal file
68
components/media/examples/examples/stereo_panner.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::param::{ParamType, RampKind, UserAutomationEvent};
|
||||
use servo_media::audio::stereo_panner::StereoPannerOptions;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(Default::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let mut options = StereoPannerOptions::default();
|
||||
options.pan = 0.;
|
||||
let pan = context
|
||||
.create_node(AudioNodeInit::StereoPannerNode(options), Default::default())
|
||||
.expect("Failed to create stereo panner node");
|
||||
context.connect_ports(osc.output(0), pan.input(0));
|
||||
context.connect_ports(pan.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
// 2s: Set pan to -1
|
||||
context.message_node(
|
||||
pan,
|
||||
AudioNodeMessage::SetParam(ParamType::Pan, UserAutomationEvent::SetValueAtTime(-1., 2.)),
|
||||
);
|
||||
// 4s: Linearly ramp pan to 0
|
||||
context.message_node(
|
||||
pan,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Pan,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 0., 4.),
|
||||
),
|
||||
);
|
||||
// 6s: Linearly ramp pan to 1
|
||||
context.message_node(
|
||||
pan,
|
||||
AudioNodeMessage::SetParam(
|
||||
ParamType::Pan,
|
||||
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, 1., 6.),
|
||||
),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(5000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
55
components/media/examples/examples/stream_dest_node.rs
Normal file
55
components/media/examples/examples/stream_dest_node.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::streams::MediaStreamType;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context = servo_media
|
||||
.create_audio_context(&ClientContextId::build(1, 1), Default::default())
|
||||
.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let options = OscillatorNodeOptions::default();
|
||||
let osc1 = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(options.clone()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
|
||||
let (socket, id) = servo_media.create_stream_and_socket(MediaStreamType::Audio);
|
||||
let dest = context
|
||||
.create_node(
|
||||
AudioNodeInit::MediaStreamDestinationNode(socket),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create stream destination node");
|
||||
context.connect_ports(osc1.output(0), dest.input(0));
|
||||
|
||||
let mut output = servo_media.create_stream_output();
|
||||
output.add_stream(&id);
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc1,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
thread::sleep(time::Duration::from_millis(3000));
|
||||
let _ = context.close();
|
||||
thread::sleep(time::Duration::from_millis(1000));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
38
components/media/examples/examples/stream_reader_node.rs
Normal file
38
components/media/examples/examples/stream_reader_node.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::AudioNodeInit;
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let context =
|
||||
servo_media.create_audio_context(&ClientContextId::build(1, 1), Default::default());
|
||||
let input = servo_media.create_audiostream();
|
||||
let context = context.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let dest = context.dest_node();
|
||||
let osc1 = context
|
||||
.create_node(
|
||||
AudioNodeInit::MediaStreamSourceNode(input),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create stream source node");
|
||||
context.connect_ports(osc1.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
|
||||
thread::sleep(time::Duration::from_millis(6000));
|
||||
let _ = context.close();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
27
components/media/examples/examples/videoinput_stream.rs
Normal file
27
components/media/examples/examples/videoinput_stream.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::ServoMedia;
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
if let Some(stream) = servo_media.create_videoinput_stream(Default::default()) {
|
||||
let mut output = servo_media.create_stream_output();
|
||||
output.add_stream(&stream);
|
||||
thread::sleep(time::Duration::from_millis(6000));
|
||||
} else {
|
||||
print!("No video input elements available");
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
112
components/media/examples/examples/wave_shaper.rs
Normal file
112
components/media/examples/examples/wave_shaper.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
extern crate servo_media;
|
||||
extern crate servo_media_auto;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{thread, time};
|
||||
|
||||
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage};
|
||||
use servo_media::audio::oscillator_node::OscillatorNodeOptions;
|
||||
use servo_media::audio::wave_shaper_node::{
|
||||
OverSampleType, WaveShaperNodeMessage, WaveShaperNodeOptions,
|
||||
};
|
||||
use servo_media::{ClientContextId, ServoMedia};
|
||||
|
||||
fn run_example(servo_media: Arc<ServoMedia>) {
|
||||
let id = ClientContextId::build(1, 1);
|
||||
let context = servo_media.create_audio_context(&id, Default::default());
|
||||
|
||||
{
|
||||
let context = context.unwrap();
|
||||
let context = context.lock().unwrap();
|
||||
let curve = vec![1., 0., 0., 0.75, 0.5];
|
||||
|
||||
let dest = context.dest_node();
|
||||
let osc = context
|
||||
.create_node(
|
||||
AudioNodeInit::OscillatorNode(OscillatorNodeOptions::default()),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create oscillator node");
|
||||
let wsh = context
|
||||
.create_node(
|
||||
AudioNodeInit::WaveShaperNode(WaveShaperNodeOptions {
|
||||
curve: Some(curve.clone()),
|
||||
oversample: OverSampleType::None,
|
||||
}),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create waveshaper node");
|
||||
let wshx2 = context
|
||||
.create_node(
|
||||
AudioNodeInit::WaveShaperNode(WaveShaperNodeOptions {
|
||||
curve: Some(curve.clone()),
|
||||
oversample: OverSampleType::Double,
|
||||
}),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create waveshaper node");
|
||||
let wshx4 = context
|
||||
.create_node(
|
||||
AudioNodeInit::WaveShaperNode(WaveShaperNodeOptions {
|
||||
curve: Some(curve.clone()),
|
||||
oversample: OverSampleType::Quadruple,
|
||||
}),
|
||||
Default::default(),
|
||||
)
|
||||
.expect("Failed to create waveshaper node");
|
||||
|
||||
context.connect_ports(osc.output(0), dest.input(0));
|
||||
let _ = context.resume();
|
||||
context.message_node(
|
||||
osc,
|
||||
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(0.)),
|
||||
);
|
||||
|
||||
println!("raw oscillator");
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("oscillator through waveshaper with no oversampling");
|
||||
context.disconnect_output(osc.output(0));
|
||||
context.connect_ports(osc.output(0), wsh.input(0));
|
||||
context.connect_ports(wsh.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("oscillator through waveshaper with 2x oversampling");
|
||||
context.disconnect_output(osc.output(0));
|
||||
context.disconnect_output(wsh.output(0));
|
||||
context.connect_ports(osc.output(0), wshx2.input(0));
|
||||
context.connect_ports(wshx2.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("oscillator through waveshaper with 4x oversampling");
|
||||
context.disconnect_output(osc.output(0));
|
||||
context.disconnect_output(wshx2.output(0));
|
||||
context.connect_ports(osc.output(0), wshx4.input(0));
|
||||
context.connect_ports(wshx4.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("oscillator through waveshaper with no oversampling");
|
||||
context.disconnect_output(osc.output(0));
|
||||
context.disconnect_output(wshx4.output(0));
|
||||
context.connect_ports(osc.output(0), wsh.input(0));
|
||||
context.connect_ports(wsh.output(0), dest.input(0));
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
println!("oscillator through waveshaper with no curve (should be same as raw oscillator)");
|
||||
context.message_node(
|
||||
wsh,
|
||||
AudioNodeMessage::WaveShaperNode(WaveShaperNodeMessage::SetCurve(None)),
|
||||
);
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
ServoMedia::init::<servo_media_auto::Backend>();
|
||||
let servo_media = ServoMedia::get();
|
||||
run_example(servo_media);
|
||||
}
|
||||
@@ -19,5 +19,5 @@ paint_api = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
servo-media = { workspace = true }
|
||||
servo_config = { path = "../config" }
|
||||
servo_config = { path = "../../config" }
|
||||
webrender_api = { workspace = true }
|
||||
23
components/media/player/Cargo.toml
Normal file
23
components/media/player/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "servo-media-player"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_player"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
serde = "1.0.66"
|
||||
serde_derive = "1.0.66"
|
||||
ipc-channel = { workspace = true }
|
||||
|
||||
[dependencies.servo-media-streams]
|
||||
path = "../streams"
|
||||
|
||||
[dependencies.servo-media-traits]
|
||||
path = "../traits"
|
||||
7
components/media/player/audio.rs
Normal file
7
components/media/player/audio.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
pub trait AudioRenderer: Send + 'static {
|
||||
fn render(&mut self, sample: Box<dyn AsRef<[f32]>>, channel: u32);
|
||||
}
|
||||
53
components/media/player/context.rs
Normal file
53
components/media/player/context.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
//! `PlayerGLContext` is a trait to be used to pass the GL context for
|
||||
//! rendering purposes.
|
||||
//!
|
||||
//! The current consumer of this trait is the GL rendering mechanism
|
||||
//! for the GStreamer backend.
|
||||
//!
|
||||
//! The client application should implement this trait and pass the
|
||||
//! trait object to its `player` instance.
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub enum GlContext {
|
||||
/// The EGL platform used primarily with the X11, Wayland and
|
||||
/// Android window systems as well as on embedded Linux.
|
||||
Egl(usize),
|
||||
/// The GLX platform used primarily with the X11 window system.
|
||||
Glx(usize),
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub enum NativeDisplay {
|
||||
/// The EGLDisplay memory address
|
||||
Egl(usize),
|
||||
/// XDisplay memory address
|
||||
X11(usize),
|
||||
/// wl_display memory address
|
||||
Wayland(usize),
|
||||
Headless,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub enum GlApi {
|
||||
OpenGL,
|
||||
OpenGL3,
|
||||
Gles1,
|
||||
Gles2,
|
||||
None,
|
||||
}
|
||||
|
||||
pub trait PlayerGLContext {
|
||||
/// Returns the GL context living pointer wrapped by `GlContext`
|
||||
fn get_gl_context(&self) -> GlContext;
|
||||
/// Returns the living pointer to the native display structure
|
||||
/// wrapped by `NativeDisplay`.
|
||||
fn get_native_display(&self) -> NativeDisplay;
|
||||
/// Returns the GL API of the context
|
||||
fn get_gl_api(&self) -> GlApi;
|
||||
}
|
||||
133
components/media/player/lib.rs
Normal file
133
components/media/player/lib.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::ops::Range;
|
||||
use std::time::Duration;
|
||||
|
||||
pub extern crate ipc_channel;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate servo_media_streams as streams;
|
||||
extern crate servo_media_traits;
|
||||
|
||||
pub mod audio;
|
||||
pub mod context;
|
||||
pub mod metadata;
|
||||
pub mod video;
|
||||
|
||||
use ipc_channel::ipc::{self, IpcSender};
|
||||
use servo_media_traits::MediaInstance;
|
||||
use streams::registry::MediaStreamId;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub enum PlaybackState {
|
||||
Stopped,
|
||||
Buffering,
|
||||
Paused,
|
||||
Playing,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum PlayerError {
|
||||
/// Backend specific error.
|
||||
Backend(String),
|
||||
/// Could not push buffer contents to the player.
|
||||
BufferPushFailed,
|
||||
/// The player cannot consume more data.
|
||||
EnoughData,
|
||||
/// Setting End Of Stream failed.
|
||||
EOSFailed,
|
||||
/// The media stream is not seekable.
|
||||
NonSeekableStream,
|
||||
/// Tried to seek out of range.
|
||||
SeekOutOfRange,
|
||||
/// Setting an audio or video stream failed.
|
||||
/// Possibly because the type of source is not PlayerSource::Stream.
|
||||
SetStreamFailed,
|
||||
// Setting an audio or video track failed.
|
||||
SetTrackFailed,
|
||||
}
|
||||
|
||||
pub type SeekLockMsg = (bool, IpcSender<()>);
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SeekLock {
|
||||
pub lock_channel: IpcSender<SeekLockMsg>,
|
||||
}
|
||||
|
||||
impl SeekLock {
|
||||
pub fn unlock(&self, result: bool) {
|
||||
let (ack_sender, ack_recv) = ipc::channel::<()>().expect("Could not create IPC channel");
|
||||
self.lock_channel.send((result, ack_sender)).unwrap();
|
||||
ack_recv.recv().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub enum PlayerEvent {
|
||||
EndOfStream,
|
||||
/// The player has enough data. The client should stop pushing data into.
|
||||
EnoughData,
|
||||
Error(String),
|
||||
VideoFrameUpdated,
|
||||
MetadataUpdated(metadata::Metadata),
|
||||
// The `None` value means the duration is unknown, in which case this is likely a live stream.
|
||||
DurationChanged(Option<Duration>),
|
||||
/// The internal player queue is running out of data. The client should start
|
||||
/// pushing more data.
|
||||
NeedData,
|
||||
PositionChanged(f64),
|
||||
/// The player needs the data to perform a seek to the given offset in bytes.
|
||||
/// The next push_data should get the buffers from the new offset.
|
||||
/// The player will be blocked until the user unlocks it through
|
||||
/// the given SeekLock instance.
|
||||
/// This event is only received for seekable stream types.
|
||||
SeekData(u64, SeekLock),
|
||||
/// The player has performed a seek to the given time offset in seconds.
|
||||
SeekDone(f64),
|
||||
StateChanged(PlaybackState),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub enum StreamType {
|
||||
/// No seeking is supported in the stream, such as a live stream.
|
||||
Stream,
|
||||
/// The stream is seekable.
|
||||
Seekable,
|
||||
}
|
||||
|
||||
pub trait Player: Send + MediaInstance {
|
||||
fn play(&self) -> Result<(), PlayerError>;
|
||||
fn pause(&self) -> Result<(), PlayerError>;
|
||||
fn paused(&self) -> bool;
|
||||
fn can_resume(&self) -> bool;
|
||||
fn stop(&self) -> Result<(), PlayerError>;
|
||||
fn seek(&self, time: f64) -> Result<(), PlayerError>;
|
||||
fn seekable(&self) -> Vec<Range<f64>>;
|
||||
fn set_mute(&self, muted: bool) -> Result<(), PlayerError>;
|
||||
fn muted(&self) -> bool;
|
||||
fn set_volume(&self, volume: f64) -> Result<(), PlayerError>;
|
||||
fn volume(&self) -> f64;
|
||||
fn set_input_size(&self, size: u64) -> Result<(), PlayerError>;
|
||||
fn set_playback_rate(&self, playback_rate: f64) -> Result<(), PlayerError>;
|
||||
fn playback_rate(&self) -> f64;
|
||||
fn push_data(&self, data: Vec<u8>) -> Result<(), PlayerError>;
|
||||
fn end_of_stream(&self) -> Result<(), PlayerError>;
|
||||
/// Get the list of time ranges in seconds that have been buffered.
|
||||
fn buffered(&self) -> Vec<Range<f64>>;
|
||||
/// Set the stream to be played by the player.
|
||||
/// Only a single stream of the same type (audio or video) can be set.
|
||||
/// Subsequent calls with a stream of the same type will override the previously
|
||||
/// set stream.
|
||||
/// This method requires the player to be constructed with StreamType::Stream.
|
||||
/// It is important to give the correct value of `only_stream` indicating
|
||||
/// that the audio or video stream being set is the only one expected.
|
||||
/// Subsequent calls to `set_stream` after the `only_stream` flag has been
|
||||
/// set to true will fail.
|
||||
fn set_stream(&self, stream: &MediaStreamId, only_stream: bool) -> Result<(), PlayerError>;
|
||||
/// If player's rendering draws using GL textures
|
||||
fn render_use_gl(&self) -> bool;
|
||||
fn set_audio_track(&self, stream_index: i32, enabled: bool) -> Result<(), PlayerError>;
|
||||
fn set_video_track(&self, stream_index: i32, enabled: bool) -> Result<(), PlayerError>;
|
||||
}
|
||||
20
components/media/player/metadata.rs
Normal file
20
components/media/player/metadata.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::time;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub duration: Option<time::Duration>,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub format: String,
|
||||
pub is_seekable: bool,
|
||||
// TODO: Might be nice to move width and height along with each video track.
|
||||
pub video_tracks: Vec<String>,
|
||||
pub audio_tracks: Vec<String>,
|
||||
// Whether the media comes from a live source or not.
|
||||
pub is_live: bool,
|
||||
pub title: Option<String>,
|
||||
}
|
||||
73
components/media/player/video.rs
Normal file
73
components/media/player/video.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum VideoFrameData {
|
||||
Raw(Arc<Vec<u8>>),
|
||||
Texture(u32),
|
||||
OESTexture(u32),
|
||||
}
|
||||
|
||||
pub trait Buffer: Send + Sync {
|
||||
fn to_vec(&self) -> Option<VideoFrameData>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VideoFrame {
|
||||
width: i32,
|
||||
height: i32,
|
||||
data: VideoFrameData,
|
||||
_buffer: Arc<dyn Buffer>,
|
||||
}
|
||||
|
||||
impl VideoFrame {
|
||||
pub fn new(width: i32, height: i32, buffer: Arc<dyn Buffer>) -> Option<Self> {
|
||||
let data = buffer.to_vec()?;
|
||||
Some(VideoFrame {
|
||||
width,
|
||||
height,
|
||||
data,
|
||||
_buffer: buffer,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_width(&self) -> i32 {
|
||||
self.width
|
||||
}
|
||||
|
||||
pub fn get_height(&self) -> i32 {
|
||||
self.height
|
||||
}
|
||||
|
||||
pub fn get_data(&self) -> Arc<Vec<u8>> {
|
||||
match self.data {
|
||||
VideoFrameData::Raw(ref data) => data.clone(),
|
||||
_ => unreachable!("invalid raw data request for texture frame"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_texture_id(&self) -> u32 {
|
||||
match self.data {
|
||||
VideoFrameData::Texture(data) | VideoFrameData::OESTexture(data) => data,
|
||||
_ => unreachable!("invalid texture id request for raw data frame"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_gl_texture(&self) -> bool {
|
||||
matches!(
|
||||
self.data,
|
||||
VideoFrameData::Texture(_) | VideoFrameData::OESTexture(_)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_external_oes(&self) -> bool {
|
||||
matches!(self.data, VideoFrameData::OESTexture(_))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VideoFrameRenderer: Send + 'static {
|
||||
fn render(&mut self, frame: VideoFrame);
|
||||
}
|
||||
18
components/media/servo-media-derive/Cargo.toml
Normal file
18
components/media/servo-media-derive/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "servo-media-derive"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media_derive"
|
||||
proc-macro = true
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
syn = "2"
|
||||
quote = "1"
|
||||
proc-macro2 = "1"
|
||||
136
components/media/servo-media-derive/lib.rs
Normal file
136
components/media/servo-media-derive/lib.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#![recursion_limit = "128"]
|
||||
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
extern crate syn;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro_derive(AudioScheduledSourceNode)]
|
||||
pub fn audio_scheduled_source_node(input: TokenStream) -> TokenStream {
|
||||
let ast: syn::DeriveInput = syn::parse(input).unwrap();
|
||||
let r#gen = impl_audio_scheduled_source_node(&ast);
|
||||
r#gen.into()
|
||||
}
|
||||
|
||||
fn impl_audio_scheduled_source_node(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {
|
||||
let name = &ast.ident;
|
||||
quote! {
|
||||
impl #name {
|
||||
fn should_play_at(&mut self, tick: Tick) -> ShouldPlay {
|
||||
let start = if let Some(start) = self.start_at {
|
||||
start
|
||||
} else {
|
||||
return ShouldPlay::No;
|
||||
};
|
||||
|
||||
let frame_end = tick + Tick::FRAMES_PER_BLOCK;
|
||||
if tick < start {
|
||||
if frame_end < start {
|
||||
ShouldPlay::No
|
||||
} else {
|
||||
let delta_start = start - tick;
|
||||
if let Some(stop) = self.stop_at {
|
||||
if stop <= start {
|
||||
self.maybe_trigger_onended_callback();
|
||||
return ShouldPlay::No;
|
||||
}
|
||||
if stop > frame_end {
|
||||
ShouldPlay::Between(delta_start, Tick::FRAMES_PER_BLOCK)
|
||||
} else {
|
||||
self.maybe_trigger_onended_callback();
|
||||
ShouldPlay::Between(delta_start, stop - tick)
|
||||
}
|
||||
} else {
|
||||
ShouldPlay::Between(delta_start, Tick::FRAMES_PER_BLOCK)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let stop = if let Some(stop) = self.stop_at {
|
||||
stop
|
||||
} else {
|
||||
return ShouldPlay::Between(Tick(0), Tick::FRAMES_PER_BLOCK);
|
||||
};
|
||||
if stop > frame_end {
|
||||
ShouldPlay::Between(Tick(0), Tick::FRAMES_PER_BLOCK)
|
||||
} else if stop < tick {
|
||||
self.maybe_trigger_onended_callback();
|
||||
ShouldPlay::No
|
||||
} else {
|
||||
self.maybe_trigger_onended_callback();
|
||||
ShouldPlay::Between(Tick(0), stop - tick)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&mut self, tick: Tick) -> bool {
|
||||
// We can only allow a single call to `start` and always before
|
||||
// any `stop` calls.
|
||||
if self.start_at.is_some() || self.stop_at.is_some() {
|
||||
return false;
|
||||
}
|
||||
self.start_at = Some(tick);
|
||||
true
|
||||
}
|
||||
|
||||
fn stop(&mut self, tick: Tick) -> bool {
|
||||
// We can only allow calls to `stop` after `start` is called.
|
||||
if self.start_at.is_none() {
|
||||
return false;
|
||||
}
|
||||
// If `stop` is called again after already having been called,
|
||||
// the last invocation will be the only one applied.
|
||||
self.stop_at = Some(tick);
|
||||
true
|
||||
}
|
||||
|
||||
fn maybe_trigger_onended_callback(&mut self) {
|
||||
// We cannot have an end without a start.
|
||||
if self.start_at.is_none() {
|
||||
return;
|
||||
}
|
||||
if let Some(cb) = self.onended_callback.take() {
|
||||
cb.0()
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_source_node_message(&mut self, message: AudioScheduledSourceNodeMessage, sample_rate: f32) {
|
||||
match message {
|
||||
AudioScheduledSourceNodeMessage::Start(when) => {
|
||||
self.start(Tick::from_time(when, sample_rate));
|
||||
}
|
||||
AudioScheduledSourceNodeMessage::Stop(when) => {
|
||||
self.stop(Tick::from_time(when, sample_rate));
|
||||
}
|
||||
AudioScheduledSourceNodeMessage::RegisterOnEndedCallback(callback) => {
|
||||
self.onended_callback = Some(callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro_derive(AudioNodeCommon)]
|
||||
pub fn channel_info(input: TokenStream) -> TokenStream {
|
||||
let ast: syn::DeriveInput = syn::parse(input).unwrap();
|
||||
let name = &ast.ident;
|
||||
let r#gen = quote! {
|
||||
impl crate::node::AudioNodeCommon for #name {
|
||||
fn channel_info(&self) -> &crate::node::ChannelInfo {
|
||||
&self.channel_info
|
||||
}
|
||||
|
||||
fn channel_info_mut(&mut self) -> &mut crate::node::ChannelInfo {
|
||||
&mut self.channel_info
|
||||
}
|
||||
}
|
||||
};
|
||||
r#gen.into()
|
||||
}
|
||||
30
components/media/servo-media/Cargo.toml
Normal file
30
components/media/servo-media/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "servo-media"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "servo_media"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1.18.0"
|
||||
|
||||
[dependencies.servo-media-audio]
|
||||
path = "../audio"
|
||||
|
||||
[dependencies.servo-media-player]
|
||||
path = "../player"
|
||||
|
||||
[dependencies.servo-media-streams]
|
||||
path = "../streams"
|
||||
|
||||
[dependencies.servo-media-traits]
|
||||
path = "../traits"
|
||||
|
||||
[dependencies.servo-media-webrtc]
|
||||
path = "../webrtc"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user