Skip to content

Commit

Permalink
Auto merge of #321 - ferjm:mediaelementsourcenode, r=Manishearth
Browse files Browse the repository at this point in the history
MediaElementAudioSourceNode
  • Loading branch information
bors-servo authored Nov 15, 2019
2 parents 5380170 + e70664c commit 220ed13
Show file tree
Hide file tree
Showing 25 changed files with 611 additions and 80 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion audio/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@ euclid = "0.20"
serde_derive = "1.0.66"
serde = "1.0.66"
servo_media_derive = { path = "../servo-media-derive" }
smallvec = "0.6.1"
servo-media-player = { path = "../player" }
servo-media-traits = { path = "../traits" }
smallvec = "0.6.1"

[dependencies.petgraph]
version = "0.4.12"
Expand Down
3 changes: 3 additions & 0 deletions audio/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ extern crate serde_derive;
#[macro_use]
extern crate servo_media_derive;

extern crate servo_media_player as player;

extern crate boxfnonce;
extern crate byte_slice_cast;
extern crate euclid;
Expand All @@ -26,6 +28,7 @@ pub mod destination_node;
pub mod gain_node;
pub mod graph;
pub mod listener;
pub mod media_element_source_node;
pub mod node;
pub mod offline_sink;
pub mod oscillator_node;
Expand Down
130 changes: 130 additions & 0 deletions audio/media_element_source_node.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
use block::{Block, Chunk, FRAMES_PER_BLOCK};
use node::{AudioNodeEngine, AudioNodeType, BlockInfo, ChannelInfo};
use player::audio::AudioRenderer;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::sync::mpsc::Sender;
use std::sync::{Arc, Mutex};

#[derive(Debug, Clone)]
pub enum MediaElementSourceNodeMessage {
GetAudioRenderer(Sender<Arc<Mutex<dyn AudioRenderer>>>),
}

#[derive(AudioNodeCommon)]
pub(crate) struct MediaElementSourceNode {
channel_info: ChannelInfo,
renderer: Arc<Mutex<dyn AudioRenderer>>,
buffers: Arc<Mutex<Vec<Vec<f32>>>>,
playback_offset: usize,
}

impl MediaElementSourceNode {
pub fn new(channel_info: ChannelInfo) -> Self {
let buffers = Arc::new(Mutex::new(Vec::new()));
let renderer = Arc::new(Mutex::new(MediaElementSourceNodeRenderer::new(
buffers.clone(),
)));
Self {
channel_info,
renderer,
buffers,
playback_offset: 0,
}
}

pub fn handle_message(&mut self, message: MediaElementSourceNodeMessage, _: f32) {
match message {
MediaElementSourceNodeMessage::GetAudioRenderer(sender) => {
let _ = sender.send(self.renderer.clone());
}
}
}
}

impl AudioNodeEngine for MediaElementSourceNode {
fn node_type(&self) -> AudioNodeType {
AudioNodeType::MediaElementSourceNode
}

fn process(&mut self, mut inputs: Chunk, _info: &BlockInfo) -> Chunk {
debug_assert!(inputs.len() == 0);

let buffers = self.buffers.lock().unwrap();
let chans = buffers.len() as u8;

if chans == 0 {
inputs.blocks.push(Default::default());
return inputs;
}

let len = buffers[0].len();

let frames_per_block = FRAMES_PER_BLOCK.0 as usize;
let samples_to_copy = if self.playback_offset + frames_per_block > len {
len - self.playback_offset
} else {
frames_per_block
};
let next_offset = self.playback_offset + samples_to_copy;
if samples_to_copy == FRAMES_PER_BLOCK.0 as usize {
// copy entire chan
let mut block = Block::empty();
for chan in 0..chans {
block.push_chan(&buffers[chan as usize][self.playback_offset..next_offset]);
}
inputs.blocks.push(block)
} else {
// silent fill and copy
let mut block = Block::default();
block.repeat(chans);
block.explicit_repeat();
for chan in 0..chans {
let data = block.data_chan_mut(chan);
let (_, data) = data.split_at_mut(0);
let (data, _) = data.split_at_mut(samples_to_copy);
data.copy_from_slice(&buffers[chan as usize][self.playback_offset..next_offset]);
}
inputs.blocks.push(block)
}

self.playback_offset = next_offset;

inputs
}

fn input_count(&self) -> u32 {
0
}

make_message_handler!(MediaElementSourceNode: handle_message);
}

struct MediaElementSourceNodeRenderer {
buffers: Arc<Mutex<Vec<Vec<f32>>>>,
channels: HashMap<u32, usize>,
}

impl MediaElementSourceNodeRenderer {
pub fn new(buffers: Arc<Mutex<Vec<Vec<f32>>>>) -> Self {
Self {
buffers,
channels: HashMap::new(),
}
}
}

impl AudioRenderer for MediaElementSourceNodeRenderer {
fn render(&mut self, sample: Box<dyn AsRef<[f32]>>, channel_pos: u32) {
let channel = match self.channels.entry(channel_pos) {
Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => {
let mut buffers = self.buffers.lock().unwrap();
let len = buffers.len();
buffers.resize(len + 1, Vec::new());
*entry.insert(buffers.len())
}
};
self.buffers.lock().unwrap()[(channel - 1) as usize].extend_from_slice((*sample).as_ref());
}
}
6 changes: 5 additions & 1 deletion audio/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use buffer_source_node::{AudioBufferSourceNodeMessage, AudioBufferSourceNodeOpti
use channel_node::ChannelNodeOptions;
use constant_source_node::ConstantSourceNodeOptions;
use gain_node::GainNodeOptions;
use media_element_source_node::MediaElementSourceNodeMessage;
use oscillator_node::{OscillatorNodeMessage, OscillatorNodeOptions};
use panner_node::{PannerNodeMessage, PannerNodeOptions};
use param::{Param, ParamRate, ParamType, UserAutomationEvent};
Expand All @@ -26,6 +27,7 @@ pub enum AudioNodeInit {
DynamicsCompressionNode,
GainNode(GainNodeOptions),
IIRFilterNode,
MediaElementSourceNode,
OscillatorNode(OscillatorNodeOptions),
PannerNode(PannerNodeOptions),
PeriodicWave,
Expand All @@ -52,6 +54,7 @@ pub enum AudioNodeType {
DynamicsCompressionNode,
GainNode,
IIRFilterNode,
MediaElementSourceNode,
OscillatorNode,
PannerNode,
PeriodicWave,
Expand Down Expand Up @@ -183,9 +186,10 @@ pub enum AudioNodeMessage {
AudioBufferSourceNode(AudioBufferSourceNodeMessage),
AudioScheduledSourceNode(AudioScheduledSourceNodeMessage),
BiquadFilterNode(BiquadFilterNodeMessage),
GetParamValue(ParamType, Sender<f32>),
MediaElementSourceNode(MediaElementSourceNodeMessage),
OscillatorNode(OscillatorNodeMessage),
PannerNode(PannerNodeMessage),
GetParamValue(ParamType, Sender<f32>),
SetChannelCount(u8),
SetChannelMode(ChannelCountMode),
SetChannelInterpretation(ChannelInterpretation),
Expand Down
2 changes: 2 additions & 0 deletions audio/render_thread.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use constant_source_node::ConstantSourceNode;
use context::{AudioContextOptions, ProcessingState, StateChangeResult};
use gain_node::GainNode;
use graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
use media_element_source_node::MediaElementSourceNode;
use node::{AudioNodeEngine, AudioNodeInit, AudioNodeMessage};
use node::{BlockInfo, ChannelInfo};
use offline_sink::OfflineAudioSink;
Expand Down Expand Up @@ -189,6 +190,7 @@ impl AudioRenderThread {
}
AudioNodeInit::ChannelSplitterNode => Box::new(ChannelSplitterNode::new(ch)),
AudioNodeInit::WaveShaperNode(options) => Box::new(WaveShaperNode::new(options, ch)),
AudioNodeInit::MediaElementSourceNode => Box::new(MediaElementSourceNode::new(ch)),
_ => unimplemented!(),
};
let id = self.graph.add_node(node);
Expand Down
5 changes: 3 additions & 2 deletions backends/dummy/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use servo_media_audio::render_thread::AudioRenderThreadMsg;
use servo_media_audio::sink::{AudioSink, AudioSinkError};
use servo_media_audio::AudioBackend;
use servo_media_player::context::PlayerGLContext;
use servo_media_player::{frame, Player, PlayerError, PlayerEvent, StreamType};
use servo_media_player::{audio, video, Player, PlayerError, PlayerEvent, StreamType};
use servo_media_streams::capture::MediaTrackConstraintSet;
use servo_media_streams::registry::{register_stream, unregister_stream, MediaStreamId};
use servo_media_streams::{MediaOutput, MediaStream, MediaStreamType};
Expand Down Expand Up @@ -73,7 +73,8 @@ impl Backend for DummyBackend {
_id: &ClientContextId,
_: StreamType,
_: IpcSender<PlayerEvent>,
_: Option<Arc<Mutex<dyn frame::FrameRenderer>>>,
_: Option<Arc<Mutex<dyn video::VideoFrameRenderer>>>,
_: Option<Arc<Mutex<dyn audio::AudioRenderer>>>,
_: Box<dyn PlayerGLContext>,
) -> Arc<Mutex<dyn Player>> {
Arc::new(Mutex::new(DummyPlayer))
Expand Down
19 changes: 11 additions & 8 deletions backends/gstreamer/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,9 @@ use servo_media_audio::context::{AudioContext, AudioContextOptions};
use servo_media_audio::decoder::AudioDecoder;
use servo_media_audio::sink::AudioSinkError;
use servo_media_audio::AudioBackend;
use servo_media_player::audio::AudioRenderer;
use servo_media_player::context::PlayerGLContext;
use servo_media_player::frame::FrameRenderer;
use servo_media_player::video::VideoFrameRenderer;
use servo_media_player::{Player, PlayerEvent, StreamType};
use servo_media_streams::capture::MediaTrackConstraintSet;
use servo_media_streams::registry::MediaStreamId;
Expand All @@ -78,7 +79,7 @@ lazy_static! {
pub struct GStreamerBackend {
capture_mocking: AtomicBool,
instances: Arc<Mutex<HashMap<ClientContextId, Vec<(usize, Weak<Mutex<dyn MediaInstance>>)>>>>,
next_muteable_id: AtomicUsize,
next_instance_id: AtomicUsize,
/// Channel to communicate media instances with its owner Backend.
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
}
Expand Down Expand Up @@ -120,9 +121,9 @@ impl GStreamerBackend {
.name("GStreamerBackend ShutdownThread".to_owned())
.spawn(move || {
match recvr.recv().unwrap() {
BackendMsg::Shutdown(context_id, muteable_id) => {
BackendMsg::Shutdown(context_id, instance_id) => {
if let Some(vec) = instances_.lock().unwrap().get_mut(&context_id) {
vec.retain(|m| m.0 != muteable_id);
vec.retain(|m| m.0 != instance_id);
if vec.is_empty() {
instances_.lock().unwrap().remove(&context_id);
}
Expand All @@ -135,7 +136,7 @@ impl GStreamerBackend {
Ok(Box::new(GStreamerBackend {
capture_mocking: AtomicBool::new(false),
instances,
next_muteable_id: AtomicUsize::new(0),
next_instance_id: AtomicUsize::new(0),
backend_chan: Arc::new(Mutex::new(backend_chan)),
}))
}
Expand Down Expand Up @@ -170,17 +171,19 @@ impl Backend for GStreamerBackend {
context_id: &ClientContextId,
stream_type: StreamType,
sender: IpcSender<PlayerEvent>,
renderer: Option<Arc<Mutex<dyn FrameRenderer>>>,
renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>>,
audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>,
gl_context: Box<dyn PlayerGLContext>,
) -> Arc<Mutex<dyn Player>> {
let id = self.next_muteable_id.fetch_add(1, Ordering::Relaxed);
let id = self.next_instance_id.fetch_add(1, Ordering::Relaxed);
let player = Arc::new(Mutex::new(player::GStreamerPlayer::new(
id,
context_id,
self.backend_chan.clone(),
stream_type,
sender,
renderer,
audio_renderer,
gl_context,
)));
let mut instances = self.instances.lock().unwrap();
Expand All @@ -194,7 +197,7 @@ impl Backend for GStreamerBackend {
client_context_id: &ClientContextId,
options: AudioContextOptions,
) -> Arc<Mutex<AudioContext>> {
let id = self.next_muteable_id.fetch_add(1, Ordering::Relaxed);
let id = self.next_instance_id.fetch_add(1, Ordering::Relaxed);
let context = Arc::new(Mutex::new(AudioContext::new::<Self>(
id,
client_context_id,
Expand Down
Loading

0 comments on commit 220ed13

Please sign in to comment.