use decoder::{AudioDecoder, AudioDecoderCallbacks, AudioDecoderOptions};
use graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
use node::{AudioNodeInit, AudioNodeMessage, ChannelInfo};
use render_thread::AudioRenderThread;
use render_thread::AudioRenderThreadMsg;
use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance};
use std::cell::Cell;
use std::sync::mpsc::{self, Sender};
use std::sync::{Arc, Mutex};
use std::thread::Builder;
use AudioBackend;
use crate::sink::AudioSinkError;
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ProcessingState {
Suspended,
Running,
Closed,
}
pub type StateChangeResult = Result<(), ()>;
#[derive(Copy, Clone)]
pub enum LatencyCategory {
Balanced,
Interactive,
Playback,
}
#[derive(Copy, Clone)]
pub struct RealTimeAudioContextOptions {
pub sample_rate: f32,
pub latency_hint: LatencyCategory,
}
impl Default for RealTimeAudioContextOptions {
fn default() -> Self {
Self {
sample_rate: 44100.,
latency_hint: LatencyCategory::Interactive,
}
}
}
#[derive(Copy, Clone)]
pub struct OfflineAudioContextOptions {
pub channels: u8,
pub length: usize,
pub sample_rate: f32,
}
impl Default for OfflineAudioContextOptions {
fn default() -> Self {
Self {
channels: 1,
length: 0,
sample_rate: 44100.,
}
}
}
impl From<RealTimeAudioContextOptions> for AudioContextOptions {
fn from(options: RealTimeAudioContextOptions) -> Self {
AudioContextOptions::RealTimeAudioContext(options)
}
}
impl From<OfflineAudioContextOptions> for AudioContextOptions {
fn from(options: OfflineAudioContextOptions) -> Self {
AudioContextOptions::OfflineAudioContext(options)
}
}
#[derive(Copy, Clone)]
pub enum AudioContextOptions {
RealTimeAudioContext(RealTimeAudioContextOptions),
OfflineAudioContext(OfflineAudioContextOptions),
}
impl Default for AudioContextOptions {
fn default() -> Self {
AudioContextOptions::RealTimeAudioContext(Default::default())
}
}
pub struct AudioContext {
id: usize,
client_context_id: ClientContextId,
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
sender: Sender<AudioRenderThreadMsg>,
state: Cell<ProcessingState>,
sample_rate: f32,
dest_node: NodeId,
listener: NodeId,
make_decoder: Arc<(dyn Fn() -> Box<dyn AudioDecoder> + Sync + Send)>,
}
impl AudioContext {
pub fn new<B: AudioBackend>(
id: usize,
client_context_id: &ClientContextId,
backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
options: AudioContextOptions,
) -> Result<Self, AudioSinkError> {
let (sample_rate, channels) = match options {
AudioContextOptions::RealTimeAudioContext(ref options) => (options.sample_rate, 2),
AudioContextOptions::OfflineAudioContext(ref options) => {
(options.sample_rate, options.channels)
}
};
let (sender, receiver) = mpsc::channel();
let sender_ = sender.clone();
let graph = AudioGraph::new(channels);
let dest_node = graph.dest_id();
let listener = graph.listener_id();
let (init_sender, init_receiver) = mpsc::channel();
Builder::new()
.name("AudioRenderThread".to_owned())
.spawn(move || {
AudioRenderThread::start::<B>(
receiver,
sender_,
sample_rate,
graph,
options,
init_sender,
)
})
.expect("Failed to spawn AudioRenderThread");
let init_thread_result = init_receiver
.recv()
.expect("Failed to receive result from AudioRenderThread");
if let Err(e) = init_thread_result {
return Err(e);
}
Ok(Self {
id,
client_context_id: *client_context_id,
backend_chan,
sender,
state: Cell::new(ProcessingState::Suspended),
sample_rate,
dest_node,
listener,
make_decoder: Arc::new(|| B::make_decoder()),
})
}
pub fn state(&self) -> ProcessingState {
self.state.get()
}
pub fn dest_node(&self) -> NodeId {
self.dest_node
}
pub fn listener(&self) -> NodeId {
self.listener
}
pub fn current_time(&self) -> f64 {
let (tx, rx) = mpsc::channel();
let _ = self.sender.send(AudioRenderThreadMsg::GetCurrentTime(tx));
rx.recv().unwrap()
}
pub fn create_node(&self, node_type: AudioNodeInit, ch: ChannelInfo) -> NodeId {
let (tx, rx) = mpsc::channel();
let _ = self
.sender
.send(AudioRenderThreadMsg::CreateNode(node_type, tx, ch));
rx.recv().unwrap()
}
make_state_change!(resume, Running, Resume);
make_state_change!(suspend, Suspended, Suspend);
make_state_change!(close, Closed, Close);
pub fn message_node(&self, id: NodeId, msg: AudioNodeMessage) {
let _ = self.sender.send(AudioRenderThreadMsg::MessageNode(id, msg));
}
pub fn connect_ports(&self, from: PortId<OutputPort>, to: PortId<InputPort>) {
let _ = self
.sender
.send(AudioRenderThreadMsg::ConnectPorts(from, to));
}
pub fn disconnect_all_from(&self, node: NodeId) {
let _ = self
.sender
.send(AudioRenderThreadMsg::DisconnectAllFrom(node));
}
pub fn disconnect_output(&self, out: PortId<OutputPort>) {
let _ = self
.sender
.send(AudioRenderThreadMsg::DisconnectOutput(out));
}
pub fn disconnect_between(&self, from: NodeId, to: NodeId) {
let _ = self
.sender
.send(AudioRenderThreadMsg::DisconnectBetween(from, to));
}
pub fn disconnect_to(&self, from: NodeId, to: PortId<InputPort>) {
let _ = self
.sender
.send(AudioRenderThreadMsg::DisconnectTo(from, to));
}
pub fn disconnect_output_between(&self, out: PortId<OutputPort>, to: NodeId) {
let _ = self
.sender
.send(AudioRenderThreadMsg::DisconnectOutputBetween(out, to));
}
pub fn disconnect_output_between_to(&self, out: PortId<OutputPort>, inp: PortId<InputPort>) {
let _ = self
.sender
.send(AudioRenderThreadMsg::DisconnectOutputBetweenTo(out, inp));
}
pub fn decode_audio_data(&self, data: Vec<u8>, callbacks: AudioDecoderCallbacks) {
let mut options = AudioDecoderOptions::default();
options.sample_rate = self.sample_rate;
let make_decoder = self.make_decoder.clone();
Builder::new()
.name("AudioDecoder".to_owned())
.spawn(move || {
let audio_decoder = make_decoder();
audio_decoder.decode(data, callbacks, Some(options));
})
.unwrap();
}
pub fn set_eos_callback(
&self,
callback: Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>,
) {
let _ = self
.sender
.send(AudioRenderThreadMsg::SetSinkEosCallback(callback));
}
fn set_mute(&self, val: bool) {
let _ = self.sender.send(AudioRenderThreadMsg::SetMute(val));
}
}
impl Drop for AudioContext {
fn drop(&mut self) {
let (tx, _) = mpsc::channel();
let _ = self.sender.send(AudioRenderThreadMsg::Close(tx));
let _ = self
.backend_chan
.lock()
.unwrap()
.send(BackendMsg::Shutdown(self.client_context_id, self.id));
}
}
impl MediaInstance for AudioContext {
fn get_id(&self) -> usize {
self.id
}
fn mute(&self, val: bool) -> Result<(), ()> {
self.set_mute(val);
Ok(())
}
fn suspend(&self) -> Result<(), ()> {
let (tx, _) = mpsc::channel();
self.sender
.send(AudioRenderThreadMsg::Suspend(tx))
.map_err(|_| ())
}
fn resume(&self) -> Result<(), ()> {
let (tx, _) = mpsc::channel();
self.sender
.send(AudioRenderThreadMsg::Resume(tx))
.map_err(|_| ())
}
}