servo_media_audio/
context.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::Cell;
6use std::sync::mpsc::{self, Sender};
7use std::sync::{Arc, Mutex};
8use std::thread::Builder;
9
10use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance, MediaInstanceError};
11
12use crate::AudioBackend;
13use crate::decoder::{AudioDecoder, AudioDecoderCallbacks, AudioDecoderOptions};
14use crate::graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
15use crate::node::{AudioNodeInit, AudioNodeMessage, ChannelInfo};
16use crate::render_thread::{AudioRenderThread, AudioRenderThreadMsg, SinkEosCallback};
17use crate::sink::AudioSinkError;
18
19/// Describes the state of the audio context on the control thread.
20#[derive(Clone, Copy, Debug, PartialEq)]
21pub enum ProcessingState {
22    /// The audio context is suspended (context time is not proceeding,
23    /// audio hardware may be powered down/released).
24    Suspended,
25    /// Audio is being processed.
26    Running,
27    /// The audio context has been released, and can no longer be used
28    /// to process audio.
29    Closed,
30}
31
32pub type StateChangeResult = Option<()>;
33
34/// Identify the type of playback, which affects tradeoffs between audio output
35/// and power consumption.
36#[derive(Copy, Clone)]
37pub enum LatencyCategory {
38    /// Balance audio output latency and power consumption.
39    Balanced,
40    /// Provide the lowest audio output latency possible without glitching.
41    Interactive,
42    /// Prioritize sustained playback without interruption over audio output latency.
43    /// Lowest power consumption.
44    Playback,
45}
46
47/// User-specified options for a real time audio context.
48#[derive(Copy, Clone)]
49pub struct RealTimeAudioContextOptions {
50    /// Number of samples that will play in one second, measured in Hz.
51    pub sample_rate: f32,
52    /// Type of playback.
53    pub latency_hint: LatencyCategory,
54}
55
56impl Default for RealTimeAudioContextOptions {
57    fn default() -> Self {
58        Self {
59            sample_rate: 44100.,
60            latency_hint: LatencyCategory::Interactive,
61        }
62    }
63}
64
65/// User-specified options for an offline audio context.
66#[derive(Copy, Clone)]
67pub struct OfflineAudioContextOptions {
68    /// The number of channels for this offline audio context.
69    pub channels: u8,
70    /// The length of the rendered audio buffer in sample-frames.
71    pub length: usize,
72    /// Number of samples that will be rendered in one second, measured in Hz.
73    pub sample_rate: f32,
74}
75
76impl Default for OfflineAudioContextOptions {
77    fn default() -> Self {
78        Self {
79            channels: 1,
80            length: 0,
81            sample_rate: 44100.,
82        }
83    }
84}
85
86impl From<RealTimeAudioContextOptions> for AudioContextOptions {
87    fn from(options: RealTimeAudioContextOptions) -> Self {
88        AudioContextOptions::RealTimeAudioContext(options)
89    }
90}
91
92impl From<OfflineAudioContextOptions> for AudioContextOptions {
93    fn from(options: OfflineAudioContextOptions) -> Self {
94        AudioContextOptions::OfflineAudioContext(options)
95    }
96}
97
98/// User-specified options for a real time or offline audio context.
99#[derive(Copy, Clone)]
100pub enum AudioContextOptions {
101    RealTimeAudioContext(RealTimeAudioContextOptions),
102    OfflineAudioContext(OfflineAudioContextOptions),
103}
104
105impl Default for AudioContextOptions {
106    fn default() -> Self {
107        AudioContextOptions::RealTimeAudioContext(Default::default())
108    }
109}
110
111/// Representation of an audio context on the control thread.
112pub struct AudioContext {
113    /// Media instance ID.
114    id: usize,
115    /// Client context ID.
116    client_context_id: ClientContextId,
117    /// Owner backend communication channel.
118    backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
119    /// Rendering thread communication channel.
120    sender: Sender<AudioRenderThreadMsg>,
121    /// State of the audio context on the control thread.
122    state: Cell<ProcessingState>,
123    /// Number of samples that will be played in one second.
124    sample_rate: f32,
125    /// The identifier of an AudioDestinationNode with a single input
126    /// representing the final destination for all audio.
127    dest_node: NodeId,
128    listener: NodeId,
129    make_decoder: Arc<dyn Fn() -> Box<dyn AudioDecoder> + Sync + Send>,
130}
131
132#[derive(Debug)]
133pub struct AudioContextError;
134
135impl std::fmt::Display for AudioContextError {
136    fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
137        write!(formatter, "AudioContextError")
138    }
139}
140
141impl std::error::Error for AudioContextError {}
142
143impl AudioContext {
144    /// Constructs a new audio context.
145    pub fn new<B: AudioBackend>(
146        id: usize,
147        client_context_id: &ClientContextId,
148        backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
149        options: AudioContextOptions,
150    ) -> Result<Self, AudioSinkError> {
151        let (sample_rate, channels) = match options {
152            AudioContextOptions::RealTimeAudioContext(ref options) => (options.sample_rate, 2),
153            AudioContextOptions::OfflineAudioContext(ref options) => {
154                (options.sample_rate, options.channels)
155            },
156        };
157
158        let (sender, receiver) = mpsc::channel();
159        let sender_ = sender.clone();
160        let graph = AudioGraph::new(channels);
161        let dest_node = graph.dest_id();
162        let listener = graph.listener_id();
163
164        let (init_sender, init_receiver) = mpsc::channel();
165        Builder::new()
166            .name("AudioRenderThread".to_owned())
167            .spawn(move || {
168                AudioRenderThread::start::<B>(
169                    receiver,
170                    sender_,
171                    sample_rate,
172                    graph,
173                    options,
174                    init_sender,
175                )
176            })
177            .expect("Failed to spawn AudioRenderThread");
178
179        init_receiver
180            .recv()
181            .expect("Failed to receive result from AudioRenderThread")?;
182        Ok(Self {
183            id,
184            client_context_id: *client_context_id,
185            backend_chan,
186            sender,
187            state: Cell::new(ProcessingState::Suspended),
188            sample_rate,
189            dest_node,
190            listener,
191            make_decoder: Arc::new(|| B::make_decoder()),
192        })
193    }
194
195    pub fn state(&self) -> ProcessingState {
196        self.state.get()
197    }
198
199    pub fn dest_node(&self) -> NodeId {
200        self.dest_node
201    }
202
203    pub fn listener(&self) -> NodeId {
204        self.listener
205    }
206
207    pub fn current_time(&self) -> f64 {
208        let (tx, rx) = mpsc::channel();
209        let _ = self.sender.send(AudioRenderThreadMsg::GetCurrentTime(tx));
210        rx.recv().unwrap()
211    }
212
213    pub fn create_node(&self, node_type: AudioNodeInit, ch: ChannelInfo) -> Option<NodeId> {
214        let (tx, rx) = mpsc::channel();
215        let _ = self
216            .sender
217            .send(AudioRenderThreadMsg::CreateNode(node_type, tx, ch));
218        rx.recv().ok()
219    }
220
221    // Resume audio processing.
222    make_state_change!(resume, Running, Resume);
223
224    // Suspend audio processing.
225    make_state_change!(suspend, Suspended, Suspend);
226
227    // Stop audio processing and close render thread.
228    make_state_change!(close, Closed, Close);
229
230    pub fn message_node(&self, id: NodeId, msg: AudioNodeMessage) {
231        let _ = self.sender.send(AudioRenderThreadMsg::MessageNode(id, msg));
232    }
233
234    pub fn connect_ports(&self, from: PortId<OutputPort>, to: PortId<InputPort>) {
235        let _ = self
236            .sender
237            .send(AudioRenderThreadMsg::ConnectPorts(from, to));
238    }
239
240    pub fn disconnect_all_from(&self, node: NodeId) {
241        let _ = self
242            .sender
243            .send(AudioRenderThreadMsg::DisconnectAllFrom(node));
244    }
245
246    /// Disconnect all outgoing connections from a node's output
247    ///
248    /// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-output>>
249    pub fn disconnect_output(&self, out: PortId<OutputPort>) {
250        let _ = self
251            .sender
252            .send(AudioRenderThreadMsg::DisconnectOutput(out));
253    }
254
255    /// Disconnect connections from a node to another node
256    ///
257    /// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode>
258    pub fn disconnect_between(&self, from: NodeId, to: NodeId) {
259        let _ = self
260            .sender
261            .send(AudioRenderThreadMsg::DisconnectBetween(from, to));
262    }
263
264    /// Disconnect connections from a node to another node's input
265    ///
266    /// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam>
267    pub fn disconnect_to(&self, from: NodeId, to: PortId<InputPort>) {
268        let _ = self
269            .sender
270            .send(AudioRenderThreadMsg::DisconnectTo(from, to));
271    }
272
273    /// Disconnect all outgoing connections from a node's output to another node
274    ///
275    /// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output>
276    pub fn disconnect_output_between(&self, out: PortId<OutputPort>, to: NodeId) {
277        let _ = self
278            .sender
279            .send(AudioRenderThreadMsg::DisconnectOutputBetween(out, to));
280    }
281
282    /// Disconnect all outgoing connections from a node's output to another node's input
283    ///
284    /// <https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output-input>
285    pub fn disconnect_output_between_to(&self, out: PortId<OutputPort>, inp: PortId<InputPort>) {
286        let _ = self
287            .sender
288            .send(AudioRenderThreadMsg::DisconnectOutputBetweenTo(out, inp));
289    }
290
291    /// Asynchronously decodes the audio file data contained in the given
292    /// buffer.
293    pub fn decode_audio_data(&self, data: Vec<u8>, callbacks: AudioDecoderCallbacks) {
294        let options = AudioDecoderOptions {
295            sample_rate: self.sample_rate,
296        };
297        let make_decoder = self.make_decoder.clone();
298        Builder::new()
299            .name("AudioDecoder".to_owned())
300            .spawn(move || {
301                let audio_decoder = make_decoder();
302
303                audio_decoder.decode(data, callbacks, Some(options));
304            })
305            .unwrap();
306    }
307
308    pub fn set_eos_callback(&self, callback: SinkEosCallback) {
309        let _ = self
310            .sender
311            .send(AudioRenderThreadMsg::SetSinkEosCallback(callback));
312    }
313
314    fn set_mute(&self, val: bool) {
315        let _ = self.sender.send(AudioRenderThreadMsg::SetMute(val));
316    }
317}
318
319impl Drop for AudioContext {
320    fn drop(&mut self) {
321        let (tx, _) = mpsc::channel();
322        let _ = self.sender.send(AudioRenderThreadMsg::Close(tx));
323
324        // Ask the backend to unregister this instance and wait for ACK
325        let (tx_ack, rx_ack) = mpsc::channel();
326        let _ = self
327            .backend_chan
328            .lock()
329            .unwrap()
330            .send(BackendMsg::Shutdown {
331                context: self.client_context_id,
332                id: self.id,
333                tx_ack,
334            });
335        let _ = rx_ack.recv();
336    }
337}
338
339impl MediaInstance for AudioContext {
340    fn get_id(&self) -> usize {
341        self.id
342    }
343
344    fn mute(&self, val: bool) -> Result<(), MediaInstanceError> {
345        self.set_mute(val);
346        Ok(())
347    }
348
349    fn suspend(&self) -> Result<(), MediaInstanceError> {
350        let (tx, _) = mpsc::channel();
351        self.sender
352            .send(AudioRenderThreadMsg::Suspend(tx))
353            .map_err(|_| MediaInstanceError)
354    }
355
356    fn resume(&self) -> Result<(), MediaInstanceError> {
357        let (tx, _) = mpsc::channel();
358        self.sender
359            .send(AudioRenderThreadMsg::Resume(tx))
360            .map_err(|_| MediaInstanceError)
361    }
362}