1use std::cell::Cell;
6use std::sync::mpsc::{self, Sender};
7use std::sync::{Arc, Mutex};
8use std::thread::Builder;
9
10use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance, MediaInstanceError};
11
12use crate::AudioBackend;
13use crate::decoder::{AudioDecoder, AudioDecoderCallbacks, AudioDecoderOptions};
14use crate::graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
15use crate::node::{AudioNodeInit, AudioNodeMessage, ChannelInfo};
16use crate::render_thread::{AudioRenderThread, AudioRenderThreadMsg, SinkEosCallback};
17use crate::sink::AudioSinkError;
18
19#[derive(Clone, Copy, Debug, PartialEq)]
21pub enum ProcessingState {
22 Suspended,
25 Running,
27 Closed,
30}
31
32pub type StateChangeResult = Option<()>;
33
34#[derive(Copy, Clone)]
37pub enum LatencyCategory {
38 Balanced,
40 Interactive,
42 Playback,
45}
46
47#[derive(Copy, Clone)]
49pub struct RealTimeAudioContextOptions {
50 pub sample_rate: f32,
52 pub latency_hint: LatencyCategory,
54}
55
56impl Default for RealTimeAudioContextOptions {
57 fn default() -> Self {
58 Self {
59 sample_rate: 44100.,
60 latency_hint: LatencyCategory::Interactive,
61 }
62 }
63}
64
65#[derive(Copy, Clone)]
67pub struct OfflineAudioContextOptions {
68 pub channels: u8,
70 pub length: usize,
72 pub sample_rate: f32,
74}
75
76impl Default for OfflineAudioContextOptions {
77 fn default() -> Self {
78 Self {
79 channels: 1,
80 length: 0,
81 sample_rate: 44100.,
82 }
83 }
84}
85
86impl From<RealTimeAudioContextOptions> for AudioContextOptions {
87 fn from(options: RealTimeAudioContextOptions) -> Self {
88 AudioContextOptions::RealTimeAudioContext(options)
89 }
90}
91
92impl From<OfflineAudioContextOptions> for AudioContextOptions {
93 fn from(options: OfflineAudioContextOptions) -> Self {
94 AudioContextOptions::OfflineAudioContext(options)
95 }
96}
97
98#[derive(Copy, Clone)]
100pub enum AudioContextOptions {
101 RealTimeAudioContext(RealTimeAudioContextOptions),
102 OfflineAudioContext(OfflineAudioContextOptions),
103}
104
105impl Default for AudioContextOptions {
106 fn default() -> Self {
107 AudioContextOptions::RealTimeAudioContext(Default::default())
108 }
109}
110
111pub struct AudioContext {
113 id: usize,
115 client_context_id: ClientContextId,
117 backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
119 sender: Sender<AudioRenderThreadMsg>,
121 state: Cell<ProcessingState>,
123 sample_rate: f32,
125 dest_node: NodeId,
128 listener: NodeId,
129 make_decoder: Arc<dyn Fn() -> Box<dyn AudioDecoder> + Sync + Send>,
130}
131
132#[derive(Debug)]
133pub struct AudioContextError;
134
135impl std::fmt::Display for AudioContextError {
136 fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
137 write!(formatter, "AudioContextError")
138 }
139}
140
141impl std::error::Error for AudioContextError {}
142
143impl AudioContext {
144 pub fn new<B: AudioBackend>(
146 id: usize,
147 client_context_id: &ClientContextId,
148 backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
149 options: AudioContextOptions,
150 ) -> Result<Self, AudioSinkError> {
151 let (sample_rate, channels) = match options {
152 AudioContextOptions::RealTimeAudioContext(ref options) => (options.sample_rate, 2),
153 AudioContextOptions::OfflineAudioContext(ref options) => {
154 (options.sample_rate, options.channels)
155 },
156 };
157
158 let (sender, receiver) = mpsc::channel();
159 let sender_ = sender.clone();
160 let graph = AudioGraph::new(channels);
161 let dest_node = graph.dest_id();
162 let listener = graph.listener_id();
163
164 let (init_sender, init_receiver) = mpsc::channel();
165 Builder::new()
166 .name("AudioRenderThread".to_owned())
167 .spawn(move || {
168 AudioRenderThread::start::<B>(
169 receiver,
170 sender_,
171 sample_rate,
172 graph,
173 options,
174 init_sender,
175 )
176 })
177 .expect("Failed to spawn AudioRenderThread");
178
179 init_receiver
180 .recv()
181 .expect("Failed to receive result from AudioRenderThread")?;
182 Ok(Self {
183 id,
184 client_context_id: *client_context_id,
185 backend_chan,
186 sender,
187 state: Cell::new(ProcessingState::Suspended),
188 sample_rate,
189 dest_node,
190 listener,
191 make_decoder: Arc::new(|| B::make_decoder()),
192 })
193 }
194
195 pub fn state(&self) -> ProcessingState {
196 self.state.get()
197 }
198
199 pub fn dest_node(&self) -> NodeId {
200 self.dest_node
201 }
202
203 pub fn listener(&self) -> NodeId {
204 self.listener
205 }
206
207 pub fn current_time(&self) -> f64 {
208 let (tx, rx) = mpsc::channel();
209 let _ = self.sender.send(AudioRenderThreadMsg::GetCurrentTime(tx));
210 rx.recv().unwrap()
211 }
212
213 pub fn create_node(&self, node_type: AudioNodeInit, ch: ChannelInfo) -> Option<NodeId> {
214 let (tx, rx) = mpsc::channel();
215 let _ = self
216 .sender
217 .send(AudioRenderThreadMsg::CreateNode(node_type, tx, ch));
218 rx.recv().ok()
219 }
220
221 make_state_change!(resume, Running, Resume);
223
224 make_state_change!(suspend, Suspended, Suspend);
226
227 make_state_change!(close, Closed, Close);
229
230 pub fn message_node(&self, id: NodeId, msg: AudioNodeMessage) {
231 let _ = self.sender.send(AudioRenderThreadMsg::MessageNode(id, msg));
232 }
233
234 pub fn connect_ports(&self, from: PortId<OutputPort>, to: PortId<InputPort>) {
235 let _ = self
236 .sender
237 .send(AudioRenderThreadMsg::ConnectPorts(from, to));
238 }
239
240 pub fn disconnect_all_from(&self, node: NodeId) {
241 let _ = self
242 .sender
243 .send(AudioRenderThreadMsg::DisconnectAllFrom(node));
244 }
245
246 pub fn disconnect_output(&self, out: PortId<OutputPort>) {
250 let _ = self
251 .sender
252 .send(AudioRenderThreadMsg::DisconnectOutput(out));
253 }
254
255 pub fn disconnect_between(&self, from: NodeId, to: NodeId) {
259 let _ = self
260 .sender
261 .send(AudioRenderThreadMsg::DisconnectBetween(from, to));
262 }
263
264 pub fn disconnect_to(&self, from: NodeId, to: PortId<InputPort>) {
268 let _ = self
269 .sender
270 .send(AudioRenderThreadMsg::DisconnectTo(from, to));
271 }
272
273 pub fn disconnect_output_between(&self, out: PortId<OutputPort>, to: NodeId) {
277 let _ = self
278 .sender
279 .send(AudioRenderThreadMsg::DisconnectOutputBetween(out, to));
280 }
281
282 pub fn disconnect_output_between_to(&self, out: PortId<OutputPort>, inp: PortId<InputPort>) {
286 let _ = self
287 .sender
288 .send(AudioRenderThreadMsg::DisconnectOutputBetweenTo(out, inp));
289 }
290
291 pub fn decode_audio_data(&self, data: Vec<u8>, callbacks: AudioDecoderCallbacks) {
294 let options = AudioDecoderOptions {
295 sample_rate: self.sample_rate,
296 };
297 let make_decoder = self.make_decoder.clone();
298 Builder::new()
299 .name("AudioDecoder".to_owned())
300 .spawn(move || {
301 let audio_decoder = make_decoder();
302
303 audio_decoder.decode(data, callbacks, Some(options));
304 })
305 .unwrap();
306 }
307
308 pub fn set_eos_callback(&self, callback: SinkEosCallback) {
309 let _ = self
310 .sender
311 .send(AudioRenderThreadMsg::SetSinkEosCallback(callback));
312 }
313
314 fn set_mute(&self, val: bool) {
315 let _ = self.sender.send(AudioRenderThreadMsg::SetMute(val));
316 }
317}
318
319impl Drop for AudioContext {
320 fn drop(&mut self) {
321 let (tx, _) = mpsc::channel();
322 let _ = self.sender.send(AudioRenderThreadMsg::Close(tx));
323
324 let (tx_ack, rx_ack) = mpsc::channel();
326 let _ = self
327 .backend_chan
328 .lock()
329 .unwrap()
330 .send(BackendMsg::Shutdown {
331 context: self.client_context_id,
332 id: self.id,
333 tx_ack,
334 });
335 let _ = rx_ack.recv();
336 }
337}
338
339impl MediaInstance for AudioContext {
340 fn get_id(&self) -> usize {
341 self.id
342 }
343
344 fn mute(&self, val: bool) -> Result<(), MediaInstanceError> {
345 self.set_mute(val);
346 Ok(())
347 }
348
349 fn suspend(&self) -> Result<(), MediaInstanceError> {
350 let (tx, _) = mpsc::channel();
351 self.sender
352 .send(AudioRenderThreadMsg::Suspend(tx))
353 .map_err(|_| MediaInstanceError)
354 }
355
356 fn resume(&self) -> Result<(), MediaInstanceError> {
357 let (tx, _) = mpsc::channel();
358 self.sender
359 .send(AudioRenderThreadMsg::Resume(tx))
360 .map_err(|_| MediaInstanceError)
361 }
362}