1use crate::decoder::{AudioDecoder, AudioDecoderCallbacks, AudioDecoderOptions};
2use crate::graph::{AudioGraph, InputPort, NodeId, OutputPort, PortId};
3use crate::node::{AudioNodeInit, AudioNodeMessage, ChannelInfo};
4use crate::render_thread::AudioRenderThread;
5use crate::render_thread::AudioRenderThreadMsg;
6use servo_media_traits::{BackendMsg, ClientContextId, MediaInstance};
7use std::cell::Cell;
8use std::sync::mpsc::{self, Sender};
9use std::sync::{Arc, Mutex};
10use std::thread::Builder;
11use crate::AudioBackend;
12
13use crate::sink::AudioSinkError;
14
15#[derive(Clone, Copy, Debug, PartialEq)]
17pub enum ProcessingState {
18 Suspended,
21 Running,
23 Closed,
26}
27
28pub type StateChangeResult = Result<(), ()>;
29
30#[derive(Copy, Clone)]
33pub enum LatencyCategory {
34 Balanced,
36 Interactive,
38 Playback,
41}
42
43#[derive(Copy, Clone)]
45pub struct RealTimeAudioContextOptions {
46 pub sample_rate: f32,
48 pub latency_hint: LatencyCategory,
50}
51
52impl Default for RealTimeAudioContextOptions {
53 fn default() -> Self {
54 Self {
55 sample_rate: 44100.,
56 latency_hint: LatencyCategory::Interactive,
57 }
58 }
59}
60
61#[derive(Copy, Clone)]
63pub struct OfflineAudioContextOptions {
64 pub channels: u8,
66 pub length: usize,
68 pub sample_rate: f32,
70}
71
72impl Default for OfflineAudioContextOptions {
73 fn default() -> Self {
74 Self {
75 channels: 1,
76 length: 0,
77 sample_rate: 44100.,
78 }
79 }
80}
81
82impl From<RealTimeAudioContextOptions> for AudioContextOptions {
83 fn from(options: RealTimeAudioContextOptions) -> Self {
84 AudioContextOptions::RealTimeAudioContext(options)
85 }
86}
87
88impl From<OfflineAudioContextOptions> for AudioContextOptions {
89 fn from(options: OfflineAudioContextOptions) -> Self {
90 AudioContextOptions::OfflineAudioContext(options)
91 }
92}
93
94#[derive(Copy, Clone)]
96pub enum AudioContextOptions {
97 RealTimeAudioContext(RealTimeAudioContextOptions),
98 OfflineAudioContext(OfflineAudioContextOptions),
99}
100
101impl Default for AudioContextOptions {
102 fn default() -> Self {
103 AudioContextOptions::RealTimeAudioContext(Default::default())
104 }
105}
106
107pub struct AudioContext {
109 id: usize,
111 client_context_id: ClientContextId,
113 backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
115 sender: Sender<AudioRenderThreadMsg>,
117 state: Cell<ProcessingState>,
119 sample_rate: f32,
121 dest_node: NodeId,
124 listener: NodeId,
125 make_decoder: Arc<(dyn Fn() -> Box<dyn AudioDecoder> + Sync + Send)>,
126}
127
128impl AudioContext {
129 pub fn new<B: AudioBackend>(
131 id: usize,
132 client_context_id: &ClientContextId,
133 backend_chan: Arc<Mutex<Sender<BackendMsg>>>,
134 options: AudioContextOptions,
135 ) -> Result<Self, AudioSinkError> {
136 let (sample_rate, channels) = match options {
137 AudioContextOptions::RealTimeAudioContext(ref options) => (options.sample_rate, 2),
138 AudioContextOptions::OfflineAudioContext(ref options) => {
139 (options.sample_rate, options.channels)
140 }
141 };
142
143 let (sender, receiver) = mpsc::channel();
144 let sender_ = sender.clone();
145 let graph = AudioGraph::new(channels);
146 let dest_node = graph.dest_id();
147 let listener = graph.listener_id();
148
149 let (init_sender, init_receiver) = mpsc::channel();
150 Builder::new()
151 .name("AudioRenderThread".to_owned())
152 .spawn(move || {
153 AudioRenderThread::start::<B>(
154 receiver,
155 sender_,
156 sample_rate,
157 graph,
158 options,
159 init_sender,
160 )
161 })
162 .expect("Failed to spawn AudioRenderThread");
163
164 let init_thread_result = init_receiver
165 .recv()
166 .expect("Failed to receive result from AudioRenderThread");
167
168 if let Err(e) = init_thread_result {
169 return Err(e);
170 }
171
172 Ok(Self {
173 id,
174 client_context_id: *client_context_id,
175 backend_chan,
176 sender,
177 state: Cell::new(ProcessingState::Suspended),
178 sample_rate,
179 dest_node,
180 listener,
181 make_decoder: Arc::new(|| B::make_decoder()),
182 })
183 }
184
185 pub fn state(&self) -> ProcessingState {
186 self.state.get()
187 }
188
189 pub fn dest_node(&self) -> NodeId {
190 self.dest_node
191 }
192
193 pub fn listener(&self) -> NodeId {
194 self.listener
195 }
196
197 pub fn current_time(&self) -> f64 {
198 let (tx, rx) = mpsc::channel();
199 let _ = self.sender.send(AudioRenderThreadMsg::GetCurrentTime(tx));
200 rx.recv().unwrap()
201 }
202
203 pub fn create_node(&self, node_type: AudioNodeInit, ch: ChannelInfo) -> NodeId {
204 let (tx, rx) = mpsc::channel();
205 let _ = self
206 .sender
207 .send(AudioRenderThreadMsg::CreateNode(node_type, tx, ch));
208 rx.recv().unwrap()
209 }
210
211 make_state_change!(resume, Running, Resume);
213
214 make_state_change!(suspend, Suspended, Suspend);
216
217 make_state_change!(close, Closed, Close);
219
220 pub fn message_node(&self, id: NodeId, msg: AudioNodeMessage) {
221 let _ = self.sender.send(AudioRenderThreadMsg::MessageNode(id, msg));
222 }
223
224 pub fn connect_ports(&self, from: PortId<OutputPort>, to: PortId<InputPort>) {
225 let _ = self
226 .sender
227 .send(AudioRenderThreadMsg::ConnectPorts(from, to));
228 }
229
230 pub fn disconnect_all_from(&self, node: NodeId) {
231 let _ = self
232 .sender
233 .send(AudioRenderThreadMsg::DisconnectAllFrom(node));
234 }
235
236 pub fn disconnect_output(&self, out: PortId<OutputPort>) {
240 let _ = self
241 .sender
242 .send(AudioRenderThreadMsg::DisconnectOutput(out));
243 }
244
245 pub fn disconnect_between(&self, from: NodeId, to: NodeId) {
249 let _ = self
250 .sender
251 .send(AudioRenderThreadMsg::DisconnectBetween(from, to));
252 }
253
254 pub fn disconnect_to(&self, from: NodeId, to: PortId<InputPort>) {
258 let _ = self
259 .sender
260 .send(AudioRenderThreadMsg::DisconnectTo(from, to));
261 }
262
263 pub fn disconnect_output_between(&self, out: PortId<OutputPort>, to: NodeId) {
267 let _ = self
268 .sender
269 .send(AudioRenderThreadMsg::DisconnectOutputBetween(out, to));
270 }
271
272 pub fn disconnect_output_between_to(&self, out: PortId<OutputPort>, inp: PortId<InputPort>) {
276 let _ = self
277 .sender
278 .send(AudioRenderThreadMsg::DisconnectOutputBetweenTo(out, inp));
279 }
280
281 pub fn decode_audio_data(&self, data: Vec<u8>, callbacks: AudioDecoderCallbacks) {
284 let mut options = AudioDecoderOptions::default();
285 options.sample_rate = self.sample_rate;
286 let make_decoder = self.make_decoder.clone();
287 Builder::new()
288 .name("AudioDecoder".to_owned())
289 .spawn(move || {
290 let audio_decoder = make_decoder();
291
292 audio_decoder.decode(data, callbacks, Some(options));
293 })
294 .unwrap();
295 }
296
297 pub fn set_eos_callback(
298 &self,
299 callback: Box<dyn Fn(Box<dyn AsRef<[f32]>>) + Send + Sync + 'static>,
300 ) {
301 let _ = self
302 .sender
303 .send(AudioRenderThreadMsg::SetSinkEosCallback(callback));
304 }
305
306 fn set_mute(&self, val: bool) {
307 let _ = self.sender.send(AudioRenderThreadMsg::SetMute(val));
308 }
309}
310
311impl Drop for AudioContext {
312 fn drop(&mut self) {
313 let (tx, _) = mpsc::channel();
314 let _ = self.sender.send(AudioRenderThreadMsg::Close(tx));
315
316 let (tx_ack, rx_ack) = mpsc::channel();
318 let _ = self
319 .backend_chan
320 .lock()
321 .unwrap()
322 .send(BackendMsg::Shutdown {
323 context: self.client_context_id,
324 id: self.id,
325 tx_ack,
326 });
327 let _ = rx_ack.recv();
328 }
329}
330
331impl MediaInstance for AudioContext {
332 fn get_id(&self) -> usize {
333 self.id
334 }
335
336 fn mute(&self, val: bool) -> Result<(), ()> {
337 self.set_mute(val);
338 Ok(())
339 }
340
341 fn suspend(&self) -> Result<(), ()> {
342 let (tx, _) = mpsc::channel();
343 self.sender
344 .send(AudioRenderThreadMsg::Suspend(tx))
345 .map_err(|_| ())
346 }
347
348 fn resume(&self) -> Result<(), ()> {
349 let (tx, _) = mpsc::channel();
350 self.sender
351 .send(AudioRenderThreadMsg::Resume(tx))
352 .map_err(|_| ())
353 }
354}