1use std::cell::Cell;
6use std::collections::hash_map::Entry;
7use std::collections::{HashMap, VecDeque};
8use std::rc::Rc;
9use std::sync::{Arc, Mutex};
10
11use base::id::PipelineId;
12use dom_struct::dom_struct;
13use js::rust::CustomAutoRooterGuard;
14use js::typedarray::ArrayBuffer;
15use servo_media::audio::context::{
16 AudioContext, AudioContextOptions, OfflineAudioContextOptions, ProcessingState,
17 RealTimeAudioContextOptions,
18};
19use servo_media::audio::decoder::AudioDecoderCallbacks;
20use servo_media::audio::graph::NodeId;
21use servo_media::{ClientContextId, ServoMedia};
22use uuid::Uuid;
23
24use crate::conversions::Convert;
25use crate::dom::audio::analysernode::AnalyserNode;
26use crate::dom::audio::audiobuffer::AudioBuffer;
27use crate::dom::audio::audiobuffersourcenode::AudioBufferSourceNode;
28use crate::dom::audio::audiodestinationnode::AudioDestinationNode;
29use crate::dom::audio::audiolistener::AudioListener;
30use crate::dom::audio::audionode::MAX_CHANNEL_COUNT;
31use crate::dom::audio::biquadfilternode::BiquadFilterNode;
32use crate::dom::audio::channelmergernode::ChannelMergerNode;
33use crate::dom::audio::channelsplitternode::ChannelSplitterNode;
34use crate::dom::audio::constantsourcenode::ConstantSourceNode;
35use crate::dom::audio::gainnode::GainNode;
36use crate::dom::audio::iirfilternode::IIRFilterNode;
37use crate::dom::audio::oscillatornode::OscillatorNode;
38use crate::dom::audio::pannernode::PannerNode;
39use crate::dom::audio::stereopannernode::StereoPannerNode;
40use crate::dom::bindings::callback::ExceptionHandling;
41use crate::dom::bindings::cell::DomRefCell;
42use crate::dom::bindings::codegen::Bindings::AnalyserNodeBinding::AnalyserOptions;
43use crate::dom::bindings::codegen::Bindings::AudioBufferSourceNodeBinding::AudioBufferSourceOptions;
44use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
45 AudioNodeOptions, ChannelCountMode, ChannelInterpretation,
46};
47use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::{
48 AudioContextState, BaseAudioContextMethods, DecodeErrorCallback, DecodeSuccessCallback,
49};
50use crate::dom::bindings::codegen::Bindings::BiquadFilterNodeBinding::BiquadFilterOptions;
51use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::ChannelMergerOptions;
52use crate::dom::bindings::codegen::Bindings::ChannelSplitterNodeBinding::ChannelSplitterOptions;
53use crate::dom::bindings::codegen::Bindings::ConstantSourceNodeBinding::ConstantSourceOptions;
54use crate::dom::bindings::codegen::Bindings::GainNodeBinding::GainOptions;
55use crate::dom::bindings::codegen::Bindings::IIRFilterNodeBinding::IIRFilterOptions;
56use crate::dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions;
57use crate::dom::bindings::codegen::Bindings::PannerNodeBinding::PannerOptions;
58use crate::dom::bindings::codegen::Bindings::StereoPannerNodeBinding::StereoPannerOptions;
59use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
60use crate::dom::bindings::inheritance::Castable;
61use crate::dom::bindings::num::Finite;
62use crate::dom::bindings::refcounted::Trusted;
63use crate::dom::bindings::reflector::DomGlobal;
64use crate::dom::bindings::root::{DomRoot, MutNullableDom};
65use crate::dom::domexception::{DOMErrorName, DOMException};
66use crate::dom::eventtarget::EventTarget;
67use crate::dom::promise::Promise;
68use crate::realms::InRealm;
69use crate::script_runtime::CanGc;
70
71#[allow(dead_code)]
72pub(crate) enum BaseAudioContextOptions {
73 AudioContext(RealTimeAudioContextOptions),
74 OfflineAudioContext(OfflineAudioContextOptions),
75}
76
77#[derive(JSTraceable)]
78struct DecodeResolver {
79 pub(crate) promise: Rc<Promise>,
80 pub(crate) success_callback: Option<Rc<DecodeSuccessCallback>>,
81 pub(crate) error_callback: Option<Rc<DecodeErrorCallback>>,
82}
83
84type BoxedSliceOfPromises = Box<[Rc<Promise>]>;
85
86#[dom_struct]
87pub(crate) struct BaseAudioContext {
88 eventtarget: EventTarget,
89 #[ignore_malloc_size_of = "servo_media"]
90 #[no_trace]
91 audio_context_impl: Arc<Mutex<AudioContext>>,
92 destination: MutNullableDom<AudioDestinationNode>,
94 listener: MutNullableDom<AudioListener>,
95 #[ignore_malloc_size_of = "promises are hard"]
97 in_flight_resume_promises_queue: DomRefCell<VecDeque<(BoxedSliceOfPromises, ErrorResult)>>,
98 #[ignore_malloc_size_of = "promises are hard"]
100 pending_resume_promises: DomRefCell<Vec<Rc<Promise>>>,
101 #[ignore_malloc_size_of = "promises are hard"]
102 decode_resolvers: DomRefCell<HashMap<String, DecodeResolver>>,
103 sample_rate: f32,
105 state: Cell<AudioContextState>,
111 channel_count: u32,
112}
113
114impl BaseAudioContext {
115 #[cfg_attr(crown, allow(crown::unrooted_must_root))]
116 pub(crate) fn new_inherited(
117 options: BaseAudioContextOptions,
118 pipeline_id: PipelineId,
119 ) -> Fallible<BaseAudioContext> {
120 let (sample_rate, channel_count) = match options {
121 BaseAudioContextOptions::AudioContext(ref opt) => (opt.sample_rate, 2),
122 BaseAudioContextOptions::OfflineAudioContext(ref opt) => {
123 (opt.sample_rate, opt.channels)
124 },
125 };
126
127 let client_context_id =
128 ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
129 let audio_context_impl = ServoMedia::get()
130 .create_audio_context(&client_context_id, options.convert())
131 .map_err(|_| Error::NotSupported)?;
132
133 Ok(BaseAudioContext {
134 eventtarget: EventTarget::new_inherited(),
135 audio_context_impl,
136 destination: Default::default(),
137 listener: Default::default(),
138 in_flight_resume_promises_queue: Default::default(),
139 pending_resume_promises: Default::default(),
140 decode_resolvers: Default::default(),
141 sample_rate,
142 state: Cell::new(AudioContextState::Suspended),
143 channel_count: channel_count.into(),
144 })
145 }
146
147 pub(crate) fn is_offline(&self) -> bool {
149 false
150 }
151
152 pub(crate) fn audio_context_impl(&self) -> Arc<Mutex<AudioContext>> {
153 self.audio_context_impl.clone()
154 }
155
156 pub(crate) fn destination_node(&self) -> NodeId {
157 self.audio_context_impl.lock().unwrap().dest_node()
158 }
159
160 pub(crate) fn listener(&self) -> NodeId {
161 self.audio_context_impl.lock().unwrap().listener()
162 }
163
164 pub(crate) fn is_allowed_to_start(&self) -> bool {
166 self.state.get() == AudioContextState::Suspended
167 }
168
169 fn push_pending_resume_promise(&self, promise: &Rc<Promise>) {
170 self.pending_resume_promises
171 .borrow_mut()
172 .push(promise.clone());
173 }
174
175 fn take_pending_resume_promises(&self, result: ErrorResult) {
186 let pending_resume_promises =
187 std::mem::take(&mut *self.pending_resume_promises.borrow_mut());
188 self.in_flight_resume_promises_queue
189 .borrow_mut()
190 .push_back((pending_resume_promises.into(), result));
191 }
192
193 #[cfg_attr(crown, allow(crown::unrooted_must_root))]
202 fn fulfill_in_flight_resume_promises<F>(&self, f: F)
203 where
204 F: FnOnce(),
205 {
206 let (promises, result) = self
207 .in_flight_resume_promises_queue
208 .borrow_mut()
209 .pop_front()
210 .expect("there should be at least one list of in flight resume promises");
211 f();
212 for promise in &*promises {
213 match result {
214 Ok(ref value) => promise.resolve_native(value, CanGc::note()),
215 Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
216 }
217 }
218 }
219
220 pub(crate) fn control_thread_state(&self) -> ProcessingState {
222 self.audio_context_impl.lock().unwrap().state()
223 }
224
225 pub(crate) fn set_state_attribute(&self, state: AudioContextState) {
227 self.state.set(state);
228 }
229
230 pub(crate) fn resume(&self) {
231 let this = Trusted::new(self);
232 match self.audio_context_impl.lock().unwrap().resume() {
235 Ok(()) => {
236 self.take_pending_resume_promises(Ok(()));
237 self.global().task_manager().dom_manipulation_task_source().queue(
238 task!(resume_success: move || {
239 let this = this.root();
240 this.fulfill_in_flight_resume_promises(|| {
241 if this.state.get() != AudioContextState::Running {
242 this.state.set(AudioContextState::Running);
243 this.global().task_manager().dom_manipulation_task_source().queue_simple_event(
244 this.upcast(),
245 atom!("statechange"),
246 );
247 }
248 });
249 })
250 );
251 },
252 Err(()) => {
253 self.take_pending_resume_promises(Err(Error::Type(
254 "Something went wrong".to_owned(),
255 )));
256 self.global()
257 .task_manager()
258 .dom_manipulation_task_source()
259 .queue(task!(resume_error: move || {
260 this.root().fulfill_in_flight_resume_promises(|| {})
261 }));
262 },
263 }
264 }
265
266 pub(crate) fn channel_count(&self) -> u32 {
267 self.channel_count
268 }
269}
270
271impl BaseAudioContextMethods<crate::DomTypeHolder> for BaseAudioContext {
272 fn SampleRate(&self) -> Finite<f32> {
274 Finite::wrap(self.sample_rate)
275 }
276
277 fn CurrentTime(&self) -> Finite<f64> {
279 let current_time = self.audio_context_impl.lock().unwrap().current_time();
280 Finite::wrap(current_time)
281 }
282
283 fn State(&self) -> AudioContextState {
285 self.state.get()
286 }
287
288 fn Resume(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
290 let promise = Promise::new_in_current_realm(comp, can_gc);
292
293 if self.audio_context_impl.lock().unwrap().state() == ProcessingState::Closed {
295 promise.reject_error(Error::InvalidState, can_gc);
296 return promise;
297 }
298
299 if self.state.get() == AudioContextState::Running {
301 promise.resolve_native(&(), can_gc);
302 return promise;
303 }
304
305 self.push_pending_resume_promise(&promise);
306
307 if !self.is_allowed_to_start() {
309 return promise;
310 }
311
312 self.resume();
314
315 promise
317 }
318
319 fn Destination(&self, can_gc: CanGc) -> DomRoot<AudioDestinationNode> {
321 let global = self.global();
322 self.destination.or_init(|| {
323 let mut options = AudioNodeOptions::empty();
324 options.channelCount = Some(self.channel_count);
325 options.channelCountMode = Some(ChannelCountMode::Explicit);
326 options.channelInterpretation = Some(ChannelInterpretation::Speakers);
327 AudioDestinationNode::new(&global, self, &options, can_gc)
328 })
329 }
330
331 fn Listener(&self, can_gc: CanGc) -> DomRoot<AudioListener> {
333 let global = self.global();
334 let window = global.as_window();
335 self.listener
336 .or_init(|| AudioListener::new(window, self, can_gc))
337 }
338
339 event_handler!(statechange, GetOnstatechange, SetOnstatechange);
341
342 fn CreateOscillator(&self, can_gc: CanGc) -> Fallible<DomRoot<OscillatorNode>> {
344 OscillatorNode::new(
345 self.global().as_window(),
346 self,
347 &OscillatorOptions::empty(),
348 can_gc,
349 )
350 }
351
352 fn CreateGain(&self, can_gc: CanGc) -> Fallible<DomRoot<GainNode>> {
354 GainNode::new(
355 self.global().as_window(),
356 self,
357 &GainOptions::empty(),
358 can_gc,
359 )
360 }
361
362 fn CreatePanner(&self, can_gc: CanGc) -> Fallible<DomRoot<PannerNode>> {
364 PannerNode::new(
365 self.global().as_window(),
366 self,
367 &PannerOptions::empty(),
368 can_gc,
369 )
370 }
371
372 fn CreateAnalyser(&self, can_gc: CanGc) -> Fallible<DomRoot<AnalyserNode>> {
374 AnalyserNode::new(
375 self.global().as_window(),
376 self,
377 &AnalyserOptions::empty(),
378 can_gc,
379 )
380 }
381
382 fn CreateBiquadFilter(&self, can_gc: CanGc) -> Fallible<DomRoot<BiquadFilterNode>> {
384 BiquadFilterNode::new(
385 self.global().as_window(),
386 self,
387 &BiquadFilterOptions::empty(),
388 can_gc,
389 )
390 }
391
392 fn CreateStereoPanner(&self, can_gc: CanGc) -> Fallible<DomRoot<StereoPannerNode>> {
394 StereoPannerNode::new(
395 self.global().as_window(),
396 self,
397 &StereoPannerOptions::empty(),
398 can_gc,
399 )
400 }
401
402 fn CreateConstantSource(&self, can_gc: CanGc) -> Fallible<DomRoot<ConstantSourceNode>> {
404 ConstantSourceNode::new(
405 self.global().as_window(),
406 self,
407 &ConstantSourceOptions::empty(),
408 can_gc,
409 )
410 }
411
412 fn CreateChannelMerger(
414 &self,
415 count: u32,
416 can_gc: CanGc,
417 ) -> Fallible<DomRoot<ChannelMergerNode>> {
418 let mut opts = ChannelMergerOptions::empty();
419 opts.numberOfInputs = count;
420 ChannelMergerNode::new(self.global().as_window(), self, &opts, can_gc)
421 }
422
423 fn CreateChannelSplitter(
425 &self,
426 count: u32,
427 can_gc: CanGc,
428 ) -> Fallible<DomRoot<ChannelSplitterNode>> {
429 let mut opts = ChannelSplitterOptions::empty();
430 opts.numberOfOutputs = count;
431 ChannelSplitterNode::new(self.global().as_window(), self, &opts, can_gc)
432 }
433
434 fn CreateBuffer(
436 &self,
437 number_of_channels: u32,
438 length: u32,
439 sample_rate: Finite<f32>,
440 can_gc: CanGc,
441 ) -> Fallible<DomRoot<AudioBuffer>> {
442 if number_of_channels == 0 ||
443 number_of_channels > MAX_CHANNEL_COUNT ||
444 length == 0 ||
445 *sample_rate <= 0.
446 {
447 return Err(Error::NotSupported);
448 }
449 Ok(AudioBuffer::new(
450 self.global().as_window(),
451 number_of_channels,
452 length,
453 *sample_rate,
454 None,
455 can_gc,
456 ))
457 }
458
459 fn CreateBufferSource(&self, can_gc: CanGc) -> Fallible<DomRoot<AudioBufferSourceNode>> {
461 AudioBufferSourceNode::new(
462 self.global().as_window(),
463 self,
464 &AudioBufferSourceOptions::empty(),
465 can_gc,
466 )
467 }
468
469 fn DecodeAudioData(
471 &self,
472 audio_data: CustomAutoRooterGuard<ArrayBuffer>,
473 decode_success_callback: Option<Rc<DecodeSuccessCallback>>,
474 decode_error_callback: Option<Rc<DecodeErrorCallback>>,
475 comp: InRealm,
476 can_gc: CanGc,
477 ) -> Rc<Promise> {
478 let promise = Promise::new_in_current_realm(comp, can_gc);
480
481 if audio_data.len() > 0 {
482 let uuid = Uuid::new_v4().simple().to_string();
485 let uuid_ = uuid.clone();
486 self.decode_resolvers.borrow_mut().insert(
487 uuid.clone(),
488 DecodeResolver {
489 promise: promise.clone(),
490 success_callback: decode_success_callback,
491 error_callback: decode_error_callback,
492 },
493 );
494 let audio_data = audio_data.to_vec();
495 let decoded_audio = Arc::new(Mutex::new(Vec::new()));
496 let decoded_audio_ = decoded_audio.clone();
497 let decoded_audio__ = decoded_audio.clone();
498 let channels = Arc::new(Mutex::new(HashMap::new()));
503 let this = Trusted::new(self);
504 let this_ = this.clone();
505 let task_source = self
506 .global()
507 .task_manager()
508 .dom_manipulation_task_source()
509 .to_sendable();
510 let task_source_clone = task_source.clone();
511 let callbacks = AudioDecoderCallbacks::new()
512 .ready(move |channel_count| {
513 decoded_audio
514 .lock()
515 .unwrap()
516 .resize(channel_count as usize, Vec::new());
517 })
518 .progress(move |buffer, channel_pos_mask| {
519 let mut decoded_audio = decoded_audio_.lock().unwrap();
520 let mut channels = channels.lock().unwrap();
521 let channel = match channels.entry(channel_pos_mask) {
522 Entry::Occupied(entry) => *entry.get(),
523 Entry::Vacant(entry) => {
524 let x = (channel_pos_mask as f32).log2() as usize;
525 *entry.insert(x)
526 },
527 };
528 decoded_audio[channel].extend_from_slice((*buffer).as_ref());
529 })
530 .eos(move || {
531 task_source.queue(task!(audio_decode_eos: move || {
532 let this = this.root();
533 let decoded_audio = decoded_audio__.lock().unwrap();
534 let length = if !decoded_audio.is_empty() {
535 decoded_audio[0].len()
536 } else {
537 0
538 };
539 let buffer = AudioBuffer::new(
540 this.global().as_window(),
541 decoded_audio.len() as u32 ,
542 length as u32,
543 this.sample_rate,
544 Some(decoded_audio.as_slice()),
545 CanGc::note());
546 let mut resolvers = this.decode_resolvers.borrow_mut();
547 assert!(resolvers.contains_key(&uuid_));
548 let resolver = resolvers.remove(&uuid_).unwrap();
549 if let Some(callback) = resolver.success_callback {
550 let _ = callback.Call__(&buffer, ExceptionHandling::Report, CanGc::note());
551 }
552 resolver.promise.resolve_native(&buffer, CanGc::note());
553 }));
554 })
555 .error(move |error| {
556 task_source_clone.queue(task!(audio_decode_eos: move || {
557 let this = this_.root();
558 let mut resolvers = this.decode_resolvers.borrow_mut();
559 assert!(resolvers.contains_key(&uuid));
560 let resolver = resolvers.remove(&uuid).unwrap();
561 if let Some(callback) = resolver.error_callback {
562 let _ = callback.Call__(
563 &DOMException::new(&this.global(), DOMErrorName::DataCloneError, CanGc::note()),
564 ExceptionHandling::Report, CanGc::note());
565 }
566 let error = format!("Audio decode error {:?}", error);
567 resolver.promise.reject_error(Error::Type(error), CanGc::note());
568 }));
569 })
570 .build();
571 self.audio_context_impl
572 .lock()
573 .unwrap()
574 .decode_audio_data(audio_data, callbacks);
575 } else {
576 promise.reject_error(Error::DataClone(None), can_gc);
578 return promise;
579 }
580
581 promise
583 }
584
585 fn CreateIIRFilter(
587 &self,
588 feedforward: Vec<Finite<f64>>,
589 feedback: Vec<Finite<f64>>,
590 can_gc: CanGc,
591 ) -> Fallible<DomRoot<IIRFilterNode>> {
592 let opts = IIRFilterOptions {
593 parent: AudioNodeOptions::empty(),
594 feedback,
595 feedforward,
596 };
597 IIRFilterNode::new(self.global().as_window(), self, &opts, can_gc)
598 }
599}
600
601impl Convert<AudioContextOptions> for BaseAudioContextOptions {
602 fn convert(self) -> AudioContextOptions {
603 match self {
604 BaseAudioContextOptions::AudioContext(options) => {
605 AudioContextOptions::RealTimeAudioContext(options)
606 },
607 BaseAudioContextOptions::OfflineAudioContext(options) => {
608 AudioContextOptions::OfflineAudioContext(options)
609 },
610 }
611 }
612}
613
614impl Convert<AudioContextState> for ProcessingState {
615 fn convert(self) -> AudioContextState {
616 match self {
617 ProcessingState::Suspended => AudioContextState::Suspended,
618 ProcessingState::Running => AudioContextState::Running,
619 ProcessingState::Closed => AudioContextState::Closed,
620 }
621 }
622}