1use std::rc::Rc;
6
7use base::id::PipelineId;
8use dom_struct::dom_struct;
9use js::rust::HandleObject;
10use servo_media::audio::context::{LatencyCategory, ProcessingState, RealTimeAudioContextOptions};
11
12use crate::conversions::Convert;
13use crate::dom::audio::baseaudiocontext::{BaseAudioContext, BaseAudioContextOptions};
14use crate::dom::audio::mediaelementaudiosourcenode::MediaElementAudioSourceNode;
15use crate::dom::audio::mediastreamaudiodestinationnode::MediaStreamAudioDestinationNode;
16use crate::dom::audio::mediastreamaudiosourcenode::MediaStreamAudioSourceNode;
17use crate::dom::audio::mediastreamtrackaudiosourcenode::MediaStreamTrackAudioSourceNode;
18use crate::dom::bindings::codegen::Bindings::AudioContextBinding::{
19 AudioContextLatencyCategory, AudioContextMethods, AudioContextOptions, AudioTimestamp,
20};
21use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
22use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState;
23use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContext_Binding::BaseAudioContextMethods;
24use crate::dom::bindings::codegen::UnionTypes::AudioContextLatencyCategoryOrDouble;
25use crate::dom::bindings::error::{Error, Fallible};
26use crate::dom::bindings::inheritance::Castable;
27use crate::dom::bindings::num::Finite;
28use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
29use crate::dom::bindings::reflector::{DomGlobal, reflect_dom_object_with_proto};
30use crate::dom::bindings::root::DomRoot;
31use crate::dom::html::htmlmediaelement::HTMLMediaElement;
32use crate::dom::mediastream::MediaStream;
33use crate::dom::mediastreamtrack::MediaStreamTrack;
34use crate::dom::promise::Promise;
35use crate::dom::window::Window;
36use crate::realms::InRealm;
37use crate::script_runtime::CanGc;
38
39#[dom_struct]
40pub(crate) struct AudioContext {
41 context: BaseAudioContext,
42 latency_hint: AudioContextLatencyCategory,
43 base_latency: f64,
45 output_latency: f64,
47}
48
49impl AudioContext {
50 #[cfg_attr(crown, allow(crown::unrooted_must_root))]
51 fn new_inherited(
53 options: &AudioContextOptions,
54 pipeline_id: PipelineId,
55 ) -> Fallible<AudioContext> {
56 let context = BaseAudioContext::new_inherited(
58 BaseAudioContextOptions::AudioContext(options.convert()),
59 pipeline_id,
60 )?;
61
62 let latency_hint = match options.latencyHint {
64 AudioContextLatencyCategoryOrDouble::AudioContextLatencyCategory(category) => category,
65 AudioContextLatencyCategoryOrDouble::Double(_) => {
66 AudioContextLatencyCategory::Interactive
67 }, };
69
70 Ok(AudioContext {
78 context,
79 latency_hint,
80 base_latency: 0., output_latency: 0., })
83 }
84
85 #[cfg_attr(crown, allow(crown::unrooted_must_root))]
86 fn new(
87 window: &Window,
88 proto: Option<HandleObject>,
89 options: &AudioContextOptions,
90 can_gc: CanGc,
91 ) -> Fallible<DomRoot<AudioContext>> {
92 let pipeline_id = window.pipeline_id();
93 let context = AudioContext::new_inherited(options, pipeline_id)?;
94 let context = reflect_dom_object_with_proto(Box::new(context), window, proto, can_gc);
95 context.resume();
96 Ok(context)
97 }
98
99 fn resume(&self) {
100 if self.context.is_allowed_to_start() {
102 self.context.resume();
104 }
105 }
106
107 pub(crate) fn base(&self) -> DomRoot<BaseAudioContext> {
108 DomRoot::from_ref(&self.context)
109 }
110}
111
112impl AudioContextMethods<crate::DomTypeHolder> for AudioContext {
113 fn Constructor(
115 window: &Window,
116 proto: Option<HandleObject>,
117 can_gc: CanGc,
118 options: &AudioContextOptions,
119 ) -> Fallible<DomRoot<AudioContext>> {
120 AudioContext::new(window, proto, options, can_gc)
121 }
122
123 fn BaseLatency(&self) -> Finite<f64> {
125 Finite::wrap(self.base_latency)
126 }
127
128 fn OutputLatency(&self) -> Finite<f64> {
130 Finite::wrap(self.output_latency)
131 }
132
133 fn GetOutputTimestamp(&self) -> AudioTimestamp {
135 AudioTimestamp {
137 contextTime: Some(Finite::wrap(0.)),
138 performanceTime: Some(Finite::wrap(0.)),
139 }
140 }
141
142 fn Suspend(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
144 let promise = Promise::new_in_current_realm(comp, can_gc);
146
147 if self.context.control_thread_state() == ProcessingState::Closed {
149 promise.reject_error(Error::InvalidState, can_gc);
150 return promise;
151 }
152
153 if self.context.State() == AudioContextState::Suspended {
155 promise.resolve_native(&(), can_gc);
156 return promise;
157 }
158
159 let trusted_promise = TrustedPromise::new(promise.clone());
161 match self.context.audio_context_impl().lock().unwrap().suspend() {
162 Ok(_) => {
163 let base_context = Trusted::new(&self.context);
164 let context = Trusted::new(self);
165 self.global().task_manager().dom_manipulation_task_source().queue(
166 task!(suspend_ok: move || {
167 let base_context = base_context.root();
168 let context = context.root();
169 let promise = trusted_promise.root();
170 promise.resolve_native(&(), CanGc::note());
171 if base_context.State() != AudioContextState::Suspended {
172 base_context.set_state_attribute(AudioContextState::Suspended);
173 context.global().task_manager().dom_manipulation_task_source().queue_simple_event(
174 context.upcast(),
175 atom!("statechange"),
176 );
177 }
178 })
179 );
180 },
181 Err(_) => {
182 self.global()
185 .task_manager()
186 .dom_manipulation_task_source()
187 .queue(task!(suspend_error: move || {
188 let promise = trusted_promise.root();
189 promise.reject_error(Error::Type("Something went wrong".to_owned()), CanGc::note());
190 }));
191 },
192 };
193
194 promise
196 }
197
198 fn Close(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
200 let promise = Promise::new_in_current_realm(comp, can_gc);
202
203 if self.context.control_thread_state() == ProcessingState::Closed {
205 promise.reject_error(Error::InvalidState, can_gc);
206 return promise;
207 }
208
209 if self.context.State() == AudioContextState::Closed {
211 promise.resolve_native(&(), can_gc);
212 return promise;
213 }
214
215 let trusted_promise = TrustedPromise::new(promise.clone());
217 match self.context.audio_context_impl().lock().unwrap().close() {
218 Ok(_) => {
219 let base_context = Trusted::new(&self.context);
220 let context = Trusted::new(self);
221 self.global().task_manager().dom_manipulation_task_source().queue(
222 task!(suspend_ok: move || {
223 let base_context = base_context.root();
224 let context = context.root();
225 let promise = trusted_promise.root();
226 promise.resolve_native(&(), CanGc::note());
227 if base_context.State() != AudioContextState::Closed {
228 base_context.set_state_attribute(AudioContextState::Closed);
229 context.global().task_manager().dom_manipulation_task_source().queue_simple_event(
230 context.upcast(),
231 atom!("statechange"),
232 );
233 }
234 })
235 );
236 },
237 Err(_) => {
238 self.global()
241 .task_manager()
242 .dom_manipulation_task_source()
243 .queue(task!(suspend_error: move || {
244 let promise = trusted_promise.root();
245 promise.reject_error(Error::Type("Something went wrong".to_owned()), CanGc::note());
246 }));
247 },
248 };
249
250 promise
252 }
253
254 fn CreateMediaElementSource(
256 &self,
257 media_element: &HTMLMediaElement,
258 can_gc: CanGc,
259 ) -> Fallible<DomRoot<MediaElementAudioSourceNode>> {
260 let global = self.global();
261 let window = global.as_window();
262 MediaElementAudioSourceNode::new(window, self, media_element, can_gc)
263 }
264
265 fn CreateMediaStreamSource(
267 &self,
268 stream: &MediaStream,
269 can_gc: CanGc,
270 ) -> Fallible<DomRoot<MediaStreamAudioSourceNode>> {
271 let global = self.global();
272 let window = global.as_window();
273 MediaStreamAudioSourceNode::new(window, self, stream, can_gc)
274 }
275
276 fn CreateMediaStreamTrackSource(
278 &self,
279 track: &MediaStreamTrack,
280 can_gc: CanGc,
281 ) -> Fallible<DomRoot<MediaStreamTrackAudioSourceNode>> {
282 let global = self.global();
283 let window = global.as_window();
284 MediaStreamTrackAudioSourceNode::new(window, self, track, can_gc)
285 }
286
287 fn CreateMediaStreamDestination(
289 &self,
290 can_gc: CanGc,
291 ) -> Fallible<DomRoot<MediaStreamAudioDestinationNode>> {
292 let global = self.global();
293 let window = global.as_window();
294 MediaStreamAudioDestinationNode::new(window, self, &AudioNodeOptions::empty(), can_gc)
295 }
296}
297
298impl Convert<LatencyCategory> for AudioContextLatencyCategory {
299 fn convert(self) -> LatencyCategory {
300 match self {
301 AudioContextLatencyCategory::Balanced => LatencyCategory::Balanced,
302 AudioContextLatencyCategory::Interactive => LatencyCategory::Interactive,
303 AudioContextLatencyCategory::Playback => LatencyCategory::Playback,
304 }
305 }
306}
307
308impl Convert<RealTimeAudioContextOptions> for &AudioContextOptions {
309 fn convert(self) -> RealTimeAudioContextOptions {
310 RealTimeAudioContextOptions {
311 sample_rate: *self.sampleRate.unwrap_or(Finite::wrap(44100.)),
312 latency_hint: match self.latencyHint {
313 AudioContextLatencyCategoryOrDouble::AudioContextLatencyCategory(category) => {
314 category.convert()
315 },
316 AudioContextLatencyCategoryOrDouble::Double(_) => LatencyCategory::Interactive, },
318 }
319 }
320}