use std::rc::Rc;
use base::id::PipelineId;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::context::{LatencyCategory, ProcessingState, RealTimeAudioContextOptions};
use crate::conversions::Convert;
use crate::dom::baseaudiocontext::{BaseAudioContext, BaseAudioContextOptions};
use crate::dom::bindings::codegen::Bindings::AudioContextBinding::{
AudioContextLatencyCategory, AudioContextMethods, AudioContextOptions, AudioTimestamp,
};
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState;
use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContext_Binding::BaseAudioContextMethods;
use crate::dom::bindings::codegen::UnionTypes::AudioContextLatencyCategoryOrDouble;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object_with_proto, DomObject};
use crate::dom::bindings::root::DomRoot;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::mediaelementaudiosourcenode::MediaElementAudioSourceNode;
use crate::dom::mediastream::MediaStream;
use crate::dom::mediastreamaudiodestinationnode::MediaStreamAudioDestinationNode;
use crate::dom::mediastreamaudiosourcenode::MediaStreamAudioSourceNode;
use crate::dom::mediastreamtrack::MediaStreamTrack;
use crate::dom::mediastreamtrackaudiosourcenode::MediaStreamTrackAudioSourceNode;
use crate::dom::promise::Promise;
use crate::dom::window::Window;
use crate::realms::InRealm;
use crate::script_runtime::CanGc;
use crate::task_source::TaskSource;
#[dom_struct]
pub struct AudioContext {
context: BaseAudioContext,
latency_hint: AudioContextLatencyCategory,
base_latency: f64,
output_latency: f64,
}
impl AudioContext {
#[allow(crown::unrooted_must_root)]
fn new_inherited(
options: &AudioContextOptions,
pipeline_id: PipelineId,
) -> Fallible<AudioContext> {
let context = BaseAudioContext::new_inherited(
BaseAudioContextOptions::AudioContext(options.convert()),
pipeline_id,
)?;
let latency_hint = match options.latencyHint {
AudioContextLatencyCategoryOrDouble::AudioContextLatencyCategory(category) => category,
AudioContextLatencyCategoryOrDouble::Double(_) => {
AudioContextLatencyCategory::Interactive
}, };
Ok(AudioContext {
context,
latency_hint,
base_latency: 0., output_latency: 0., })
}
#[allow(crown::unrooted_must_root)]
fn new(
window: &Window,
proto: Option<HandleObject>,
options: &AudioContextOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<AudioContext>> {
let pipeline_id = window.pipeline_id();
let context = AudioContext::new_inherited(options, pipeline_id)?;
let context = reflect_dom_object_with_proto(Box::new(context), window, proto, can_gc);
context.resume();
Ok(context)
}
fn resume(&self) {
if self.context.is_allowed_to_start() {
self.context.resume();
}
}
pub fn base(&self) -> DomRoot<BaseAudioContext> {
DomRoot::from_ref(&self.context)
}
}
impl AudioContextMethods<crate::DomTypeHolder> for AudioContext {
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
options: &AudioContextOptions,
) -> Fallible<DomRoot<AudioContext>> {
AudioContext::new(window, proto, options, can_gc)
}
fn BaseLatency(&self) -> Finite<f64> {
Finite::wrap(self.base_latency)
}
fn OutputLatency(&self) -> Finite<f64> {
Finite::wrap(self.output_latency)
}
fn GetOutputTimestamp(&self) -> AudioTimestamp {
AudioTimestamp {
contextTime: Some(Finite::wrap(0.)),
performanceTime: Some(Finite::wrap(0.)),
}
}
fn Suspend(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
let promise = Promise::new_in_current_realm(comp, can_gc);
if self.context.control_thread_state() == ProcessingState::Closed {
promise.reject_error(Error::InvalidState);
return promise;
}
if self.context.State() == AudioContextState::Suspended {
promise.resolve_native(&());
return promise;
}
let window = DomRoot::downcast::<Window>(self.global()).unwrap();
let task_source = window.task_manager().dom_manipulation_task_source();
let trusted_promise = TrustedPromise::new(promise.clone());
match self.context.audio_context_impl().lock().unwrap().suspend() {
Ok(_) => {
let base_context = Trusted::new(&self.context);
let context = Trusted::new(self);
let _ = task_source.queue(
task!(suspend_ok: move || {
let base_context = base_context.root();
let context = context.root();
let promise = trusted_promise.root();
promise.resolve_native(&());
if base_context.State() != AudioContextState::Suspended {
base_context.set_state_attribute(AudioContextState::Suspended);
let window = DomRoot::downcast::<Window>(context.global()).unwrap();
window.task_manager().dom_manipulation_task_source().queue_simple_event(
context.upcast(),
atom!("statechange"),
&window
);
}
}),
window.upcast(),
);
},
Err(_) => {
let _ = task_source.queue(
task!(suspend_error: move || {
let promise = trusted_promise.root();
promise.reject_error(Error::Type("Something went wrong".to_owned()));
}),
window.upcast(),
);
},
};
promise
}
fn Close(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
let promise = Promise::new_in_current_realm(comp, can_gc);
if self.context.control_thread_state() == ProcessingState::Closed {
promise.reject_error(Error::InvalidState);
return promise;
}
if self.context.State() == AudioContextState::Closed {
promise.resolve_native(&());
return promise;
}
let window = DomRoot::downcast::<Window>(self.global()).unwrap();
let task_source = window.task_manager().dom_manipulation_task_source();
let trusted_promise = TrustedPromise::new(promise.clone());
match self.context.audio_context_impl().lock().unwrap().close() {
Ok(_) => {
let base_context = Trusted::new(&self.context);
let context = Trusted::new(self);
let _ = task_source.queue(
task!(suspend_ok: move || {
let base_context = base_context.root();
let context = context.root();
let promise = trusted_promise.root();
promise.resolve_native(&());
if base_context.State() != AudioContextState::Closed {
base_context.set_state_attribute(AudioContextState::Closed);
let window = DomRoot::downcast::<Window>(context.global()).unwrap();
window.task_manager().dom_manipulation_task_source().queue_simple_event(
context.upcast(),
atom!("statechange"),
&window
);
}
}),
window.upcast(),
);
},
Err(_) => {
let _ = task_source.queue(
task!(suspend_error: move || {
let promise = trusted_promise.root();
promise.reject_error(Error::Type("Something went wrong".to_owned()));
}),
window.upcast(),
);
},
};
promise
}
fn CreateMediaElementSource(
&self,
media_element: &HTMLMediaElement,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaElementAudioSourceNode>> {
let global = self.global();
let window = global.as_window();
MediaElementAudioSourceNode::new(window, self, media_element, can_gc)
}
fn CreateMediaStreamSource(
&self,
stream: &MediaStream,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioSourceNode>> {
let global = self.global();
let window = global.as_window();
MediaStreamAudioSourceNode::new(window, self, stream, can_gc)
}
fn CreateMediaStreamTrackSource(
&self,
track: &MediaStreamTrack,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamTrackAudioSourceNode>> {
let global = self.global();
let window = global.as_window();
MediaStreamTrackAudioSourceNode::new(window, self, track, can_gc)
}
fn CreateMediaStreamDestination(
&self,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioDestinationNode>> {
let global = self.global();
let window = global.as_window();
MediaStreamAudioDestinationNode::new(window, self, &AudioNodeOptions::empty(), can_gc)
}
}
impl Convert<LatencyCategory> for AudioContextLatencyCategory {
fn convert(self) -> LatencyCategory {
match self {
AudioContextLatencyCategory::Balanced => LatencyCategory::Balanced,
AudioContextLatencyCategory::Interactive => LatencyCategory::Interactive,
AudioContextLatencyCategory::Playback => LatencyCategory::Playback,
}
}
}
impl<'a> Convert<RealTimeAudioContextOptions> for &'a AudioContextOptions {
fn convert(self) -> RealTimeAudioContextOptions {
RealTimeAudioContextOptions {
sample_rate: *self.sampleRate.unwrap_or(Finite::wrap(44100.)),
latency_hint: match self.latencyHint {
AudioContextLatencyCategoryOrDouble::AudioContextLatencyCategory(category) => {
category.convert()
},
AudioContextLatencyCategoryOrDouble::Double(_) => LatencyCategory::Interactive, },
}
}
}