script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::Cell;
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use compositing_traits::{CrossProcessCompositorApi, ImageUpdate, SerializableImageData};
13use dom_struct::dom_struct;
14use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
15use euclid::default::Size2D;
16use headers::{ContentLength, ContentRange, HeaderMapExt};
17use html5ever::{LocalName, Prefix, local_name, ns};
18use http::StatusCode;
19use http::header::{self, HeaderMap, HeaderValue};
20use ipc_channel::ipc::{self, IpcSharedMemory, channel};
21use ipc_channel::router::ROUTER;
22use js::jsapi::JSAutoRealm;
23use layout_api::MediaFrame;
24use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
25use net_traits::request::{Destination, RequestId};
26use net_traits::{
27    CoreResourceThread, FetchMetadata, FetchResponseListener, FilteredMetadata, Metadata,
28    NetworkError, ResourceFetchTiming, ResourceTimingType,
29};
30use pixels::RasterImage;
31use script_bindings::codegen::GenericBindings::TimeRangesBinding::TimeRangesMethods;
32use script_bindings::codegen::InheritTypes::{
33    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
34};
35use servo_config::pref;
36use servo_media::player::audio::AudioRenderer;
37use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
38use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
39use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
40use servo_url::ServoUrl;
41use webrender_api::{
42    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
43    ImageDescriptorFlags, ImageFormat, ImageKey,
44};
45
46use crate::document_loader::{LoadBlocker, LoadType};
47use crate::dom::attr::Attr;
48use crate::dom::audio::audiotrack::AudioTrack;
49use crate::dom::audio::audiotracklist::AudioTrackList;
50use crate::dom::bindings::cell::DomRefCell;
51use crate::dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
52use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
53    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
54};
55use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
56use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
57use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
58use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
59use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
60use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
61use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
62use crate::dom::bindings::codegen::UnionTypes::{
63    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
64};
65use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
66use crate::dom::bindings::inheritance::Castable;
67use crate::dom::bindings::num::Finite;
68use crate::dom::bindings::refcounted::Trusted;
69use crate::dom::bindings::reflector::DomGlobal;
70use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
71use crate::dom::bindings::str::{DOMString, USVString};
72use crate::dom::blob::Blob;
73use crate::dom::csp::{GlobalCspReporting, Violation};
74use crate::dom::document::Document;
75use crate::dom::element::{
76    AttributeMutation, Element, ElementCreator, cors_setting_for_element,
77    reflect_cross_origin_attribute, set_cross_origin_attribute,
78};
79use crate::dom::event::Event;
80use crate::dom::eventtarget::EventTarget;
81use crate::dom::globalscope::GlobalScope;
82use crate::dom::html::htmlelement::HTMLElement;
83use crate::dom::html::htmlscriptelement::HTMLScriptElement;
84use crate::dom::html::htmlsourceelement::HTMLSourceElement;
85use crate::dom::html::htmlstyleelement::HTMLStyleElement;
86use crate::dom::html::htmlvideoelement::HTMLVideoElement;
87use crate::dom::mediaerror::MediaError;
88use crate::dom::mediafragmentparser::MediaFragmentParser;
89use crate::dom::mediastream::MediaStream;
90use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
91use crate::dom::performanceresourcetiming::InitiatorType;
92use crate::dom::promise::Promise;
93use crate::dom::texttrack::TextTrack;
94use crate::dom::texttracklist::TextTrackList;
95use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
96use crate::dom::trackevent::TrackEvent;
97use crate::dom::url::URL;
98use crate::dom::videotrack::VideoTrack;
99use crate::dom::videotracklist::VideoTrackList;
100use crate::dom::virtualmethods::VirtualMethods;
101use crate::fetch::{FetchCanceller, create_a_potential_cors_request};
102use crate::microtask::{Microtask, MicrotaskRunnable};
103use crate::network_listener::{self, PreInvoke, ResourceTimingListener};
104use crate::realms::{InRealm, enter_realm};
105use crate::script_runtime::CanGc;
106use crate::script_thread::ScriptThread;
107
108/// A CSS file to style the media controls.
109static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
110
111/// A JS file to control the media controls.
112static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
113
114#[derive(MallocSizeOf, PartialEq)]
115enum FrameStatus {
116    Locked,
117    Unlocked,
118}
119
120#[derive(MallocSizeOf)]
121struct FrameHolder(
122    FrameStatus,
123    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
124);
125
126impl FrameHolder {
127    fn new(frame: VideoFrame) -> FrameHolder {
128        FrameHolder(FrameStatus::Unlocked, frame)
129    }
130
131    fn lock(&mut self) {
132        if self.0 == FrameStatus::Unlocked {
133            self.0 = FrameStatus::Locked;
134        };
135    }
136
137    fn unlock(&mut self) {
138        if self.0 == FrameStatus::Locked {
139            self.0 = FrameStatus::Unlocked;
140        };
141    }
142
143    fn set(&mut self, new_frame: VideoFrame) {
144        if self.0 == FrameStatus::Unlocked {
145            self.1 = new_frame
146        };
147    }
148
149    fn get(&self) -> (u32, Size2D<i32>, usize) {
150        if self.0 == FrameStatus::Locked {
151            (
152                self.1.get_texture_id(),
153                Size2D::new(self.1.get_width(), self.1.get_height()),
154                0,
155            )
156        } else {
157            unreachable!();
158        }
159    }
160
161    fn get_frame(&self) -> VideoFrame {
162        self.1.clone()
163    }
164}
165
166#[derive(MallocSizeOf)]
167pub(crate) struct MediaFrameRenderer {
168    player_id: Option<u64>,
169    compositor_api: CrossProcessCompositorApi,
170    current_frame: Option<MediaFrame>,
171    old_frame: Option<ImageKey>,
172    very_old_frame: Option<ImageKey>,
173    current_frame_holder: Option<FrameHolder>,
174    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
175    poster_frame: Option<MediaFrame>,
176}
177
178impl MediaFrameRenderer {
179    fn new(compositor_api: CrossProcessCompositorApi) -> Self {
180        Self {
181            player_id: None,
182            compositor_api,
183            current_frame: None,
184            old_frame: None,
185            very_old_frame: None,
186            current_frame_holder: None,
187            poster_frame: None,
188        }
189    }
190
191    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
192        self.poster_frame = image.and_then(|image| {
193            image.id.map(|image_key| MediaFrame {
194                image_key,
195                width: image.metadata.width as i32,
196                height: image.metadata.height as i32,
197            })
198        });
199    }
200}
201
202impl VideoFrameRenderer for MediaFrameRenderer {
203    fn render(&mut self, frame: VideoFrame) {
204        let mut updates = smallvec::smallvec![];
205
206        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
207            updates.push(ImageUpdate::DeleteImage(old_image_key));
208        }
209
210        let descriptor = ImageDescriptor::new(
211            frame.get_width(),
212            frame.get_height(),
213            ImageFormat::BGRA8,
214            ImageDescriptorFlags::empty(),
215        );
216
217        match &mut self.current_frame {
218            Some(current_frame)
219                if current_frame.width == frame.get_width() &&
220                    current_frame.height == frame.get_height() =>
221            {
222                if !frame.is_gl_texture() {
223                    updates.push(ImageUpdate::UpdateImage(
224                        current_frame.image_key,
225                        descriptor,
226                        SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data())),
227                        None,
228                    ));
229                }
230
231                self.current_frame_holder
232                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
233                    .set(frame);
234
235                if let Some(old_image_key) = self.old_frame.take() {
236                    updates.push(ImageUpdate::DeleteImage(old_image_key));
237                }
238            },
239            Some(current_frame) => {
240                self.old_frame = Some(current_frame.image_key);
241
242                let Some(new_image_key) = self.compositor_api.generate_image_key_blocking() else {
243                    return;
244                };
245
246                /* update current_frame */
247                current_frame.image_key = new_image_key;
248                current_frame.width = frame.get_width();
249                current_frame.height = frame.get_height();
250
251                let image_data = if frame.is_gl_texture() && self.player_id.is_some() {
252                    let texture_target = if frame.is_external_oes() {
253                        ImageBufferKind::TextureExternal
254                    } else {
255                        ImageBufferKind::Texture2D
256                    };
257
258                    SerializableImageData::External(ExternalImageData {
259                        id: ExternalImageId(self.player_id.unwrap()),
260                        channel_index: 0,
261                        image_type: ExternalImageType::TextureHandle(texture_target),
262                        normalized_uvs: false,
263                    })
264                } else {
265                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
266                };
267
268                self.current_frame_holder
269                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
270                    .set(frame);
271
272                updates.push(ImageUpdate::AddImage(new_image_key, descriptor, image_data));
273            },
274            None => {
275                let Some(image_key) = self.compositor_api.generate_image_key_blocking() else {
276                    return;
277                };
278
279                self.current_frame = Some(MediaFrame {
280                    image_key,
281                    width: frame.get_width(),
282                    height: frame.get_height(),
283                });
284
285                let image_data = if frame.is_gl_texture() && self.player_id.is_some() {
286                    let texture_target = if frame.is_external_oes() {
287                        ImageBufferKind::TextureExternal
288                    } else {
289                        ImageBufferKind::Texture2D
290                    };
291
292                    SerializableImageData::External(ExternalImageData {
293                        id: ExternalImageId(self.player_id.unwrap()),
294                        channel_index: 0,
295                        image_type: ExternalImageType::TextureHandle(texture_target),
296                        normalized_uvs: false,
297                    })
298                } else {
299                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
300                };
301
302                self.current_frame_holder = Some(FrameHolder::new(frame));
303
304                updates.push(ImageUpdate::AddImage(image_key, descriptor, image_data));
305            },
306        }
307        self.compositor_api.update_images(updates);
308    }
309}
310
311#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
312#[derive(JSTraceable, MallocSizeOf)]
313enum SrcObject {
314    MediaStream(Dom<MediaStream>),
315    Blob(Dom<Blob>),
316}
317
318impl From<MediaStreamOrBlob> for SrcObject {
319    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
320    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
321        match src_object {
322            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
323            MediaStreamOrBlob::MediaStream(stream) => {
324                SrcObject::MediaStream(Dom::from_ref(&*stream))
325            },
326        }
327    }
328}
329
330#[derive(JSTraceable, MallocSizeOf)]
331struct DroppableHtmlMediaElement {
332    /// Player Id reported the player thread
333    player_id: Cell<u64>,
334    #[ignore_malloc_size_of = "Defined in other crates"]
335    #[no_trace]
336    player_context: WindowGLContext,
337}
338
339impl DroppableHtmlMediaElement {
340    fn new(player_id: Cell<u64>, player_context: WindowGLContext) -> Self {
341        Self {
342            player_id,
343            player_context,
344        }
345    }
346
347    pub(crate) fn set_player_id(&self, id: u64) {
348        self.player_id.set(id);
349    }
350}
351
352impl Drop for DroppableHtmlMediaElement {
353    fn drop(&mut self) {
354        self.player_context
355            .send(GLPlayerMsg::UnregisterPlayer(self.player_id.get()));
356    }
357}
358#[dom_struct]
359#[allow(non_snake_case)]
360pub(crate) struct HTMLMediaElement {
361    htmlelement: HTMLElement,
362    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
363    network_state: Cell<NetworkState>,
364    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
365    ready_state: Cell<ReadyState>,
366    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
367    src_object: DomRefCell<Option<SrcObject>>,
368    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
369    current_src: DomRefCell<String>,
370    /// Incremented whenever tasks associated with this element are cancelled.
371    generation_id: Cell<u32>,
372    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
373    ///
374    /// Reset to false every time the load algorithm is invoked.
375    fired_loadeddata_event: Cell<bool>,
376    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
377    error: MutNullableDom<MediaError>,
378    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
379    paused: Cell<bool>,
380    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
381    defaultPlaybackRate: Cell<f64>,
382    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
383    playbackRate: Cell<f64>,
384    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
385    autoplaying: Cell<bool>,
386    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
387    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
388    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
389    #[ignore_malloc_size_of = "promises are hard"]
390    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
391    /// Play promises which are soon to be fulfilled by a queued task.
392    #[allow(clippy::type_complexity)]
393    #[ignore_malloc_size_of = "promises are hard"]
394    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
395    #[ignore_malloc_size_of = "servo_media"]
396    #[no_trace]
397    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
398    #[conditional_malloc_size_of]
399    #[no_trace]
400    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
401    #[ignore_malloc_size_of = "Arc"]
402    #[no_trace]
403    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
404    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
405    show_poster: Cell<bool>,
406    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
407    duration: Cell<f64>,
408    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
409    playback_position: Cell<f64>,
410    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
411    default_playback_start_position: Cell<f64>,
412    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
413    volume: Cell<f64>,
414    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
415    seeking: Cell<bool>,
416    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
417    muted: Cell<bool>,
418    /// URL of the media resource, if any.
419    #[no_trace]
420    resource_url: DomRefCell<Option<ServoUrl>>,
421    /// URL of the media resource, if the resource is set through the src_object attribute and it
422    /// is a blob.
423    #[no_trace]
424    blob_url: DomRefCell<Option<ServoUrl>>,
425    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
426    played: DomRefCell<TimeRangesContainer>,
427    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
428    audio_tracks_list: MutNullableDom<AudioTrackList>,
429    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
430    video_tracks_list: MutNullableDom<VideoTrackList>,
431    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
432    text_tracks_list: MutNullableDom<TextTrackList>,
433    /// Time of last timeupdate notification.
434    #[ignore_malloc_size_of = "Defined in std::time"]
435    next_timeupdate_event: Cell<Instant>,
436    /// Latest fetch request context.
437    current_fetch_context: DomRefCell<Option<HTMLMediaElementFetchContext>>,
438    /// Media controls id.
439    /// In order to workaround the lack of privileged JS context, we secure the
440    /// the access to the "privileged" document.servoGetMediaControls(id) API by
441    /// keeping a whitelist of media controls identifiers.
442    media_controls_id: DomRefCell<Option<String>>,
443    droppable: DroppableHtmlMediaElement,
444}
445
446/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
447#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
448#[repr(u8)]
449pub(crate) enum NetworkState {
450    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
451    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
452    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
453    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
454}
455
456/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
457#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
458#[repr(u8)]
459#[allow(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
460pub(crate) enum ReadyState {
461    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
462    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
463    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
464    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
465    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
466}
467
468impl HTMLMediaElement {
469    pub(crate) fn new_inherited(
470        tag_name: LocalName,
471        prefix: Option<Prefix>,
472        document: &Document,
473    ) -> Self {
474        Self {
475            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
476            network_state: Cell::new(NetworkState::Empty),
477            ready_state: Cell::new(ReadyState::HaveNothing),
478            src_object: Default::default(),
479            current_src: DomRefCell::new("".to_owned()),
480            generation_id: Cell::new(0),
481            fired_loadeddata_event: Cell::new(false),
482            error: Default::default(),
483            paused: Cell::new(true),
484            defaultPlaybackRate: Cell::new(1.0),
485            playbackRate: Cell::new(1.0),
486            muted: Cell::new(false),
487            // FIXME(nox): Why is this initialised to true?
488            autoplaying: Cell::new(true),
489            delaying_the_load_event_flag: Default::default(),
490            pending_play_promises: Default::default(),
491            in_flight_play_promises_queue: Default::default(),
492            player: Default::default(),
493            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
494                document.window().compositor_api().clone(),
495            ))),
496            audio_renderer: Default::default(),
497            show_poster: Cell::new(true),
498            duration: Cell::new(f64::NAN),
499            playback_position: Cell::new(0.),
500            default_playback_start_position: Cell::new(0.),
501            volume: Cell::new(1.0),
502            seeking: Cell::new(false),
503            resource_url: DomRefCell::new(None),
504            blob_url: DomRefCell::new(None),
505            played: DomRefCell::new(TimeRangesContainer::default()),
506            audio_tracks_list: Default::default(),
507            video_tracks_list: Default::default(),
508            text_tracks_list: Default::default(),
509            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
510            current_fetch_context: DomRefCell::new(None),
511            media_controls_id: DomRefCell::new(None),
512            droppable: DroppableHtmlMediaElement::new(
513                Cell::new(0),
514                document.window().get_player_context(),
515            ),
516        }
517    }
518
519    pub(crate) fn network_state(&self) -> NetworkState {
520        self.network_state.get()
521    }
522
523    pub(crate) fn get_ready_state(&self) -> ReadyState {
524        self.ready_state.get()
525    }
526
527    fn media_type_id(&self) -> HTMLMediaElementTypeId {
528        match self.upcast::<Node>().type_id() {
529            NodeTypeId::Element(ElementTypeId::HTMLElement(
530                HTMLElementTypeId::HTMLMediaElement(media_type_id),
531            )) => media_type_id,
532            _ => unreachable!(),
533        }
534    }
535
536    fn play_media(&self) {
537        if let Some(ref player) = *self.player.borrow() {
538            if let Err(e) = player.lock().unwrap().set_rate(self.playbackRate.get()) {
539                warn!("Could not set the playback rate {:?}", e);
540            }
541            if let Err(e) = player.lock().unwrap().play() {
542                warn!("Could not play media {:?}", e);
543            }
544        }
545    }
546
547    /// Marks that element as delaying the load event or not.
548    ///
549    /// Nothing happens if the element was already delaying the load event and
550    /// we pass true to that method again.
551    ///
552    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
553    pub(crate) fn delay_load_event(&self, delay: bool, can_gc: CanGc) {
554        let blocker = &self.delaying_the_load_event_flag;
555        if delay && blocker.borrow().is_none() {
556            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
557        } else if !delay && blocker.borrow().is_some() {
558            LoadBlocker::terminate(blocker, can_gc);
559        }
560    }
561
562    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
563    fn time_marches_on(&self) {
564        // Step 6.
565        if Instant::now() > self.next_timeupdate_event.get() {
566            self.owner_global()
567                .task_manager()
568                .media_element_task_source()
569                .queue_simple_event(self.upcast(), atom!("timeupdate"));
570            self.next_timeupdate_event
571                .set(Instant::now() + Duration::from_millis(350));
572        }
573    }
574
575    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
576    fn internal_pause_steps(&self) {
577        // Step 1.
578        self.autoplaying.set(false);
579
580        // Step 2.
581        if !self.Paused() {
582            // Step 2.1.
583            self.paused.set(true);
584
585            // Step 2.2.
586            self.take_pending_play_promises(Err(Error::Abort));
587
588            // Step 2.3.
589            let this = Trusted::new(self);
590            let generation_id = self.generation_id.get();
591            self.owner_global()
592                .task_manager()
593                .media_element_task_source()
594                .queue(task!(internal_pause_steps: move || {
595                    let this = this.root();
596                    if generation_id != this.generation_id.get() {
597                        return;
598                    }
599
600                    this.fulfill_in_flight_play_promises(|| {
601                        // Step 2.3.1.
602                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
603
604                        // Step 2.3.2.
605                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
606
607                        if let Some(ref player) = *this.player.borrow() {
608                            if let Err(e) = player.lock().unwrap().pause() {
609                                eprintln!("Could not pause player {:?}", e);
610                            }
611                        }
612
613                        // Step 2.3.3.
614                        // Done after running this closure in
615                        // `fulfill_in_flight_play_promises`.
616                    });
617                }));
618
619            // Step 2.4.
620            // FIXME(nox): Set the official playback position to the current
621            // playback position.
622        }
623    }
624    // https://html.spec.whatwg.org/multipage/#allowed-to-play
625    fn is_allowed_to_play(&self) -> bool {
626        true
627    }
628
629    // https://html.spec.whatwg.org/multipage/#notify-about-playing
630    fn notify_about_playing(&self) {
631        // Step 1.
632        self.take_pending_play_promises(Ok(()));
633
634        // Step 2.
635        let this = Trusted::new(self);
636        let generation_id = self.generation_id.get();
637        self.owner_global()
638            .task_manager()
639            .media_element_task_source()
640            .queue(task!(notify_about_playing: move || {
641                let this = this.root();
642                if generation_id != this.generation_id.get() {
643                    return;
644                }
645
646                this.fulfill_in_flight_play_promises(|| {
647                    // Step 2.1.
648                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
649                    this.play_media();
650
651                    // Step 2.2.
652                    // Done after running this closure in
653                    // `fulfill_in_flight_play_promises`.
654                });
655
656            }));
657    }
658
659    // https://html.spec.whatwg.org/multipage/#ready-states
660    fn change_ready_state(&self, ready_state: ReadyState) {
661        let old_ready_state = self.ready_state.get();
662        self.ready_state.set(ready_state);
663
664        if self.network_state.get() == NetworkState::Empty {
665            return;
666        }
667
668        if old_ready_state == ready_state {
669            return;
670        }
671
672        let owner_global = self.owner_global();
673        let task_manager = owner_global.task_manager();
674        let task_source = task_manager.media_element_task_source();
675
676        // Step 1.
677        match (old_ready_state, ready_state) {
678            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
679                task_source.queue_simple_event(self.upcast(), atom!("loadedmetadata"));
680                // No other steps are applicable in this case.
681                return;
682            },
683            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
684                if !self.fired_loadeddata_event.get() {
685                    self.fired_loadeddata_event.set(true);
686                    let this = Trusted::new(self);
687                    task_source.queue(task!(media_reached_current_data: move || {
688                        let this = this.root();
689                        this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::note());
690                        this.delay_load_event(false, CanGc::note());
691                    }));
692                }
693
694                // Steps for the transition from HaveMetadata to HaveCurrentData
695                // or HaveFutureData also apply here, as per the next match
696                // expression.
697            },
698            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
699                // FIXME(nox): Queue a task to fire timeupdate and waiting
700                // events if the conditions call from the spec are met.
701
702                // No other steps are applicable in this case.
703                return;
704            },
705
706            _ => (),
707        }
708
709        if old_ready_state <= ReadyState::HaveCurrentData &&
710            ready_state >= ReadyState::HaveFutureData
711        {
712            task_source.queue_simple_event(self.upcast(), atom!("canplay"));
713
714            if !self.Paused() {
715                self.notify_about_playing();
716            }
717        }
718
719        if ready_state == ReadyState::HaveEnoughData {
720            // TODO: Check sandboxed automatic features browsing context flag.
721            // FIXME(nox): I have no idea what this TODO is about.
722
723            // FIXME(nox): Review this block.
724            if self.autoplaying.get() && self.Paused() && self.Autoplay() {
725                // Step 1
726                self.paused.set(false);
727                // Step 2
728                if self.show_poster.get() {
729                    self.show_poster.set(false);
730                    self.time_marches_on();
731                }
732                // Step 3
733                task_source.queue_simple_event(self.upcast(), atom!("play"));
734                // Step 4
735                self.notify_about_playing();
736                // Step 5
737                self.autoplaying.set(false);
738            }
739
740            // FIXME(nox): According to the spec, this should come *before* the
741            // "play" event.
742            task_source.queue_simple_event(self.upcast(), atom!("canplaythrough"));
743        }
744    }
745
746    // https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm
747    fn invoke_resource_selection_algorithm(&self, can_gc: CanGc) {
748        // Step 1.
749        self.network_state.set(NetworkState::NoSource);
750
751        // Step 2.
752        self.show_poster.set(true);
753
754        // Step 3.
755        self.delay_load_event(true, can_gc);
756
757        // Step 4.
758        // If the resource selection mode in the synchronous section is
759        // "attribute", the URL of the resource to fetch is relative to the
760        // media element's node document when the src attribute was last
761        // changed, which is why we need to pass the base URL in the task
762        // right here.
763        let doc = self.owner_document();
764        let task = MediaElementMicrotask::ResourceSelection {
765            elem: DomRoot::from_ref(self),
766            generation_id: self.generation_id.get(),
767            base_url: doc.base_url(),
768        };
769
770        // FIXME(nox): This will later call the resource_selection_algorithm_sync
771        // method from below, if microtasks were trait objects, we would be able
772        // to put the code directly in this method, without the boilerplate
773        // indirections.
774        ScriptThread::await_stable_state(Microtask::MediaElement(task));
775    }
776
777    // https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm
778    fn resource_selection_algorithm_sync(&self, base_url: ServoUrl, can_gc: CanGc) {
779        // Step 5.
780        // FIXME(ferjm): Implement blocked_on_parser logic
781        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
782        // FIXME(nox): Maybe populate the list of pending text tracks.
783
784        // Step 6.
785        enum Mode {
786            Object,
787            Attribute(String),
788            Children(DomRoot<HTMLSourceElement>),
789        }
790        fn mode(media: &HTMLMediaElement) -> Option<Mode> {
791            if media.src_object.borrow().is_some() {
792                return Some(Mode::Object);
793            }
794            if let Some(attr) = media
795                .upcast::<Element>()
796                .get_attribute(&ns!(), &local_name!("src"))
797            {
798                return Some(Mode::Attribute(attr.Value().into()));
799            }
800            let source_child_element = media
801                .upcast::<Node>()
802                .children()
803                .filter_map(DomRoot::downcast::<HTMLSourceElement>)
804                .next();
805            if let Some(element) = source_child_element {
806                return Some(Mode::Children(element));
807            }
808            None
809        }
810        let mode = if let Some(mode) = mode(self) {
811            mode
812        } else {
813            self.network_state.set(NetworkState::Empty);
814            // https://github.com/whatwg/html/issues/3065
815            self.delay_load_event(false, can_gc);
816            return;
817        };
818
819        // Step 7.
820        self.network_state.set(NetworkState::Loading);
821
822        // Step 8.
823        self.owner_global()
824            .task_manager()
825            .media_element_task_source()
826            .queue_simple_event(self.upcast(), atom!("loadstart"));
827
828        // Step 9.
829        match mode {
830            // Step 9.obj.
831            Mode::Object => {
832                // Step 9.obj.1.
833                "".clone_into(&mut self.current_src.borrow_mut());
834
835                // Step 9.obj.2.
836                // FIXME(nox): The rest of the steps should be ran in parallel.
837
838                // Step 9.obj.3.
839                // Note that the resource fetch algorithm itself takes care
840                // of the cleanup in case of failure itself.
841                self.resource_fetch_algorithm(Resource::Object);
842            },
843            Mode::Attribute(src) => {
844                // Step 9.attr.1.
845                if src.is_empty() {
846                    self.queue_dedicated_media_source_failure_steps();
847                    return;
848                }
849
850                // Step 9.attr.2.
851                let url_record = match base_url.join(&src) {
852                    Ok(url) => url,
853                    Err(_) => {
854                        self.queue_dedicated_media_source_failure_steps();
855                        return;
856                    },
857                };
858
859                // Step 9.attr.3.
860                *self.current_src.borrow_mut() = url_record.as_str().into();
861
862                // Step 9.attr.4.
863                // Note that the resource fetch algorithm itself takes care
864                // of the cleanup in case of failure itself.
865                self.resource_fetch_algorithm(Resource::Url(url_record));
866            },
867            // Step 9.children.
868            Mode::Children(source) => {
869                // This is only a partial implementation
870                // FIXME: https://github.com/servo/servo/issues/21481
871                let src = source
872                    .upcast::<Element>()
873                    .get_attribute(&ns!(), &local_name!("src"));
874                // Step 9.attr.2.
875                let src: String = match src {
876                    Some(src) if !src.Value().is_empty() => src.Value().into(),
877                    _ => {
878                        source
879                            .upcast::<EventTarget>()
880                            .fire_event(atom!("error"), can_gc);
881                        self.queue_dedicated_media_source_failure_steps();
882                        return;
883                    },
884                };
885                // Step 9.attr.3.
886                let url_record = match base_url.join(&src) {
887                    Ok(url) => url,
888                    Err(_) => {
889                        source
890                            .upcast::<EventTarget>()
891                            .fire_event(atom!("error"), can_gc);
892                        self.queue_dedicated_media_source_failure_steps();
893                        return;
894                    },
895                };
896                // Step 9.attr.7
897                *self.current_src.borrow_mut() = url_record.as_str().into();
898                // Step 9.attr.8.
899                self.resource_fetch_algorithm(Resource::Url(url_record));
900            },
901        }
902    }
903
904    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
905        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
906            eprintln!("Missing request url");
907            if let Some(seek_lock) = seek_lock {
908                seek_lock.unlock(/* successful seek */ false);
909            }
910            self.queue_dedicated_media_source_failure_steps();
911            return;
912        }
913
914        let document = self.owner_document();
915        let destination = match self.media_type_id() {
916            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
917            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
918        };
919        let mut headers = HeaderMap::new();
920        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
921        headers.insert(
922            header::RANGE,
923            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
924        );
925        let url = match self.resource_url.borrow().as_ref() {
926            Some(url) => url.clone(),
927            None => self.blob_url.borrow().as_ref().unwrap().clone(),
928        };
929
930        let cors_setting = cors_setting_for_element(self.upcast());
931        let global = self.global();
932        let request = create_a_potential_cors_request(
933            Some(document.webview_id()),
934            url.clone(),
935            destination,
936            cors_setting,
937            None,
938            global.get_referrer(),
939            document.insecure_requests_policy(),
940            document.has_trustworthy_ancestor_or_current_origin(),
941            global.policy_container(),
942        )
943        .headers(headers)
944        .origin(document.origin().immutable().clone())
945        .pipeline_id(Some(self.global().pipeline_id()))
946        .referrer_policy(document.get_referrer_policy());
947
948        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
949        if let Some(ref mut current_fetch_context) = *current_fetch_context {
950            current_fetch_context.cancel(CancelReason::Overridden);
951        }
952
953        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
954            request.id,
955            global.core_resource_thread(),
956        ));
957        let listener =
958            HTMLMediaElementFetchListener::new(self, request.id, url.clone(), offset.unwrap_or(0));
959
960        self.owner_document().fetch_background(request, listener);
961
962        // Since we cancelled the previous fetch, from now on the media element
963        // will only receive response data from the new fetch that's been
964        // initiated. This means the player can resume operation, since all subsequent data
965        // pushes will originate from the new seek offset.
966        if let Some(seek_lock) = seek_lock {
967            seek_lock.unlock(/* successful seek */ true);
968        }
969    }
970
971    // https://html.spec.whatwg.org/multipage/#concept-media-load-resource
972    fn resource_fetch_algorithm(&self, resource: Resource) {
973        if let Err(e) = self.setup_media_player(&resource) {
974            eprintln!("Setup media player error {:?}", e);
975            self.queue_dedicated_media_source_failure_steps();
976            return;
977        }
978
979        // Steps 1-2.
980        // Unapplicable, the `resource` variable already conveys which mode
981        // is in use.
982
983        // Step 3.
984        // FIXME(nox): Remove all media-resource-specific text tracks.
985
986        // Step 4.
987        match resource {
988            Resource::Url(url) => {
989                // Step 4.remote.1.
990                if self.Preload() == "none" && !self.autoplaying.get() {
991                    // Step 4.remote.1.1.
992                    self.network_state.set(NetworkState::Idle);
993
994                    // Step 4.remote.1.2.
995                    let owner_global = self.owner_global();
996                    let task_manager = owner_global.task_manager();
997                    let task_source = task_manager.media_element_task_source();
998                    task_source.queue_simple_event(self.upcast(), atom!("suspend"));
999
1000                    // Step 4.remote.1.3.
1001                    let this = Trusted::new(self);
1002                    task_source.queue(task!(set_media_delay_load_event_flag_to_false: move || {
1003                        this.root().delay_load_event(false, CanGc::note());
1004                    }));
1005
1006                    // Steps 4.remote.1.4.
1007                    // FIXME(nox): Somehow we should wait for the task from previous
1008                    // step to be ran before continuing.
1009
1010                    // Steps 4.remote.1.5-4.remote.1.7.
1011                    // FIXME(nox): Wait for an implementation-defined event and
1012                    // then continue with the normal set of steps instead of just
1013                    // returning.
1014                    return;
1015                }
1016
1017                // Step 4.remote.2.
1018                *self.resource_url.borrow_mut() = Some(url);
1019                self.fetch_request(None, None);
1020            },
1021            Resource::Object => {
1022                if let Some(ref src_object) = *self.src_object.borrow() {
1023                    match src_object {
1024                        SrcObject::Blob(blob) => {
1025                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1026                            *self.blob_url.borrow_mut() =
1027                                Some(ServoUrl::parse(&blob_url).expect("infallible"));
1028                            self.fetch_request(None, None);
1029                        },
1030                        SrcObject::MediaStream(stream) => {
1031                            let tracks = &*stream.get_tracks();
1032                            for (pos, track) in tracks.iter().enumerate() {
1033                                if self
1034                                    .player
1035                                    .borrow()
1036                                    .as_ref()
1037                                    .unwrap()
1038                                    .lock()
1039                                    .unwrap()
1040                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1041                                    .is_err()
1042                                {
1043                                    self.queue_dedicated_media_source_failure_steps();
1044                                }
1045                            }
1046                        },
1047                    }
1048                }
1049            },
1050        }
1051    }
1052
1053    /// Queues a task to run the [dedicated media source failure steps][steps].
1054    ///
1055    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1056    fn queue_dedicated_media_source_failure_steps(&self) {
1057        let this = Trusted::new(self);
1058        let generation_id = self.generation_id.get();
1059        self.take_pending_play_promises(Err(Error::NotSupported));
1060        self.owner_global()
1061            .task_manager()
1062            .media_element_task_source()
1063            .queue(task!(dedicated_media_source_failure_steps: move || {
1064                let this = this.root();
1065                if generation_id != this.generation_id.get() {
1066                    return;
1067                }
1068
1069                this.fulfill_in_flight_play_promises(|| {
1070                    // Step 1.
1071                    this.error.set(Some(&*MediaError::new(
1072                        &this.owner_window(),
1073                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::note())));
1074
1075                    // Step 2.
1076                    this.AudioTracks().clear();
1077                    this.VideoTracks().clear();
1078
1079                    // Step 3.
1080                    this.network_state.set(NetworkState::NoSource);
1081
1082                    // Step 4.
1083                    this.show_poster.set(true);
1084
1085                    // Step 5.
1086                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1087
1088                    if let Some(ref player) = *this.player.borrow() {
1089                        if let Err(e) = player.lock().unwrap().stop() {
1090                            eprintln!("Could not stop player {:?}", e);
1091                        }
1092                    }
1093
1094                    // Step 6.
1095                    // Done after running this closure in
1096                    // `fulfill_in_flight_play_promises`.
1097                });
1098
1099                // Step 7.
1100                this.delay_load_event(false, CanGc::note());
1101            }));
1102    }
1103
1104    fn queue_ratechange_event(&self) {
1105        self.owner_global()
1106            .task_manager()
1107            .media_element_task_source()
1108            .queue_simple_event(self.upcast(), atom!("ratechange"));
1109    }
1110
1111    fn in_error_state(&self) -> bool {
1112        self.error.get().is_some()
1113    }
1114
1115    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1116    fn is_potentially_playing(&self) -> bool {
1117        !self.paused.get() &&
1118            !self.Ended() &&
1119            self.error.get().is_none() &&
1120            !self.is_blocked_media_element()
1121    }
1122
1123    // https://html.spec.whatwg.org/multipage/#blocked-media-element
1124    fn is_blocked_media_element(&self) -> bool {
1125        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1126            self.is_paused_for_user_interaction() ||
1127            self.is_paused_for_in_band_content()
1128    }
1129
1130    // https://html.spec.whatwg.org/multipage/#paused-for-user-interaction
1131    fn is_paused_for_user_interaction(&self) -> bool {
1132        // FIXME: we will likely be able to fill this placeholder once (if) we
1133        //        implement the MediaSession API.
1134        false
1135    }
1136
1137    // https://html.spec.whatwg.org/multipage/#paused-for-in-band-content
1138    fn is_paused_for_in_band_content(&self) -> bool {
1139        // FIXME: we will likely be able to fill this placeholder once (if) we
1140        //        implement https://github.com/servo/servo/issues/22314
1141        false
1142    }
1143
1144    // https://html.spec.whatwg.org/multipage/#media-element-load-algorithm
1145    fn media_element_load_algorithm(&self, can_gc: CanGc) {
1146        // Reset the flag that signals whether loadeddata was ever fired for
1147        // this invokation of the load algorithm.
1148        self.fired_loadeddata_event.set(false);
1149
1150        // Step 1-2.
1151        self.generation_id.set(self.generation_id.get() + 1);
1152
1153        // Steps 3-4.
1154        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1155            self.fulfill_in_flight_play_promises(|| ());
1156        }
1157
1158        let global = self.owner_global();
1159        let task_manager = global.task_manager();
1160        let task_source = task_manager.media_element_task_source();
1161
1162        // Step 5.
1163        let network_state = self.network_state.get();
1164        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1165            task_source.queue_simple_event(self.upcast(), atom!("abort"));
1166        }
1167
1168        // Step 6.
1169        if network_state != NetworkState::Empty {
1170            // Step 6.1.
1171            task_source.queue_simple_event(self.upcast(), atom!("emptied"));
1172
1173            // Step 6.2.
1174            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1175                current_fetch_context.cancel(CancelReason::Error);
1176            }
1177
1178            // Step 6.3.
1179            // FIXME(nox): Detach MediaSource media provider object.
1180
1181            // Step 6.4.
1182            self.AudioTracks().clear();
1183            self.VideoTracks().clear();
1184
1185            // Step 6.5.
1186            if self.ready_state.get() != ReadyState::HaveNothing {
1187                self.change_ready_state(ReadyState::HaveNothing);
1188            }
1189
1190            // Step 6.6.
1191            if !self.Paused() {
1192                // Step 6.6.1.
1193                self.paused.set(true);
1194
1195                // Step 6.6.2.
1196                self.take_pending_play_promises(Err(Error::Abort));
1197                self.fulfill_in_flight_play_promises(|| ());
1198            }
1199
1200            // Step 6.7. If seeking is true, set it to false.
1201            if self.seeking.get() {
1202                self.seeking.set(false);
1203            }
1204
1205            // Step 6.8.
1206            let queue_timeupdate_event = self.playback_position.get() != 0.;
1207            self.playback_position.set(0.);
1208            if queue_timeupdate_event {
1209                task_source.queue_simple_event(self.upcast(), atom!("timeupdate"));
1210            }
1211
1212            // Step 6.9.
1213            // FIXME(nox): Set timeline offset to NaN.
1214
1215            // Step 6.10.
1216            self.duration.set(f64::NAN);
1217        }
1218
1219        // Step 7.
1220        self.playbackRate.set(self.defaultPlaybackRate.get());
1221
1222        // Step 8.
1223        self.error.set(None);
1224        self.autoplaying.set(true);
1225
1226        // Step 9.
1227        self.invoke_resource_selection_algorithm(can_gc);
1228
1229        // Step 10.
1230        // FIXME(nox): Stop playback of any previously running media resource.
1231    }
1232
1233    /// Appends a promise to the list of pending play promises.
1234    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1235        self.pending_play_promises
1236            .borrow_mut()
1237            .push(promise.clone());
1238    }
1239
1240    /// Takes the pending play promises.
1241    ///
1242    /// The result with which these promises will be fulfilled is passed here
1243    /// and this method returns nothing because we actually just move the
1244    /// current list of pending play promises to the
1245    /// `in_flight_play_promises_queue` field.
1246    ///
1247    /// Each call to this method must be followed by a call to
1248    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1249    /// which were taken and moved to the in-flight queue.
1250    fn take_pending_play_promises(&self, result: ErrorResult) {
1251        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1252        self.in_flight_play_promises_queue
1253            .borrow_mut()
1254            .push_back((pending_play_promises.into(), result));
1255    }
1256
1257    /// Fulfills the next in-flight play promises queue after running a closure.
1258    ///
1259    /// See the comment on `take_pending_play_promises` for why this method
1260    /// does not take a list of promises to fulfill. Callers cannot just pop
1261    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1262    /// the promises because that would mean putting
1263    /// `#[cfg_attr(crown, allow(crown::unrooted_must_root))]` on even more functions, potentially
1264    /// hiding actual safety bugs.
1265    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
1266    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1267    where
1268        F: FnOnce(),
1269    {
1270        let (promises, result) = self
1271            .in_flight_play_promises_queue
1272            .borrow_mut()
1273            .pop_front()
1274            .expect("there should be at least one list of in flight play promises");
1275        f();
1276        for promise in &*promises {
1277            match result {
1278                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1279                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1280            }
1281        }
1282    }
1283
1284    /// Handles insertion of `source` children.
1285    ///
1286    /// <https://html.spec.whatwg.org/multipage/#the-source-element:nodes-are-inserted>
1287    pub(crate) fn handle_source_child_insertion(&self, can_gc: CanGc) {
1288        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1289            return;
1290        }
1291        if self.network_state.get() != NetworkState::Empty {
1292            return;
1293        }
1294        self.media_element_load_algorithm(can_gc);
1295    }
1296
1297    // https://html.spec.whatwg.org/multipage/#dom-media-seek
1298    fn seek(&self, time: f64, _approximate_for_speed: bool) {
1299        // Step 1.
1300        self.show_poster.set(false);
1301
1302        // Step 2.
1303        if self.ready_state.get() == ReadyState::HaveNothing {
1304            return;
1305        }
1306
1307        // Step 3.
1308        // The fetch request associated with this seek already takes
1309        // care of cancelling any previous requests.
1310
1311        // Step 4.
1312        // The flag will be cleared when the media engine tells us the seek was done.
1313        self.seeking.set(true);
1314
1315        // Step 5.
1316        // XXX(ferjm) The rest of the steps should be run in parallel, so seeking cancelation
1317        //            can be done properly. No other browser does it yet anyway.
1318
1319        // Step 6.
1320        let time = f64::min(time, self.Duration());
1321
1322        // Step 7.
1323        let time = f64::max(time, 0.);
1324
1325        // Step 8.
1326        let seekable = self.Seekable();
1327        if seekable.Length() == 0 {
1328            self.seeking.set(false);
1329            return;
1330        }
1331        let mut nearest_seekable_position = 0.0;
1332        let mut in_seekable_range = false;
1333        let mut nearest_seekable_distance = f64::MAX;
1334        for i in 0..seekable.Length() {
1335            let start = seekable.Start(i).unwrap().abs();
1336            let end = seekable.End(i).unwrap().abs();
1337            if time >= start && time <= end {
1338                nearest_seekable_position = time;
1339                in_seekable_range = true;
1340                break;
1341            } else if time < start {
1342                let distance = start - time;
1343                if distance < nearest_seekable_distance {
1344                    nearest_seekable_distance = distance;
1345                    nearest_seekable_position = start;
1346                }
1347            } else {
1348                let distance = time - end;
1349                if distance < nearest_seekable_distance {
1350                    nearest_seekable_distance = distance;
1351                    nearest_seekable_position = end;
1352                }
1353            }
1354        }
1355        let time = if in_seekable_range {
1356            time
1357        } else {
1358            nearest_seekable_position
1359        };
1360
1361        // Step 9.
1362        // servo-media with gstreamer does not support inaccurate seeking for now.
1363
1364        // Step 10.
1365        self.owner_global()
1366            .task_manager()
1367            .media_element_task_source()
1368            .queue_simple_event(self.upcast(), atom!("seeking"));
1369
1370        // Step 11.
1371        if let Some(ref player) = *self.player.borrow() {
1372            if let Err(e) = player.lock().unwrap().seek(time) {
1373                eprintln!("Seek error {:?}", e);
1374            }
1375        }
1376
1377        // The rest of the steps are handled when the media engine signals a
1378        // ready state change or otherwise satisfies seek completion and signals
1379        // a position change.
1380    }
1381
1382    // https://html.spec.whatwg.org/multipage/#dom-media-seek
1383    fn seek_end(&self) {
1384        // Step 14.
1385        self.seeking.set(false);
1386
1387        // Step 15.
1388        self.time_marches_on();
1389
1390        // Step 16.
1391        let global = self.owner_global();
1392        let task_manager = global.task_manager();
1393        let task_source = task_manager.media_element_task_source();
1394        task_source.queue_simple_event(self.upcast(), atom!("timeupdate"));
1395
1396        // Step 17.
1397        task_source.queue_simple_event(self.upcast(), atom!("seeked"));
1398    }
1399
1400    fn set_player_id(&self, player_id: u64) {
1401        self.droppable.set_player_id(player_id);
1402    }
1403
1404    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
1405    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
1406        let queue_postershown_event = pref!(media_testing_enabled) && image.is_some();
1407
1408        self.video_renderer.lock().unwrap().set_poster_frame(image);
1409
1410        self.upcast::<Node>().dirty(NodeDamage::Other);
1411
1412        if queue_postershown_event {
1413            self.owner_global()
1414                .task_manager()
1415                .media_element_task_source()
1416                .queue_simple_event(self.upcast(), atom!("postershown"));
1417        }
1418    }
1419
1420    fn setup_media_player(&self, resource: &Resource) -> Result<(), ()> {
1421        let stream_type = match *resource {
1422            Resource::Object => {
1423                if let Some(ref src_object) = *self.src_object.borrow() {
1424                    match src_object {
1425                        SrcObject::MediaStream(_) => StreamType::Stream,
1426                        _ => StreamType::Seekable,
1427                    }
1428                } else {
1429                    return Err(());
1430                }
1431            },
1432            _ => StreamType::Seekable,
1433        };
1434
1435        let window = self.owner_window();
1436        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
1437        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
1438        {
1439            HTMLMediaElementTypeId::HTMLAudioElement => None,
1440            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
1441        };
1442
1443        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
1444
1445        let pipeline_id = window.pipeline_id();
1446        let client_context_id =
1447            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
1448        let player = ServoMedia::get().create_player(
1449            &client_context_id,
1450            stream_type,
1451            action_sender,
1452            video_renderer,
1453            audio_renderer,
1454            Box::new(window.get_player_context()),
1455        );
1456        let player_id = {
1457            let player_guard = player.lock().unwrap();
1458
1459            if let Err(e) = player_guard.set_mute(self.muted.get()) {
1460                log::warn!("Could not set mute state: {:?}", e);
1461            }
1462
1463            player_guard.get_id()
1464        };
1465
1466        *self.player.borrow_mut() = Some(player);
1467
1468        let trusted_node = Trusted::new(self);
1469        let task_source = self
1470            .owner_global()
1471            .task_manager()
1472            .media_element_task_source()
1473            .to_sendable();
1474        ROUTER.add_typed_route(
1475            action_receiver,
1476            Box::new(move |message| {
1477                let event = message.unwrap();
1478                trace!("Player event {:?}", event);
1479                let this = trusted_node.clone();
1480                task_source.queue(task!(handle_player_event: move || {
1481                    this.root().handle_player_event(player_id, &event, CanGc::note());
1482                }));
1483            }),
1484        );
1485
1486        // GLPlayer thread setup
1487        let (player_id, image_receiver) = window
1488            .get_player_context()
1489            .glplayer_thread_sender
1490            .map(|pipeline| {
1491                let (image_sender, image_receiver) = channel().unwrap();
1492                pipeline
1493                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
1494                    .unwrap();
1495                match image_receiver.recv().unwrap() {
1496                    GLPlayerMsgForward::PlayerId(id) => (id, Some(image_receiver)),
1497                    _ => unreachable!(),
1498                }
1499            })
1500            .unwrap_or((0, None));
1501
1502        self.set_player_id(player_id);
1503        self.video_renderer.lock().unwrap().player_id = Some(player_id);
1504
1505        if let Some(image_receiver) = image_receiver {
1506            let trusted_node = Trusted::new(self);
1507            let task_source = self
1508                .owner_global()
1509                .task_manager()
1510                .media_element_task_source()
1511                .to_sendable();
1512            ROUTER.add_typed_route(
1513                image_receiver,
1514                Box::new(move |message| {
1515                    let msg = message.unwrap();
1516                    let this = trusted_node.clone();
1517                    task_source.queue(task!(handle_glplayer_message: move || {
1518                        trace!("GLPlayer message {:?}", msg);
1519                        let video_renderer = this.root().video_renderer.clone();
1520
1521                        match msg {
1522                            GLPlayerMsgForward::Lock(sender) => {
1523                                if let Some(holder) = video_renderer
1524                                    .lock()
1525                                    .unwrap()
1526                                    .current_frame_holder
1527                                    .as_mut() {
1528                                        holder.lock();
1529                                        sender.send(holder.get()).unwrap();
1530                                    };
1531                            },
1532                            GLPlayerMsgForward::Unlock() => {
1533                                if let Some(holder) = video_renderer
1534                                    .lock()
1535                                    .unwrap()
1536                                    .current_frame_holder
1537                                    .as_mut() { holder.unlock() }
1538                            },
1539                            _ => (),
1540                        }
1541                    }));
1542                }),
1543            );
1544        }
1545
1546        Ok(())
1547    }
1548
1549    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
1550        if let Some(ref player) = *self.player.borrow() {
1551            if let Err(err) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
1552                warn!("Could not set audio track {:#?}", err);
1553            }
1554        }
1555    }
1556
1557    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
1558        if let Some(ref player) = *self.player.borrow() {
1559            if let Err(err) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
1560                warn!("Could not set video track {:#?}", err);
1561            }
1562        }
1563    }
1564
1565    fn end_of_playback_in_forwards_direction(&self) {
1566        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
1567        // posible position of the media resource and return.
1568        if self.Loop() {
1569            self.seek(
1570                self.earliest_possible_position(),
1571                /* approximate_for_speed*/ false,
1572            );
1573            return;
1574        }
1575        // Step 2. The ended IDL attribute starts returning true once the event loop returns to
1576        // step 1.
1577        // The **ended playback** condition is implemented inside of
1578        // the HTMLMediaElementMethods::Ended method
1579
1580        // Step 3. Queue a media element task given the media element and the following steps:
1581        let this = Trusted::new(self);
1582
1583        self.owner_global()
1584            .task_manager()
1585            .media_element_task_source()
1586            .queue(task!(reaches_the_end_steps: move || {
1587                let this = this.root();
1588                // Step 3.1. Fire an event named timeupdate at the media element
1589                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
1590
1591                // Step 3.2. If the media element has ended playback, the direction of playback is
1592                // forwards, and paused is false, then:
1593                if this.Ended() && !this.Paused() {
1594                    // Step 3.2.1. Set the paused attribute to true
1595                    this.paused.set(true);
1596
1597                    // Step 3.2.2. Fire an event named pause at the media element
1598                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
1599
1600                    // Step 3.2.3. Take pending play promises and reject pending play promises with
1601                    // the result and an "AbortError" DOMException
1602                    this.take_pending_play_promises(Err(Error::Abort));
1603                    this.fulfill_in_flight_play_promises(|| ());
1604                }
1605
1606                // Step 3.3. Fire an event named ended at the media element.
1607                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
1608            }));
1609
1610        // https://html.spec.whatwg.org/multipage/#dom-media-have_current_data
1611        self.change_ready_state(ReadyState::HaveCurrentData);
1612    }
1613
1614    fn playback_end(&self) {
1615        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
1616        // => "If the media data can be fetched but is found by inspection to be in
1617        //    an unsupported format, or can otherwise not be rendered at all"
1618        if self.ready_state.get() < ReadyState::HaveMetadata {
1619            self.queue_dedicated_media_source_failure_steps();
1620            return;
1621        }
1622
1623        // https://html.spec.whatwg.org/multipage/#reaches-the-end
1624        match self.direction_of_playback() {
1625            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(),
1626
1627            PlaybackDirection::Backwards => {
1628                if self.playback_position.get() <= self.earliest_possible_position() {
1629                    self.owner_global()
1630                        .task_manager()
1631                        .media_element_task_source()
1632                        .queue_simple_event(self.upcast(), atom!("ended"));
1633                }
1634            },
1635        }
1636    }
1637
1638    fn playback_error(&self, error: &str, can_gc: CanGc) {
1639        error!("Player error: {:?}", error);
1640
1641        // If we have already flagged an error condition while processing
1642        // the network response, we should silently skip any observable
1643        // errors originating while decoding the erroneous response.
1644        if self.in_error_state() {
1645            return;
1646        }
1647
1648        // https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-data-13
1649        // 1. The user agent should cancel the fetching process.
1650        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1651            current_fetch_context.cancel(CancelReason::Error);
1652        }
1653        // 2. Set the error attribute to the result of creating a MediaError with MEDIA_ERR_DECODE.
1654        self.error.set(Some(&*MediaError::new(
1655            &self.owner_window(),
1656            MEDIA_ERR_DECODE,
1657            can_gc,
1658        )));
1659
1660        // 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1661        self.network_state.set(NetworkState::Idle);
1662
1663        // 4. Set the element's delaying-the-load-event flag to false. This stops delaying the load event.
1664        self.delay_load_event(false, can_gc);
1665
1666        // 5. Fire an event named error at the media element.
1667        self.upcast::<EventTarget>()
1668            .fire_event(atom!("error"), can_gc);
1669
1670        // TODO: 6. Abort the overall resource selection algorithm.
1671    }
1672
1673    fn playback_metadata_updated(
1674        &self,
1675        metadata: &servo_media::player::metadata::Metadata,
1676        can_gc: CanGc,
1677    ) {
1678        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
1679        // => If the media resource is found to have an audio track
1680        if !metadata.audio_tracks.is_empty() {
1681            for (i, _track) in metadata.audio_tracks.iter().enumerate() {
1682                // Step 1.
1683                let kind = match i {
1684                    0 => DOMString::from("main"),
1685                    _ => DOMString::new(),
1686                };
1687                let window = self.owner_window();
1688                let audio_track = AudioTrack::new(
1689                    &window,
1690                    DOMString::new(),
1691                    kind,
1692                    DOMString::new(),
1693                    DOMString::new(),
1694                    Some(&*self.AudioTracks()),
1695                    can_gc,
1696                );
1697
1698                // Steps 2. & 3.
1699                self.AudioTracks().add(&audio_track);
1700
1701                // Step 4
1702                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
1703                    let fragment = MediaFragmentParser::from(servo_url);
1704                    if let Some(id) = fragment.id() {
1705                        if audio_track.id() == DOMString::from(id) {
1706                            self.AudioTracks()
1707                                .set_enabled(self.AudioTracks().len() - 1, true);
1708                        }
1709                    }
1710
1711                    if fragment.tracks().contains(&audio_track.kind().into()) {
1712                        self.AudioTracks()
1713                            .set_enabled(self.AudioTracks().len() - 1, true);
1714                    }
1715                }
1716
1717                // Step 5. & 6,
1718                if self.AudioTracks().enabled_index().is_none() {
1719                    self.AudioTracks()
1720                        .set_enabled(self.AudioTracks().len() - 1, true);
1721                }
1722
1723                // Steps 7.
1724                let event = TrackEvent::new(
1725                    self.global().as_window(),
1726                    atom!("addtrack"),
1727                    false,
1728                    false,
1729                    &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
1730                    can_gc,
1731                );
1732
1733                event
1734                    .upcast::<Event>()
1735                    .fire(self.upcast::<EventTarget>(), can_gc);
1736            }
1737        }
1738
1739        // => If the media resource is found to have a video track
1740        if !metadata.video_tracks.is_empty() {
1741            for (i, _track) in metadata.video_tracks.iter().enumerate() {
1742                // Step 1.
1743                let kind = match i {
1744                    0 => DOMString::from("main"),
1745                    _ => DOMString::new(),
1746                };
1747                let window = self.owner_window();
1748                let video_track = VideoTrack::new(
1749                    &window,
1750                    DOMString::new(),
1751                    kind,
1752                    DOMString::new(),
1753                    DOMString::new(),
1754                    Some(&*self.VideoTracks()),
1755                    can_gc,
1756                );
1757
1758                // Steps 2. & 3.
1759                self.VideoTracks().add(&video_track);
1760
1761                // Step 4.
1762                if let Some(track) = self.VideoTracks().item(0) {
1763                    if let Some(servo_url) = self.resource_url.borrow().as_ref() {
1764                        let fragment = MediaFragmentParser::from(servo_url);
1765                        if let Some(id) = fragment.id() {
1766                            if track.id() == DOMString::from(id) {
1767                                self.VideoTracks().set_selected(0, true);
1768                            }
1769                        } else if fragment.tracks().contains(&track.kind().into()) {
1770                            self.VideoTracks().set_selected(0, true);
1771                        }
1772                    }
1773                }
1774
1775                // Step 5. & 6.
1776                if self.VideoTracks().selected_index().is_none() {
1777                    self.VideoTracks()
1778                        .set_selected(self.VideoTracks().len() - 1, true);
1779                }
1780
1781                // Steps 7.
1782                let event = TrackEvent::new(
1783                    self.global().as_window(),
1784                    atom!("addtrack"),
1785                    false,
1786                    false,
1787                    &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
1788                    can_gc,
1789                );
1790
1791                event
1792                    .upcast::<Event>()
1793                    .fire(self.upcast::<EventTarget>(), can_gc);
1794            }
1795        }
1796
1797        // => "Once enough of the media data has been fetched to determine the duration..."
1798        // Step 1.
1799        // servo-media owns the media timeline.
1800
1801        // Step 2.
1802        // XXX(ferjm) Update the timeline offset.
1803
1804        // Step 3.
1805        self.playback_position.set(0.);
1806
1807        // Step 4.
1808        let previous_duration = self.duration.get();
1809        if let Some(duration) = metadata.duration {
1810            self.duration.set(duration.as_secs() as f64);
1811        } else {
1812            self.duration.set(f64::INFINITY);
1813        }
1814        if previous_duration != self.duration.get() {
1815            self.owner_global()
1816                .task_manager()
1817                .media_element_task_source()
1818                .queue_simple_event(self.upcast(), atom!("durationchange"));
1819        }
1820
1821        // Step 5.
1822        self.handle_resize(Some(metadata.width), Some(metadata.height));
1823
1824        // Step 6.
1825        self.change_ready_state(ReadyState::HaveMetadata);
1826
1827        // Step 7.
1828        let mut jumped = false;
1829
1830        // Step 8.
1831        if self.default_playback_start_position.get() > 0. {
1832            self.seek(
1833                self.default_playback_start_position.get(),
1834                /* approximate_for_speed*/ false,
1835            );
1836            jumped = true;
1837        }
1838
1839        // Step 9.
1840        self.default_playback_start_position.set(0.);
1841
1842        // Steps 10 and 11.
1843        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
1844            let fragment = MediaFragmentParser::from(servo_url);
1845            if let Some(start) = fragment.start() {
1846                if start > 0. && start < self.duration.get() {
1847                    self.playback_position.set(start);
1848                    if !jumped {
1849                        self.seek(self.playback_position.get(), false)
1850                    }
1851                }
1852            }
1853        }
1854
1855        // Step 12 & 13 are already handled by the earlier media track processing.
1856
1857        // We wait until we have metadata to render the controls, so we render them
1858        // with the appropriate size.
1859        if self.Controls() {
1860            self.render_controls(can_gc);
1861        }
1862
1863        let global = self.global();
1864        let window = global.as_window();
1865
1866        // Update the media session metadata title with the obtained metadata.
1867        window.Navigator().MediaSession().update_title(
1868            metadata
1869                .title
1870                .clone()
1871                .unwrap_or(window.get_url().into_string()),
1872        );
1873    }
1874
1875    fn playback_video_frame_updated(&self) {
1876        // Check if the frame was resized
1877        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
1878            self.handle_resize(Some(frame.width as u32), Some(frame.height as u32));
1879        }
1880    }
1881
1882    fn playback_need_data(&self) {
1883        // The player needs more data.
1884        // If we already have a valid fetch request, we do nothing.
1885        // Otherwise, if we have no request and the previous request was
1886        // cancelled because we got an EnoughData event, we restart
1887        // fetching where we left.
1888        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
1889            if let Some(reason) = current_fetch_context.cancel_reason() {
1890                // XXX(ferjm) Ideally we should just create a fetch request from
1891                // where we left. But keeping track of the exact next byte that the
1892                // media backend expects is not the easiest task, so I'm simply
1893                // seeking to the current playback position for now which will create
1894                // a new fetch request for the last rendered frame.
1895                if *reason == CancelReason::Backoff {
1896                    self.seek(self.playback_position.get(), false);
1897                }
1898                return;
1899            }
1900        }
1901
1902        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1903            if let Err(e) = {
1904                let mut data_source = current_fetch_context.data_source().borrow_mut();
1905                data_source.set_locked(false);
1906                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
1907            } {
1908                // If we are pushing too much data and we know that we can
1909                // restart the download later from where we left, we cancel
1910                // the current request. Otherwise, we continue the request
1911                // assuming that we may drop some frames.
1912                if e == PlayerError::EnoughData {
1913                    current_fetch_context.cancel(CancelReason::Backoff);
1914                }
1915            }
1916        }
1917    }
1918
1919    fn playback_enough_data(&self) {
1920        self.change_ready_state(ReadyState::HaveEnoughData);
1921
1922        // The player has enough data and it is asking us to stop pushing
1923        // bytes, so we cancel the ongoing fetch request iff we are able
1924        // to restart it from where we left. Otherwise, we continue the
1925        // current fetch request, assuming that some frames will be dropped.
1926        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1927            if current_fetch_context.is_seekable() {
1928                current_fetch_context.cancel(CancelReason::Backoff);
1929            }
1930        }
1931    }
1932
1933    fn playback_position_changed(&self, position: u64) {
1934        let position = position as f64;
1935        let _ = self
1936            .played
1937            .borrow_mut()
1938            .add(self.playback_position.get(), position);
1939        self.playback_position.set(position);
1940        self.time_marches_on();
1941        let media_position_state =
1942            MediaPositionState::new(self.duration.get(), self.playbackRate.get(), position);
1943        debug!(
1944            "Sending media session event set position state {:?}",
1945            media_position_state
1946        );
1947        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
1948    }
1949
1950    fn playback_seek_done(&self) {
1951        // Continuation of
1952        // https://html.spec.whatwg.org/multipage/#dom-media-seek
1953
1954        // Step 13.
1955        let task = MediaElementMicrotask::Seeked {
1956            elem: DomRoot::from_ref(self),
1957            generation_id: self.generation_id.get(),
1958        };
1959        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1960    }
1961
1962    fn playback_state_changed(&self, state: &PlaybackState) {
1963        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
1964        match *state {
1965            PlaybackState::Paused => {
1966                media_session_playback_state = MediaSessionPlaybackState::Paused;
1967                if self.ready_state.get() == ReadyState::HaveMetadata {
1968                    self.change_ready_state(ReadyState::HaveEnoughData);
1969                }
1970            },
1971            PlaybackState::Playing => {
1972                media_session_playback_state = MediaSessionPlaybackState::Playing;
1973            },
1974            PlaybackState::Buffering => {
1975                // Do not send the media session playback state change event
1976                // in this case as a None_ state is expected to clean up the
1977                // session.
1978                return;
1979            },
1980            _ => {},
1981        };
1982        debug!(
1983            "Sending media session event playback state changed to {:?}",
1984            media_session_playback_state
1985        );
1986        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
1987            media_session_playback_state,
1988        ));
1989    }
1990
1991    fn handle_player_event(&self, player_id: usize, event: &PlayerEvent, can_gc: CanGc) {
1992        // Ignore the asynchronous event from previous player.
1993        if self
1994            .player
1995            .borrow()
1996            .as_ref()
1997            .is_none_or(|player| player.lock().unwrap().get_id() != player_id)
1998        {
1999            return;
2000        }
2001
2002        match *event {
2003            PlayerEvent::EndOfStream => self.playback_end(),
2004            PlayerEvent::Error(ref error) => self.playback_error(error, can_gc),
2005            PlayerEvent::VideoFrameUpdated => self.playback_video_frame_updated(),
2006            PlayerEvent::MetadataUpdated(ref metadata) => {
2007                self.playback_metadata_updated(metadata, can_gc)
2008            },
2009            PlayerEvent::NeedData => self.playback_need_data(),
2010            PlayerEvent::EnoughData => self.playback_enough_data(),
2011            PlayerEvent::PositionChanged(position) => self.playback_position_changed(position),
2012            PlayerEvent::SeekData(p, ref seek_lock) => {
2013                self.fetch_request(Some(p), Some(seek_lock.clone()))
2014            },
2015            PlayerEvent::SeekDone(_) => self.playback_seek_done(),
2016            PlayerEvent::StateChanged(ref state) => self.playback_state_changed(state),
2017        }
2018    }
2019
2020    // https://html.spec.whatwg.org/multipage/#earliest-possible-position
2021    fn earliest_possible_position(&self) -> f64 {
2022        self.played
2023            .borrow()
2024            .start(0)
2025            .unwrap_or_else(|_| self.playback_position.get())
2026    }
2027
2028    fn render_controls(&self, can_gc: CanGc) {
2029        let element = self.htmlelement.upcast::<Element>();
2030        if self.ready_state.get() < ReadyState::HaveMetadata || element.is_shadow_host() {
2031            // Bail out if we have no metadata yet or
2032            // if we are already showing the controls.
2033            return;
2034        }
2035        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2036        //                       reparsing styles.
2037        let shadow_root = self
2038            .upcast::<Element>()
2039            .attach_ua_shadow_root(false, can_gc);
2040        let document = self.owner_document();
2041        let script = HTMLScriptElement::new(
2042            local_name!("script"),
2043            None,
2044            &document,
2045            None,
2046            ElementCreator::ScriptCreated,
2047            can_gc,
2048        );
2049        // This is our hacky way to temporarily workaround the lack of a privileged
2050        // JS context.
2051        // The media controls UI accesses the document.servoGetMediaControls(id) API
2052        // to get an instance to the media controls ShadowRoot.
2053        // `id` needs to match the internally generated UUID assigned to a media element.
2054        let id = document.register_media_controls(&shadow_root);
2055        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2056        *self.media_controls_id.borrow_mut() = Some(id);
2057        script
2058            .upcast::<Node>()
2059            .set_text_content_for_element(Some(DOMString::from(media_controls_script)), can_gc);
2060        if let Err(e) = shadow_root
2061            .upcast::<Node>()
2062            .AppendChild(script.upcast::<Node>(), can_gc)
2063        {
2064            warn!("Could not render media controls {:?}", e);
2065            return;
2066        }
2067
2068        let style = HTMLStyleElement::new(
2069            local_name!("script"),
2070            None,
2071            &document,
2072            None,
2073            ElementCreator::ScriptCreated,
2074            can_gc,
2075        );
2076        style
2077            .upcast::<Node>()
2078            .set_text_content_for_element(Some(DOMString::from(MEDIA_CONTROL_CSS)), can_gc);
2079
2080        if let Err(e) = shadow_root
2081            .upcast::<Node>()
2082            .AppendChild(style.upcast::<Node>(), can_gc)
2083        {
2084            warn!("Could not render media controls {:?}", e);
2085        }
2086
2087        self.upcast::<Node>().dirty(NodeDamage::Other);
2088    }
2089
2090    fn remove_controls(&self, can_gc: CanGc) {
2091        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2092            self.owner_document().unregister_media_controls(&id, can_gc);
2093        }
2094    }
2095
2096    /// Gets the video frame at the current playback position.
2097    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2098        self.video_renderer
2099            .lock()
2100            .unwrap()
2101            .current_frame_holder
2102            .as_ref()
2103            .map(|holder| holder.get_frame())
2104    }
2105
2106    /// Gets the current frame of the video element to present, if any.
2107    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2108    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2109        let (current_frame, poster_frame) = {
2110            let renderer = self.video_renderer.lock().unwrap();
2111            (renderer.current_frame, renderer.poster_frame)
2112        };
2113
2114        // If the show poster flag is set (or there is no current video frame to
2115        // present) AND there is a poster frame, present that.
2116        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2117            return poster_frame;
2118        }
2119
2120        current_frame
2121    }
2122
2123    pub(crate) fn clear_current_frame_data(&self) {
2124        self.handle_resize(None, None);
2125        self.video_renderer.lock().unwrap().current_frame = None;
2126    }
2127
2128    fn handle_resize(&self, width: Option<u32>, height: Option<u32>) {
2129        if let Some(video_elem) = self.downcast::<HTMLVideoElement>() {
2130            video_elem.resize(width, height);
2131            self.upcast::<Node>().dirty(NodeDamage::Other);
2132        }
2133    }
2134
2135    /// By default the audio is rendered through the audio sink automatically
2136    /// selected by the servo-media Player instance. However, in some cases, like
2137    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2138    /// renderer.
2139    pub(crate) fn set_audio_renderer(
2140        &self,
2141        audio_renderer: Arc<Mutex<dyn AudioRenderer>>,
2142        can_gc: CanGc,
2143    ) {
2144        *self.audio_renderer.borrow_mut() = Some(audio_renderer);
2145        if let Some(ref player) = *self.player.borrow() {
2146            if let Err(e) = player.lock().unwrap().stop() {
2147                eprintln!("Could not stop player {:?}", e);
2148            }
2149            self.media_element_load_algorithm(can_gc);
2150        }
2151    }
2152
2153    fn send_media_session_event(&self, event: MediaSessionEvent) {
2154        let global = self.global();
2155        let media_session = global.as_window().Navigator().MediaSession();
2156
2157        media_session.register_media_instance(self);
2158
2159        media_session.send_event(event);
2160    }
2161
2162    pub(crate) fn set_duration(&self, duration: f64) {
2163        self.duration.set(duration);
2164    }
2165
2166    pub(crate) fn reset(&self) {
2167        if let Some(ref player) = *self.player.borrow() {
2168            if let Err(e) = player.lock().unwrap().stop() {
2169                eprintln!("Could not stop player {:?}", e);
2170            }
2171        }
2172    }
2173
2174    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2175    pub(crate) fn origin_is_clean(&self) -> bool {
2176        // Step 5.local (media provider object).
2177        if self.src_object.borrow().is_some() {
2178            // The resource described by the current media resource, if any,
2179            // contains the media data. It is CORS-same-origin.
2180            return true;
2181        }
2182
2183        // Step 5.remote (URL record).
2184        if self.resource_url.borrow().is_some() {
2185            // Update the media data with the contents
2186            // of response's unsafe response obtained in this fashion.
2187            // Response can be CORS-same-origin or CORS-cross-origin;
2188            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2189                return current_fetch_context.origin_is_clean();
2190            }
2191        }
2192
2193        true
2194    }
2195}
2196
2197// XXX Placeholder for [https://github.com/servo/servo/issues/22293]
2198#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
2199enum PlaybackDirection {
2200    Forwards,
2201    #[allow(dead_code)]
2202    Backwards,
2203}
2204
2205// XXX Placeholder implementations for:
2206//
2207// - https://github.com/servo/servo/issues/22293
2208impl HTMLMediaElement {
2209    // https://github.com/servo/servo/issues/22293
2210    fn direction_of_playback(&self) -> PlaybackDirection {
2211        PlaybackDirection::Forwards
2212    }
2213}
2214
2215impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2216    // https://html.spec.whatwg.org/multipage/#dom-media-networkstate
2217    fn NetworkState(&self) -> u16 {
2218        self.network_state.get() as u16
2219    }
2220
2221    // https://html.spec.whatwg.org/multipage/#dom-media-readystate
2222    fn ReadyState(&self) -> u16 {
2223        self.ready_state.get() as u16
2224    }
2225
2226    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2227    make_bool_getter!(Autoplay, "autoplay");
2228    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2229    make_bool_setter!(SetAutoplay, "autoplay");
2230
2231    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2232    make_bool_getter!(Loop, "loop");
2233    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2234    make_bool_setter!(SetLoop, "loop");
2235
2236    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2237    make_bool_getter!(DefaultMuted, "muted");
2238    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2239    make_bool_setter!(SetDefaultMuted, "muted");
2240
2241    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2242    make_bool_getter!(Controls, "controls");
2243    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2244    make_bool_setter!(SetControls, "controls");
2245
2246    // https://html.spec.whatwg.org/multipage/#dom-media-src
2247    make_url_getter!(Src, "src");
2248
2249    // https://html.spec.whatwg.org/multipage/#dom-media-src
2250    make_url_setter!(SetSrc, "src");
2251
2252    // https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin
2253    fn GetCrossOrigin(&self) -> Option<DOMString> {
2254        reflect_cross_origin_attribute(self.upcast::<Element>())
2255    }
2256    // https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin
2257    fn SetCrossOrigin(&self, value: Option<DOMString>, can_gc: CanGc) {
2258        set_cross_origin_attribute(self.upcast::<Element>(), value, can_gc);
2259    }
2260
2261    // https://html.spec.whatwg.org/multipage/#dom-media-muted
2262    fn Muted(&self) -> bool {
2263        self.muted.get()
2264    }
2265
2266    // https://html.spec.whatwg.org/multipage/#dom-media-muted
2267    fn SetMuted(&self, value: bool) {
2268        if self.muted.get() == value {
2269            return;
2270        }
2271
2272        if let Some(ref player) = *self.player.borrow() {
2273            let _ = player.lock().unwrap().set_mute(value);
2274        }
2275
2276        self.muted.set(value);
2277        self.owner_global()
2278            .task_manager()
2279            .media_element_task_source()
2280            .queue_simple_event(self.upcast(), atom!("volumechange"));
2281        if !self.is_allowed_to_play() {
2282            self.internal_pause_steps();
2283        }
2284    }
2285
2286    // https://html.spec.whatwg.org/multipage/#dom-media-srcobject
2287    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
2288        (*self.src_object.borrow())
2289            .as_ref()
2290            .map(|src_object| match src_object {
2291                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
2292                SrcObject::MediaStream(stream) => {
2293                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
2294                },
2295            })
2296    }
2297
2298    // https://html.spec.whatwg.org/multipage/#dom-media-srcobject
2299    fn SetSrcObject(&self, value: Option<MediaStreamOrBlob>, can_gc: CanGc) {
2300        *self.src_object.borrow_mut() = value.map(|value| value.into());
2301        self.media_element_load_algorithm(can_gc);
2302    }
2303
2304    // https://html.spec.whatwg.org/multipage/#attr-media-preload
2305    // Missing/Invalid values are user-agent defined.
2306    make_enumerated_getter!(
2307        Preload,
2308        "preload",
2309        "none" | "metadata" | "auto",
2310        missing => "auto",
2311        invalid => "auto"
2312    );
2313
2314    // https://html.spec.whatwg.org/multipage/#attr-media-preload
2315    make_setter!(SetPreload, "preload");
2316
2317    // https://html.spec.whatwg.org/multipage/#dom-media-currentsrc
2318    fn CurrentSrc(&self) -> USVString {
2319        USVString(self.current_src.borrow().clone())
2320    }
2321
2322    // https://html.spec.whatwg.org/multipage/#dom-media-load
2323    fn Load(&self, can_gc: CanGc) {
2324        self.media_element_load_algorithm(can_gc);
2325    }
2326
2327    // https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype
2328    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
2329        match ServoMedia::get().can_play_type(&type_) {
2330            SupportsMediaType::No => CanPlayTypeResult::_empty,
2331            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
2332            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
2333        }
2334    }
2335
2336    // https://html.spec.whatwg.org/multipage/#dom-media-error
2337    fn GetError(&self) -> Option<DomRoot<MediaError>> {
2338        self.error.get()
2339    }
2340
2341    // https://html.spec.whatwg.org/multipage/#dom-media-play
2342    fn Play(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
2343        let promise = Promise::new_in_current_realm(comp, can_gc);
2344        // Step 1.
2345        // FIXME(nox): Reject promise if not allowed to play.
2346
2347        // Step 2.
2348        if self
2349            .error
2350            .get()
2351            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
2352        {
2353            promise.reject_error(Error::NotSupported, can_gc);
2354            return promise;
2355        }
2356
2357        // Step 3.
2358        self.push_pending_play_promise(&promise);
2359
2360        // Step 4.
2361        if self.network_state.get() == NetworkState::Empty {
2362            self.invoke_resource_selection_algorithm(can_gc);
2363        }
2364
2365        // Step 5.
2366        if self.Ended() && self.direction_of_playback() == PlaybackDirection::Forwards {
2367            self.seek(
2368                self.earliest_possible_position(),
2369                /* approximate_for_speed */ false,
2370            );
2371        }
2372
2373        let state = self.ready_state.get();
2374
2375        let global = self.owner_global();
2376        let task_manager = global.task_manager();
2377        let task_source = task_manager.media_element_task_source();
2378        if self.Paused() {
2379            // Step 6.1.
2380            self.paused.set(false);
2381
2382            // Step 6.2.
2383            if self.show_poster.get() {
2384                self.show_poster.set(false);
2385                self.time_marches_on();
2386            }
2387
2388            // Step 6.3.
2389            task_source.queue_simple_event(self.upcast(), atom!("play"));
2390
2391            // Step 6.4.
2392            match state {
2393                ReadyState::HaveNothing |
2394                ReadyState::HaveMetadata |
2395                ReadyState::HaveCurrentData => {
2396                    task_source.queue_simple_event(self.upcast(), atom!("waiting"));
2397                },
2398                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
2399                    self.notify_about_playing();
2400                },
2401            }
2402        } else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
2403            // Step 7.
2404            self.take_pending_play_promises(Ok(()));
2405            let this = Trusted::new(self);
2406            let generation_id = self.generation_id.get();
2407            task_source.queue(task!(resolve_pending_play_promises: move || {
2408                let this = this.root();
2409                if generation_id != this.generation_id.get() {
2410                    return;
2411                }
2412
2413                this.fulfill_in_flight_play_promises(|| {
2414                    this.play_media();
2415                });
2416            }));
2417        }
2418
2419        // Step 8.
2420        self.autoplaying.set(false);
2421
2422        // Step 9.
2423        promise
2424    }
2425
2426    // https://html.spec.whatwg.org/multipage/#dom-media-pause
2427    fn Pause(&self, can_gc: CanGc) {
2428        // Step 1
2429        if self.network_state.get() == NetworkState::Empty {
2430            self.invoke_resource_selection_algorithm(can_gc);
2431        }
2432
2433        // Step 2
2434        self.internal_pause_steps();
2435    }
2436
2437    // https://html.spec.whatwg.org/multipage/#dom-media-paused
2438    fn Paused(&self) -> bool {
2439        self.paused.get()
2440    }
2441
2442    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
2443    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
2444        Ok(Finite::wrap(self.defaultPlaybackRate.get()))
2445    }
2446
2447    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
2448    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
2449        let min_allowed = -64.0;
2450        let max_allowed = 64.0;
2451        if *value < min_allowed || *value > max_allowed {
2452            return Err(Error::NotSupported);
2453        }
2454
2455        if *value != self.defaultPlaybackRate.get() {
2456            self.defaultPlaybackRate.set(*value);
2457            self.queue_ratechange_event();
2458        }
2459
2460        Ok(())
2461    }
2462
2463    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
2464    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
2465        Ok(Finite::wrap(self.playbackRate.get()))
2466    }
2467
2468    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
2469    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
2470        let min_allowed = -64.0;
2471        let max_allowed = 64.0;
2472        if *value < min_allowed || *value > max_allowed {
2473            return Err(Error::NotSupported);
2474        }
2475
2476        if *value != self.playbackRate.get() {
2477            self.playbackRate.set(*value);
2478            self.queue_ratechange_event();
2479            if self.is_potentially_playing() {
2480                if let Some(ref player) = *self.player.borrow() {
2481                    if let Err(e) = player.lock().unwrap().set_rate(*value) {
2482                        warn!("Could not set the playback rate {:?}", e);
2483                    }
2484                }
2485            }
2486        }
2487
2488        Ok(())
2489    }
2490
2491    // https://html.spec.whatwg.org/multipage/#dom-media-duration
2492    fn Duration(&self) -> f64 {
2493        self.duration.get()
2494    }
2495
2496    // https://html.spec.whatwg.org/multipage/#dom-media-currenttime
2497    fn CurrentTime(&self) -> Finite<f64> {
2498        Finite::wrap(if self.default_playback_start_position.get() != 0. {
2499            self.default_playback_start_position.get()
2500        } else {
2501            self.playback_position.get()
2502        })
2503    }
2504
2505    // https://html.spec.whatwg.org/multipage/#dom-media-currenttime
2506    fn SetCurrentTime(&self, time: Finite<f64>) {
2507        if self.ready_state.get() == ReadyState::HaveNothing {
2508            self.default_playback_start_position.set(*time);
2509        } else {
2510            self.playback_position.set(*time);
2511            self.seek(*time, /* approximate_for_speed */ false);
2512        }
2513    }
2514
2515    // https://html.spec.whatwg.org/multipage/#dom-media-seeking
2516    fn Seeking(&self) -> bool {
2517        self.seeking.get()
2518    }
2519
2520    // https://html.spec.whatwg.org/multipage/#ended-playback
2521    fn Ended(&self) -> bool {
2522        if self.ready_state.get() < ReadyState::HaveMetadata {
2523            return false;
2524        }
2525
2526        let playback_pos = self.playback_position.get();
2527
2528        match self.direction_of_playback() {
2529            PlaybackDirection::Forwards => playback_pos >= self.Duration() && !self.Loop(),
2530            PlaybackDirection::Backwards => playback_pos <= self.earliest_possible_position(),
2531        }
2532    }
2533
2534    // https://html.spec.whatwg.org/multipage/#dom-media-fastseek
2535    fn FastSeek(&self, time: Finite<f64>) {
2536        self.seek(*time, /* approximate_for_speed */ true);
2537    }
2538
2539    // https://html.spec.whatwg.org/multipage/#dom-media-played
2540    fn Played(&self) -> DomRoot<TimeRanges> {
2541        TimeRanges::new(
2542            self.global().as_window(),
2543            self.played.borrow().clone(),
2544            CanGc::note(),
2545        )
2546    }
2547
2548    // https://html.spec.whatwg.org/multipage/#dom-media-seekable
2549    fn Seekable(&self) -> DomRoot<TimeRanges> {
2550        let mut seekable = TimeRangesContainer::default();
2551        if let Some(ref player) = *self.player.borrow() {
2552            if let Ok(ranges) = player.lock().unwrap().seekable() {
2553                for range in ranges {
2554                    let _ = seekable.add(range.start, range.end);
2555                }
2556            }
2557        }
2558        TimeRanges::new(self.global().as_window(), seekable, CanGc::note())
2559    }
2560
2561    // https://html.spec.whatwg.org/multipage/#dom-media-buffered
2562    fn Buffered(&self) -> DomRoot<TimeRanges> {
2563        let mut buffered = TimeRangesContainer::default();
2564        if let Some(ref player) = *self.player.borrow() {
2565            if let Ok(ranges) = player.lock().unwrap().buffered() {
2566                for range in ranges {
2567                    let _ = buffered.add(range.start, range.end);
2568                }
2569            }
2570        }
2571        TimeRanges::new(self.global().as_window(), buffered, CanGc::note())
2572    }
2573
2574    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
2575    fn AudioTracks(&self) -> DomRoot<AudioTrackList> {
2576        let window = self.owner_window();
2577        self.audio_tracks_list
2578            .or_init(|| AudioTrackList::new(&window, &[], Some(self), CanGc::note()))
2579    }
2580
2581    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
2582    fn VideoTracks(&self) -> DomRoot<VideoTrackList> {
2583        let window = self.owner_window();
2584        self.video_tracks_list
2585            .or_init(|| VideoTrackList::new(&window, &[], Some(self), CanGc::note()))
2586    }
2587
2588    // https://html.spec.whatwg.org/multipage/#dom-media-texttracks
2589    fn TextTracks(&self) -> DomRoot<TextTrackList> {
2590        let window = self.owner_window();
2591        self.text_tracks_list
2592            .or_init(|| TextTrackList::new(&window, &[], CanGc::note()))
2593    }
2594
2595    // https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack
2596    fn AddTextTrack(
2597        &self,
2598        kind: TextTrackKind,
2599        label: DOMString,
2600        language: DOMString,
2601    ) -> DomRoot<TextTrack> {
2602        let window = self.owner_window();
2603        // Step 1 & 2
2604        // FIXME(#22314, dlrobertson) set the ready state to Loaded
2605        let track = TextTrack::new(
2606            &window,
2607            "".into(),
2608            kind,
2609            label,
2610            language,
2611            TextTrackMode::Hidden,
2612            None,
2613            CanGc::note(),
2614        );
2615        // Step 3 & 4
2616        self.TextTracks().add(&track);
2617        // Step 5
2618        DomRoot::from_ref(&track)
2619    }
2620
2621    // https://html.spec.whatwg.org/multipage/#dom-media-volume
2622    fn GetVolume(&self) -> Fallible<Finite<f64>> {
2623        Ok(Finite::wrap(self.volume.get()))
2624    }
2625
2626    // https://html.spec.whatwg.org/multipage/#dom-media-volume
2627    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
2628        let minimum_volume = 0.0;
2629        let maximum_volume = 1.0;
2630        if *value < minimum_volume || *value > maximum_volume {
2631            return Err(Error::IndexSize);
2632        }
2633
2634        if *value != self.volume.get() {
2635            self.volume.set(*value);
2636
2637            self.owner_global()
2638                .task_manager()
2639                .media_element_task_source()
2640                .queue_simple_event(self.upcast(), atom!("volumechange"));
2641            if !self.is_allowed_to_play() {
2642                self.internal_pause_steps();
2643            }
2644        }
2645
2646        Ok(())
2647    }
2648}
2649
2650impl VirtualMethods for HTMLMediaElement {
2651    fn super_type(&self) -> Option<&dyn VirtualMethods> {
2652        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
2653    }
2654
2655    fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation, can_gc: CanGc) {
2656        self.super_type()
2657            .unwrap()
2658            .attribute_mutated(attr, mutation, can_gc);
2659
2660        match *attr.local_name() {
2661            local_name!("muted") => {
2662                self.SetMuted(mutation.new_value(attr).is_some());
2663            },
2664            local_name!("src") => {
2665                if mutation.new_value(attr).is_none() {
2666                    self.clear_current_frame_data();
2667                    return;
2668                }
2669                self.media_element_load_algorithm(CanGc::note());
2670            },
2671            local_name!("controls") => {
2672                if mutation.new_value(attr).is_some() {
2673                    self.render_controls(can_gc);
2674                } else {
2675                    self.remove_controls(can_gc);
2676                }
2677            },
2678            _ => (),
2679        };
2680    }
2681
2682    // https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document
2683    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
2684        self.super_type().unwrap().unbind_from_tree(context, can_gc);
2685
2686        self.remove_controls(can_gc);
2687
2688        if context.tree_connected {
2689            let task = MediaElementMicrotask::PauseIfNotInDocument {
2690                elem: DomRoot::from_ref(self),
2691            };
2692            ScriptThread::await_stable_state(Microtask::MediaElement(task));
2693        }
2694    }
2695}
2696
2697#[derive(JSTraceable, MallocSizeOf)]
2698pub(crate) enum MediaElementMicrotask {
2699    ResourceSelection {
2700        elem: DomRoot<HTMLMediaElement>,
2701        generation_id: u32,
2702        #[no_trace]
2703        base_url: ServoUrl,
2704    },
2705    PauseIfNotInDocument {
2706        elem: DomRoot<HTMLMediaElement>,
2707    },
2708    Seeked {
2709        elem: DomRoot<HTMLMediaElement>,
2710        generation_id: u32,
2711    },
2712}
2713
2714impl MicrotaskRunnable for MediaElementMicrotask {
2715    fn handler(&self, can_gc: CanGc) {
2716        match self {
2717            &MediaElementMicrotask::ResourceSelection {
2718                ref elem,
2719                generation_id,
2720                ref base_url,
2721            } => {
2722                if generation_id == elem.generation_id.get() {
2723                    elem.resource_selection_algorithm_sync(base_url.clone(), can_gc);
2724                }
2725            },
2726            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
2727                if !elem.upcast::<Node>().is_connected() {
2728                    elem.internal_pause_steps();
2729                }
2730            },
2731            &MediaElementMicrotask::Seeked {
2732                ref elem,
2733                generation_id,
2734            } => {
2735                if generation_id == elem.generation_id.get() {
2736                    elem.seek_end();
2737                }
2738            },
2739        }
2740    }
2741
2742    fn enter_realm(&self) -> JSAutoRealm {
2743        match self {
2744            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
2745            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
2746            &MediaElementMicrotask::Seeked { ref elem, .. } => enter_realm(&**elem),
2747        }
2748    }
2749}
2750
2751enum Resource {
2752    Object,
2753    Url(ServoUrl),
2754}
2755
2756#[derive(Debug, MallocSizeOf, PartialEq)]
2757enum DataBuffer {
2758    Payload(Vec<u8>),
2759    EndOfStream,
2760}
2761
2762#[derive(MallocSizeOf)]
2763struct BufferedDataSource {
2764    /// During initial setup and seeking (including clearing the buffer queue
2765    /// and resetting the end-of-stream state), the data source should be locked and
2766    /// any request for processing should be ignored until the media player informs us
2767    /// via the NeedData event that it is ready to accept incoming data.
2768    locked: Cell<bool>,
2769    /// Temporary storage for incoming data.
2770    buffers: VecDeque<DataBuffer>,
2771}
2772
2773impl BufferedDataSource {
2774    fn new() -> BufferedDataSource {
2775        BufferedDataSource {
2776            locked: Cell::new(true),
2777            buffers: VecDeque::default(),
2778        }
2779    }
2780
2781    fn set_locked(&self, locked: bool) {
2782        self.locked.set(locked)
2783    }
2784
2785    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
2786        debug_assert_ne!(
2787            self.buffers.back(),
2788            Some(&DataBuffer::EndOfStream),
2789            "The media backend not expects any further data after end of stream"
2790        );
2791
2792        self.buffers.push_back(buffer);
2793    }
2794
2795    fn process_into_player_from_queue(
2796        &mut self,
2797        player: &Arc<Mutex<dyn Player>>,
2798    ) -> Result<(), PlayerError> {
2799        // Early out if any request for processing should be ignored.
2800        if self.locked.get() {
2801            return Ok(());
2802        }
2803
2804        while let Some(buffer) = self.buffers.pop_front() {
2805            match buffer {
2806                DataBuffer::Payload(payload) => {
2807                    if let Err(e) = player.lock().unwrap().push_data(payload) {
2808                        warn!("Could not push input data to player {:?}", e);
2809                        return Err(e);
2810                    }
2811                },
2812                DataBuffer::EndOfStream => {
2813                    if let Err(e) = player.lock().unwrap().end_of_stream() {
2814                        warn!("Could not signal EOS to player {:?}", e);
2815                        return Err(e);
2816                    }
2817                },
2818            }
2819        }
2820
2821        Ok(())
2822    }
2823
2824    fn reset(&mut self) {
2825        self.locked.set(true);
2826        self.buffers.clear();
2827    }
2828}
2829
2830/// Indicates the reason why a fetch request was cancelled.
2831#[derive(Debug, MallocSizeOf, PartialEq)]
2832enum CancelReason {
2833    /// We were asked to stop pushing data to the player.
2834    Backoff,
2835    /// An error ocurred while fetching the media data.
2836    Error,
2837    /// A new request overrode this one.
2838    Overridden,
2839}
2840
2841#[derive(MallocSizeOf)]
2842pub(crate) struct HTMLMediaElementFetchContext {
2843    /// The fetch request id.
2844    request_id: RequestId,
2845    /// Some if the request has been cancelled.
2846    cancel_reason: Option<CancelReason>,
2847    /// Indicates whether the fetched stream is seekable.
2848    is_seekable: bool,
2849    /// Indicates whether the fetched stream is origin clean.
2850    origin_clean: bool,
2851    /// The buffered data source which to be processed by media backend.
2852    data_source: DomRefCell<BufferedDataSource>,
2853    /// Fetch canceller. Allows cancelling the current fetch request by
2854    /// manually calling its .cancel() method or automatically on Drop.
2855    fetch_canceller: FetchCanceller,
2856}
2857
2858impl HTMLMediaElementFetchContext {
2859    fn new(
2860        request_id: RequestId,
2861        core_resource_thread: CoreResourceThread,
2862    ) -> HTMLMediaElementFetchContext {
2863        HTMLMediaElementFetchContext {
2864            request_id,
2865            cancel_reason: None,
2866            is_seekable: false,
2867            origin_clean: true,
2868            data_source: DomRefCell::new(BufferedDataSource::new()),
2869            fetch_canceller: FetchCanceller::new(request_id, core_resource_thread.clone()),
2870        }
2871    }
2872
2873    fn request_id(&self) -> RequestId {
2874        self.request_id
2875    }
2876
2877    fn is_seekable(&self) -> bool {
2878        self.is_seekable
2879    }
2880
2881    fn set_seekable(&mut self, seekable: bool) {
2882        self.is_seekable = seekable;
2883    }
2884
2885    pub(crate) fn origin_is_clean(&self) -> bool {
2886        self.origin_clean
2887    }
2888
2889    fn set_origin_unclean(&mut self) {
2890        self.origin_clean = false;
2891    }
2892
2893    fn data_source(&self) -> &DomRefCell<BufferedDataSource> {
2894        &self.data_source
2895    }
2896
2897    fn cancel(&mut self, reason: CancelReason) {
2898        if self.cancel_reason.is_some() {
2899            return;
2900        }
2901        self.cancel_reason = Some(reason);
2902        self.data_source.borrow_mut().reset();
2903        self.fetch_canceller.cancel();
2904    }
2905
2906    fn cancel_reason(&self) -> &Option<CancelReason> {
2907        &self.cancel_reason
2908    }
2909}
2910
2911struct HTMLMediaElementFetchListener {
2912    /// The element that initiated the request.
2913    elem: Trusted<HTMLMediaElement>,
2914    /// The response metadata received to date.
2915    metadata: Option<Metadata>,
2916    /// The generation of the media element when this fetch started.
2917    generation_id: u32,
2918    /// The fetch request id.
2919    request_id: RequestId,
2920    /// Time of last progress notification.
2921    next_progress_event: Instant,
2922    /// Timing data for this resource.
2923    resource_timing: ResourceFetchTiming,
2924    /// Url for the resource.
2925    url: ServoUrl,
2926    /// Expected content length of the media asset being fetched or played.
2927    expected_content_length: Option<u64>,
2928    /// Actual content length of the media asset was fetched.
2929    fetched_content_length: u64,
2930    /// Discarded content length from the network for the ongoing
2931    /// request if range requests are not supported. Seek requests set it
2932    /// to the required position (in bytes).
2933    content_length_to_discard: u64,
2934}
2935
2936// https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2937impl FetchResponseListener for HTMLMediaElementFetchListener {
2938    fn process_request_body(&mut self, _: RequestId) {}
2939
2940    fn process_request_eof(&mut self, _: RequestId) {}
2941
2942    fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
2943        let elem = self.elem.root();
2944
2945        if let Ok(FetchMetadata::Filtered {
2946            filtered: FilteredMetadata::Opaque | FilteredMetadata::OpaqueRedirect(_),
2947            ..
2948        }) = metadata
2949        {
2950            if let Some(ref mut current_fetch_context) = *elem.current_fetch_context.borrow_mut() {
2951                current_fetch_context.set_origin_unclean();
2952            }
2953        }
2954
2955        self.metadata = metadata.ok().map(|m| match m {
2956            FetchMetadata::Unfiltered(m) => m,
2957            FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
2958        });
2959
2960        if let Some(metadata) = self.metadata.as_ref() {
2961            if let Some(headers) = metadata.headers.as_ref() {
2962                // For range requests we get the size of the media asset from the Content-Range
2963                // header. Otherwise, we get it from the Content-Length header.
2964                let content_length =
2965                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
2966                        content_range.bytes_len()
2967                    } else {
2968                        headers
2969                            .typed_get::<ContentLength>()
2970                            .map(|content_length| content_length.0)
2971                    };
2972
2973                // We only set the expected input size if it changes.
2974                if content_length != self.expected_content_length {
2975                    if let Some(content_length) = content_length {
2976                        self.expected_content_length = Some(content_length);
2977                    }
2978                }
2979            }
2980        }
2981
2982        // Explicit media player initialization with live/seekable source.
2983        if let Err(e) = elem
2984            .player
2985            .borrow()
2986            .as_ref()
2987            .unwrap()
2988            .lock()
2989            .unwrap()
2990            .set_input_size(self.expected_content_length.unwrap_or_default())
2991        {
2992            warn!("Could not set player input size {:?}", e);
2993        }
2994
2995        let (status_is_ok, is_seekable) = self.metadata.as_ref().map_or((true, false), |s| {
2996            let status = &s.status;
2997            (
2998                status.is_success(),
2999                *status == StatusCode::PARTIAL_CONTENT ||
3000                    *status == StatusCode::RANGE_NOT_SATISFIABLE,
3001            )
3002        });
3003
3004        if is_seekable {
3005            // The server supports range requests,
3006            if let Some(ref mut current_fetch_context) = *elem.current_fetch_context.borrow_mut() {
3007                current_fetch_context.set_seekable(true);
3008            }
3009        }
3010
3011        // => "If the media data cannot be fetched at all..."
3012        if !status_is_ok {
3013            // Ensure that the element doesn't receive any further notifications
3014            // of the aborted fetch.
3015            if let Some(ref mut current_fetch_context) = *elem.current_fetch_context.borrow_mut() {
3016                current_fetch_context.cancel(CancelReason::Error);
3017            }
3018            elem.queue_dedicated_media_source_failure_steps();
3019        }
3020    }
3021
3022    fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
3023        let elem = self.elem.root();
3024
3025        self.fetched_content_length += chunk.len() as u64;
3026
3027        // If an error was received previously, we skip processing the payload.
3028        if let Some(ref mut current_fetch_context) = *elem.current_fetch_context.borrow_mut() {
3029            if current_fetch_context.cancel_reason().is_some() {
3030                return;
3031            }
3032
3033            // Discard chunk of the response body if fetch context doesn't
3034            // support range requests.
3035            let payload = if !current_fetch_context.is_seekable() &&
3036                self.content_length_to_discard != 0
3037            {
3038                if chunk.len() as u64 > self.content_length_to_discard {
3039                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3040                    self.content_length_to_discard = 0;
3041                    shrink_chunk
3042                } else {
3043                    // Completely discard this response chunk.
3044                    self.content_length_to_discard -= chunk.len() as u64;
3045                    return;
3046                }
3047            } else {
3048                chunk
3049            };
3050
3051            if let Err(e) = {
3052                let mut data_source = current_fetch_context.data_source().borrow_mut();
3053                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3054                data_source.process_into_player_from_queue(elem.player.borrow().as_ref().unwrap())
3055            } {
3056                // If we are pushing too much data and we know that we can
3057                // restart the download later from where we left, we cancel
3058                // the current request. Otherwise, we continue the request
3059                // assuming that we may drop some frames.
3060                if e == PlayerError::EnoughData {
3061                    current_fetch_context.cancel(CancelReason::Backoff);
3062                }
3063                return;
3064            }
3065        }
3066
3067        // https://html.spec.whatwg.org/multipage/#concept-media-load-resource step 4,
3068        // => "If mode is remote" step 2
3069        if Instant::now() > self.next_progress_event {
3070            elem.owner_global()
3071                .task_manager()
3072                .media_element_task_source()
3073                .queue_simple_event(elem.upcast(), atom!("progress"));
3074            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3075        }
3076    }
3077
3078    // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
3079    fn process_response_eof(
3080        &mut self,
3081        _: RequestId,
3082        status: Result<ResourceFetchTiming, NetworkError>,
3083    ) {
3084        trace!("process response eof");
3085
3086        let elem = self.elem.root();
3087
3088        // There are no more chunks of the response body forthcoming, so we can
3089        // go ahead and notify the media backend not to expect any further data.
3090        if let Some(ref mut current_fetch_context) = *elem.current_fetch_context.borrow_mut() {
3091            // On initial state change READY -> PAUSED the media player perform
3092            // seek to initial position by event with seek segment (TIME format)
3093            // while media stack operates in BYTES format and configuring segment
3094            // start and stop positions without the total size of the stream is not
3095            // possible. As fallback the media player perform seek with BYTES format
3096            // and initiate seek request via "seek-data" callback with required offset.
3097            if self.expected_content_length.is_none() && self.fetched_content_length != 0 {
3098                if let Err(e) = elem
3099                    .player
3100                    .borrow()
3101                    .as_ref()
3102                    .unwrap()
3103                    .lock()
3104                    .unwrap()
3105                    .set_input_size(self.fetched_content_length)
3106                {
3107                    warn!("Could not set player input size {:?}", e);
3108                }
3109            }
3110
3111            let mut data_source = current_fetch_context.data_source().borrow_mut();
3112
3113            data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3114            let _ =
3115                data_source.process_into_player_from_queue(elem.player.borrow().as_ref().unwrap());
3116
3117            // If an error was previously received we skip processing the payload.
3118            if let Some(CancelReason::Error) = current_fetch_context.cancel_reason() {
3119                return;
3120            }
3121        }
3122
3123        if status.is_ok() && self.fetched_content_length != 0 {
3124            elem.upcast::<EventTarget>()
3125                .fire_event(atom!("progress"), CanGc::note());
3126
3127            elem.network_state.set(NetworkState::Idle);
3128
3129            elem.upcast::<EventTarget>()
3130                .fire_event(atom!("suspend"), CanGc::note());
3131        }
3132        // => "If the connection is interrupted after some media data has been received..."
3133        else if elem.ready_state.get() != ReadyState::HaveNothing {
3134            // If the media backend has already flagged an error, skip any observable
3135            // network-related errors.
3136            if elem.in_error_state() {
3137                return;
3138            }
3139
3140            // Step 1
3141            if let Some(ref mut current_fetch_context) = *elem.current_fetch_context.borrow_mut() {
3142                current_fetch_context.cancel(CancelReason::Error);
3143            }
3144
3145            // Step 2
3146            elem.error.set(Some(&*MediaError::new(
3147                &elem.owner_window(),
3148                MEDIA_ERR_NETWORK,
3149                CanGc::note(),
3150            )));
3151
3152            // Step 3
3153            elem.network_state.set(NetworkState::Idle);
3154
3155            // Step 4.
3156            elem.delay_load_event(false, CanGc::note());
3157
3158            // Step 5
3159            elem.upcast::<EventTarget>()
3160                .fire_event(atom!("error"), CanGc::note());
3161        } else {
3162            // => "If the media data cannot be fetched at all..."
3163            elem.queue_dedicated_media_source_failure_steps();
3164        }
3165    }
3166
3167    fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
3168        &mut self.resource_timing
3169    }
3170
3171    fn resource_timing(&self) -> &ResourceFetchTiming {
3172        &self.resource_timing
3173    }
3174
3175    fn submit_resource_timing(&mut self) {
3176        network_listener::submit_timing(self, CanGc::note())
3177    }
3178
3179    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3180        let global = &self.resource_timing_global();
3181        global.report_csp_violations(violations, None, None);
3182    }
3183}
3184
3185impl ResourceTimingListener for HTMLMediaElementFetchListener {
3186    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3187        let initiator_type = InitiatorType::LocalName(
3188            self.elem
3189                .root()
3190                .upcast::<Element>()
3191                .local_name()
3192                .to_string(),
3193        );
3194        (initiator_type, self.url.clone())
3195    }
3196
3197    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3198        self.elem.root().owner_document().global()
3199    }
3200}
3201
3202impl PreInvoke for HTMLMediaElementFetchListener {
3203    fn should_invoke(&self) -> bool {
3204        let elem = self.elem.root();
3205
3206        if elem.generation_id.get() != self.generation_id || elem.player.borrow().is_none() {
3207            return false;
3208        }
3209
3210        // A new fetch request was triggered, so we skip processing previous request.
3211        elem.current_fetch_context
3212            .borrow()
3213            .as_ref()
3214            .is_some_and(|context| context.request_id() == self.request_id)
3215    }
3216}
3217
3218impl HTMLMediaElementFetchListener {
3219    fn new(elem: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3220        Self {
3221            elem: Trusted::new(elem),
3222            metadata: None,
3223            generation_id: elem.generation_id.get(),
3224            request_id,
3225            next_progress_event: Instant::now() + Duration::from_millis(350),
3226            resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
3227            url,
3228            expected_content_length: None,
3229            fetched_content_length: 0,
3230            content_length_to_discard: offset,
3231        }
3232    }
3233}