script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::{Cell, RefCell};
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex, Weak};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use base::id::WebViewId;
13use compositing_traits::{CrossProcessCompositorApi, ImageUpdate, SerializableImageData};
14use content_security_policy::sandboxing_directive::SandboxingFlagSet;
15use dom_struct::dom_struct;
16use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
17use euclid::default::Size2D;
18use headers::{ContentLength, ContentRange, HeaderMapExt};
19use html5ever::{LocalName, Prefix, QualName, local_name, ns};
20use http::StatusCode;
21use http::header::{self, HeaderMap, HeaderValue};
22use ipc_channel::ipc::{self, IpcSharedMemory};
23use ipc_channel::router::ROUTER;
24use js::jsapi::JSAutoRealm;
25use layout_api::MediaFrame;
26use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
27use net_traits::request::{Destination, RequestId};
28use net_traits::{
29    CoreResourceThread, FetchMetadata, FilteredMetadata, NetworkError, ResourceFetchTiming,
30};
31use pixels::RasterImage;
32use script_bindings::codegen::InheritTypes::{
33    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
34};
35use servo_config::pref;
36use servo_media::player::audio::AudioRenderer;
37use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
38use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
39use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
40use servo_url::ServoUrl;
41use stylo_atoms::Atom;
42use uuid::Uuid;
43use webrender_api::{
44    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
45    ImageDescriptorFlags, ImageFormat, ImageKey,
46};
47
48use crate::document_loader::{LoadBlocker, LoadType};
49use crate::dom::attr::Attr;
50use crate::dom::audio::audiotrack::AudioTrack;
51use crate::dom::audio::audiotracklist::AudioTrackList;
52use crate::dom::bindings::cell::DomRefCell;
53use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
54    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
55};
56use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
57use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
58use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
59use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
60use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
61use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
62use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
63use crate::dom::bindings::codegen::UnionTypes::{
64    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
65};
66use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
67use crate::dom::bindings::inheritance::Castable;
68use crate::dom::bindings::num::Finite;
69use crate::dom::bindings::refcounted::Trusted;
70use crate::dom::bindings::reflector::DomGlobal;
71use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
72use crate::dom::bindings::str::{DOMString, USVString};
73use crate::dom::blob::Blob;
74use crate::dom::csp::{GlobalCspReporting, Violation};
75use crate::dom::document::Document;
76use crate::dom::element::{
77    AttributeMutation, AttributeMutationReason, CustomElementCreationMode, Element, ElementCreator,
78    cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
79};
80use crate::dom::event::Event;
81use crate::dom::eventtarget::EventTarget;
82use crate::dom::globalscope::GlobalScope;
83use crate::dom::html::htmlelement::HTMLElement;
84use crate::dom::html::htmlsourceelement::HTMLSourceElement;
85use crate::dom::html::htmlvideoelement::HTMLVideoElement;
86use crate::dom::mediaerror::MediaError;
87use crate::dom::mediafragmentparser::MediaFragmentParser;
88use crate::dom::medialist::MediaList;
89use crate::dom::mediastream::MediaStream;
90use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
91use crate::dom::performance::performanceresourcetiming::InitiatorType;
92use crate::dom::promise::Promise;
93use crate::dom::texttrack::TextTrack;
94use crate::dom::texttracklist::TextTrackList;
95use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
96use crate::dom::trackevent::TrackEvent;
97use crate::dom::url::URL;
98use crate::dom::videotrack::VideoTrack;
99use crate::dom::videotracklist::VideoTrackList;
100use crate::dom::virtualmethods::VirtualMethods;
101use crate::fetch::{FetchCanceller, create_a_potential_cors_request};
102use crate::microtask::{Microtask, MicrotaskRunnable};
103use crate::network_listener::{self, FetchResponseListener, ResourceTimingListener};
104use crate::realms::{InRealm, enter_realm};
105use crate::script_runtime::CanGc;
106use crate::script_thread::ScriptThread;
107use crate::task_source::SendableTaskSource;
108
109/// A CSS file to style the media controls.
110static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
111
112/// A JS file to control the media controls.
113static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
114
115#[derive(MallocSizeOf, PartialEq)]
116enum FrameStatus {
117    Locked,
118    Unlocked,
119}
120
121#[derive(MallocSizeOf)]
122struct FrameHolder(
123    FrameStatus,
124    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
125);
126
127impl FrameHolder {
128    fn new(frame: VideoFrame) -> FrameHolder {
129        FrameHolder(FrameStatus::Unlocked, frame)
130    }
131
132    fn lock(&mut self) {
133        if self.0 == FrameStatus::Unlocked {
134            self.0 = FrameStatus::Locked;
135        };
136    }
137
138    fn unlock(&mut self) {
139        if self.0 == FrameStatus::Locked {
140            self.0 = FrameStatus::Unlocked;
141        };
142    }
143
144    fn set(&mut self, new_frame: VideoFrame) {
145        if self.0 == FrameStatus::Unlocked {
146            self.1 = new_frame
147        };
148    }
149
150    fn get(&self) -> (u32, Size2D<i32>, usize) {
151        if self.0 == FrameStatus::Locked {
152            (
153                self.1.get_texture_id(),
154                Size2D::new(self.1.get_width(), self.1.get_height()),
155                0,
156            )
157        } else {
158            unreachable!();
159        }
160    }
161
162    fn get_frame(&self) -> VideoFrame {
163        self.1.clone()
164    }
165}
166
167#[derive(MallocSizeOf)]
168pub(crate) struct MediaFrameRenderer {
169    webview_id: WebViewId,
170    player_id: Option<usize>,
171    glplayer_id: Option<u64>,
172    compositor_api: CrossProcessCompositorApi,
173    #[ignore_malloc_size_of = "Defined in other crates"]
174    player_context: WindowGLContext,
175    current_frame: Option<MediaFrame>,
176    old_frame: Option<ImageKey>,
177    very_old_frame: Option<ImageKey>,
178    current_frame_holder: Option<FrameHolder>,
179    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
180    poster_frame: Option<MediaFrame>,
181}
182
183impl MediaFrameRenderer {
184    fn new(
185        webview_id: WebViewId,
186        compositor_api: CrossProcessCompositorApi,
187        player_context: WindowGLContext,
188    ) -> Self {
189        Self {
190            webview_id,
191            player_id: None,
192            glplayer_id: None,
193            compositor_api,
194            player_context,
195            current_frame: None,
196            old_frame: None,
197            very_old_frame: None,
198            current_frame_holder: None,
199            poster_frame: None,
200        }
201    }
202
203    fn setup(
204        &mut self,
205        player_id: usize,
206        task_source: SendableTaskSource,
207        weak_video_renderer: Weak<Mutex<MediaFrameRenderer>>,
208    ) {
209        self.player_id = Some(player_id);
210
211        let (glplayer_id, image_receiver) = self
212            .player_context
213            .glplayer_thread_sender
214            .as_ref()
215            .map(|sender| {
216                let (image_sender, image_receiver) = ipc::channel::<GLPlayerMsgForward>().unwrap();
217                sender
218                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
219                    .unwrap();
220                match image_receiver.recv().unwrap() {
221                    GLPlayerMsgForward::PlayerId(id) => (Some(id), Some(image_receiver)),
222                    _ => unreachable!(),
223                }
224            })
225            .unwrap_or((None, None));
226
227        self.glplayer_id = glplayer_id;
228
229        let Some(image_receiver) = image_receiver else {
230            return;
231        };
232
233        ROUTER.add_typed_route(
234            image_receiver,
235            Box::new(move |message| {
236                let message = message.unwrap();
237                let weak_video_renderer = weak_video_renderer.clone();
238
239                task_source.queue(task!(handle_glplayer_message: move || {
240                    trace!("GLPlayer message {:?}", message);
241
242                    let Some(video_renderer) = weak_video_renderer.upgrade() else {
243                        return;
244                    };
245
246                    match message {
247                        GLPlayerMsgForward::Lock(sender) => {
248                            if let Some(holder) = video_renderer
249                                .lock()
250                                .unwrap()
251                                .current_frame_holder
252                                .as_mut() {
253                                    holder.lock();
254                                    sender.send(holder.get()).unwrap();
255                                };
256                        },
257                        GLPlayerMsgForward::Unlock() => {
258                            if let Some(holder) = video_renderer
259                                .lock()
260                                .unwrap()
261                                .current_frame_holder
262                                .as_mut() { holder.unlock() }
263                        },
264                        _ => (),
265                    }
266                }));
267            }),
268        );
269    }
270
271    fn reset(&mut self) {
272        self.player_id = None;
273
274        if let Some(glplayer_id) = self.glplayer_id.take() {
275            self.player_context
276                .send(GLPlayerMsg::UnregisterPlayer(glplayer_id));
277        }
278
279        self.current_frame_holder = None;
280
281        let mut updates = smallvec::smallvec![];
282
283        if let Some(current_frame) = self.current_frame.take() {
284            updates.push(ImageUpdate::DeleteImage(current_frame.image_key));
285        }
286
287        if let Some(old_image_key) = self.old_frame.take() {
288            updates.push(ImageUpdate::DeleteImage(old_image_key));
289        }
290
291        if let Some(very_old_image_key) = self.very_old_frame.take() {
292            updates.push(ImageUpdate::DeleteImage(very_old_image_key));
293        }
294
295        if !updates.is_empty() {
296            self.compositor_api
297                .update_images(self.webview_id.into(), updates);
298        }
299    }
300
301    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
302        self.poster_frame = image.and_then(|image| {
303            image.id.map(|image_key| MediaFrame {
304                image_key,
305                width: image.metadata.width as i32,
306                height: image.metadata.height as i32,
307            })
308        });
309    }
310}
311
312impl Drop for MediaFrameRenderer {
313    fn drop(&mut self) {
314        self.reset();
315    }
316}
317
318impl VideoFrameRenderer for MediaFrameRenderer {
319    fn render(&mut self, frame: VideoFrame) {
320        if self.player_id.is_none() || (frame.is_gl_texture() && self.glplayer_id.is_none()) {
321            return;
322        }
323
324        let mut updates = smallvec::smallvec![];
325
326        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
327            updates.push(ImageUpdate::DeleteImage(old_image_key));
328        }
329
330        let descriptor = ImageDescriptor::new(
331            frame.get_width(),
332            frame.get_height(),
333            ImageFormat::BGRA8,
334            ImageDescriptorFlags::empty(),
335        );
336
337        match &mut self.current_frame {
338            Some(current_frame)
339                if current_frame.width == frame.get_width() &&
340                    current_frame.height == frame.get_height() =>
341            {
342                if !frame.is_gl_texture() {
343                    updates.push(ImageUpdate::UpdateImage(
344                        current_frame.image_key,
345                        descriptor,
346                        SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data())),
347                        None,
348                    ));
349                }
350
351                self.current_frame_holder
352                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
353                    .set(frame);
354
355                if let Some(old_image_key) = self.old_frame.take() {
356                    updates.push(ImageUpdate::DeleteImage(old_image_key));
357                }
358            },
359            Some(current_frame) => {
360                self.old_frame = Some(current_frame.image_key);
361
362                let Some(new_image_key) = self
363                    .compositor_api
364                    .generate_image_key_blocking(self.webview_id)
365                else {
366                    return;
367                };
368
369                /* update current_frame */
370                current_frame.image_key = new_image_key;
371                current_frame.width = frame.get_width();
372                current_frame.height = frame.get_height();
373
374                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
375                    let texture_target = if frame.is_external_oes() {
376                        ImageBufferKind::TextureExternal
377                    } else {
378                        ImageBufferKind::Texture2D
379                    };
380
381                    SerializableImageData::External(ExternalImageData {
382                        id: ExternalImageId(self.glplayer_id.unwrap()),
383                        channel_index: 0,
384                        image_type: ExternalImageType::TextureHandle(texture_target),
385                        normalized_uvs: false,
386                    })
387                } else {
388                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
389                };
390
391                self.current_frame_holder
392                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
393                    .set(frame);
394
395                updates.push(ImageUpdate::AddImage(new_image_key, descriptor, image_data));
396            },
397            None => {
398                let Some(image_key) = self
399                    .compositor_api
400                    .generate_image_key_blocking(self.webview_id)
401                else {
402                    return;
403                };
404
405                self.current_frame = Some(MediaFrame {
406                    image_key,
407                    width: frame.get_width(),
408                    height: frame.get_height(),
409                });
410
411                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
412                    let texture_target = if frame.is_external_oes() {
413                        ImageBufferKind::TextureExternal
414                    } else {
415                        ImageBufferKind::Texture2D
416                    };
417
418                    SerializableImageData::External(ExternalImageData {
419                        id: ExternalImageId(self.glplayer_id.unwrap()),
420                        channel_index: 0,
421                        image_type: ExternalImageType::TextureHandle(texture_target),
422                        normalized_uvs: false,
423                    })
424                } else {
425                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
426                };
427
428                self.current_frame_holder = Some(FrameHolder::new(frame));
429
430                updates.push(ImageUpdate::AddImage(image_key, descriptor, image_data));
431            },
432        }
433        self.compositor_api
434            .update_images(self.webview_id.into(), updates);
435    }
436}
437
438#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
439#[derive(JSTraceable, MallocSizeOf)]
440enum SrcObject {
441    MediaStream(Dom<MediaStream>),
442    Blob(Dom<Blob>),
443}
444
445impl From<MediaStreamOrBlob> for SrcObject {
446    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
447    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
448        match src_object {
449            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
450            MediaStreamOrBlob::MediaStream(stream) => {
451                SrcObject::MediaStream(Dom::from_ref(&*stream))
452            },
453        }
454    }
455}
456
457#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
458enum LoadState {
459    NotLoaded,
460    LoadingFromSrcObject,
461    LoadingFromSrcAttribute,
462    LoadingFromSourceChild,
463    WaitingForSource,
464}
465
466/// <https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-element-29>
467#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
468#[derive(JSTraceable, MallocSizeOf)]
469struct SourceChildrenPointer {
470    source_before_pointer: Dom<HTMLSourceElement>,
471    inclusive: bool,
472}
473
474impl SourceChildrenPointer {
475    fn new(source_before_pointer: DomRoot<HTMLSourceElement>, inclusive: bool) -> Self {
476        Self {
477            source_before_pointer: source_before_pointer.as_traced(),
478            inclusive,
479        }
480    }
481}
482
483/// Generally the presence of the loop attribute should be considered to mean playback has not
484/// "ended", as "ended" and "looping" are mutually exclusive.
485/// <https://html.spec.whatwg.org/multipage/#ended-playback>
486#[derive(Clone, Copy, Debug, PartialEq)]
487enum LoopCondition {
488    Included,
489    Ignored,
490}
491
492#[dom_struct]
493pub(crate) struct HTMLMediaElement {
494    htmlelement: HTMLElement,
495    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
496    network_state: Cell<NetworkState>,
497    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
498    ready_state: Cell<ReadyState>,
499    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
500    src_object: DomRefCell<Option<SrcObject>>,
501    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
502    current_src: DomRefCell<String>,
503    /// Incremented whenever tasks associated with this element are cancelled.
504    generation_id: Cell<u32>,
505    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
506    ///
507    /// Reset to false every time the load algorithm is invoked.
508    fired_loadeddata_event: Cell<bool>,
509    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
510    error: MutNullableDom<MediaError>,
511    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
512    paused: Cell<bool>,
513    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
514    default_playback_rate: Cell<f64>,
515    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
516    playback_rate: Cell<f64>,
517    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
518    autoplaying: Cell<bool>,
519    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
520    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
521    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
522    #[conditional_malloc_size_of]
523    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
524    /// Play promises which are soon to be fulfilled by a queued task.
525    #[allow(clippy::type_complexity)]
526    #[conditional_malloc_size_of]
527    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
528    #[ignore_malloc_size_of = "servo_media"]
529    #[no_trace]
530    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
531    #[conditional_malloc_size_of]
532    #[no_trace]
533    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
534    #[ignore_malloc_size_of = "servo_media"]
535    #[no_trace]
536    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
537    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
538    show_poster: Cell<bool>,
539    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
540    duration: Cell<f64>,
541    /// <https://html.spec.whatwg.org/multipage/#current-playback-position>
542    current_playback_position: Cell<f64>,
543    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
544    official_playback_position: Cell<f64>,
545    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
546    default_playback_start_position: Cell<f64>,
547    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
548    volume: Cell<f64>,
549    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
550    seeking: Cell<bool>,
551    /// The latest seek position (in seconds) is used to distinguish whether the seek request was
552    /// initiated by a script or by the user agent itself, rather than by the media engine and to
553    /// abort other running instance of the `seek` algorithm.
554    current_seek_position: Cell<f64>,
555    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
556    muted: Cell<bool>,
557    /// Loading state from source, if any.
558    load_state: Cell<LoadState>,
559    source_children_pointer: DomRefCell<Option<SourceChildrenPointer>>,
560    current_source_child: MutNullableDom<HTMLSourceElement>,
561    /// URL of the media resource, if any.
562    #[no_trace]
563    resource_url: DomRefCell<Option<ServoUrl>>,
564    /// URL of the media resource, if the resource is set through the src_object attribute and it
565    /// is a blob.
566    #[no_trace]
567    blob_url: DomRefCell<Option<ServoUrl>>,
568    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
569    played: DomRefCell<TimeRangesContainer>,
570    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
571    audio_tracks_list: MutNullableDom<AudioTrackList>,
572    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
573    video_tracks_list: MutNullableDom<VideoTrackList>,
574    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
575    text_tracks_list: MutNullableDom<TextTrackList>,
576    /// Time of last timeupdate notification.
577    #[ignore_malloc_size_of = "Defined in std::time"]
578    next_timeupdate_event: Cell<Instant>,
579    /// Latest fetch request context.
580    current_fetch_context: RefCell<Option<HTMLMediaElementFetchContext>>,
581    /// Media controls id.
582    /// In order to workaround the lack of privileged JS context, we secure the
583    /// the access to the "privileged" document.servoGetMediaControls(id) API by
584    /// keeping a whitelist of media controls identifiers.
585    media_controls_id: DomRefCell<Option<String>>,
586}
587
588/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
589#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
590#[repr(u8)]
591pub(crate) enum NetworkState {
592    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
593    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
594    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
595    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
596}
597
598/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
599#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
600#[repr(u8)]
601#[allow(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
602pub(crate) enum ReadyState {
603    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
604    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
605    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
606    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
607    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
608}
609
610/// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
611#[derive(Clone, Copy, PartialEq)]
612enum PlaybackDirection {
613    Forwards,
614    Backwards,
615}
616
617impl HTMLMediaElement {
618    pub(crate) fn new_inherited(
619        tag_name: LocalName,
620        prefix: Option<Prefix>,
621        document: &Document,
622    ) -> Self {
623        Self {
624            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
625            network_state: Cell::new(NetworkState::Empty),
626            ready_state: Cell::new(ReadyState::HaveNothing),
627            src_object: Default::default(),
628            current_src: DomRefCell::new("".to_owned()),
629            generation_id: Cell::new(0),
630            fired_loadeddata_event: Cell::new(false),
631            error: Default::default(),
632            paused: Cell::new(true),
633            default_playback_rate: Cell::new(1.0),
634            playback_rate: Cell::new(1.0),
635            muted: Cell::new(false),
636            load_state: Cell::new(LoadState::NotLoaded),
637            source_children_pointer: DomRefCell::new(None),
638            current_source_child: Default::default(),
639            // FIXME(nox): Why is this initialised to true?
640            autoplaying: Cell::new(true),
641            delaying_the_load_event_flag: Default::default(),
642            pending_play_promises: Default::default(),
643            in_flight_play_promises_queue: Default::default(),
644            player: Default::default(),
645            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
646                document.webview_id(),
647                document.window().compositor_api().clone(),
648                document.window().get_player_context(),
649            ))),
650            audio_renderer: Default::default(),
651            show_poster: Cell::new(true),
652            duration: Cell::new(f64::NAN),
653            current_playback_position: Cell::new(0.),
654            official_playback_position: Cell::new(0.),
655            default_playback_start_position: Cell::new(0.),
656            volume: Cell::new(1.0),
657            seeking: Cell::new(false),
658            current_seek_position: Cell::new(f64::NAN),
659            resource_url: DomRefCell::new(None),
660            blob_url: DomRefCell::new(None),
661            played: DomRefCell::new(TimeRangesContainer::default()),
662            audio_tracks_list: Default::default(),
663            video_tracks_list: Default::default(),
664            text_tracks_list: Default::default(),
665            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
666            current_fetch_context: RefCell::new(None),
667            media_controls_id: DomRefCell::new(None),
668        }
669    }
670
671    pub(crate) fn network_state(&self) -> NetworkState {
672        self.network_state.get()
673    }
674
675    pub(crate) fn get_ready_state(&self) -> ReadyState {
676        self.ready_state.get()
677    }
678
679    fn media_type_id(&self) -> HTMLMediaElementTypeId {
680        match self.upcast::<Node>().type_id() {
681            NodeTypeId::Element(ElementTypeId::HTMLElement(
682                HTMLElementTypeId::HTMLMediaElement(media_type_id),
683            )) => media_type_id,
684            _ => unreachable!(),
685        }
686    }
687
688    fn update_media_state(&self) {
689        if self.is_potentially_playing() {
690            if let Some(ref player) = *self.player.borrow() {
691                if let Err(err) = player.lock().unwrap().set_rate(self.playback_rate.get()) {
692                    warn!("Could not set the playback rate {:?}", err);
693                }
694                if let Err(err) = player.lock().unwrap().set_volume(self.volume.get()) {
695                    warn!("Could not set the volume {:?}", err);
696                }
697                if let Err(err) = player.lock().unwrap().play() {
698                    warn!("Could not play media {:?}", err);
699                }
700            }
701        } else if let Some(ref player) = *self.player.borrow() {
702            if let Err(err) = player.lock().unwrap().pause() {
703                error!("Could not pause player {:?}", err);
704            }
705        }
706    }
707
708    /// Marks that element as delaying the load event or not.
709    ///
710    /// Nothing happens if the element was already delaying the load event and
711    /// we pass true to that method again.
712    ///
713    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
714    pub(crate) fn delay_load_event(&self, delay: bool, can_gc: CanGc) {
715        let blocker = &self.delaying_the_load_event_flag;
716        if delay && blocker.borrow().is_none() {
717            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
718        } else if !delay && blocker.borrow().is_some() {
719            LoadBlocker::terminate(blocker, can_gc);
720        }
721    }
722
723    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
724    fn time_marches_on(&self) {
725        // Step 6. If the time was reached through the usual monotonic increase of the current
726        // playback position during normal playback, and if the user agent has not fired a
727        // timeupdate event at the element in the past 15 to 250ms and is not still running event
728        // handlers for such an event, then the user agent must queue a media element task given the
729        // media element to fire an event named timeupdate at the element.
730        if Instant::now() > self.next_timeupdate_event.get() {
731            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
732            self.next_timeupdate_event
733                .set(Instant::now() + Duration::from_millis(250));
734        }
735    }
736
737    /// <https://html.spec.whatwg.org/multipage/#internal-play-steps>
738    fn internal_play_steps(&self, can_gc: CanGc) {
739        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
740        // the media element's resource selection algorithm.
741        if self.network_state.get() == NetworkState::Empty {
742            self.invoke_resource_selection_algorithm(can_gc);
743        }
744
745        // Step 2. If the playback has ended and the direction of playback is forwards, seek to the
746        // earliest possible position of the media resource.
747        // Generally "ended" and "looping" are exclusive. Here, the loop attribute is ignored to
748        // seek back to start in case loop was set after playback ended.
749        // <https://github.com/whatwg/html/issues/4487>
750        if self.ended_playback(LoopCondition::Ignored) &&
751            self.direction_of_playback() == PlaybackDirection::Forwards
752        {
753            self.seek(
754                self.earliest_possible_position(),
755                /* approximate_for_speed */ false,
756            );
757        }
758
759        let state = self.ready_state.get();
760
761        // Step 3. If the media element's paused attribute is true, then:
762        if self.Paused() {
763            // Step 3.1. Change the value of paused to false.
764            self.paused.set(false);
765
766            // Step 3.2. If the show poster flag is true, set the element's show poster flag to
767            // false and run the time marches on steps.
768            if self.show_poster.get() {
769                self.show_poster.set(false);
770                self.time_marches_on();
771            }
772
773            // Step 3.3. Queue a media element task given the media element to fire an event named
774            // play at the element.
775            self.queue_media_element_task_to_fire_event(atom!("play"));
776
777            // Step 3.4. If the media element's readyState attribute has the value HAVE_NOTHING,
778            // HAVE_METADATA, or HAVE_CURRENT_DATA, queue a media element task given the media
779            // element to fire an event named waiting at the element. Otherwise, the media element's
780            // readyState attribute has the value HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA: notify about
781            // playing for the element.
782            match state {
783                ReadyState::HaveNothing |
784                ReadyState::HaveMetadata |
785                ReadyState::HaveCurrentData => {
786                    self.queue_media_element_task_to_fire_event(atom!("waiting"));
787                },
788                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
789                    self.notify_about_playing();
790                },
791            }
792        }
793        // Step 4. Otherwise, if the media element's readyState attribute has the value
794        // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, take pending play promises and queue a media
795        // element task given the media element to resolve pending play promises with the
796        // result.
797        else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
798            self.take_pending_play_promises(Ok(()));
799
800            let this = Trusted::new(self);
801            let generation_id = self.generation_id.get();
802
803            self.owner_global()
804                .task_manager()
805                .media_element_task_source()
806                .queue(task!(resolve_pending_play_promises: move || {
807                    let this = this.root();
808                    if generation_id != this.generation_id.get() {
809                        return;
810                    }
811
812                    this.fulfill_in_flight_play_promises(|| {});
813                }));
814        }
815
816        // Step 5. Set the media element's can autoplay flag to false.
817        self.autoplaying.set(false);
818
819        self.update_media_state();
820    }
821
822    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
823    fn internal_pause_steps(&self) {
824        // Step 1. Set the media element's can autoplay flag to false.
825        self.autoplaying.set(false);
826
827        // Step 2. If the media element's paused attribute is false, run the following steps:
828        if !self.Paused() {
829            // Step 2.1. Change the value of paused to true.
830            self.paused.set(true);
831
832            // Step 2.2. Take pending play promises and let promises be the result.
833            self.take_pending_play_promises(Err(Error::Abort(None)));
834
835            // Step 2.3. Queue a media element task given the media element and the following steps:
836            let this = Trusted::new(self);
837            let generation_id = self.generation_id.get();
838
839            self.owner_global()
840                .task_manager()
841                .media_element_task_source()
842                .queue(task!(internal_pause_steps: move || {
843                    let this = this.root();
844                    if generation_id != this.generation_id.get() {
845                        return;
846                    }
847
848                    this.fulfill_in_flight_play_promises(|| {
849                        // Step 2.3.1. Fire an event named timeupdate at the element.
850                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
851
852                        // Step 2.3.2. Fire an event named pause at the element.
853                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
854
855                        // Step 2.3.3. Reject pending play promises with promises and an
856                        // "AbortError" DOMException.
857                        // Done after running this closure in `fulfill_in_flight_play_promises`.
858                    });
859                }));
860
861            // Step 2.4. Set the official playback position to the current playback position.
862            self.official_playback_position
863                .set(self.current_playback_position.get());
864        }
865
866        self.update_media_state();
867    }
868
869    /// <https://html.spec.whatwg.org/multipage/#allowed-to-play>
870    fn is_allowed_to_play(&self) -> bool {
871        true
872    }
873
874    /// <https://html.spec.whatwg.org/multipage/#notify-about-playing>
875    fn notify_about_playing(&self) {
876        // Step 1. Take pending play promises and let promises be the result.
877        self.take_pending_play_promises(Ok(()));
878
879        // Step 2. Queue a media element task given the element and the following steps:
880        let this = Trusted::new(self);
881        let generation_id = self.generation_id.get();
882
883        self.owner_global()
884            .task_manager()
885            .media_element_task_source()
886            .queue(task!(notify_about_playing: move || {
887                let this = this.root();
888                if generation_id != this.generation_id.get() {
889                    return;
890                }
891
892                this.fulfill_in_flight_play_promises(|| {
893                    // Step 2.1. Fire an event named playing at the element.
894                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
895
896                    // Step 2.2. Resolve pending play promises with promises.
897                    // Done after running this closure in `fulfill_in_flight_play_promises`.
898                });
899            }));
900    }
901
902    /// <https://html.spec.whatwg.org/multipage/#ready-states>
903    fn change_ready_state(&self, ready_state: ReadyState) {
904        let old_ready_state = self.ready_state.get();
905        self.ready_state.set(ready_state);
906
907        if self.network_state.get() == NetworkState::Empty {
908            return;
909        }
910
911        if old_ready_state == ready_state {
912            return;
913        }
914
915        // Step 1. Apply the first applicable set of substeps from the following list:
916        match (old_ready_state, ready_state) {
917            // => "If the previous ready state was HAVE_NOTHING, and the new ready state is
918            // HAVE_METADATA"
919            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
920                // Queue a media element task given the media element to fire an event named
921                // loadedmetadata at the element.
922                self.queue_media_element_task_to_fire_event(atom!("loadedmetadata"));
923                // No other steps are applicable in this case.
924                return;
925            },
926            // => "If the previous ready state was HAVE_METADATA and the new ready state is
927            // HAVE_CURRENT_DATA or greater"
928            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
929                // If this is the first time this occurs for this media element since the load()
930                // algorithm was last invoked, the user agent must queue a media element task given
931                // the media element to fire an event named loadeddata at the element.
932                if !self.fired_loadeddata_event.get() {
933                    self.fired_loadeddata_event.set(true);
934
935                    let this = Trusted::new(self);
936                    let generation_id = self.generation_id.get();
937
938                    self.owner_global()
939                        .task_manager()
940                        .media_element_task_source()
941                        .queue(task!(media_reached_current_data: move || {
942                            let this = this.root();
943                            if generation_id != this.generation_id.get() {
944                                return;
945                            }
946
947                            this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::note());
948                            // Once the readyState attribute reaches HAVE_CURRENT_DATA, after the
949                            // loadeddata event has been fired, set the element's
950                            // delaying-the-load-event flag to false.
951                            this.delay_load_event(false, CanGc::note());
952                        }));
953                }
954
955                // Steps for the transition from HaveMetadata to HaveCurrentData
956                // or HaveFutureData also apply here, as per the next match
957                // expression.
958            },
959            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
960                // FIXME(nox): Queue a task to fire timeupdate and waiting
961                // events if the conditions call from the spec are met.
962
963                // No other steps are applicable in this case.
964                return;
965            },
966
967            _ => (),
968        }
969
970        // => "If the previous ready state was HAVE_CURRENT_DATA or less, and the new ready state is
971        // HAVE_FUTURE_DATA or more"
972        if old_ready_state <= ReadyState::HaveCurrentData &&
973            ready_state >= ReadyState::HaveFutureData
974        {
975            // The user agent must queue a media element task given the media element to fire an
976            // event named canplay at the element.
977            self.queue_media_element_task_to_fire_event(atom!("canplay"));
978
979            // If the element's paused attribute is false, the user agent must notify about playing
980            // for the element.
981            if !self.Paused() {
982                self.notify_about_playing();
983            }
984        }
985
986        // => "If the new ready state is HAVE_ENOUGH_DATA"
987        if ready_state == ReadyState::HaveEnoughData {
988            // The user agent must queue a media element task given the media element to fire an
989            // event named canplaythrough at the element.
990            self.queue_media_element_task_to_fire_event(atom!("canplaythrough"));
991
992            // If the element is eligible for autoplay, then the user agent may run the following
993            // substeps:
994            if self.eligible_for_autoplay() {
995                // Step 1. Set the paused attribute to false.
996                self.paused.set(false);
997
998                // Step 2. If the element's show poster flag is true, set it to false and run the
999                // time marches on steps.
1000                if self.show_poster.get() {
1001                    self.show_poster.set(false);
1002                    self.time_marches_on();
1003                }
1004
1005                // Step 3. Queue a media element task given the element to fire an event named play
1006                // at the element.
1007                self.queue_media_element_task_to_fire_event(atom!("play"));
1008
1009                // Step 4. Notify about playing for the element.
1010                self.notify_about_playing();
1011            }
1012        }
1013
1014        self.update_media_state();
1015    }
1016
1017    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1018    fn invoke_resource_selection_algorithm(&self, can_gc: CanGc) {
1019        // Step 1. Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
1020        self.network_state.set(NetworkState::NoSource);
1021
1022        // Step 2. Set the element's show poster flag to true.
1023        self.show_poster.set(true);
1024
1025        // Step 3. Set the media element's delaying-the-load-event flag to true (this delays the
1026        // load event).
1027        self.delay_load_event(true, can_gc);
1028
1029        // Step 4. Await a stable state, allowing the task that invoked this algorithm to continue.
1030        // If the resource selection mode in the synchronous section is
1031        // "attribute", the URL of the resource to fetch is relative to the
1032        // media element's node document when the src attribute was last
1033        // changed, which is why we need to pass the base URL in the task
1034        // right here.
1035        let task = MediaElementMicrotask::ResourceSelection {
1036            elem: DomRoot::from_ref(self),
1037            generation_id: self.generation_id.get(),
1038            base_url: self.owner_document().base_url(),
1039        };
1040
1041        // FIXME(nox): This will later call the resource_selection_algorithm_sync
1042        // method from below, if microtasks were trait objects, we would be able
1043        // to put the code directly in this method, without the boilerplate
1044        // indirections.
1045        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1046    }
1047
1048    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1049    fn resource_selection_algorithm_sync(&self, base_url: ServoUrl, can_gc: CanGc) {
1050        // TODO Step 5. If the media element's blocked-on-parser flag is false, then populate the
1051        // list of pending text tracks.
1052        // FIXME(ferjm): Implement blocked_on_parser logic
1053        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
1054        // FIXME(nox): Maybe populate the list of pending text tracks.
1055
1056        enum Mode {
1057            Object,
1058            Attribute(String),
1059            Children(DomRoot<HTMLSourceElement>),
1060        }
1061
1062        // Step 6.
1063        let mode = if self.src_object.borrow().is_some() {
1064            // If the media element has an assigned media provider object, then let mode be object.
1065            Mode::Object
1066        } else if let Some(attribute) = self
1067            .upcast::<Element>()
1068            .get_attribute(&ns!(), &local_name!("src"))
1069        {
1070            // Otherwise, if the media element has no assigned media provider object but has a src
1071            // attribute, then let mode be attribute.
1072            Mode::Attribute((**attribute.value()).to_owned())
1073        } else if let Some(source) = self
1074            .upcast::<Node>()
1075            .children()
1076            .find_map(DomRoot::downcast::<HTMLSourceElement>)
1077        {
1078            // Otherwise, if the media element does not have an assigned media provider object and
1079            // does not have a src attribute, but does have a source element child, then let mode be
1080            // children and let candidate be the first such source element child in tree order.
1081            Mode::Children(source)
1082        } else {
1083            // Otherwise, the media element has no assigned media provider object and has neither a
1084            // src attribute nor a source element child:
1085            self.load_state.set(LoadState::NotLoaded);
1086
1087            // Step 6.none.1. Set the networkState to NETWORK_EMPTY.
1088            self.network_state.set(NetworkState::Empty);
1089
1090            // Step 6.none.2. Set the element's delaying-the-load-event flag to false. This stops
1091            // delaying the load event.
1092            self.delay_load_event(false, can_gc);
1093
1094            // Step 6.none.3. End the synchronous section and return.
1095            return;
1096        };
1097
1098        // Step 7. Set the media element's networkState to NETWORK_LOADING.
1099        self.network_state.set(NetworkState::Loading);
1100
1101        // Step 8. Queue a media element task given the media element to fire an event named
1102        // loadstart at the media element.
1103        self.queue_media_element_task_to_fire_event(atom!("loadstart"));
1104
1105        // Step 9. Run the appropriate steps from the following list:
1106        match mode {
1107            Mode::Object => {
1108                // => "If mode is object"
1109                self.load_from_src_object();
1110            },
1111            Mode::Attribute(src) => {
1112                // => "If mode is attribute"
1113                self.load_from_src_attribute(base_url, &src);
1114            },
1115            Mode::Children(source) => {
1116                // => "Otherwise (mode is children)""
1117                self.load_from_source_child(&source);
1118            },
1119        }
1120    }
1121
1122    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1123    fn load_from_src_object(&self) {
1124        self.load_state.set(LoadState::LoadingFromSrcObject);
1125
1126        // Step 9.object.1. Set the currentSrc attribute to the empty string.
1127        "".clone_into(&mut self.current_src.borrow_mut());
1128
1129        // Step 9.object.3. Run the resource fetch algorithm with the assigned media
1130        // provider object. If that algorithm returns without aborting this one, then the
1131        // load failed.
1132        // Note that the resource fetch algorithm itself takes care of the cleanup in case
1133        // of failure itself.
1134        self.resource_fetch_algorithm(Resource::Object);
1135    }
1136
1137    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1138    fn load_from_src_attribute(&self, base_url: ServoUrl, src: &str) {
1139        self.load_state.set(LoadState::LoadingFromSrcAttribute);
1140
1141        // Step 9.attribute.1. If the src attribute's value is the empty string, then end
1142        // the synchronous section, and jump down to the failed with attribute step below.
1143        if src.is_empty() {
1144            self.queue_dedicated_media_source_failure_steps();
1145            return;
1146        }
1147
1148        // Step 9.attribute.2. Let urlRecord be the result of encoding-parsing a URL given
1149        // the src attribute's value, relative to the media element's node document when the
1150        // src attribute was last changed.
1151        let Ok(url_record) = base_url.join(src) else {
1152            self.queue_dedicated_media_source_failure_steps();
1153            return;
1154        };
1155
1156        // Step 9.attribute.3. If urlRecord is not failure, then set the currentSrc
1157        // attribute to the result of applying the URL serializer to urlRecord.
1158        *self.current_src.borrow_mut() = url_record.as_str().into();
1159
1160        // Step 9.attribute.5. If urlRecord is not failure, then run the resource fetch
1161        // algorithm with urlRecord. If that algorithm returns without aborting this one,
1162        // then the load failed.
1163        // Note that the resource fetch algorithm itself takes care
1164        // of the cleanup in case of failure itself.
1165        self.resource_fetch_algorithm(Resource::Url(url_record));
1166    }
1167
1168    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1169    fn load_from_source_child(&self, source: &HTMLSourceElement) {
1170        self.load_state.set(LoadState::LoadingFromSourceChild);
1171
1172        // Step 9.children.1. Let pointer be a position defined by two adjacent nodes in the media
1173        // element's child list, treating the start of the list (before the first child in the list,
1174        // if any) and end of the list (after the last child in the list, if any) as nodes in their
1175        // own right. One node is the node before pointer, and the other node is the node after
1176        // pointer. Initially, let pointer be the position between the candidate node and the next
1177        // node, if there are any, or the end of the list, if it is the last node.
1178        *self.source_children_pointer.borrow_mut() =
1179            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), false));
1180
1181        let element = source.upcast::<Element>();
1182
1183        // Step 9.children.2. Process candidate: If candidate does not have a src attribute, or if
1184        // its src attribute's value is the empty string, then end the synchronous section, and jump
1185        // down to the failed with elements step below.
1186        let Some(src) = element
1187            .get_attribute(&ns!(), &local_name!("src"))
1188            .filter(|attribute| !attribute.value().is_empty())
1189        else {
1190            self.load_from_source_child_failure_steps(source);
1191            return;
1192        };
1193
1194        // Step 9.children.3. If candidate has a media attribute whose value does not match the
1195        // environment, then end the synchronous section, and jump down to the failed with elements
1196        // step below.
1197        if let Some(media) = element.get_attribute(&ns!(), &local_name!("media")) {
1198            if !MediaList::matches_environment(&element.owner_document(), &media.value()) {
1199                self.load_from_source_child_failure_steps(source);
1200                return;
1201            }
1202        }
1203
1204        // Step 9.children.4. Let urlRecord be the result of encoding-parsing a URL given
1205        // candidate's src attribute's value, relative to candidate's node document when the src
1206        // attribute was last changed.
1207        let Ok(url_record) = source.owner_document().base_url().join(&src.value()) else {
1208            // Step 9.children.5. If urlRecord is failure, then end the synchronous section,
1209            // and jump down to the failed with elements step below.
1210            self.load_from_source_child_failure_steps(source);
1211            return;
1212        };
1213
1214        // Step 9.children.6. If candidate has a type attribute whose value, when parsed as a MIME
1215        // type (including any codecs described by the codecs parameter, for types that define that
1216        // parameter), represents a type that the user agent knows it cannot render, then end the
1217        // synchronous section, and jump down to the failed with elements step below.
1218        if let Some(type_) = element.get_attribute(&ns!(), &local_name!("type")) {
1219            if ServoMedia::get().can_play_type(&type_.value()) == SupportsMediaType::No {
1220                self.load_from_source_child_failure_steps(source);
1221                return;
1222            }
1223        }
1224
1225        // Reset the media player before loading the next source child.
1226        self.reset_media_player();
1227
1228        self.current_source_child.set(Some(source));
1229
1230        // Step 9.children.7. Set the currentSrc attribute to the result of applying the URL
1231        // serializer to urlRecord.
1232        *self.current_src.borrow_mut() = url_record.as_str().into();
1233
1234        // Step 9.children.9. Run the resource fetch algorithm with urlRecord. If that
1235        // algorithm returns without aborting this one, then the load failed.
1236        // Note that the resource fetch algorithm itself takes care
1237        // of the cleanup in case of failure itself.
1238        self.resource_fetch_algorithm(Resource::Url(url_record));
1239    }
1240
1241    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1242    fn load_from_source_child_failure_steps(&self, source: &HTMLSourceElement) {
1243        // Step 9.children.10. Failed with elements: Queue a media element task given the media
1244        // element to fire an event named error at candidate.
1245        let trusted_this = Trusted::new(self);
1246        let trusted_source = Trusted::new(source);
1247        let generation_id = self.generation_id.get();
1248
1249        self.owner_global()
1250            .task_manager()
1251            .media_element_task_source()
1252            .queue(task!(queue_error_event: move || {
1253                let this = trusted_this.root();
1254                if generation_id != this.generation_id.get() {
1255                    return;
1256                }
1257
1258                let source = trusted_source.root();
1259                source.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1260            }));
1261
1262        // Step 9.children.11. Await a stable state.
1263        let task = MediaElementMicrotask::SelectNextSourceChild {
1264            elem: DomRoot::from_ref(self),
1265            generation_id: self.generation_id.get(),
1266        };
1267
1268        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1269    }
1270
1271    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1272    fn select_next_source_child(&self, can_gc: CanGc) {
1273        // Step 9.children.12. Forget the media element's media-resource-specific tracks.
1274        self.AudioTracks(can_gc).clear();
1275        self.VideoTracks(can_gc).clear();
1276
1277        // Step 9.children.13. Find next candidate: Let candidate be null.
1278        let mut source_candidate = None;
1279
1280        // Step 9.children.14. Search loop: If the node after pointer is the end of the list, then
1281        // jump to the waiting step below.
1282        // Step 9.children.15. If the node after pointer is a source element, let candidate be that
1283        // element.
1284        // Step 9.children.16. Advance pointer so that the node before pointer is now the node that
1285        // was after pointer, and the node after pointer is the node after the node that used to be
1286        // after pointer, if any.
1287        if let Some(ref source_children_pointer) = *self.source_children_pointer.borrow() {
1288            // Note that shared implementation between opaque types from
1289            // `inclusively_following_siblings` and `following_siblings` if not possible due to
1290            // precise capturing.
1291            if source_children_pointer.inclusive {
1292                for next_sibling in source_children_pointer
1293                    .source_before_pointer
1294                    .upcast::<Node>()
1295                    .inclusively_following_siblings()
1296                {
1297                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1298                    {
1299                        source_candidate = Some(next_source);
1300                        break;
1301                    }
1302                }
1303            } else {
1304                for next_sibling in source_children_pointer
1305                    .source_before_pointer
1306                    .upcast::<Node>()
1307                    .following_siblings()
1308                {
1309                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1310                    {
1311                        source_candidate = Some(next_source);
1312                        break;
1313                    }
1314                }
1315            };
1316        }
1317
1318        // Step 9.children.17. If candidate is null, jump back to the search loop step. Otherwise,
1319        // jump back to the process candidate step.
1320        if let Some(source_candidate) = source_candidate {
1321            self.load_from_source_child(&source_candidate);
1322            return;
1323        }
1324
1325        self.load_state.set(LoadState::WaitingForSource);
1326
1327        *self.source_children_pointer.borrow_mut() = None;
1328
1329        // Step 9.children.18. Waiting: Set the element's networkState attribute to the
1330        // NETWORK_NO_SOURCE value.
1331        self.network_state.set(NetworkState::NoSource);
1332
1333        // Step 9.children.19. Set the element's show poster flag to true.
1334        self.show_poster.set(true);
1335
1336        // Step 9.children.20. Queue a media element task given the media element to set the
1337        // element's delaying-the-load-event flag to false. This stops delaying the load event.
1338        let this = Trusted::new(self);
1339        let generation_id = self.generation_id.get();
1340
1341        self.owner_global()
1342            .task_manager()
1343            .media_element_task_source()
1344            .queue(task!(queue_delay_load_event: move || {
1345                let this = this.root();
1346                if generation_id != this.generation_id.get() {
1347                    return;
1348                }
1349
1350                this.delay_load_event(false, CanGc::note());
1351            }));
1352
1353        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1354        // list. (This step might wait forever.)
1355    }
1356
1357    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1358    fn resource_selection_algorithm_failure_steps(&self) {
1359        match self.load_state.get() {
1360            LoadState::LoadingFromSrcObject => {
1361                // Step 9.object.4. Failed with media provider: Reaching this step indicates that
1362                // the media resource failed to load. Take pending play promises and queue a media
1363                // element task given the media element to run the dedicated media source failure
1364                // steps with the result.
1365                self.queue_dedicated_media_source_failure_steps();
1366            },
1367            LoadState::LoadingFromSrcAttribute => {
1368                // Step 9.attribute.6. Failed with attribute: Reaching this step indicates that the
1369                // media resource failed to load or that urlRecord is failure. Take pending play
1370                // promises and queue a media element task given the media element to run the
1371                // dedicated media source failure steps with the result.
1372                self.queue_dedicated_media_source_failure_steps();
1373            },
1374            LoadState::LoadingFromSourceChild => {
1375                // Step 9.children.10. Failed with elements: Queue a media element task given the
1376                // media element to fire an event named error at candidate.
1377                if let Some(source) = self.current_source_child.take() {
1378                    self.load_from_source_child_failure_steps(&source);
1379                }
1380            },
1381            _ => {},
1382        }
1383    }
1384
1385    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
1386        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
1387            error!("Missing request url");
1388            if let Some(seek_lock) = seek_lock {
1389                seek_lock.unlock(/* successful seek */ false);
1390            }
1391            self.resource_selection_algorithm_failure_steps();
1392            return;
1393        }
1394
1395        let document = self.owner_document();
1396        let destination = match self.media_type_id() {
1397            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
1398            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
1399        };
1400        let mut headers = HeaderMap::new();
1401        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
1402        headers.insert(
1403            header::RANGE,
1404            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
1405        );
1406        let url = match self.resource_url.borrow().as_ref() {
1407            Some(url) => url.clone(),
1408            None => self.blob_url.borrow().as_ref().unwrap().clone(),
1409        };
1410
1411        let cors_setting = cors_setting_for_element(self.upcast());
1412        let global = self.global();
1413        let request = create_a_potential_cors_request(
1414            Some(document.webview_id()),
1415            url.clone(),
1416            destination,
1417            cors_setting,
1418            None,
1419            global.get_referrer(),
1420            document.insecure_requests_policy(),
1421            document.has_trustworthy_ancestor_or_current_origin(),
1422            global.policy_container(),
1423        )
1424        .headers(headers)
1425        .origin(document.origin().immutable().clone())
1426        .pipeline_id(Some(self.global().pipeline_id()))
1427        .referrer_policy(document.get_referrer_policy());
1428
1429        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
1430        if let Some(ref mut current_fetch_context) = *current_fetch_context {
1431            current_fetch_context.cancel(CancelReason::Abort);
1432        }
1433
1434        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
1435            request.id,
1436            global.core_resource_thread(),
1437        ));
1438        let listener =
1439            HTMLMediaElementFetchListener::new(self, request.id, url.clone(), offset.unwrap_or(0));
1440
1441        self.owner_document().fetch_background(request, listener);
1442
1443        // Since we cancelled the previous fetch, from now on the media element
1444        // will only receive response data from the new fetch that's been
1445        // initiated. This means the player can resume operation, since all subsequent data
1446        // pushes will originate from the new seek offset.
1447        if let Some(seek_lock) = seek_lock {
1448            seek_lock.unlock(/* successful seek */ true);
1449        }
1450    }
1451
1452    /// <https://html.spec.whatwg.org/multipage/#eligible-for-autoplay>
1453    fn eligible_for_autoplay(&self) -> bool {
1454        // its can autoplay flag is true;
1455        self.autoplaying.get() &&
1456
1457        // its paused attribute is true;
1458        self.Paused() &&
1459
1460        // it has an autoplay attribute specified;
1461        self.Autoplay() &&
1462
1463        // its node document's active sandboxing flag set does not have the sandboxed automatic
1464        // features browsing context flag set; and
1465        {
1466            let document = self.owner_document();
1467
1468            !document.has_active_sandboxing_flag(
1469                SandboxingFlagSet::SANDBOXED_AUTOMATIC_FEATURES_BROWSING_CONTEXT_FLAG,
1470            )
1471        }
1472
1473        // its node document is allowed to use the "autoplay" feature.
1474        // TODO: Feature policy: https://html.spec.whatwg.org/iframe-embed-object.html#allowed-to-use
1475    }
1476
1477    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
1478    fn resource_fetch_algorithm(&self, resource: Resource) {
1479        if let Err(e) = self.create_media_player(&resource) {
1480            error!("Create media player error {:?}", e);
1481            self.resource_selection_algorithm_failure_steps();
1482            return;
1483        }
1484
1485        // Steps 1-2.
1486        // Unapplicable, the `resource` variable already conveys which mode
1487        // is in use.
1488
1489        // Step 3.
1490        // FIXME(nox): Remove all media-resource-specific text tracks.
1491
1492        // Step 5. Run the appropriate steps from the following list:
1493        match resource {
1494            Resource::Url(url) => {
1495                // Step 5.remote.1. Optionally, run the following substeps. This is the expected
1496                // behavior if the user agent intends to not attempt to fetch the resource until the
1497                // user requests it explicitly (e.g. as a way to implement the preload attribute's
1498                // none keyword).
1499                if self.Preload() == "none" && !self.autoplaying.get() {
1500                    // Step 5.remote.1.1. Set the networkState to NETWORK_IDLE.
1501                    self.network_state.set(NetworkState::Idle);
1502
1503                    // Step 5.remote.1.2. Queue a media element task given the media element to fire
1504                    // an event named suspend at the element.
1505                    self.queue_media_element_task_to_fire_event(atom!("suspend"));
1506
1507                    // Step 5.remote.1.3. Queue a media element task given the media element to set
1508                    // the element's delaying-the-load-event flag to false. This stops delaying the
1509                    // load event.
1510                    let this = Trusted::new(self);
1511                    let generation_id = self.generation_id.get();
1512
1513                    self.owner_global()
1514                        .task_manager()
1515                        .media_element_task_source()
1516                        .queue(task!(queue_delay_load_event: move || {
1517                            let this = this.root();
1518                            if generation_id != this.generation_id.get() {
1519                                return;
1520                            }
1521
1522                            this.delay_load_event(false, CanGc::note());
1523                        }));
1524
1525                    // TODO Steps 5.remote.1.4. Wait for the task to be run.
1526                    // FIXME(nox): Somehow we should wait for the task from previous
1527                    // step to be ran before continuing.
1528
1529                    // TODO Steps 5.remote.1.5-5.remote.1.7.
1530                    // FIXME(nox): Wait for an implementation-defined event and
1531                    // then continue with the normal set of steps instead of just
1532                    // returning.
1533                    return;
1534                }
1535
1536                *self.resource_url.borrow_mut() = Some(url);
1537
1538                // Steps 5.remote.2-5.remote.8
1539                self.fetch_request(None, None);
1540            },
1541            Resource::Object => {
1542                if let Some(ref src_object) = *self.src_object.borrow() {
1543                    match src_object {
1544                        SrcObject::Blob(blob) => {
1545                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1546                            *self.blob_url.borrow_mut() =
1547                                Some(ServoUrl::parse(&blob_url.str()).expect("infallible"));
1548                            self.fetch_request(None, None);
1549                        },
1550                        SrcObject::MediaStream(stream) => {
1551                            let tracks = &*stream.get_tracks();
1552                            for (pos, track) in tracks.iter().enumerate() {
1553                                if self
1554                                    .player
1555                                    .borrow()
1556                                    .as_ref()
1557                                    .unwrap()
1558                                    .lock()
1559                                    .unwrap()
1560                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1561                                    .is_err()
1562                                {
1563                                    self.resource_selection_algorithm_failure_steps();
1564                                }
1565                            }
1566                        },
1567                    }
1568                }
1569            },
1570        }
1571    }
1572
1573    /// Queues a task to run the [dedicated media source failure steps][steps].
1574    ///
1575    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1576    fn queue_dedicated_media_source_failure_steps(&self) {
1577        let this = Trusted::new(self);
1578        let generation_id = self.generation_id.get();
1579        self.take_pending_play_promises(Err(Error::NotSupported(None)));
1580        self.owner_global()
1581            .task_manager()
1582            .media_element_task_source()
1583            .queue(task!(dedicated_media_source_failure_steps: move || {
1584                let this = this.root();
1585                if generation_id != this.generation_id.get() {
1586                    return;
1587                }
1588
1589                this.fulfill_in_flight_play_promises(|| {
1590                    // Step 1. Set the error attribute to the result of creating a MediaError with
1591                    // MEDIA_ERR_SRC_NOT_SUPPORTED.
1592                    this.error.set(Some(&*MediaError::new(
1593                        &this.owner_window(),
1594                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::note())));
1595
1596                    // Step 2. Forget the media element's media-resource-specific tracks.
1597                    this.AudioTracks(CanGc::note()).clear();
1598                    this.VideoTracks(CanGc::note()).clear();
1599
1600                    // Step 3. Set the element's networkState attribute to the NETWORK_NO_SOURCE
1601                    // value.
1602                    this.network_state.set(NetworkState::NoSource);
1603
1604                    // Step 4. Set the element's show poster flag to true.
1605                    this.show_poster.set(true);
1606
1607                    // Step 5. Fire an event named error at the media element.
1608                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1609
1610                    if let Some(ref player) = *this.player.borrow() {
1611                        if let Err(err) = player.lock().unwrap().stop() {
1612                            error!("Could not stop player {:?}", err);
1613                        }
1614                    }
1615
1616                    // Step 6. Reject pending play promises with promises and a "NotSupportedError"
1617                    // DOMException.
1618                    // Done after running this closure in `fulfill_in_flight_play_promises`.
1619                });
1620
1621                // Step 7. Set the element's delaying-the-load-event flag to false. This stops
1622                // delaying the load event.
1623                this.delay_load_event(false, CanGc::note());
1624            }));
1625    }
1626
1627    fn in_error_state(&self) -> bool {
1628        self.error.get().is_some()
1629    }
1630
1631    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1632    fn is_potentially_playing(&self) -> bool {
1633        !self.paused.get() &&
1634            !self.ended_playback(LoopCondition::Included) &&
1635            self.error.get().is_none() &&
1636            !self.is_blocked_media_element()
1637    }
1638
1639    /// <https://html.spec.whatwg.org/multipage/#blocked-media-element>
1640    fn is_blocked_media_element(&self) -> bool {
1641        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1642            self.is_paused_for_user_interaction() ||
1643            self.is_paused_for_in_band_content()
1644    }
1645
1646    /// <https://html.spec.whatwg.org/multipage/#paused-for-user-interaction>
1647    fn is_paused_for_user_interaction(&self) -> bool {
1648        // FIXME: we will likely be able to fill this placeholder once (if) we
1649        //        implement the MediaSession API.
1650        false
1651    }
1652
1653    /// <https://html.spec.whatwg.org/multipage/#paused-for-in-band-content>
1654    fn is_paused_for_in_band_content(&self) -> bool {
1655        // FIXME: we will likely be able to fill this placeholder once (if) we
1656        //        implement https://github.com/servo/servo/issues/22314
1657        false
1658    }
1659
1660    /// <https://html.spec.whatwg.org/multipage/#media-element-load-algorithm>
1661    fn media_element_load_algorithm(&self, can_gc: CanGc) {
1662        // Reset the flag that signals whether loadeddata was ever fired for
1663        // this invokation of the load algorithm.
1664        self.fired_loadeddata_event.set(false);
1665
1666        // TODO Step 1. Set this element's is currently stalled to false.
1667
1668        // Step 2. Abort any already-running instance of the resource selection algorithm for this
1669        // element.
1670        self.generation_id.set(self.generation_id.get() + 1);
1671
1672        self.load_state.set(LoadState::NotLoaded);
1673        *self.source_children_pointer.borrow_mut() = None;
1674        self.current_source_child.set(None);
1675
1676        // Step 3. Let pending tasks be a list of all tasks from the media element's media element
1677        // event task source in one of the task queues.
1678
1679        // Step 4. For each task in pending tasks that would resolve pending play promises or reject
1680        // pending play promises, immediately resolve or reject those promises in the order the
1681        // corresponding tasks were queued.
1682        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1683            self.fulfill_in_flight_play_promises(|| ());
1684        }
1685
1686        // Step 5. Remove each task in pending tasks from its task queue.
1687        // Note that each media element's pending event and callback is scheduled with associated
1688        // generation id and will be aborted eventually (from Step 2).
1689
1690        let network_state = self.network_state.get();
1691
1692        // Step 6. If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE,
1693        // queue a media element task given the media element to fire an event named abort at the
1694        // media element.
1695        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1696            self.queue_media_element_task_to_fire_event(atom!("abort"));
1697        }
1698
1699        // Reset the media player for any previously playing media resource (see Step 11).
1700        self.reset_media_player();
1701
1702        // Step 7. If the media element's networkState is not set to NETWORK_EMPTY, then:
1703        if network_state != NetworkState::Empty {
1704            // Step 7.1. Queue a media element task given the media element to fire an event named
1705            // emptied at the media element.
1706            self.queue_media_element_task_to_fire_event(atom!("emptied"));
1707
1708            // Step 7.2. If a fetching process is in progress for the media element, the user agent
1709            // should stop it.
1710            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1711                current_fetch_context.cancel(CancelReason::Abort);
1712            }
1713
1714            // TODO Step 7.3. If the media element's assigned media provider object is a MediaSource
1715            // object, then detach it.
1716
1717            // Step 7.4. Forget the media element's media-resource-specific tracks.
1718            self.AudioTracks(can_gc).clear();
1719            self.VideoTracks(can_gc).clear();
1720
1721            // Step 7.5. If readyState is not set to HAVE_NOTHING, then set it to that state.
1722            if self.ready_state.get() != ReadyState::HaveNothing {
1723                self.change_ready_state(ReadyState::HaveNothing);
1724            }
1725
1726            // Step 7.6. If the paused attribute is false, then:
1727            if !self.Paused() {
1728                // Step 7.6.1. Set the paused attribute to true.
1729                self.paused.set(true);
1730
1731                // Step 7.6.2. Take pending play promises and reject pending play promises with the
1732                // result and an "AbortError" DOMException.
1733                self.take_pending_play_promises(Err(Error::Abort(None)));
1734                self.fulfill_in_flight_play_promises(|| ());
1735            }
1736
1737            // Step 7.7. If seeking is true, set it to false.
1738            self.seeking.set(false);
1739
1740            self.current_seek_position.set(f64::NAN);
1741
1742            // Step 7.8. Set the current playback position to 0.
1743            // Set the official playback position to 0.
1744            // If this changed the official playback position, then queue a media element task given
1745            // the media element to fire an event named timeupdate at the media element.
1746            self.current_playback_position.set(0.);
1747            if self.official_playback_position.get() != 0. {
1748                self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
1749            }
1750            self.official_playback_position.set(0.);
1751
1752            // TODO Step 7.9. Set the timeline offset to Not-a-Number (NaN).
1753
1754            // Step 7.10. Update the duration attribute to Not-a-Number (NaN).
1755            self.duration.set(f64::NAN);
1756        }
1757
1758        // Step 8. Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
1759        self.playback_rate.set(self.default_playback_rate.get());
1760
1761        // Step 9. Set the error attribute to null and the can autoplay flag to true.
1762        self.error.set(None);
1763        self.autoplaying.set(true);
1764
1765        // Step 10. Invoke the media element's resource selection algorithm.
1766        self.invoke_resource_selection_algorithm(can_gc);
1767
1768        // Step 11. Note: Playback of any previously playing media resource for this element stops.
1769    }
1770
1771    /// Queue a media element task given the media element to fire an event at the media element.
1772    /// <https://html.spec.whatwg.org/multipage/#queue-a-media-element-task>
1773    fn queue_media_element_task_to_fire_event(&self, name: Atom) {
1774        let this = Trusted::new(self);
1775        let generation_id = self.generation_id.get();
1776
1777        self.owner_global()
1778            .task_manager()
1779            .media_element_task_source()
1780            .queue(task!(queue_event: move || {
1781                let this = this.root();
1782                if generation_id != this.generation_id.get() {
1783                    return;
1784                }
1785
1786                this.upcast::<EventTarget>().fire_event(name, CanGc::note());
1787            }));
1788    }
1789
1790    /// Appends a promise to the list of pending play promises.
1791    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1792        self.pending_play_promises
1793            .borrow_mut()
1794            .push(promise.clone());
1795    }
1796
1797    /// Takes the pending play promises.
1798    ///
1799    /// The result with which these promises will be fulfilled is passed here
1800    /// and this method returns nothing because we actually just move the
1801    /// current list of pending play promises to the
1802    /// `in_flight_play_promises_queue` field.
1803    ///
1804    /// Each call to this method must be followed by a call to
1805    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1806    /// which were taken and moved to the in-flight queue.
1807    fn take_pending_play_promises(&self, result: ErrorResult) {
1808        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1809        self.in_flight_play_promises_queue
1810            .borrow_mut()
1811            .push_back((pending_play_promises.into(), result));
1812    }
1813
1814    /// Fulfills the next in-flight play promises queue after running a closure.
1815    ///
1816    /// See the comment on `take_pending_play_promises` for why this method
1817    /// does not take a list of promises to fulfill. Callers cannot just pop
1818    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1819    /// the promises because that would mean putting
1820    /// `#[cfg_attr(crown, allow(crown::unrooted_must_root))]` on even more functions, potentially
1821    /// hiding actual safety bugs.
1822    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
1823    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1824    where
1825        F: FnOnce(),
1826    {
1827        let (promises, result) = self
1828            .in_flight_play_promises_queue
1829            .borrow_mut()
1830            .pop_front()
1831            .expect("there should be at least one list of in flight play promises");
1832        f();
1833        for promise in &*promises {
1834            match result {
1835                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1836                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1837            }
1838        }
1839    }
1840
1841    pub(crate) fn handle_source_child_insertion(&self, source: &HTMLSourceElement, can_gc: CanGc) {
1842        // <https://html.spec.whatwg.org/multipage/#the-source-element:html-element-insertion-steps>
1843        // Step 2. If parent is a media element that has no src attribute and whose networkState has
1844        // the value NETWORK_EMPTY, then invoke that media element's resource selection algorithm.
1845        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1846            return;
1847        }
1848
1849        if self.network_state.get() == NetworkState::Empty {
1850            self.invoke_resource_selection_algorithm(can_gc);
1851            return;
1852        }
1853
1854        // <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1855        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1856        // list. (This step might wait forever.)
1857        if self.load_state.get() != LoadState::WaitingForSource {
1858            return;
1859        }
1860
1861        self.load_state.set(LoadState::LoadingFromSourceChild);
1862
1863        *self.source_children_pointer.borrow_mut() =
1864            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), true));
1865
1866        // Step 9.children.23. Await a stable state.
1867        let task = MediaElementMicrotask::SelectNextSourceChildAfterWait {
1868            elem: DomRoot::from_ref(self),
1869            generation_id: self.generation_id.get(),
1870        };
1871
1872        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1873    }
1874
1875    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1876    fn select_next_source_child_after_wait(&self, can_gc: CanGc) {
1877        // Step 9.children.24. Set the element's delaying-the-load-event flag back to true (this
1878        // delays the load event again, in case it hasn't been fired yet).
1879        self.delay_load_event(true, can_gc);
1880
1881        // Step 9.children.25. Set the networkState back to NETWORK_LOADING.
1882        self.network_state.set(NetworkState::Loading);
1883
1884        // Step 9.children.26. Jump back to the find next candidate step above.
1885        self.select_next_source_child(can_gc);
1886    }
1887
1888    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1889    /// => "If the media data cannot be fetched at all, due to network errors..."
1890    /// => "If the media data can be fetched but is found by inspection to be in an unsupported
1891    /// format, or can otherwise not be rendered at all"
1892    fn media_data_processing_failure_steps(&self) {
1893        // Step 1. The user agent should cancel the fetching process.
1894        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1895            current_fetch_context.cancel(CancelReason::Error);
1896        }
1897
1898        // Step 2. Abort this subalgorithm, returning to the resource selection algorithm.
1899        self.resource_selection_algorithm_failure_steps();
1900    }
1901
1902    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1903    /// => "If the connection is interrupted after some media data has been received..."
1904    /// => "If the media data is corrupted"
1905    fn media_data_processing_fatal_steps(&self, error: u16, can_gc: CanGc) {
1906        *self.source_children_pointer.borrow_mut() = None;
1907        self.current_source_child.set(None);
1908
1909        // Step 1. The user agent should cancel the fetching process.
1910        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1911            current_fetch_context.cancel(CancelReason::Error);
1912        }
1913
1914        // Step 2. Set the error attribute to the result of creating a MediaError with
1915        // MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
1916        self.error
1917            .set(Some(&*MediaError::new(&self.owner_window(), error, can_gc)));
1918
1919        // Step 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1920        self.network_state.set(NetworkState::Idle);
1921
1922        // Step 4. Set the element's delaying-the-load-event flag to false. This stops delaying
1923        // the load event.
1924        self.delay_load_event(false, can_gc);
1925
1926        // Step 5. Fire an event named error at the media element.
1927        self.upcast::<EventTarget>()
1928            .fire_event(atom!("error"), can_gc);
1929
1930        // Step 6. Abort the overall resource selection algorithm.
1931    }
1932
1933    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1934    fn seek(&self, time: f64, _approximate_for_speed: bool) {
1935        // Step 1. Set the media element's show poster flag to false.
1936        self.show_poster.set(false);
1937
1938        // Step 2. If the media element's readyState is HAVE_NOTHING, return.
1939        if self.ready_state.get() == ReadyState::HaveNothing {
1940            return;
1941        }
1942
1943        // Step 3. If the element's seeking IDL attribute is true, then another instance of this
1944        // algorithm is already running. Abort that other instance of the algorithm without waiting
1945        // for the step that it is running to complete.
1946        self.current_seek_position.set(f64::NAN);
1947
1948        // Step 4. Set the seeking IDL attribute to true.
1949        self.seeking.set(true);
1950
1951        // Step 5. If the seek was in response to a DOM method call or setting of an IDL attribute,
1952        // then continue the script. The remainder of these steps must be run in parallel.
1953
1954        // Step 6. If the new playback position is later than the end of the media resource, then
1955        // let it be the end of the media resource instead.
1956        let time = f64::min(time, self.Duration());
1957
1958        // Step 7. If the new playback position is less than the earliest possible position, let it
1959        // be that position instead.
1960        let time = f64::max(time, self.earliest_possible_position());
1961
1962        // Step 8. If the (possibly now changed) new playback position is not in one of the ranges
1963        // given in the seekable attribute, then let it be the position in one of the ranges given
1964        // in the seekable attribute that is the nearest to the new playback position. If there are
1965        // no ranges given in the seekable attribute, then set the seeking IDL attribute to false
1966        // and return.
1967        let seekable = self.seekable();
1968
1969        if seekable.is_empty() {
1970            self.seeking.set(false);
1971            return;
1972        }
1973
1974        let mut nearest_seekable_position = 0.0;
1975        let mut in_seekable_range = false;
1976        let mut nearest_seekable_distance = f64::MAX;
1977        for i in 0..seekable.len() {
1978            let start = seekable.start(i).unwrap().abs();
1979            let end = seekable.end(i).unwrap().abs();
1980            if time >= start && time <= end {
1981                nearest_seekable_position = time;
1982                in_seekable_range = true;
1983                break;
1984            } else if time < start {
1985                let distance = start - time;
1986                if distance < nearest_seekable_distance {
1987                    nearest_seekable_distance = distance;
1988                    nearest_seekable_position = start;
1989                }
1990            } else {
1991                let distance = time - end;
1992                if distance < nearest_seekable_distance {
1993                    nearest_seekable_distance = distance;
1994                    nearest_seekable_position = end;
1995                }
1996            }
1997        }
1998        let time = if in_seekable_range {
1999            time
2000        } else {
2001            nearest_seekable_position
2002        };
2003
2004        // Step 9. If the approximate-for-speed flag is set, adjust the new playback position to a
2005        // value that will allow for playback to resume promptly. If new playback position before
2006        // this step is before current playback position, then the adjusted new playback position
2007        // must also be before the current playback position. Similarly, if the new playback
2008        // position before this step is after current playback position, then the adjusted new
2009        // playback position must also be after the current playback position.
2010        // TODO: Note that servo-media with gstreamer does not support inaccurate seeking for now.
2011
2012        // Step 10. Queue a media element task given the media element to fire an event named
2013        // seeking at the element.
2014        self.queue_media_element_task_to_fire_event(atom!("seeking"));
2015
2016        // Step 11. Set the current playback position to the new playback position.
2017        self.current_playback_position.set(time);
2018
2019        if let Some(ref player) = *self.player.borrow() {
2020            if let Err(error) = player.lock().unwrap().seek(time) {
2021                error!("Could not seek player: {error:?}");
2022            }
2023        }
2024
2025        self.current_seek_position.set(time);
2026
2027        // Step 12. Wait until the user agent has established whether or not the media data for the
2028        // new playback position is available, and, if it is, until it has decoded enough data to
2029        // play back that position.
2030        // The rest of the steps are handled when the media engine signals a ready state change or
2031        // otherwise satisfies seek completion and signals a position change.
2032    }
2033
2034    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2035    fn seek_end(&self) {
2036        // Any time the user agent provides a stable state, the official playback position must be
2037        // set to the current playback position.
2038        self.official_playback_position
2039            .set(self.current_playback_position.get());
2040
2041        // Step 14. Set the seeking IDL attribute to false.
2042        self.seeking.set(false);
2043
2044        self.current_seek_position.set(f64::NAN);
2045
2046        // Step 15. Run the time marches on steps.
2047        self.time_marches_on();
2048
2049        // Step 16. Queue a media element task given the media element to fire an event named
2050        // timeupdate at the element.
2051        self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2052
2053        // Step 17. Queue a media element task given the media element to fire an event named seeked
2054        // at the element.
2055        self.queue_media_element_task_to_fire_event(atom!("seeked"));
2056    }
2057
2058    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
2059    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
2060        if pref!(media_testing_enabled) && image.is_some() {
2061            self.queue_media_element_task_to_fire_event(atom!("postershown"));
2062        }
2063
2064        self.video_renderer.lock().unwrap().set_poster_frame(image);
2065
2066        self.upcast::<Node>().dirty(NodeDamage::Other);
2067    }
2068
2069    fn create_media_player(&self, resource: &Resource) -> Result<(), ()> {
2070        let stream_type = match *resource {
2071            Resource::Object => {
2072                if let Some(ref src_object) = *self.src_object.borrow() {
2073                    match src_object {
2074                        SrcObject::MediaStream(_) => StreamType::Stream,
2075                        _ => StreamType::Seekable,
2076                    }
2077                } else {
2078                    return Err(());
2079                }
2080            },
2081            _ => StreamType::Seekable,
2082        };
2083
2084        let window = self.owner_window();
2085        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
2086        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
2087        {
2088            HTMLMediaElementTypeId::HTMLAudioElement => None,
2089            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
2090        };
2091
2092        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
2093
2094        let pipeline_id = window.pipeline_id();
2095        let client_context_id =
2096            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
2097        let player = ServoMedia::get().create_player(
2098            &client_context_id,
2099            stream_type,
2100            action_sender,
2101            video_renderer,
2102            audio_renderer,
2103            Box::new(window.get_player_context()),
2104        );
2105        let player_id = {
2106            let player_guard = player.lock().unwrap();
2107
2108            if let Err(e) = player_guard.set_mute(self.muted.get()) {
2109                log::warn!("Could not set mute state: {:?}", e);
2110            }
2111
2112            player_guard.get_id()
2113        };
2114
2115        *self.player.borrow_mut() = Some(player);
2116
2117        let trusted_node = Trusted::new(self);
2118        let task_source = self
2119            .owner_global()
2120            .task_manager()
2121            .media_element_task_source()
2122            .to_sendable();
2123        ROUTER.add_typed_route(
2124            action_receiver,
2125            Box::new(move |message| {
2126                let event = message.unwrap();
2127                trace!("Player event {:?}", event);
2128                let this = trusted_node.clone();
2129                task_source.queue(task!(handle_player_event: move || {
2130                    this.root().handle_player_event(player_id, &event, CanGc::note());
2131                }));
2132            }),
2133        );
2134
2135        let task_source = self
2136            .owner_global()
2137            .task_manager()
2138            .media_element_task_source()
2139            .to_sendable();
2140        let weak_video_renderer = Arc::downgrade(&self.video_renderer);
2141
2142        self.video_renderer
2143            .lock()
2144            .unwrap()
2145            .setup(player_id, task_source, weak_video_renderer);
2146
2147        Ok(())
2148    }
2149
2150    fn reset_media_player(&self) {
2151        if self.player.borrow().is_none() {
2152            return;
2153        }
2154
2155        if let Some(ref player) = *self.player.borrow() {
2156            if let Err(err) = player.lock().unwrap().stop() {
2157                error!("Could not stop player {:?}", err);
2158            }
2159        }
2160
2161        *self.player.borrow_mut() = None;
2162        self.video_renderer.lock().unwrap().reset();
2163
2164        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2165            video_element.set_natural_dimensions(None, None);
2166        }
2167    }
2168
2169    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
2170        if let Some(ref player) = *self.player.borrow() {
2171            if let Err(err) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
2172                warn!("Could not set audio track {:#?}", err);
2173            }
2174        }
2175    }
2176
2177    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
2178        if let Some(ref player) = *self.player.borrow() {
2179            if let Err(err) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
2180                warn!("Could not set video track {:#?}", err);
2181            }
2182        }
2183    }
2184
2185    /// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
2186    fn direction_of_playback(&self) -> PlaybackDirection {
2187        // If the element's playbackRate is positive or zero, then the direction of playback is
2188        // forwards. Otherwise, it is backwards.
2189        if self.playback_rate.get() >= 0. {
2190            PlaybackDirection::Forwards
2191        } else {
2192            PlaybackDirection::Backwards
2193        }
2194    }
2195
2196    /// <https://html.spec.whatwg.org/multipage/#ended-playback>
2197    fn ended_playback(&self, loop_condition: LoopCondition) -> bool {
2198        // A media element is said to have ended playback when:
2199
2200        // The element's readyState attribute is HAVE_METADATA or greater, and
2201        if self.ready_state.get() < ReadyState::HaveMetadata {
2202            return false;
2203        }
2204
2205        let playback_position = self.current_playback_position.get();
2206
2207        match self.direction_of_playback() {
2208            // Either: The current playback position is the end of the media resource, and the
2209            // direction of playback is forwards, and the media element does not have a loop
2210            // attribute specified.
2211            PlaybackDirection::Forwards => {
2212                playback_position >= self.Duration() &&
2213                    (loop_condition == LoopCondition::Ignored || !self.Loop())
2214            },
2215            // Or: The current playback position is the earliest possible position, and the
2216            // direction of playback is backwards.
2217            PlaybackDirection::Backwards => playback_position <= self.earliest_possible_position(),
2218        }
2219    }
2220
2221    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2222    fn end_of_playback_in_forwards_direction(&self) {
2223        // When the current playback position reaches the end of the media resource when the
2224        // direction of playback is forwards, then the user agent must follow these steps:
2225
2226        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
2227        // posible position of the media resource and return.
2228        if self.Loop() {
2229            self.seek(
2230                self.earliest_possible_position(),
2231                /* approximate_for_speed */ false,
2232            );
2233            return;
2234        }
2235
2236        // Step 2. As defined above, the ended IDL attribute starts returning true once the event
2237        // loop returns to step 1.
2238
2239        // Step 3. Queue a media element task given the media element and the following steps:
2240        let this = Trusted::new(self);
2241        let generation_id = self.generation_id.get();
2242
2243        self.owner_global()
2244            .task_manager()
2245            .media_element_task_source()
2246            .queue(task!(reaches_the_end_steps: move || {
2247                let this = this.root();
2248                if generation_id != this.generation_id.get() {
2249                    return;
2250                }
2251
2252                // Step 3.1. Fire an event named timeupdate at the media element.
2253                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
2254
2255                // Step 3.2. If the media element has ended playback, the direction of playback is
2256                // forwards, and paused is false, then:
2257                if this.ended_playback(LoopCondition::Included) &&
2258                    this.direction_of_playback() == PlaybackDirection::Forwards &&
2259                    !this.Paused() {
2260                    // Step 3.2.1. Set the paused attribute to true.
2261                    this.paused.set(true);
2262
2263                    // Step 3.2.2. Fire an event named pause at the media element.
2264                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
2265
2266                    // Step 3.2.3. Take pending play promises and reject pending play promises with
2267                    // the result and an "AbortError" DOMException.
2268                    this.take_pending_play_promises(Err(Error::Abort(None)));
2269                    this.fulfill_in_flight_play_promises(|| ());
2270                }
2271
2272                // Step 3.3. Fire an event named ended at the media element.
2273                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
2274            }));
2275
2276        // <https://html.spec.whatwg.org/multipage/#dom-media-have_current_data>
2277        self.change_ready_state(ReadyState::HaveCurrentData);
2278    }
2279
2280    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2281    fn end_of_playback_in_backwards_direction(&self) {
2282        // When the current playback position reaches the earliest possible position of the media
2283        // resource when the direction of playback is backwards, then the user agent must only queue
2284        // a media element task given the media element to fire an event named timeupdate at the
2285        // element.
2286        if self.current_playback_position.get() <= self.earliest_possible_position() {
2287            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2288        }
2289    }
2290
2291    fn playback_end(&self) {
2292        // Abort the following steps of the end of playback if seeking is in progress.
2293        if self.seeking.get() {
2294            return;
2295        }
2296
2297        match self.direction_of_playback() {
2298            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(),
2299            PlaybackDirection::Backwards => self.end_of_playback_in_backwards_direction(),
2300        }
2301    }
2302
2303    fn playback_error(&self, error: &str, can_gc: CanGc) {
2304        error!("Player error: {:?}", error);
2305
2306        // If we have already flagged an error condition while processing
2307        // the network response, we should silently skip any observable
2308        // errors originating while decoding the erroneous response.
2309        if self.in_error_state() {
2310            return;
2311        }
2312
2313        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
2314        if self.ready_state.get() == ReadyState::HaveNothing {
2315            // => "If the media data can be fetched but is found by inspection to be in an
2316            // unsupported format, or can otherwise not be rendered at all"
2317            self.media_data_processing_failure_steps();
2318        } else {
2319            // => "If the media data is corrupted"
2320            self.media_data_processing_fatal_steps(MEDIA_ERR_DECODE, can_gc);
2321        }
2322    }
2323
2324    fn playback_metadata_updated(
2325        &self,
2326        metadata: &servo_media::player::metadata::Metadata,
2327        can_gc: CanGc,
2328    ) {
2329        // The following steps should be run once on the initial `metadata` signal from the media
2330        // engine.
2331        if self.ready_state.get() != ReadyState::HaveNothing {
2332            return;
2333        }
2334
2335        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2336        // => "If the media resource is found to have an audio track"
2337        for (i, _track) in metadata.audio_tracks.iter().enumerate() {
2338            let audio_track_list = self.AudioTracks(can_gc);
2339
2340            // Step 1. Create an AudioTrack object to represent the audio track.
2341            let kind = match i {
2342                0 => DOMString::from("main"),
2343                _ => DOMString::new(),
2344            };
2345
2346            let audio_track = AudioTrack::new(
2347                self.global().as_window(),
2348                DOMString::new(),
2349                kind,
2350                DOMString::new(),
2351                DOMString::new(),
2352                Some(&*audio_track_list),
2353                can_gc,
2354            );
2355
2356            // Steps 2. Update the media element's audioTracks attribute's AudioTrackList object
2357            // with the new AudioTrack object.
2358            audio_track_list.add(&audio_track);
2359
2360            // Step 3. Let enable be unknown.
2361            // Step 4. If either the media resource or the URL of the current media resource
2362            // indicate a particular set of audio tracks to enable, or if the user agent has
2363            // information that would facilitate the selection of specific audio tracks to
2364            // improve the user's experience, then: if this audio track is one of the ones to
2365            // enable, then set enable to true, otherwise, set enable to false.
2366            if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2367                let fragment = MediaFragmentParser::from(servo_url);
2368                if let Some(id) = fragment.id() {
2369                    if audio_track.id() == id {
2370                        audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2371                    }
2372                }
2373
2374                if fragment.tracks().contains(&audio_track.kind().into()) {
2375                    audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2376                }
2377            }
2378
2379            // Step 5. If enable is still unknown, then, if the media element does not yet have an
2380            // enabled audio track, then set enable to true, otherwise, set enable to false.
2381            // Step 6. If enable is true, then enable this audio track, otherwise, do not enable
2382            // this audio track.
2383            if audio_track_list.enabled_index().is_none() {
2384                audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2385            }
2386
2387            // Step 7. Fire an event named addtrack at this AudioTrackList object, using TrackEvent,
2388            // with the track attribute initialized to the new AudioTrack object.
2389            let event = TrackEvent::new(
2390                self.global().as_window(),
2391                atom!("addtrack"),
2392                false,
2393                false,
2394                &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
2395                can_gc,
2396            );
2397
2398            event
2399                .upcast::<Event>()
2400                .fire(audio_track_list.upcast::<EventTarget>(), can_gc);
2401        }
2402
2403        // => "If the media resource is found to have a video track"
2404        for (i, _track) in metadata.video_tracks.iter().enumerate() {
2405            let video_track_list = self.VideoTracks(can_gc);
2406
2407            // Step 1. Create a VideoTrack object to represent the video track.
2408            let kind = match i {
2409                0 => DOMString::from("main"),
2410                _ => DOMString::new(),
2411            };
2412
2413            let video_track = VideoTrack::new(
2414                self.global().as_window(),
2415                DOMString::new(),
2416                kind,
2417                DOMString::new(),
2418                DOMString::new(),
2419                Some(&*video_track_list),
2420                can_gc,
2421            );
2422
2423            // Steps 2. Update the media element's videoTracks attribute's VideoTrackList object
2424            // with the new VideoTrack object.
2425            video_track_list.add(&video_track);
2426
2427            // Step 3. Let enable be unknown.
2428            // Step 4. If either the media resource or the URL of the current media resource
2429            // indicate a particular set of video tracks to enable, or if the user agent has
2430            // information that would facilitate the selection of specific video tracks to
2431            // improve the user's experience, then: if this video track is the first such video
2432            // track, then set enable to true, otherwise, set enable to false.
2433            if let Some(track) = video_track_list.item(0) {
2434                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2435                    let fragment = MediaFragmentParser::from(servo_url);
2436                    if let Some(id) = fragment.id() {
2437                        if track.id() == id {
2438                            video_track_list.set_selected(0, true);
2439                        }
2440                    } else if fragment.tracks().contains(&track.kind().into()) {
2441                        video_track_list.set_selected(0, true);
2442                    }
2443                }
2444            }
2445
2446            // Step 5. If enable is still unknown, then, if the media element does not yet have a
2447            // selected video track, then set enable to true, otherwise, set enable to false.
2448            // Step 6. If enable is true, then select this track and unselect any previously
2449            // selected video tracks, otherwise, do not select this video track. If other tracks are
2450            // unselected, then a change event will be fired.
2451            if video_track_list.selected_index().is_none() {
2452                video_track_list.set_selected(video_track_list.len() - 1, true);
2453            }
2454
2455            // Step 7. Fire an event named addtrack at this VideoTrackList object, using TrackEvent,
2456            // with the track attribute initialized to the new VideoTrack object.
2457            let event = TrackEvent::new(
2458                self.global().as_window(),
2459                atom!("addtrack"),
2460                false,
2461                false,
2462                &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
2463                can_gc,
2464            );
2465
2466            event
2467                .upcast::<Event>()
2468                .fire(video_track_list.upcast::<EventTarget>(), can_gc);
2469        }
2470
2471        // => "Once enough of the media data has been fetched to determine the duration..."
2472
2473        // TODO Step 1. Establish the media timeline for the purposes of the current playback
2474        // position and the earliest possible position, based on the media data.
2475
2476        // TODO Step 2. Update the timeline offset to the date and time that corresponds to the zero
2477        // time in the media timeline established in the previous step, if any. If no explicit time
2478        // and date is given by the media resource, the timeline offset must be set to Not-a-Number
2479        // (NaN).
2480
2481        // Step 3. Set the current playback position and the official playback position to the
2482        // earliest possible position.
2483        let earliest_possible_position = self.earliest_possible_position();
2484        self.current_playback_position
2485            .set(earliest_possible_position);
2486        self.official_playback_position
2487            .set(earliest_possible_position);
2488
2489        // Step 4. Update the duration attribute with the time of the last frame of the resource, if
2490        // known, on the media timeline established above. If it is not known (e.g. a stream that is
2491        // in principle infinite), update the duration attribute to the value positive Infinity.
2492        // Note: The user agent will queue a media element task given the media element to fire an
2493        // event named durationchange at the element at this point.
2494        self.duration.set(
2495            metadata
2496                .duration
2497                .map_or(f64::INFINITY, |duration| duration.as_secs_f64()),
2498        );
2499        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2500
2501        // Step 5. For video elements, set the videoWidth and videoHeight attributes, and queue a
2502        // media element task given the media element to fire an event named resize at the media
2503        // element.
2504        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2505            video_element.set_natural_dimensions(Some(metadata.width), Some(metadata.height));
2506            self.queue_media_element_task_to_fire_event(atom!("resize"));
2507        }
2508
2509        // Step 6. Set the readyState attribute to HAVE_METADATA.
2510        self.change_ready_state(ReadyState::HaveMetadata);
2511
2512        // Step 7. Let jumped be false.
2513        let mut jumped = false;
2514
2515        // Step 8. If the media element's default playback start position is greater than zero, then
2516        // seek to that time, and let jumped be true.
2517        if self.default_playback_start_position.get() > 0. {
2518            self.seek(
2519                self.default_playback_start_position.get(),
2520                /* approximate_for_speed */ false,
2521            );
2522            jumped = true;
2523        }
2524
2525        // Step 9. Set the media element's default playback start position to zero.
2526        self.default_playback_start_position.set(0.);
2527
2528        // Step 10. Let the initial playback position be 0.
2529        // Step 11. If either the media resource or the URL of the current media resource indicate a
2530        // particular start time, then set the initial playback position to that time and, if jumped
2531        // is still false, seek to that time.
2532        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2533            let fragment = MediaFragmentParser::from(servo_url);
2534            if let Some(initial_playback_position) = fragment.start() {
2535                if initial_playback_position > 0. &&
2536                    initial_playback_position < self.duration.get() &&
2537                    !jumped
2538                {
2539                    self.seek(
2540                        initial_playback_position,
2541                        /* approximate_for_speed */ false,
2542                    )
2543                }
2544            }
2545        }
2546
2547        // Step 12. If there is no enabled audio track, then enable an audio track. This will cause
2548        // a change event to be fired.
2549        // Step 13. If there is no selected video track, then select a video track. This will cause
2550        // a change event to be fired.
2551        // Note that these steps are already handled by the earlier media track processing.
2552
2553        let global = self.global();
2554        let window = global.as_window();
2555
2556        // Update the media session metadata title with the obtained metadata.
2557        window.Navigator().MediaSession().update_title(
2558            metadata
2559                .title
2560                .clone()
2561                .unwrap_or(window.get_url().into_string()),
2562        );
2563    }
2564
2565    fn playback_duration_changed(&self, duration: Option<Duration>) {
2566        let duration = duration.map_or(f64::INFINITY, |duration| duration.as_secs_f64());
2567
2568        if self.duration.get() == duration {
2569            return;
2570        }
2571
2572        self.duration.set(duration);
2573
2574        // When the length of the media resource changes to a known value (e.g. from being unknown
2575        // to known, or from a previously established length to a new length), the user agent must
2576        // queue a media element task given the media element to fire an event named durationchange
2577        // at the media element.
2578        // <https://html.spec.whatwg.org/multipage/#offsets-into-the-media-resource:media-resource-22>
2579        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2580
2581        // If the duration is changed such that the current playback position ends up being greater
2582        // than the time of the end of the media resource, then the user agent must also seek to the
2583        // time of the end of the media resource.
2584        if self.current_playback_position.get() > duration {
2585            self.seek(duration, /* approximate_for_speed */ false);
2586        }
2587    }
2588
2589    fn playback_video_frame_updated(&self) {
2590        let Some(video_element) = self.downcast::<HTMLVideoElement>() else {
2591            return;
2592        };
2593
2594        // Whenever the natural width or natural height of the video changes (including, for
2595        // example, because the selected video track was changed), if the element's readyState
2596        // attribute is not HAVE_NOTHING, the user agent must queue a media element task given
2597        // the media element to fire an event named resize at the media element.
2598        // <https://html.spec.whatwg.org/multipage/#concept-video-intrinsic-width>
2599
2600        // The event for the prerolled frame from media engine could reached us before the media
2601        // element HAVE_METADATA ready state so subsequent steps will be cancelled.
2602        if self.ready_state.get() == ReadyState::HaveNothing {
2603            return;
2604        }
2605
2606        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
2607            if video_element
2608                .set_natural_dimensions(Some(frame.width as u32), Some(frame.height as u32))
2609            {
2610                self.queue_media_element_task_to_fire_event(atom!("resize"));
2611            } else {
2612                // If the natural dimensions have not been changed, the node should be marked as
2613                // damaged to force a repaint with the new frame contents.
2614                self.upcast::<Node>().dirty(NodeDamage::Other);
2615            }
2616        }
2617    }
2618
2619    fn playback_need_data(&self) {
2620        // The media engine signals that the source needs more data. If we already have a valid
2621        // fetch request, we do nothing. Otherwise, if we have no request and the previous request
2622        // was cancelled because we got an EnoughData event, we restart fetching where we left.
2623        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2624            if let Some(reason) = current_fetch_context.cancel_reason() {
2625                // XXX(ferjm) Ideally we should just create a fetch request from
2626                // where we left. But keeping track of the exact next byte that the
2627                // media backend expects is not the easiest task, so I'm simply
2628                // seeking to the current playback position for now which will create
2629                // a new fetch request for the last rendered frame.
2630                if *reason == CancelReason::Backoff {
2631                    self.seek(
2632                        self.current_playback_position.get(),
2633                        /* approximate_for_speed */ false,
2634                    );
2635                }
2636                return;
2637            }
2638        }
2639
2640        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2641            if let Err(e) = {
2642                let mut data_source = current_fetch_context.data_source().borrow_mut();
2643                data_source.set_locked(false);
2644                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
2645            } {
2646                // If we are pushing too much data and we know that we can
2647                // restart the download later from where we left, we cancel
2648                // the current request. Otherwise, we continue the request
2649                // assuming that we may drop some frames.
2650                if e == PlayerError::EnoughData {
2651                    current_fetch_context.cancel(CancelReason::Backoff);
2652                }
2653            }
2654        }
2655    }
2656
2657    fn playback_enough_data(&self) {
2658        // The media engine signals that the source has enough data and asks us to stop pushing bytes
2659        // to avoid excessive buffer queueing, so we cancel the ongoing fetch request if we are able
2660        // to restart it from where we left. Otherwise, we continue the current fetch request,
2661        // assuming that some frames will be dropped.
2662        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2663            if current_fetch_context.is_seekable() {
2664                current_fetch_context.cancel(CancelReason::Backoff);
2665            }
2666        }
2667    }
2668
2669    fn playback_position_changed(&self, position: f64) {
2670        // Abort the following steps of the current time update if seeking is in progress.
2671        if self.seeking.get() {
2672            return;
2673        }
2674
2675        let _ = self
2676            .played
2677            .borrow_mut()
2678            .add(self.current_playback_position.get(), position);
2679        self.current_playback_position.set(position);
2680        self.official_playback_position.set(position);
2681        self.time_marches_on();
2682
2683        let media_position_state =
2684            MediaPositionState::new(self.duration.get(), self.playback_rate.get(), position);
2685        debug!(
2686            "Sending media session event set position state {:?}",
2687            media_position_state
2688        );
2689        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
2690    }
2691
2692    fn playback_seek_done(&self, position: f64) {
2693        // If the seek was initiated by script or by the user agent itself continue with the
2694        // following steps, otherwise abort.
2695        if !self.seeking.get() || position != self.current_seek_position.get() {
2696            return;
2697        }
2698
2699        // <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2700        // Step 13. Await a stable state.
2701        let task = MediaElementMicrotask::Seeked {
2702            elem: DomRoot::from_ref(self),
2703            generation_id: self.generation_id.get(),
2704        };
2705
2706        ScriptThread::await_stable_state(Microtask::MediaElement(task));
2707    }
2708
2709    fn playback_state_changed(&self, state: &PlaybackState) {
2710        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
2711        match *state {
2712            PlaybackState::Paused => {
2713                media_session_playback_state = MediaSessionPlaybackState::Paused;
2714                if self.ready_state.get() == ReadyState::HaveMetadata {
2715                    self.change_ready_state(ReadyState::HaveEnoughData);
2716                }
2717            },
2718            PlaybackState::Playing => {
2719                media_session_playback_state = MediaSessionPlaybackState::Playing;
2720                if self.ready_state.get() == ReadyState::HaveMetadata {
2721                    self.change_ready_state(ReadyState::HaveEnoughData);
2722                }
2723            },
2724            PlaybackState::Buffering => {
2725                // Do not send the media session playback state change event
2726                // in this case as a None_ state is expected to clean up the
2727                // session.
2728                return;
2729            },
2730            _ => {},
2731        };
2732        debug!(
2733            "Sending media session event playback state changed to {:?}",
2734            media_session_playback_state
2735        );
2736        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
2737            media_session_playback_state,
2738        ));
2739    }
2740
2741    fn handle_player_event(&self, player_id: usize, event: &PlayerEvent, can_gc: CanGc) {
2742        // Ignore the asynchronous event from previous player.
2743        if self
2744            .player
2745            .borrow()
2746            .as_ref()
2747            .is_none_or(|player| player.lock().unwrap().get_id() != player_id)
2748        {
2749            return;
2750        }
2751
2752        match *event {
2753            PlayerEvent::EndOfStream => self.playback_end(),
2754            PlayerEvent::Error(ref error) => self.playback_error(error, can_gc),
2755            PlayerEvent::VideoFrameUpdated => self.playback_video_frame_updated(),
2756            PlayerEvent::MetadataUpdated(ref metadata) => {
2757                self.playback_metadata_updated(metadata, can_gc)
2758            },
2759            PlayerEvent::DurationChanged(duration) => self.playback_duration_changed(duration),
2760            PlayerEvent::NeedData => self.playback_need_data(),
2761            PlayerEvent::EnoughData => self.playback_enough_data(),
2762            PlayerEvent::PositionChanged(position) => self.playback_position_changed(position),
2763            PlayerEvent::SeekData(p, ref seek_lock) => {
2764                self.fetch_request(Some(p), Some(seek_lock.clone()))
2765            },
2766            PlayerEvent::SeekDone(position) => self.playback_seek_done(position),
2767            PlayerEvent::StateChanged(ref state) => self.playback_state_changed(state),
2768        }
2769    }
2770
2771    fn seekable(&self) -> TimeRangesContainer {
2772        let mut seekable = TimeRangesContainer::default();
2773        if let Some(ref player) = *self.player.borrow() {
2774            if let Ok(ranges) = player.lock().unwrap().seekable() {
2775                for range in ranges {
2776                    let _ = seekable.add(range.start, range.end);
2777                }
2778            }
2779        }
2780        seekable
2781    }
2782
2783    /// <https://html.spec.whatwg.org/multipage/#earliest-possible-position>
2784    fn earliest_possible_position(&self) -> f64 {
2785        self.seekable()
2786            .start(0)
2787            .unwrap_or_else(|_| self.current_playback_position.get())
2788    }
2789
2790    fn render_controls(&self, can_gc: CanGc) {
2791        if self.upcast::<Element>().is_shadow_host() {
2792            // Bail out if we are already showing the controls.
2793            return;
2794        }
2795
2796        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2797        //                       reparsing styles.
2798        let shadow_root = self
2799            .upcast::<Element>()
2800            .attach_ua_shadow_root(false, can_gc);
2801        let document = self.owner_document();
2802        let script = Element::create(
2803            QualName::new(None, ns!(html), local_name!("script")),
2804            None,
2805            &document,
2806            ElementCreator::ScriptCreated,
2807            CustomElementCreationMode::Asynchronous,
2808            None,
2809            can_gc,
2810        );
2811        // This is our hacky way to temporarily workaround the lack of a privileged
2812        // JS context.
2813        // The media controls UI accesses the document.servoGetMediaControls(id) API
2814        // to get an instance to the media controls ShadowRoot.
2815        // `id` needs to match the internally generated UUID assigned to a media element.
2816        let id = Uuid::new_v4().to_string();
2817        document.register_media_controls(&id, &shadow_root);
2818        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2819        *self.media_controls_id.borrow_mut() = Some(id);
2820        script
2821            .upcast::<Node>()
2822            .set_text_content_for_element(Some(DOMString::from(media_controls_script)), can_gc);
2823        if let Err(e) = shadow_root
2824            .upcast::<Node>()
2825            .AppendChild(script.upcast::<Node>(), can_gc)
2826        {
2827            warn!("Could not render media controls {:?}", e);
2828            return;
2829        }
2830
2831        let style = Element::create(
2832            QualName::new(None, ns!(html), local_name!("style")),
2833            None,
2834            &document,
2835            ElementCreator::ScriptCreated,
2836            CustomElementCreationMode::Asynchronous,
2837            None,
2838            can_gc,
2839        );
2840
2841        style
2842            .upcast::<Node>()
2843            .set_text_content_for_element(Some(DOMString::from(MEDIA_CONTROL_CSS)), can_gc);
2844
2845        if let Err(e) = shadow_root
2846            .upcast::<Node>()
2847            .AppendChild(style.upcast::<Node>(), can_gc)
2848        {
2849            warn!("Could not render media controls {:?}", e);
2850        }
2851
2852        self.upcast::<Node>().dirty(NodeDamage::Other);
2853    }
2854
2855    fn remove_controls(&self) {
2856        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2857            self.owner_document().unregister_media_controls(&id);
2858        }
2859    }
2860
2861    /// Gets the video frame at the current playback position.
2862    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2863        self.video_renderer
2864            .lock()
2865            .unwrap()
2866            .current_frame_holder
2867            .as_ref()
2868            .map(|holder| holder.get_frame())
2869    }
2870
2871    /// Gets the current frame of the video element to present, if any.
2872    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2873    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2874        let (current_frame, poster_frame) = {
2875            let renderer = self.video_renderer.lock().unwrap();
2876            (renderer.current_frame, renderer.poster_frame)
2877        };
2878
2879        // If the show poster flag is set (or there is no current video frame to
2880        // present) AND there is a poster frame, present that.
2881        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2882            return poster_frame;
2883        }
2884
2885        current_frame
2886    }
2887
2888    /// By default the audio is rendered through the audio sink automatically
2889    /// selected by the servo-media Player instance. However, in some cases, like
2890    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2891    /// renderer.
2892    pub(crate) fn set_audio_renderer(
2893        &self,
2894        audio_renderer: Arc<Mutex<dyn AudioRenderer>>,
2895        can_gc: CanGc,
2896    ) {
2897        *self.audio_renderer.borrow_mut() = Some(audio_renderer);
2898
2899        let had_player = {
2900            if let Some(ref player) = *self.player.borrow() {
2901                if let Err(err) = player.lock().unwrap().stop() {
2902                    error!("Could not stop player {:?}", err);
2903                }
2904                true
2905            } else {
2906                false
2907            }
2908        };
2909
2910        if had_player {
2911            self.media_element_load_algorithm(can_gc);
2912        }
2913    }
2914
2915    fn send_media_session_event(&self, event: MediaSessionEvent) {
2916        let global = self.global();
2917        let media_session = global.as_window().Navigator().MediaSession();
2918
2919        media_session.register_media_instance(self);
2920
2921        media_session.send_event(event);
2922    }
2923
2924    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2925    pub(crate) fn origin_is_clean(&self) -> bool {
2926        // Step 5.local (media provider object).
2927        if self.src_object.borrow().is_some() {
2928            // The resource described by the current media resource, if any,
2929            // contains the media data. It is CORS-same-origin.
2930            return true;
2931        }
2932
2933        // Step 5.remote (URL record).
2934        if self.resource_url.borrow().is_some() {
2935            // Update the media data with the contents
2936            // of response's unsafe response obtained in this fashion.
2937            // Response can be CORS-same-origin or CORS-cross-origin;
2938            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2939                return current_fetch_context.origin_is_clean();
2940            }
2941        }
2942
2943        true
2944    }
2945}
2946
2947impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2948    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
2949    fn NetworkState(&self) -> u16 {
2950        self.network_state.get() as u16
2951    }
2952
2953    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
2954    fn ReadyState(&self) -> u16 {
2955        self.ready_state.get() as u16
2956    }
2957
2958    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2959    make_bool_getter!(Autoplay, "autoplay");
2960    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2961    make_bool_setter!(SetAutoplay, "autoplay");
2962
2963    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2964    make_bool_getter!(Loop, "loop");
2965    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2966    make_bool_setter!(SetLoop, "loop");
2967
2968    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2969    make_bool_getter!(DefaultMuted, "muted");
2970    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2971    make_bool_setter!(SetDefaultMuted, "muted");
2972
2973    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2974    make_bool_getter!(Controls, "controls");
2975    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2976    make_bool_setter!(SetControls, "controls");
2977
2978    // https://html.spec.whatwg.org/multipage/#dom-media-src
2979    make_url_getter!(Src, "src");
2980
2981    // https://html.spec.whatwg.org/multipage/#dom-media-src
2982    make_url_setter!(SetSrc, "src");
2983
2984    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2985    fn GetCrossOrigin(&self) -> Option<DOMString> {
2986        reflect_cross_origin_attribute(self.upcast::<Element>())
2987    }
2988    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2989    fn SetCrossOrigin(&self, value: Option<DOMString>, can_gc: CanGc) {
2990        set_cross_origin_attribute(self.upcast::<Element>(), value, can_gc);
2991    }
2992
2993    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2994    fn Muted(&self) -> bool {
2995        self.muted.get()
2996    }
2997
2998    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2999    fn SetMuted(&self, value: bool) {
3000        if self.muted.get() == value {
3001            return;
3002        }
3003
3004        self.muted.set(value);
3005
3006        if let Some(ref player) = *self.player.borrow() {
3007            if let Err(err) = player.lock().unwrap().set_mute(value) {
3008                warn!("Could not set mute state {:?}", err);
3009            }
3010        }
3011
3012        // The user agent must queue a media element task given the media element to fire an event
3013        // named volumechange at the media element.
3014        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3015
3016        // Then, if the media element is not allowed to play, the user agent must run the internal
3017        // pause steps for the media element.
3018        if !self.is_allowed_to_play() {
3019            self.internal_pause_steps();
3020        }
3021    }
3022
3023    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3024    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
3025        (*self.src_object.borrow())
3026            .as_ref()
3027            .map(|src_object| match src_object {
3028                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
3029                SrcObject::MediaStream(stream) => {
3030                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
3031                },
3032            })
3033    }
3034
3035    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3036    fn SetSrcObject(&self, value: Option<MediaStreamOrBlob>, can_gc: CanGc) {
3037        *self.src_object.borrow_mut() = value.map(|value| value.into());
3038        self.media_element_load_algorithm(can_gc);
3039    }
3040
3041    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3042    // Missing/Invalid values are user-agent defined.
3043    make_enumerated_getter!(
3044        Preload,
3045        "preload",
3046        "none" | "metadata" | "auto",
3047        missing => "auto",
3048        invalid => "auto"
3049    );
3050
3051    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3052    make_setter!(SetPreload, "preload");
3053
3054    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
3055    fn CurrentSrc(&self) -> USVString {
3056        USVString(self.current_src.borrow().clone())
3057    }
3058
3059    /// <https://html.spec.whatwg.org/multipage/#dom-media-load>
3060    fn Load(&self, can_gc: CanGc) {
3061        self.media_element_load_algorithm(can_gc);
3062    }
3063
3064    /// <https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype>
3065    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
3066        match ServoMedia::get().can_play_type(&type_.str()) {
3067            SupportsMediaType::No => CanPlayTypeResult::_empty,
3068            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
3069            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
3070        }
3071    }
3072
3073    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
3074    fn GetError(&self) -> Option<DomRoot<MediaError>> {
3075        self.error.get()
3076    }
3077
3078    /// <https://html.spec.whatwg.org/multipage/#dom-media-play>
3079    fn Play(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
3080        let promise = Promise::new_in_current_realm(comp, can_gc);
3081
3082        // TODO Step 1. If the media element is not allowed to play, then return a promise rejected
3083        // with a "NotAllowedError" DOMException.
3084
3085        // Step 2. If the media element's error attribute is not null and its code is
3086        // MEDIA_ERR_SRC_NOT_SUPPORTED, then return a promise rejected with a "NotSupportedError"
3087        // DOMException.
3088        if self
3089            .error
3090            .get()
3091            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
3092        {
3093            promise.reject_error(Error::NotSupported(None), can_gc);
3094            return promise;
3095        }
3096
3097        // Step 3. Let promise be a new promise and append promise to the list of pending play
3098        // promises.
3099        self.push_pending_play_promise(&promise);
3100
3101        // Step 4. Run the internal play steps for the media element.
3102        self.internal_play_steps(can_gc);
3103
3104        // Step 5. Return promise.
3105        promise
3106    }
3107
3108    /// <https://html.spec.whatwg.org/multipage/#dom-media-pause>
3109    fn Pause(&self, can_gc: CanGc) {
3110        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
3111        // the media element's resource selection algorithm.
3112        if self.network_state.get() == NetworkState::Empty {
3113            self.invoke_resource_selection_algorithm(can_gc);
3114        }
3115
3116        // Step 2. Run the internal pause steps for the media element.
3117        self.internal_pause_steps();
3118    }
3119
3120    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
3121    fn Paused(&self) -> bool {
3122        self.paused.get()
3123    }
3124
3125    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3126    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
3127        Ok(Finite::wrap(self.default_playback_rate.get()))
3128    }
3129
3130    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3131    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3132        // If the given value is not supported by the user agent, then throw a "NotSupportedError"
3133        // DOMException.
3134        let min_allowed = -64.0;
3135        let max_allowed = 64.0;
3136        if *value < min_allowed || *value > max_allowed {
3137            return Err(Error::NotSupported(None));
3138        }
3139
3140        if self.default_playback_rate.get() == *value {
3141            return Ok(());
3142        }
3143
3144        self.default_playback_rate.set(*value);
3145
3146        // The user agent must queue a media element task given the media element to fire an event
3147        // named ratechange at the media element.
3148        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3149
3150        Ok(())
3151    }
3152
3153    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3154    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
3155        Ok(Finite::wrap(self.playback_rate.get()))
3156    }
3157
3158    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3159    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3160        // The attribute is mutable: on setting, the user agent must follow these steps:
3161
3162        // Step 1. If the given value is not supported by the user agent, then throw a
3163        // "NotSupportedError" DOMException.
3164        let min_allowed = -64.0;
3165        let max_allowed = 64.0;
3166        if *value < min_allowed || *value > max_allowed {
3167            return Err(Error::NotSupported(None));
3168        }
3169
3170        if self.playback_rate.get() == *value {
3171            return Ok(());
3172        }
3173
3174        // Step 2. Set playbackRate to the new value, and if the element is potentially playing,
3175        // change the playback speed.
3176        self.playback_rate.set(*value);
3177
3178        if self.is_potentially_playing() {
3179            if let Some(ref player) = *self.player.borrow() {
3180                if let Err(err) = player.lock().unwrap().set_rate(*value) {
3181                    warn!("Could not set the playback rate {:?}", err);
3182                }
3183            }
3184        }
3185
3186        // The user agent must queue a media element task given the media element to fire an event
3187        // named ratechange at the media element.
3188        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3189
3190        Ok(())
3191    }
3192
3193    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
3194    fn Duration(&self) -> f64 {
3195        self.duration.get()
3196    }
3197
3198    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3199    fn CurrentTime(&self) -> Finite<f64> {
3200        Finite::wrap(if self.default_playback_start_position.get() != 0. {
3201            self.default_playback_start_position.get()
3202        } else if self.seeking.get() {
3203            // Note that the other browsers do the similar (by checking `seeking` value or clamp the
3204            // `official` position to the earliest possible position, the duration, and the seekable
3205            // ranges.
3206            // <https://github.com/whatwg/html/issues/11773>
3207            self.current_seek_position.get()
3208        } else {
3209            self.official_playback_position.get()
3210        })
3211    }
3212
3213    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3214    fn SetCurrentTime(&self, time: Finite<f64>) {
3215        if self.ready_state.get() == ReadyState::HaveNothing {
3216            self.default_playback_start_position.set(*time);
3217        } else {
3218            self.official_playback_position.set(*time);
3219            self.seek(*time, /* approximate_for_speed */ false);
3220        }
3221    }
3222
3223    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
3224    fn Seeking(&self) -> bool {
3225        self.seeking.get()
3226    }
3227
3228    /// <https://html.spec.whatwg.org/multipage/#dom-media-ended>
3229    fn Ended(&self) -> bool {
3230        self.ended_playback(LoopCondition::Included) &&
3231            self.direction_of_playback() == PlaybackDirection::Forwards
3232    }
3233
3234    /// <https://html.spec.whatwg.org/multipage/#dom-media-fastseek>
3235    fn FastSeek(&self, time: Finite<f64>) {
3236        self.seek(*time, /* approximate_for_speed */ true);
3237    }
3238
3239    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
3240    fn Played(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3241        TimeRanges::new(
3242            self.global().as_window(),
3243            self.played.borrow().clone(),
3244            can_gc,
3245        )
3246    }
3247
3248    /// <https://html.spec.whatwg.org/multipage/#dom-media-seekable>
3249    fn Seekable(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3250        TimeRanges::new(self.global().as_window(), self.seekable(), can_gc)
3251    }
3252
3253    /// <https://html.spec.whatwg.org/multipage/#dom-media-buffered>
3254    fn Buffered(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3255        let mut buffered = TimeRangesContainer::default();
3256        if let Some(ref player) = *self.player.borrow() {
3257            if let Ok(ranges) = player.lock().unwrap().buffered() {
3258                for range in ranges {
3259                    let _ = buffered.add(range.start, range.end);
3260                }
3261            }
3262        }
3263        TimeRanges::new(self.global().as_window(), buffered, can_gc)
3264    }
3265
3266    /// <https://html.spec.whatwg.org/multipage/#dom-media-audiotracks>
3267    fn AudioTracks(&self, can_gc: CanGc) -> DomRoot<AudioTrackList> {
3268        let window = self.owner_window();
3269        self.audio_tracks_list
3270            .or_init(|| AudioTrackList::new(&window, &[], Some(self), can_gc))
3271    }
3272
3273    /// <https://html.spec.whatwg.org/multipage/#dom-media-videotracks>
3274    fn VideoTracks(&self, can_gc: CanGc) -> DomRoot<VideoTrackList> {
3275        let window = self.owner_window();
3276        self.video_tracks_list
3277            .or_init(|| VideoTrackList::new(&window, &[], Some(self), can_gc))
3278    }
3279
3280    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
3281    fn TextTracks(&self, can_gc: CanGc) -> DomRoot<TextTrackList> {
3282        let window = self.owner_window();
3283        self.text_tracks_list
3284            .or_init(|| TextTrackList::new(&window, &[], can_gc))
3285    }
3286
3287    /// <https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack>
3288    fn AddTextTrack(
3289        &self,
3290        kind: TextTrackKind,
3291        label: DOMString,
3292        language: DOMString,
3293        can_gc: CanGc,
3294    ) -> DomRoot<TextTrack> {
3295        let window = self.owner_window();
3296        // Step 1 & 2
3297        // FIXME(#22314, dlrobertson) set the ready state to Loaded
3298        let track = TextTrack::new(
3299            &window,
3300            "".into(),
3301            kind,
3302            label,
3303            language,
3304            TextTrackMode::Hidden,
3305            None,
3306            can_gc,
3307        );
3308        // Step 3 & 4
3309        self.TextTracks(can_gc).add(&track);
3310        // Step 5
3311        DomRoot::from_ref(&track)
3312    }
3313
3314    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3315    fn GetVolume(&self) -> Fallible<Finite<f64>> {
3316        Ok(Finite::wrap(self.volume.get()))
3317    }
3318
3319    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3320    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
3321        // If the new value is outside the range 0.0 to 1.0 inclusive, then, on setting, an
3322        // "IndexSizeError" DOMException must be thrown instead.
3323        let minimum_volume = 0.0;
3324        let maximum_volume = 1.0;
3325        if *value < minimum_volume || *value > maximum_volume {
3326            return Err(Error::IndexSize(None));
3327        }
3328
3329        if self.volume.get() == *value {
3330            return Ok(());
3331        }
3332
3333        self.volume.set(*value);
3334
3335        if let Some(ref player) = *self.player.borrow() {
3336            if let Err(err) = player.lock().unwrap().set_volume(*value) {
3337                warn!("Could not set the volume {:?}", err);
3338            }
3339        }
3340
3341        // The user agent must queue a media element task given the media element to fire an event
3342        // named volumechange at the media element.
3343        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3344
3345        // Then, if the media element is not allowed to play, the user agent must run the internal
3346        // pause steps for the media element.
3347        if !self.is_allowed_to_play() {
3348            self.internal_pause_steps();
3349        }
3350
3351        Ok(())
3352    }
3353}
3354
3355impl VirtualMethods for HTMLMediaElement {
3356    fn super_type(&self) -> Option<&dyn VirtualMethods> {
3357        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
3358    }
3359
3360    fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation, can_gc: CanGc) {
3361        self.super_type()
3362            .unwrap()
3363            .attribute_mutated(attr, mutation, can_gc);
3364
3365        match *attr.local_name() {
3366            local_name!("muted") => {
3367                // <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3368                // When a media element is created, if the element has a muted content attribute
3369                // specified, then the muted IDL attribute should be set to true.
3370                if let AttributeMutation::Set(
3371                    _,
3372                    AttributeMutationReason::ByCloning | AttributeMutationReason::ByParser,
3373                ) = mutation
3374                {
3375                    self.SetMuted(true);
3376                }
3377            },
3378            local_name!("src") => {
3379                // <https://html.spec.whatwg.org/multipage/#location-of-the-media-resource>
3380                // If a src attribute of a media element is set or changed, the user agent must invoke
3381                // the media element's media element load algorithm (Removing the src attribute does
3382                // not do this, even if there are source elements present).
3383                if !mutation.is_removal() {
3384                    self.media_element_load_algorithm(can_gc);
3385                }
3386            },
3387            local_name!("controls") => {
3388                if mutation.new_value(attr).is_some() {
3389                    self.render_controls(can_gc);
3390                } else {
3391                    self.remove_controls();
3392                }
3393            },
3394            _ => (),
3395        };
3396    }
3397
3398    /// <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document>
3399    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
3400        self.super_type().unwrap().unbind_from_tree(context, can_gc);
3401
3402        self.remove_controls();
3403
3404        if context.tree_connected {
3405            let task = MediaElementMicrotask::PauseIfNotInDocument {
3406                elem: DomRoot::from_ref(self),
3407            };
3408            ScriptThread::await_stable_state(Microtask::MediaElement(task));
3409        }
3410    }
3411
3412    fn adopting_steps(&self, old_doc: &Document, can_gc: CanGc) {
3413        self.super_type().unwrap().adopting_steps(old_doc, can_gc);
3414
3415        // Note that media control id should be adopting between documents so "privileged"
3416        // document.servoGetMediaControls(id) API is keeping access to the whitelist of media
3417        // controls identifiers.
3418        if let Some(id) = &*self.media_controls_id.borrow() {
3419            let Some(shadow_root) = self.upcast::<Element>().shadow_root() else {
3420                error!("Missing media controls shadow root");
3421                return;
3422            };
3423
3424            old_doc.unregister_media_controls(id);
3425            self.owner_document()
3426                .register_media_controls(id, &shadow_root);
3427        }
3428    }
3429}
3430
3431#[derive(JSTraceable, MallocSizeOf)]
3432pub(crate) enum MediaElementMicrotask {
3433    ResourceSelection {
3434        elem: DomRoot<HTMLMediaElement>,
3435        generation_id: u32,
3436        #[no_trace]
3437        base_url: ServoUrl,
3438    },
3439    PauseIfNotInDocument {
3440        elem: DomRoot<HTMLMediaElement>,
3441    },
3442    Seeked {
3443        elem: DomRoot<HTMLMediaElement>,
3444        generation_id: u32,
3445    },
3446    SelectNextSourceChild {
3447        elem: DomRoot<HTMLMediaElement>,
3448        generation_id: u32,
3449    },
3450    SelectNextSourceChildAfterWait {
3451        elem: DomRoot<HTMLMediaElement>,
3452        generation_id: u32,
3453    },
3454}
3455
3456impl MicrotaskRunnable for MediaElementMicrotask {
3457    fn handler(&self, can_gc: CanGc) {
3458        match self {
3459            &MediaElementMicrotask::ResourceSelection {
3460                ref elem,
3461                generation_id,
3462                ref base_url,
3463            } => {
3464                if generation_id == elem.generation_id.get() {
3465                    elem.resource_selection_algorithm_sync(base_url.clone(), can_gc);
3466                }
3467            },
3468            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
3469                if !elem.upcast::<Node>().is_connected() {
3470                    elem.internal_pause_steps();
3471                }
3472            },
3473            &MediaElementMicrotask::Seeked {
3474                ref elem,
3475                generation_id,
3476            } => {
3477                if generation_id == elem.generation_id.get() {
3478                    elem.seek_end();
3479                }
3480            },
3481            &MediaElementMicrotask::SelectNextSourceChild {
3482                ref elem,
3483                generation_id,
3484            } => {
3485                if generation_id == elem.generation_id.get() {
3486                    elem.select_next_source_child(can_gc);
3487                }
3488            },
3489            &MediaElementMicrotask::SelectNextSourceChildAfterWait {
3490                ref elem,
3491                generation_id,
3492            } => {
3493                if generation_id == elem.generation_id.get() {
3494                    elem.select_next_source_child_after_wait(can_gc);
3495                }
3496            },
3497        }
3498    }
3499
3500    fn enter_realm(&self) -> JSAutoRealm {
3501        match self {
3502            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
3503            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
3504            &MediaElementMicrotask::Seeked { ref elem, .. } |
3505            &MediaElementMicrotask::SelectNextSourceChild { ref elem, .. } |
3506            &MediaElementMicrotask::SelectNextSourceChildAfterWait { ref elem, .. } => {
3507                enter_realm(&**elem)
3508            },
3509        }
3510    }
3511}
3512
3513enum Resource {
3514    Object,
3515    Url(ServoUrl),
3516}
3517
3518#[derive(Debug, MallocSizeOf, PartialEq)]
3519enum DataBuffer {
3520    Payload(Vec<u8>),
3521    EndOfStream,
3522}
3523
3524#[derive(MallocSizeOf)]
3525struct BufferedDataSource {
3526    /// During initial setup and seeking (including clearing the buffer queue
3527    /// and resetting the end-of-stream state), the data source should be locked and
3528    /// any request for processing should be ignored until the media player informs us
3529    /// via the NeedData event that it is ready to accept incoming data.
3530    locked: Cell<bool>,
3531    /// Temporary storage for incoming data.
3532    buffers: VecDeque<DataBuffer>,
3533}
3534
3535impl BufferedDataSource {
3536    fn new() -> BufferedDataSource {
3537        BufferedDataSource {
3538            locked: Cell::new(true),
3539            buffers: VecDeque::default(),
3540        }
3541    }
3542
3543    fn set_locked(&self, locked: bool) {
3544        self.locked.set(locked)
3545    }
3546
3547    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
3548        debug_assert_ne!(
3549            self.buffers.back(),
3550            Some(&DataBuffer::EndOfStream),
3551            "The media backend not expects any further data after end of stream"
3552        );
3553
3554        self.buffers.push_back(buffer);
3555    }
3556
3557    fn process_into_player_from_queue(
3558        &mut self,
3559        player: &Arc<Mutex<dyn Player>>,
3560    ) -> Result<(), PlayerError> {
3561        // Early out if any request for processing should be ignored.
3562        if self.locked.get() {
3563            return Ok(());
3564        }
3565
3566        while let Some(buffer) = self.buffers.pop_front() {
3567            match buffer {
3568                DataBuffer::Payload(payload) => {
3569                    if let Err(e) = player.lock().unwrap().push_data(payload) {
3570                        warn!("Could not push input data to player {:?}", e);
3571                        return Err(e);
3572                    }
3573                },
3574                DataBuffer::EndOfStream => {
3575                    if let Err(e) = player.lock().unwrap().end_of_stream() {
3576                        warn!("Could not signal EOS to player {:?}", e);
3577                        return Err(e);
3578                    }
3579                },
3580            }
3581        }
3582
3583        Ok(())
3584    }
3585
3586    fn reset(&mut self) {
3587        self.locked.set(true);
3588        self.buffers.clear();
3589    }
3590}
3591
3592/// Indicates the reason why a fetch request was cancelled.
3593#[derive(Debug, MallocSizeOf, PartialEq)]
3594enum CancelReason {
3595    /// We were asked to stop pushing data to the player.
3596    Backoff,
3597    /// An error ocurred while fetching the media data.
3598    Error,
3599    /// The fetching process is aborted by the user.
3600    Abort,
3601}
3602
3603#[derive(MallocSizeOf)]
3604pub(crate) struct HTMLMediaElementFetchContext {
3605    /// The fetch request id.
3606    request_id: RequestId,
3607    /// Some if the request has been cancelled.
3608    cancel_reason: Option<CancelReason>,
3609    /// Indicates whether the fetched stream is seekable.
3610    is_seekable: bool,
3611    /// Indicates whether the fetched stream is origin clean.
3612    origin_clean: bool,
3613    /// The buffered data source which to be processed by media backend.
3614    data_source: RefCell<BufferedDataSource>,
3615    /// Fetch canceller. Allows cancelling the current fetch request by
3616    /// manually calling its .cancel() method or automatically on Drop.
3617    fetch_canceller: FetchCanceller,
3618}
3619
3620impl HTMLMediaElementFetchContext {
3621    fn new(
3622        request_id: RequestId,
3623        core_resource_thread: CoreResourceThread,
3624    ) -> HTMLMediaElementFetchContext {
3625        HTMLMediaElementFetchContext {
3626            request_id,
3627            cancel_reason: None,
3628            is_seekable: false,
3629            origin_clean: true,
3630            data_source: RefCell::new(BufferedDataSource::new()),
3631            fetch_canceller: FetchCanceller::new(request_id, core_resource_thread.clone()),
3632        }
3633    }
3634
3635    fn request_id(&self) -> RequestId {
3636        self.request_id
3637    }
3638
3639    fn is_seekable(&self) -> bool {
3640        self.is_seekable
3641    }
3642
3643    fn set_seekable(&mut self, seekable: bool) {
3644        self.is_seekable = seekable;
3645    }
3646
3647    fn origin_is_clean(&self) -> bool {
3648        self.origin_clean
3649    }
3650
3651    fn set_origin_clean(&mut self, origin_clean: bool) {
3652        self.origin_clean = origin_clean;
3653    }
3654
3655    fn data_source(&self) -> &RefCell<BufferedDataSource> {
3656        &self.data_source
3657    }
3658
3659    fn cancel(&mut self, reason: CancelReason) {
3660        if self.cancel_reason.is_some() {
3661            return;
3662        }
3663        self.cancel_reason = Some(reason);
3664        self.data_source.borrow_mut().reset();
3665        self.fetch_canceller.cancel();
3666    }
3667
3668    fn cancel_reason(&self) -> &Option<CancelReason> {
3669        &self.cancel_reason
3670    }
3671}
3672
3673struct HTMLMediaElementFetchListener {
3674    /// The element that initiated the request.
3675    element: Trusted<HTMLMediaElement>,
3676    /// The generation of the media element when this fetch started.
3677    generation_id: u32,
3678    /// The fetch request id.
3679    request_id: RequestId,
3680    /// Time of last progress notification.
3681    next_progress_event: Instant,
3682    /// Url for the resource.
3683    url: ServoUrl,
3684    /// Expected content length of the media asset being fetched or played.
3685    expected_content_length: Option<u64>,
3686    /// Actual content length of the media asset was fetched.
3687    fetched_content_length: u64,
3688    /// Discarded content length from the network for the ongoing
3689    /// request if range requests are not supported. Seek requests set it
3690    /// to the required position (in bytes).
3691    content_length_to_discard: u64,
3692}
3693
3694impl FetchResponseListener for HTMLMediaElementFetchListener {
3695    fn process_request_body(&mut self, _: RequestId) {}
3696
3697    fn process_request_eof(&mut self, _: RequestId) {}
3698
3699    fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
3700        let element = self.element.root();
3701
3702        let (metadata, origin_clean) = match metadata {
3703            Ok(fetch_metadata) => match fetch_metadata {
3704                FetchMetadata::Unfiltered(metadata) => (Some(metadata), true),
3705                FetchMetadata::Filtered { filtered, unsafe_ } => (
3706                    Some(unsafe_),
3707                    matches!(
3708                        filtered,
3709                        FilteredMetadata::Basic(_) | FilteredMetadata::Cors(_)
3710                    ),
3711                ),
3712            },
3713            Err(_) => (None, true),
3714        };
3715
3716        let (status_is_success, is_seekable) =
3717            metadata.as_ref().map_or((false, false), |metadata| {
3718                let status = &metadata.status;
3719                (status.is_success(), *status == StatusCode::PARTIAL_CONTENT)
3720            });
3721
3722        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3723        if !status_is_success {
3724            if element.ready_state.get() == ReadyState::HaveNothing {
3725                // => "If the media data cannot be fetched at all, due to network errors..."
3726                element.media_data_processing_failure_steps();
3727            } else {
3728                // => "If the connection is interrupted after some media data has been received..."
3729                element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3730            }
3731            return;
3732        }
3733
3734        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3735            current_fetch_context.set_seekable(is_seekable);
3736            current_fetch_context.set_origin_clean(origin_clean);
3737        }
3738
3739        if let Some(metadata) = metadata.as_ref() {
3740            if let Some(headers) = metadata.headers.as_ref() {
3741                // For range requests we get the size of the media asset from the Content-Range
3742                // header. Otherwise, we get it from the Content-Length header.
3743                let content_length =
3744                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
3745                        content_range.bytes_len()
3746                    } else {
3747                        headers
3748                            .typed_get::<ContentLength>()
3749                            .map(|content_length| content_length.0)
3750                    };
3751
3752                // We only set the expected input size if it changes.
3753                if content_length != self.expected_content_length {
3754                    if let Some(content_length) = content_length {
3755                        self.expected_content_length = Some(content_length);
3756                    }
3757                }
3758            }
3759        }
3760
3761        // Explicit media player initialization with live/seekable source.
3762        if let Some(expected_content_length) = self.expected_content_length {
3763            if let Err(e) = element
3764                .player
3765                .borrow()
3766                .as_ref()
3767                .unwrap()
3768                .lock()
3769                .unwrap()
3770                .set_input_size(expected_content_length)
3771            {
3772                warn!("Could not set player input size {:?}", e);
3773            }
3774        }
3775    }
3776
3777    fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
3778        let element = self.element.root();
3779
3780        self.fetched_content_length += chunk.len() as u64;
3781
3782        // If an error was received previously, we skip processing the payload.
3783        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3784            if let Some(CancelReason::Backoff) = current_fetch_context.cancel_reason() {
3785                return;
3786            }
3787
3788            // Discard chunk of the response body if fetch context doesn't support range requests.
3789            let payload = if !current_fetch_context.is_seekable() &&
3790                self.content_length_to_discard != 0
3791            {
3792                if chunk.len() as u64 > self.content_length_to_discard {
3793                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3794                    self.content_length_to_discard = 0;
3795                    shrink_chunk
3796                } else {
3797                    // Completely discard this response chunk.
3798                    self.content_length_to_discard -= chunk.len() as u64;
3799                    return;
3800                }
3801            } else {
3802                chunk
3803            };
3804
3805            if let Err(e) = {
3806                let mut data_source = current_fetch_context.data_source().borrow_mut();
3807                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3808                data_source
3809                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap())
3810            } {
3811                // If we are pushing too much data and we know that we can
3812                // restart the download later from where we left, we cancel
3813                // the current request. Otherwise, we continue the request
3814                // assuming that we may drop some frames.
3815                if e == PlayerError::EnoughData {
3816                    current_fetch_context.cancel(CancelReason::Backoff);
3817                }
3818                return;
3819            }
3820        }
3821
3822        // <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
3823        // While the load is not suspended (see below), every 350ms (±200ms) or for every byte
3824        // received, whichever is least frequent, queue a media element task given the media element
3825        // to fire an event named progress at the element.
3826        if Instant::now() > self.next_progress_event {
3827            element.queue_media_element_task_to_fire_event(atom!("progress"));
3828            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3829        }
3830    }
3831
3832    fn process_response_eof(self, _: RequestId, status: Result<ResourceFetchTiming, NetworkError>) {
3833        let element = self.element.root();
3834
3835        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3836        if status.is_ok() && self.fetched_content_length != 0 {
3837            // => "Once the entire media resource has been fetched..."
3838
3839            // There are no more chunks of the response body forthcoming, so we can
3840            // go ahead and notify the media backend not to expect any further data.
3841            if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut()
3842            {
3843                // On initial state change READY -> PAUSED the media player perform
3844                // seek to initial position by event with seek segment (TIME format)
3845                // while media stack operates in BYTES format and configuring segment
3846                // start and stop positions without the total size of the stream is not
3847                // possible. As fallback the media player perform seek with BYTES format
3848                // and initiate seek request via "seek-data" callback with required offset.
3849                if self.expected_content_length.is_none() {
3850                    if let Err(e) = element
3851                        .player
3852                        .borrow()
3853                        .as_ref()
3854                        .unwrap()
3855                        .lock()
3856                        .unwrap()
3857                        .set_input_size(self.fetched_content_length)
3858                    {
3859                        warn!("Could not set player input size {:?}", e);
3860                    }
3861                }
3862
3863                let mut data_source = current_fetch_context.data_source().borrow_mut();
3864
3865                data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3866                let _ = data_source
3867                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap());
3868            }
3869
3870            // Step 1. Fire an event named progress at the media element.
3871            element
3872                .upcast::<EventTarget>()
3873                .fire_event(atom!("progress"), CanGc::note());
3874
3875            // Step 2. Set the networkState to NETWORK_IDLE and fire an event named suspend at the
3876            // media element.
3877            element.network_state.set(NetworkState::Idle);
3878
3879            element
3880                .upcast::<EventTarget>()
3881                .fire_event(atom!("suspend"), CanGc::note());
3882        } else if status.is_err() && element.ready_state.get() != ReadyState::HaveNothing {
3883            // => "If the connection is interrupted after some media data has been received..."
3884            element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3885        } else {
3886            // => "If the media data can be fetched but is found by inspection to be in an
3887            // unsupported format, or can otherwise not be rendered at all"
3888            element.media_data_processing_failure_steps();
3889        }
3890
3891        if let Ok(response) = status {
3892            network_listener::submit_timing(&self, &response, CanGc::note());
3893        }
3894    }
3895
3896    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3897        let global = &self.resource_timing_global();
3898        global.report_csp_violations(violations, None, None);
3899    }
3900
3901    fn should_invoke(&self) -> bool {
3902        let element = self.element.root();
3903
3904        if element.generation_id.get() != self.generation_id || element.player.borrow().is_none() {
3905            return false;
3906        }
3907
3908        let Some(ref current_fetch_context) = *element.current_fetch_context.borrow() else {
3909            return false;
3910        };
3911
3912        // Whether the new fetch request was triggered.
3913        if current_fetch_context.request_id() != self.request_id {
3914            return false;
3915        }
3916
3917        // Whether the current fetch request was cancelled due to a network or decoding error, or
3918        // was aborted by the user.
3919        if let Some(cancel_reason) = current_fetch_context.cancel_reason() {
3920            if matches!(*cancel_reason, CancelReason::Error | CancelReason::Abort) {
3921                return false;
3922            }
3923        }
3924
3925        true
3926    }
3927}
3928
3929impl ResourceTimingListener for HTMLMediaElementFetchListener {
3930    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3931        let initiator_type = InitiatorType::LocalName(
3932            self.element
3933                .root()
3934                .upcast::<Element>()
3935                .local_name()
3936                .to_string(),
3937        );
3938        (initiator_type, self.url.clone())
3939    }
3940
3941    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3942        self.element.root().owner_document().global()
3943    }
3944}
3945
3946impl HTMLMediaElementFetchListener {
3947    fn new(element: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3948        Self {
3949            element: Trusted::new(element),
3950            generation_id: element.generation_id.get(),
3951            request_id,
3952            next_progress_event: Instant::now() + Duration::from_millis(350),
3953            url,
3954            expected_content_length: None,
3955            fetched_content_length: 0,
3956            content_length_to_discard: offset,
3957        }
3958    }
3959}