script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::{Cell, RefCell};
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex, Weak};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use base::generic_channel::GenericSharedMemory;
13use base::id::WebViewId;
14use content_security_policy::sandboxing_directive::SandboxingFlagSet;
15use dom_struct::dom_struct;
16use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
17use euclid::default::Size2D;
18use headers::{ContentLength, ContentRange, HeaderMapExt};
19use html5ever::{LocalName, Prefix, QualName, local_name, ns};
20use http::StatusCode;
21use http::header::{self, HeaderMap, HeaderValue};
22use ipc_channel::ipc::{self};
23use ipc_channel::router::ROUTER;
24use js::jsapi::JSAutoRealm;
25use layout_api::MediaFrame;
26use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
27use net_traits::request::{Destination, RequestId};
28use net_traits::{
29    CoreResourceThread, FetchMetadata, FilteredMetadata, NetworkError, ResourceFetchTiming,
30};
31use paint_api::{CrossProcessPaintApi, ImageUpdate, SerializableImageData};
32use pixels::RasterImage;
33use script_bindings::codegen::InheritTypes::{
34    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
35};
36use script_bindings::root::assert_in_script;
37use script_bindings::weakref::WeakRef;
38use servo_config::pref;
39use servo_media::player::audio::AudioRenderer;
40use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
41use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
42use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
43use servo_url::ServoUrl;
44use stylo_atoms::Atom;
45use uuid::Uuid;
46use webrender_api::{
47    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
48    ImageDescriptorFlags, ImageFormat, ImageKey,
49};
50
51use crate::document_loader::{LoadBlocker, LoadType};
52use crate::dom::attr::Attr;
53use crate::dom::audio::audiotrack::AudioTrack;
54use crate::dom::audio::audiotracklist::AudioTrackList;
55use crate::dom::bindings::cell::DomRefCell;
56use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
57    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
58};
59use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
60use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
61use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
62use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
63use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
64use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
65use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
66use crate::dom::bindings::codegen::UnionTypes::{
67    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
68};
69use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
70use crate::dom::bindings::inheritance::Castable;
71use crate::dom::bindings::num::Finite;
72use crate::dom::bindings::refcounted::Trusted;
73use crate::dom::bindings::reflector::DomGlobal;
74use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
75use crate::dom::bindings::str::{DOMString, USVString};
76use crate::dom::blob::Blob;
77use crate::dom::csp::{GlobalCspReporting, Violation};
78use crate::dom::document::Document;
79use crate::dom::element::{
80    AttributeMutation, AttributeMutationReason, CustomElementCreationMode, Element, ElementCreator,
81    cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
82};
83use crate::dom::event::Event;
84use crate::dom::eventtarget::EventTarget;
85use crate::dom::globalscope::GlobalScope;
86use crate::dom::html::htmlelement::HTMLElement;
87use crate::dom::html::htmlsourceelement::HTMLSourceElement;
88use crate::dom::html::htmlvideoelement::HTMLVideoElement;
89use crate::dom::mediaerror::MediaError;
90use crate::dom::mediafragmentparser::MediaFragmentParser;
91use crate::dom::medialist::MediaList;
92use crate::dom::mediastream::MediaStream;
93use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
94use crate::dom::performance::performanceresourcetiming::InitiatorType;
95use crate::dom::promise::Promise;
96use crate::dom::texttrack::TextTrack;
97use crate::dom::texttracklist::TextTrackList;
98use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
99use crate::dom::trackevent::TrackEvent;
100use crate::dom::url::URL;
101use crate::dom::videotrack::VideoTrack;
102use crate::dom::videotracklist::VideoTrackList;
103use crate::dom::virtualmethods::VirtualMethods;
104use crate::fetch::{FetchCanceller, RequestWithGlobalScope, create_a_potential_cors_request};
105use crate::microtask::{Microtask, MicrotaskRunnable};
106use crate::network_listener::{self, FetchResponseListener, ResourceTimingListener};
107use crate::realms::{InRealm, enter_realm};
108use crate::script_runtime::CanGc;
109use crate::script_thread::ScriptThread;
110use crate::task_source::SendableTaskSource;
111
112/// A CSS file to style the media controls.
113static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
114
115/// A JS file to control the media controls.
116static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
117
118#[derive(MallocSizeOf, PartialEq)]
119enum FrameStatus {
120    Locked,
121    Unlocked,
122}
123
124#[derive(MallocSizeOf)]
125struct FrameHolder(
126    FrameStatus,
127    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
128);
129
130impl FrameHolder {
131    fn new(frame: VideoFrame) -> FrameHolder {
132        FrameHolder(FrameStatus::Unlocked, frame)
133    }
134
135    fn lock(&mut self) {
136        if self.0 == FrameStatus::Unlocked {
137            self.0 = FrameStatus::Locked;
138        };
139    }
140
141    fn unlock(&mut self) {
142        if self.0 == FrameStatus::Locked {
143            self.0 = FrameStatus::Unlocked;
144        };
145    }
146
147    fn set(&mut self, new_frame: VideoFrame) {
148        if self.0 == FrameStatus::Unlocked {
149            self.1 = new_frame
150        };
151    }
152
153    fn get(&self) -> (u32, Size2D<i32>, usize) {
154        if self.0 == FrameStatus::Locked {
155            (
156                self.1.get_texture_id(),
157                Size2D::new(self.1.get_width(), self.1.get_height()),
158                0,
159            )
160        } else {
161            unreachable!();
162        }
163    }
164
165    fn get_frame(&self) -> VideoFrame {
166        self.1.clone()
167    }
168}
169
170#[derive(MallocSizeOf)]
171pub(crate) struct MediaFrameRenderer {
172    webview_id: WebViewId,
173    player_id: Option<usize>,
174    glplayer_id: Option<u64>,
175    paint_api: CrossProcessPaintApi,
176    #[ignore_malloc_size_of = "Defined in other crates"]
177    player_context: WindowGLContext,
178    current_frame: Option<MediaFrame>,
179    old_frame: Option<ImageKey>,
180    very_old_frame: Option<ImageKey>,
181    current_frame_holder: Option<FrameHolder>,
182    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
183    poster_frame: Option<MediaFrame>,
184}
185
186impl MediaFrameRenderer {
187    fn new(
188        webview_id: WebViewId,
189        paint_api: CrossProcessPaintApi,
190        player_context: WindowGLContext,
191    ) -> Self {
192        Self {
193            webview_id,
194            player_id: None,
195            glplayer_id: None,
196            paint_api,
197            player_context,
198            current_frame: None,
199            old_frame: None,
200            very_old_frame: None,
201            current_frame_holder: None,
202            poster_frame: None,
203        }
204    }
205
206    fn setup(
207        &mut self,
208        player_id: usize,
209        task_source: SendableTaskSource,
210        weak_video_renderer: Weak<Mutex<MediaFrameRenderer>>,
211    ) {
212        self.player_id = Some(player_id);
213
214        let (glplayer_id, image_receiver) = self
215            .player_context
216            .glplayer_thread_sender
217            .as_ref()
218            .map(|sender| {
219                let (image_sender, image_receiver) = ipc::channel::<GLPlayerMsgForward>().unwrap();
220                sender
221                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
222                    .unwrap();
223                match image_receiver.recv().unwrap() {
224                    GLPlayerMsgForward::PlayerId(id) => (Some(id), Some(image_receiver)),
225                    _ => unreachable!(),
226                }
227            })
228            .unwrap_or((None, None));
229
230        self.glplayer_id = glplayer_id;
231
232        let Some(image_receiver) = image_receiver else {
233            return;
234        };
235
236        ROUTER.add_typed_route(
237            image_receiver,
238            Box::new(move |message| {
239                let message = message.unwrap();
240                let weak_video_renderer = weak_video_renderer.clone();
241
242                task_source.queue(task!(handle_glplayer_message: move || {
243                    trace!("GLPlayer message {:?}", message);
244
245                    let Some(video_renderer) = weak_video_renderer.upgrade() else {
246                        return;
247                    };
248
249                    match message {
250                        GLPlayerMsgForward::Lock(sender) => {
251                            if let Some(holder) = video_renderer
252                                .lock()
253                                .unwrap()
254                                .current_frame_holder
255                                .as_mut() {
256                                    holder.lock();
257                                    sender.send(holder.get()).unwrap();
258                                };
259                        },
260                        GLPlayerMsgForward::Unlock() => {
261                            if let Some(holder) = video_renderer
262                                .lock()
263                                .unwrap()
264                                .current_frame_holder
265                                .as_mut() { holder.unlock() }
266                        },
267                        _ => (),
268                    }
269                }));
270            }),
271        );
272    }
273
274    fn reset(&mut self) {
275        self.player_id = None;
276
277        if let Some(glplayer_id) = self.glplayer_id.take() {
278            self.player_context
279                .send(GLPlayerMsg::UnregisterPlayer(glplayer_id));
280        }
281
282        self.current_frame_holder = None;
283
284        let mut updates = smallvec::smallvec![];
285
286        if let Some(current_frame) = self.current_frame.take() {
287            updates.push(ImageUpdate::DeleteImage(current_frame.image_key));
288        }
289
290        if let Some(old_image_key) = self.old_frame.take() {
291            updates.push(ImageUpdate::DeleteImage(old_image_key));
292        }
293
294        if let Some(very_old_image_key) = self.very_old_frame.take() {
295            updates.push(ImageUpdate::DeleteImage(very_old_image_key));
296        }
297
298        if !updates.is_empty() {
299            self.paint_api
300                .update_images(self.webview_id.into(), updates);
301        }
302    }
303
304    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
305        self.poster_frame = image.and_then(|image| {
306            image.id.map(|image_key| MediaFrame {
307                image_key,
308                width: image.metadata.width as i32,
309                height: image.metadata.height as i32,
310            })
311        });
312    }
313}
314
315impl Drop for MediaFrameRenderer {
316    fn drop(&mut self) {
317        self.reset();
318    }
319}
320
321impl VideoFrameRenderer for MediaFrameRenderer {
322    fn render(&mut self, frame: VideoFrame) {
323        if self.player_id.is_none() || (frame.is_gl_texture() && self.glplayer_id.is_none()) {
324            return;
325        }
326
327        let mut updates = smallvec::smallvec![];
328
329        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
330            updates.push(ImageUpdate::DeleteImage(old_image_key));
331        }
332
333        let descriptor = ImageDescriptor::new(
334            frame.get_width(),
335            frame.get_height(),
336            ImageFormat::BGRA8,
337            ImageDescriptorFlags::empty(),
338        );
339
340        match &mut self.current_frame {
341            Some(current_frame)
342                if current_frame.width == frame.get_width() &&
343                    current_frame.height == frame.get_height() =>
344            {
345                if !frame.is_gl_texture() {
346                    updates.push(ImageUpdate::UpdateImage(
347                        current_frame.image_key,
348                        descriptor,
349                        SerializableImageData::Raw(GenericSharedMemory::from_bytes(
350                            &frame.get_data(),
351                        )),
352                        None,
353                    ));
354                }
355
356                self.current_frame_holder
357                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
358                    .set(frame);
359
360                if let Some(old_image_key) = self.old_frame.take() {
361                    updates.push(ImageUpdate::DeleteImage(old_image_key));
362                }
363            },
364            Some(current_frame) => {
365                self.old_frame = Some(current_frame.image_key);
366
367                let Some(new_image_key) =
368                    self.paint_api.generate_image_key_blocking(self.webview_id)
369                else {
370                    return;
371                };
372
373                /* update current_frame */
374                current_frame.image_key = new_image_key;
375                current_frame.width = frame.get_width();
376                current_frame.height = frame.get_height();
377
378                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
379                    let texture_target = if frame.is_external_oes() {
380                        ImageBufferKind::TextureExternal
381                    } else {
382                        ImageBufferKind::Texture2D
383                    };
384
385                    SerializableImageData::External(ExternalImageData {
386                        id: ExternalImageId(self.glplayer_id.unwrap()),
387                        channel_index: 0,
388                        image_type: ExternalImageType::TextureHandle(texture_target),
389                        normalized_uvs: false,
390                    })
391                } else {
392                    SerializableImageData::Raw(GenericSharedMemory::from_bytes(&frame.get_data()))
393                };
394
395                self.current_frame_holder
396                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
397                    .set(frame);
398
399                updates.push(ImageUpdate::AddImage(
400                    new_image_key,
401                    descriptor,
402                    image_data,
403                    false,
404                ));
405            },
406            None => {
407                let Some(image_key) = self.paint_api.generate_image_key_blocking(self.webview_id)
408                else {
409                    return;
410                };
411
412                self.current_frame = Some(MediaFrame {
413                    image_key,
414                    width: frame.get_width(),
415                    height: frame.get_height(),
416                });
417
418                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
419                    let texture_target = if frame.is_external_oes() {
420                        ImageBufferKind::TextureExternal
421                    } else {
422                        ImageBufferKind::Texture2D
423                    };
424
425                    SerializableImageData::External(ExternalImageData {
426                        id: ExternalImageId(self.glplayer_id.unwrap()),
427                        channel_index: 0,
428                        image_type: ExternalImageType::TextureHandle(texture_target),
429                        normalized_uvs: false,
430                    })
431                } else {
432                    SerializableImageData::Raw(GenericSharedMemory::from_bytes(&frame.get_data()))
433                };
434
435                self.current_frame_holder = Some(FrameHolder::new(frame));
436
437                updates.push(ImageUpdate::AddImage(
438                    image_key, descriptor, image_data, false,
439                ));
440            },
441        }
442        self.paint_api
443            .update_images(self.webview_id.into(), updates);
444    }
445}
446
447#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
448#[derive(JSTraceable, MallocSizeOf)]
449enum SrcObject {
450    MediaStream(Dom<MediaStream>),
451    Blob(Dom<Blob>),
452}
453
454impl From<MediaStreamOrBlob> for SrcObject {
455    #[cfg_attr(crown, expect(crown::unrooted_must_root))]
456    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
457        match src_object {
458            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
459            MediaStreamOrBlob::MediaStream(stream) => {
460                SrcObject::MediaStream(Dom::from_ref(&*stream))
461            },
462        }
463    }
464}
465
466#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
467enum LoadState {
468    NotLoaded,
469    LoadingFromSrcObject,
470    LoadingFromSrcAttribute,
471    LoadingFromSourceChild,
472    WaitingForSource,
473}
474
475/// <https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-element-29>
476#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
477#[derive(JSTraceable, MallocSizeOf)]
478struct SourceChildrenPointer {
479    source_before_pointer: Dom<HTMLSourceElement>,
480    inclusive: bool,
481}
482
483impl SourceChildrenPointer {
484    fn new(source_before_pointer: DomRoot<HTMLSourceElement>, inclusive: bool) -> Self {
485        Self {
486            source_before_pointer: source_before_pointer.as_traced(),
487            inclusive,
488        }
489    }
490}
491
492/// Generally the presence of the loop attribute should be considered to mean playback has not
493/// "ended", as "ended" and "looping" are mutually exclusive.
494/// <https://html.spec.whatwg.org/multipage/#ended-playback>
495#[derive(Clone, Copy, Debug, PartialEq)]
496enum LoopCondition {
497    Included,
498    Ignored,
499}
500
501#[dom_struct]
502pub(crate) struct HTMLMediaElement {
503    htmlelement: HTMLElement,
504    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
505    network_state: Cell<NetworkState>,
506    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
507    ready_state: Cell<ReadyState>,
508    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
509    src_object: DomRefCell<Option<SrcObject>>,
510    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
511    current_src: DomRefCell<String>,
512    /// Incremented whenever tasks associated with this element are cancelled.
513    generation_id: Cell<u32>,
514    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
515    ///
516    /// Reset to false every time the load algorithm is invoked.
517    fired_loadeddata_event: Cell<bool>,
518    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
519    error: MutNullableDom<MediaError>,
520    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
521    paused: Cell<bool>,
522    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
523    default_playback_rate: Cell<f64>,
524    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
525    playback_rate: Cell<f64>,
526    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
527    autoplaying: Cell<bool>,
528    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
529    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
530    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
531    #[conditional_malloc_size_of]
532    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
533    /// Play promises which are soon to be fulfilled by a queued task.
534    #[expect(clippy::type_complexity)]
535    #[conditional_malloc_size_of]
536    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
537    #[ignore_malloc_size_of = "servo_media"]
538    #[no_trace]
539    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
540    #[conditional_malloc_size_of]
541    #[no_trace]
542    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
543    #[ignore_malloc_size_of = "servo_media"]
544    #[no_trace]
545    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
546    #[conditional_malloc_size_of]
547    #[no_trace]
548    event_handler: RefCell<Option<Arc<Mutex<HTMLMediaElementEventHandler>>>>,
549    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
550    show_poster: Cell<bool>,
551    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
552    duration: Cell<f64>,
553    /// <https://html.spec.whatwg.org/multipage/#current-playback-position>
554    current_playback_position: Cell<f64>,
555    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
556    official_playback_position: Cell<f64>,
557    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
558    default_playback_start_position: Cell<f64>,
559    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
560    volume: Cell<f64>,
561    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
562    seeking: Cell<bool>,
563    /// The latest seek position (in seconds) is used to distinguish whether the seek request was
564    /// initiated by a script or by the user agent itself, rather than by the media engine and to
565    /// abort other running instance of the `seek` algorithm.
566    current_seek_position: Cell<f64>,
567    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
568    muted: Cell<bool>,
569    /// Loading state from source, if any.
570    load_state: Cell<LoadState>,
571    source_children_pointer: DomRefCell<Option<SourceChildrenPointer>>,
572    current_source_child: MutNullableDom<HTMLSourceElement>,
573    /// URL of the media resource, if any.
574    #[no_trace]
575    resource_url: DomRefCell<Option<ServoUrl>>,
576    /// URL of the media resource, if the resource is set through the src_object attribute and it
577    /// is a blob.
578    #[no_trace]
579    blob_url: DomRefCell<Option<ServoUrl>>,
580    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
581    played: DomRefCell<TimeRangesContainer>,
582    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
583    audio_tracks_list: MutNullableDom<AudioTrackList>,
584    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
585    video_tracks_list: MutNullableDom<VideoTrackList>,
586    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
587    text_tracks_list: MutNullableDom<TextTrackList>,
588    /// Time of last timeupdate notification.
589    #[ignore_malloc_size_of = "Defined in std::time"]
590    next_timeupdate_event: Cell<Instant>,
591    /// Latest fetch request context.
592    current_fetch_context: RefCell<Option<HTMLMediaElementFetchContext>>,
593    /// Media controls id.
594    /// In order to workaround the lack of privileged JS context, we secure the
595    /// the access to the "privileged" document.servoGetMediaControls(id) API by
596    /// keeping a whitelist of media controls identifiers.
597    media_controls_id: DomRefCell<Option<String>>,
598}
599
600/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
601#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
602#[repr(u8)]
603pub(crate) enum NetworkState {
604    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
605    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
606    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
607    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
608}
609
610/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
611#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
612#[repr(u8)]
613#[expect(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
614pub(crate) enum ReadyState {
615    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
616    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
617    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
618    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
619    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
620}
621
622/// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
623#[derive(Clone, Copy, PartialEq)]
624enum PlaybackDirection {
625    Forwards,
626    Backwards,
627}
628
629impl HTMLMediaElement {
630    pub(crate) fn new_inherited(
631        tag_name: LocalName,
632        prefix: Option<Prefix>,
633        document: &Document,
634    ) -> Self {
635        Self {
636            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
637            network_state: Cell::new(NetworkState::Empty),
638            ready_state: Cell::new(ReadyState::HaveNothing),
639            src_object: Default::default(),
640            current_src: DomRefCell::new("".to_owned()),
641            generation_id: Cell::new(0),
642            fired_loadeddata_event: Cell::new(false),
643            error: Default::default(),
644            paused: Cell::new(true),
645            default_playback_rate: Cell::new(1.0),
646            playback_rate: Cell::new(1.0),
647            muted: Cell::new(false),
648            load_state: Cell::new(LoadState::NotLoaded),
649            source_children_pointer: DomRefCell::new(None),
650            current_source_child: Default::default(),
651            // FIXME(nox): Why is this initialised to true?
652            autoplaying: Cell::new(true),
653            delaying_the_load_event_flag: Default::default(),
654            pending_play_promises: Default::default(),
655            in_flight_play_promises_queue: Default::default(),
656            player: Default::default(),
657            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
658                document.webview_id(),
659                document.window().paint_api().clone(),
660                document.window().get_player_context(),
661            ))),
662            audio_renderer: Default::default(),
663            event_handler: Default::default(),
664            show_poster: Cell::new(true),
665            duration: Cell::new(f64::NAN),
666            current_playback_position: Cell::new(0.),
667            official_playback_position: Cell::new(0.),
668            default_playback_start_position: Cell::new(0.),
669            volume: Cell::new(1.0),
670            seeking: Cell::new(false),
671            current_seek_position: Cell::new(f64::NAN),
672            resource_url: DomRefCell::new(None),
673            blob_url: DomRefCell::new(None),
674            played: DomRefCell::new(TimeRangesContainer::default()),
675            audio_tracks_list: Default::default(),
676            video_tracks_list: Default::default(),
677            text_tracks_list: Default::default(),
678            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
679            current_fetch_context: RefCell::new(None),
680            media_controls_id: DomRefCell::new(None),
681        }
682    }
683
684    pub(crate) fn network_state(&self) -> NetworkState {
685        self.network_state.get()
686    }
687
688    pub(crate) fn get_ready_state(&self) -> ReadyState {
689        self.ready_state.get()
690    }
691
692    fn media_type_id(&self) -> HTMLMediaElementTypeId {
693        match self.upcast::<Node>().type_id() {
694            NodeTypeId::Element(ElementTypeId::HTMLElement(
695                HTMLElementTypeId::HTMLMediaElement(media_type_id),
696            )) => media_type_id,
697            _ => unreachable!(),
698        }
699    }
700
701    fn update_media_state(&self) {
702        let is_playing = self
703            .player
704            .borrow()
705            .as_ref()
706            .is_some_and(|player| !player.lock().unwrap().paused());
707
708        if self.is_potentially_playing() && !is_playing {
709            if let Some(ref player) = *self.player.borrow() {
710                let player = player.lock().unwrap();
711
712                if let Err(error) = player.set_playback_rate(self.playback_rate.get()) {
713                    warn!("Could not set the playback rate: {error:?}");
714                }
715                if let Err(error) = player.set_volume(self.volume.get()) {
716                    warn!("Could not set the volume: {error:?}");
717                }
718                if let Err(error) = player.play() {
719                    error!("Could not play media: {error:?}");
720                }
721            }
722        } else if is_playing {
723            if let Some(ref player) = *self.player.borrow() {
724                if let Err(error) = player.lock().unwrap().pause() {
725                    error!("Could not pause player: {error:?}");
726                }
727            }
728        }
729    }
730
731    /// Marks that element as delaying the load event or not.
732    ///
733    /// Nothing happens if the element was already delaying the load event and
734    /// we pass true to that method again.
735    ///
736    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
737    pub(crate) fn delay_load_event(&self, delay: bool, can_gc: CanGc) {
738        let blocker = &self.delaying_the_load_event_flag;
739        if delay && blocker.borrow().is_none() {
740            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
741        } else if !delay && blocker.borrow().is_some() {
742            LoadBlocker::terminate(blocker, can_gc);
743        }
744    }
745
746    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
747    fn time_marches_on(&self) {
748        // Step 6. If the time was reached through the usual monotonic increase of the current
749        // playback position during normal playback, and if the user agent has not fired a
750        // timeupdate event at the element in the past 15 to 250ms and is not still running event
751        // handlers for such an event, then the user agent must queue a media element task given the
752        // media element to fire an event named timeupdate at the element.
753        if Instant::now() > self.next_timeupdate_event.get() {
754            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
755            self.next_timeupdate_event
756                .set(Instant::now() + Duration::from_millis(250));
757        }
758    }
759
760    /// <https://html.spec.whatwg.org/multipage/#internal-play-steps>
761    fn internal_play_steps(&self, can_gc: CanGc) {
762        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
763        // the media element's resource selection algorithm.
764        if self.network_state.get() == NetworkState::Empty {
765            self.invoke_resource_selection_algorithm(can_gc);
766        }
767
768        // Step 2. If the playback has ended and the direction of playback is forwards, seek to the
769        // earliest possible position of the media resource.
770        // Generally "ended" and "looping" are exclusive. Here, the loop attribute is ignored to
771        // seek back to start in case loop was set after playback ended.
772        // <https://github.com/whatwg/html/issues/4487>
773        if self.ended_playback(LoopCondition::Ignored) &&
774            self.direction_of_playback() == PlaybackDirection::Forwards
775        {
776            self.seek(
777                self.earliest_possible_position(),
778                /* approximate_for_speed */ false,
779            );
780        }
781
782        let state = self.ready_state.get();
783
784        // Step 3. If the media element's paused attribute is true, then:
785        if self.Paused() {
786            // Step 3.1. Change the value of paused to false.
787            self.paused.set(false);
788
789            // Step 3.2. If the show poster flag is true, set the element's show poster flag to
790            // false and run the time marches on steps.
791            if self.show_poster.get() {
792                self.show_poster.set(false);
793                self.time_marches_on();
794            }
795
796            // Step 3.3. Queue a media element task given the media element to fire an event named
797            // play at the element.
798            self.queue_media_element_task_to_fire_event(atom!("play"));
799
800            // Step 3.4. If the media element's readyState attribute has the value HAVE_NOTHING,
801            // HAVE_METADATA, or HAVE_CURRENT_DATA, queue a media element task given the media
802            // element to fire an event named waiting at the element. Otherwise, the media element's
803            // readyState attribute has the value HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA: notify about
804            // playing for the element.
805            match state {
806                ReadyState::HaveNothing |
807                ReadyState::HaveMetadata |
808                ReadyState::HaveCurrentData => {
809                    self.queue_media_element_task_to_fire_event(atom!("waiting"));
810                },
811                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
812                    self.notify_about_playing();
813                },
814            }
815        }
816        // Step 4. Otherwise, if the media element's readyState attribute has the value
817        // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, take pending play promises and queue a media
818        // element task given the media element to resolve pending play promises with the
819        // result.
820        else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
821            self.take_pending_play_promises(Ok(()));
822
823            let this = Trusted::new(self);
824            let generation_id = self.generation_id.get();
825
826            self.owner_global()
827                .task_manager()
828                .media_element_task_source()
829                .queue(task!(resolve_pending_play_promises: move || {
830                    let this = this.root();
831                    if generation_id != this.generation_id.get() {
832                        return;
833                    }
834
835                    this.fulfill_in_flight_play_promises(|| {});
836                }));
837        }
838
839        // Step 5. Set the media element's can autoplay flag to false.
840        self.autoplaying.set(false);
841
842        self.update_media_state();
843    }
844
845    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
846    fn internal_pause_steps(&self) {
847        // Step 1. Set the media element's can autoplay flag to false.
848        self.autoplaying.set(false);
849
850        // Step 2. If the media element's paused attribute is false, run the following steps:
851        if !self.Paused() {
852            // Step 2.1. Change the value of paused to true.
853            self.paused.set(true);
854
855            // Step 2.2. Take pending play promises and let promises be the result.
856            self.take_pending_play_promises(Err(Error::Abort(None)));
857
858            // Step 2.3. Queue a media element task given the media element and the following steps:
859            let this = Trusted::new(self);
860            let generation_id = self.generation_id.get();
861
862            self.owner_global()
863                .task_manager()
864                .media_element_task_source()
865                .queue(task!(internal_pause_steps: move || {
866                    let this = this.root();
867                    if generation_id != this.generation_id.get() {
868                        return;
869                    }
870
871                    this.fulfill_in_flight_play_promises(|| {
872                        // Step 2.3.1. Fire an event named timeupdate at the element.
873                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
874
875                        // Step 2.3.2. Fire an event named pause at the element.
876                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
877
878                        // Step 2.3.3. Reject pending play promises with promises and an
879                        // "AbortError" DOMException.
880                        // Done after running this closure in `fulfill_in_flight_play_promises`.
881                    });
882                }));
883
884            // Step 2.4. Set the official playback position to the current playback position.
885            self.official_playback_position
886                .set(self.current_playback_position.get());
887        }
888
889        self.update_media_state();
890    }
891
892    /// <https://html.spec.whatwg.org/multipage/#allowed-to-play>
893    fn is_allowed_to_play(&self) -> bool {
894        true
895    }
896
897    /// <https://html.spec.whatwg.org/multipage/#notify-about-playing>
898    fn notify_about_playing(&self) {
899        // Step 1. Take pending play promises and let promises be the result.
900        self.take_pending_play_promises(Ok(()));
901
902        // Step 2. Queue a media element task given the element and the following steps:
903        let this = Trusted::new(self);
904        let generation_id = self.generation_id.get();
905
906        self.owner_global()
907            .task_manager()
908            .media_element_task_source()
909            .queue(task!(notify_about_playing: move || {
910                let this = this.root();
911                if generation_id != this.generation_id.get() {
912                    return;
913                }
914
915                this.fulfill_in_flight_play_promises(|| {
916                    // Step 2.1. Fire an event named playing at the element.
917                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
918
919                    // Step 2.2. Resolve pending play promises with promises.
920                    // Done after running this closure in `fulfill_in_flight_play_promises`.
921                });
922            }));
923    }
924
925    /// <https://html.spec.whatwg.org/multipage/#ready-states>
926    fn change_ready_state(&self, ready_state: ReadyState) {
927        let old_ready_state = self.ready_state.get();
928        self.ready_state.set(ready_state);
929
930        if self.network_state.get() == NetworkState::Empty {
931            return;
932        }
933
934        if old_ready_state == ready_state {
935            return;
936        }
937
938        // Step 1. Apply the first applicable set of substeps from the following list:
939        match (old_ready_state, ready_state) {
940            // => "If the previous ready state was HAVE_NOTHING, and the new ready state is
941            // HAVE_METADATA"
942            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
943                // Queue a media element task given the media element to fire an event named
944                // loadedmetadata at the element.
945                self.queue_media_element_task_to_fire_event(atom!("loadedmetadata"));
946                // No other steps are applicable in this case.
947                return;
948            },
949            // => "If the previous ready state was HAVE_METADATA and the new ready state is
950            // HAVE_CURRENT_DATA or greater"
951            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
952                // If this is the first time this occurs for this media element since the load()
953                // algorithm was last invoked, the user agent must queue a media element task given
954                // the media element to fire an event named loadeddata at the element.
955                if !self.fired_loadeddata_event.get() {
956                    self.fired_loadeddata_event.set(true);
957
958                    let this = Trusted::new(self);
959                    let generation_id = self.generation_id.get();
960
961                    self.owner_global()
962                        .task_manager()
963                        .media_element_task_source()
964                        .queue(task!(media_reached_current_data: move || {
965                            let this = this.root();
966                            if generation_id != this.generation_id.get() {
967                                return;
968                            }
969
970                            this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::note());
971                            // Once the readyState attribute reaches HAVE_CURRENT_DATA, after the
972                            // loadeddata event has been fired, set the element's
973                            // delaying-the-load-event flag to false.
974                            this.delay_load_event(false, CanGc::note());
975                        }));
976                }
977
978                // Steps for the transition from HaveMetadata to HaveCurrentData
979                // or HaveFutureData also apply here, as per the next match
980                // expression.
981            },
982            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
983                // FIXME(nox): Queue a task to fire timeupdate and waiting
984                // events if the conditions call from the spec are met.
985
986                // No other steps are applicable in this case.
987                return;
988            },
989
990            _ => (),
991        }
992
993        // => "If the previous ready state was HAVE_CURRENT_DATA or less, and the new ready state is
994        // HAVE_FUTURE_DATA or more"
995        if old_ready_state <= ReadyState::HaveCurrentData &&
996            ready_state >= ReadyState::HaveFutureData
997        {
998            // The user agent must queue a media element task given the media element to fire an
999            // event named canplay at the element.
1000            self.queue_media_element_task_to_fire_event(atom!("canplay"));
1001
1002            // If the element's paused attribute is false, the user agent must notify about playing
1003            // for the element.
1004            if !self.Paused() {
1005                self.notify_about_playing();
1006            }
1007        }
1008
1009        // => "If the new ready state is HAVE_ENOUGH_DATA"
1010        if ready_state == ReadyState::HaveEnoughData {
1011            // The user agent must queue a media element task given the media element to fire an
1012            // event named canplaythrough at the element.
1013            self.queue_media_element_task_to_fire_event(atom!("canplaythrough"));
1014
1015            // If the element is eligible for autoplay, then the user agent may run the following
1016            // substeps:
1017            if self.eligible_for_autoplay() {
1018                // Step 1. Set the paused attribute to false.
1019                self.paused.set(false);
1020
1021                // Step 2. If the element's show poster flag is true, set it to false and run the
1022                // time marches on steps.
1023                if self.show_poster.get() {
1024                    self.show_poster.set(false);
1025                    self.time_marches_on();
1026                }
1027
1028                // Step 3. Queue a media element task given the element to fire an event named play
1029                // at the element.
1030                self.queue_media_element_task_to_fire_event(atom!("play"));
1031
1032                // Step 4. Notify about playing for the element.
1033                self.notify_about_playing();
1034            }
1035        }
1036
1037        self.update_media_state();
1038    }
1039
1040    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1041    fn invoke_resource_selection_algorithm(&self, can_gc: CanGc) {
1042        // Step 1. Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
1043        self.network_state.set(NetworkState::NoSource);
1044
1045        // Step 2. Set the element's show poster flag to true.
1046        self.show_poster.set(true);
1047
1048        // Step 3. Set the media element's delaying-the-load-event flag to true (this delays the
1049        // load event).
1050        self.delay_load_event(true, can_gc);
1051
1052        // Step 4. Await a stable state, allowing the task that invoked this algorithm to continue.
1053        // If the resource selection mode in the synchronous section is
1054        // "attribute", the URL of the resource to fetch is relative to the
1055        // media element's node document when the src attribute was last
1056        // changed, which is why we need to pass the base URL in the task
1057        // right here.
1058        let task = MediaElementMicrotask::ResourceSelection {
1059            elem: DomRoot::from_ref(self),
1060            generation_id: self.generation_id.get(),
1061            base_url: self.owner_document().base_url(),
1062        };
1063
1064        // FIXME(nox): This will later call the resource_selection_algorithm_sync
1065        // method from below, if microtasks were trait objects, we would be able
1066        // to put the code directly in this method, without the boilerplate
1067        // indirections.
1068        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1069    }
1070
1071    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1072    fn resource_selection_algorithm_sync(&self, base_url: ServoUrl, can_gc: CanGc) {
1073        // TODO Step 5. If the media element's blocked-on-parser flag is false, then populate the
1074        // list of pending text tracks.
1075        // FIXME(ferjm): Implement blocked_on_parser logic
1076        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
1077        // FIXME(nox): Maybe populate the list of pending text tracks.
1078
1079        enum Mode {
1080            Object,
1081            Attribute(String),
1082            Children(DomRoot<HTMLSourceElement>),
1083        }
1084
1085        // Step 6.
1086        let mode = if self.src_object.borrow().is_some() {
1087            // If the media element has an assigned media provider object, then let mode be object.
1088            Mode::Object
1089        } else if let Some(attribute) = self
1090            .upcast::<Element>()
1091            .get_attribute(&ns!(), &local_name!("src"))
1092        {
1093            // Otherwise, if the media element has no assigned media provider object but has a src
1094            // attribute, then let mode be attribute.
1095            Mode::Attribute((**attribute.value()).to_owned())
1096        } else if let Some(source) = self
1097            .upcast::<Node>()
1098            .children()
1099            .find_map(DomRoot::downcast::<HTMLSourceElement>)
1100        {
1101            // Otherwise, if the media element does not have an assigned media provider object and
1102            // does not have a src attribute, but does have a source element child, then let mode be
1103            // children and let candidate be the first such source element child in tree order.
1104            Mode::Children(source)
1105        } else {
1106            // Otherwise, the media element has no assigned media provider object and has neither a
1107            // src attribute nor a source element child:
1108            self.load_state.set(LoadState::NotLoaded);
1109
1110            // Step 6.none.1. Set the networkState to NETWORK_EMPTY.
1111            self.network_state.set(NetworkState::Empty);
1112
1113            // Step 6.none.2. Set the element's delaying-the-load-event flag to false. This stops
1114            // delaying the load event.
1115            self.delay_load_event(false, can_gc);
1116
1117            // Step 6.none.3. End the synchronous section and return.
1118            return;
1119        };
1120
1121        // Step 7. Set the media element's networkState to NETWORK_LOADING.
1122        self.network_state.set(NetworkState::Loading);
1123
1124        // Step 8. Queue a media element task given the media element to fire an event named
1125        // loadstart at the media element.
1126        self.queue_media_element_task_to_fire_event(atom!("loadstart"));
1127
1128        // Step 9. Run the appropriate steps from the following list:
1129        match mode {
1130            Mode::Object => {
1131                // => "If mode is object"
1132                self.load_from_src_object();
1133            },
1134            Mode::Attribute(src) => {
1135                // => "If mode is attribute"
1136                self.load_from_src_attribute(base_url, &src);
1137            },
1138            Mode::Children(source) => {
1139                // => "Otherwise (mode is children)""
1140                self.load_from_source_child(&source);
1141            },
1142        }
1143    }
1144
1145    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1146    fn load_from_src_object(&self) {
1147        self.load_state.set(LoadState::LoadingFromSrcObject);
1148
1149        // Step 9.object.1. Set the currentSrc attribute to the empty string.
1150        "".clone_into(&mut self.current_src.borrow_mut());
1151
1152        // Step 9.object.3. Run the resource fetch algorithm with the assigned media
1153        // provider object. If that algorithm returns without aborting this one, then the
1154        // load failed.
1155        // Note that the resource fetch algorithm itself takes care of the cleanup in case
1156        // of failure itself.
1157        self.resource_fetch_algorithm(Resource::Object);
1158    }
1159
1160    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1161    fn load_from_src_attribute(&self, base_url: ServoUrl, src: &str) {
1162        self.load_state.set(LoadState::LoadingFromSrcAttribute);
1163
1164        // Step 9.attribute.1. If the src attribute's value is the empty string, then end
1165        // the synchronous section, and jump down to the failed with attribute step below.
1166        if src.is_empty() {
1167            self.queue_dedicated_media_source_failure_steps();
1168            return;
1169        }
1170
1171        // Step 9.attribute.2. Let urlRecord be the result of encoding-parsing a URL given
1172        // the src attribute's value, relative to the media element's node document when the
1173        // src attribute was last changed.
1174        let Ok(url_record) = base_url.join(src) else {
1175            self.queue_dedicated_media_source_failure_steps();
1176            return;
1177        };
1178
1179        // Step 9.attribute.3. If urlRecord is not failure, then set the currentSrc
1180        // attribute to the result of applying the URL serializer to urlRecord.
1181        *self.current_src.borrow_mut() = url_record.as_str().into();
1182
1183        // Step 9.attribute.5. If urlRecord is not failure, then run the resource fetch
1184        // algorithm with urlRecord. If that algorithm returns without aborting this one,
1185        // then the load failed.
1186        // Note that the resource fetch algorithm itself takes care
1187        // of the cleanup in case of failure itself.
1188        self.resource_fetch_algorithm(Resource::Url(url_record));
1189    }
1190
1191    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1192    fn load_from_source_child(&self, source: &HTMLSourceElement) {
1193        self.load_state.set(LoadState::LoadingFromSourceChild);
1194
1195        // Step 9.children.1. Let pointer be a position defined by two adjacent nodes in the media
1196        // element's child list, treating the start of the list (before the first child in the list,
1197        // if any) and end of the list (after the last child in the list, if any) as nodes in their
1198        // own right. One node is the node before pointer, and the other node is the node after
1199        // pointer. Initially, let pointer be the position between the candidate node and the next
1200        // node, if there are any, or the end of the list, if it is the last node.
1201        *self.source_children_pointer.borrow_mut() =
1202            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), false));
1203
1204        let element = source.upcast::<Element>();
1205
1206        // Step 9.children.2. Process candidate: If candidate does not have a src attribute, or if
1207        // its src attribute's value is the empty string, then end the synchronous section, and jump
1208        // down to the failed with elements step below.
1209        let Some(src) = element
1210            .get_attribute(&ns!(), &local_name!("src"))
1211            .filter(|attribute| !attribute.value().is_empty())
1212        else {
1213            self.load_from_source_child_failure_steps(source);
1214            return;
1215        };
1216
1217        // Step 9.children.3. If candidate has a media attribute whose value does not match the
1218        // environment, then end the synchronous section, and jump down to the failed with elements
1219        // step below.
1220        if let Some(media) = element.get_attribute(&ns!(), &local_name!("media")) {
1221            if !MediaList::matches_environment(&element.owner_document(), &media.value()) {
1222                self.load_from_source_child_failure_steps(source);
1223                return;
1224            }
1225        }
1226
1227        // Step 9.children.4. Let urlRecord be the result of encoding-parsing a URL given
1228        // candidate's src attribute's value, relative to candidate's node document when the src
1229        // attribute was last changed.
1230        let Ok(url_record) = source.owner_document().base_url().join(&src.value()) else {
1231            // Step 9.children.5. If urlRecord is failure, then end the synchronous section,
1232            // and jump down to the failed with elements step below.
1233            self.load_from_source_child_failure_steps(source);
1234            return;
1235        };
1236
1237        // Step 9.children.6. If candidate has a type attribute whose value, when parsed as a MIME
1238        // type (including any codecs described by the codecs parameter, for types that define that
1239        // parameter), represents a type that the user agent knows it cannot render, then end the
1240        // synchronous section, and jump down to the failed with elements step below.
1241        if let Some(type_) = element.get_attribute(&ns!(), &local_name!("type")) {
1242            if ServoMedia::get().can_play_type(&type_.value()) == SupportsMediaType::No {
1243                self.load_from_source_child_failure_steps(source);
1244                return;
1245            }
1246        }
1247
1248        // Reset the media player before loading the next source child.
1249        self.reset_media_player();
1250
1251        self.current_source_child.set(Some(source));
1252
1253        // Step 9.children.7. Set the currentSrc attribute to the result of applying the URL
1254        // serializer to urlRecord.
1255        *self.current_src.borrow_mut() = url_record.as_str().into();
1256
1257        // Step 9.children.9. Run the resource fetch algorithm with urlRecord. If that
1258        // algorithm returns without aborting this one, then the load failed.
1259        // Note that the resource fetch algorithm itself takes care
1260        // of the cleanup in case of failure itself.
1261        self.resource_fetch_algorithm(Resource::Url(url_record));
1262    }
1263
1264    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1265    fn load_from_source_child_failure_steps(&self, source: &HTMLSourceElement) {
1266        // Step 9.children.10. Failed with elements: Queue a media element task given the media
1267        // element to fire an event named error at candidate.
1268        let trusted_this = Trusted::new(self);
1269        let trusted_source = Trusted::new(source);
1270        let generation_id = self.generation_id.get();
1271
1272        self.owner_global()
1273            .task_manager()
1274            .media_element_task_source()
1275            .queue(task!(queue_error_event: move || {
1276                let this = trusted_this.root();
1277                if generation_id != this.generation_id.get() {
1278                    return;
1279                }
1280
1281                let source = trusted_source.root();
1282                source.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1283            }));
1284
1285        // Step 9.children.11. Await a stable state.
1286        let task = MediaElementMicrotask::SelectNextSourceChild {
1287            elem: DomRoot::from_ref(self),
1288            generation_id: self.generation_id.get(),
1289        };
1290
1291        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1292    }
1293
1294    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1295    fn select_next_source_child(&self, can_gc: CanGc) {
1296        // Step 9.children.12. Forget the media element's media-resource-specific tracks.
1297        self.AudioTracks(can_gc).clear();
1298        self.VideoTracks(can_gc).clear();
1299
1300        // Step 9.children.13. Find next candidate: Let candidate be null.
1301        let mut source_candidate = None;
1302
1303        // Step 9.children.14. Search loop: If the node after pointer is the end of the list, then
1304        // jump to the waiting step below.
1305        // Step 9.children.15. If the node after pointer is a source element, let candidate be that
1306        // element.
1307        // Step 9.children.16. Advance pointer so that the node before pointer is now the node that
1308        // was after pointer, and the node after pointer is the node after the node that used to be
1309        // after pointer, if any.
1310        if let Some(ref source_children_pointer) = *self.source_children_pointer.borrow() {
1311            // Note that shared implementation between opaque types from
1312            // `inclusively_following_siblings` and `following_siblings` if not possible due to
1313            // precise capturing.
1314            if source_children_pointer.inclusive {
1315                for next_sibling in source_children_pointer
1316                    .source_before_pointer
1317                    .upcast::<Node>()
1318                    .inclusively_following_siblings()
1319                {
1320                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1321                    {
1322                        source_candidate = Some(next_source);
1323                        break;
1324                    }
1325                }
1326            } else {
1327                for next_sibling in source_children_pointer
1328                    .source_before_pointer
1329                    .upcast::<Node>()
1330                    .following_siblings()
1331                {
1332                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1333                    {
1334                        source_candidate = Some(next_source);
1335                        break;
1336                    }
1337                }
1338            };
1339        }
1340
1341        // Step 9.children.17. If candidate is null, jump back to the search loop step. Otherwise,
1342        // jump back to the process candidate step.
1343        if let Some(source_candidate) = source_candidate {
1344            self.load_from_source_child(&source_candidate);
1345            return;
1346        }
1347
1348        self.load_state.set(LoadState::WaitingForSource);
1349
1350        *self.source_children_pointer.borrow_mut() = None;
1351
1352        // Step 9.children.18. Waiting: Set the element's networkState attribute to the
1353        // NETWORK_NO_SOURCE value.
1354        self.network_state.set(NetworkState::NoSource);
1355
1356        // Step 9.children.19. Set the element's show poster flag to true.
1357        self.show_poster.set(true);
1358
1359        // Step 9.children.20. Queue a media element task given the media element to set the
1360        // element's delaying-the-load-event flag to false. This stops delaying the load event.
1361        let this = Trusted::new(self);
1362        let generation_id = self.generation_id.get();
1363
1364        self.owner_global()
1365            .task_manager()
1366            .media_element_task_source()
1367            .queue(task!(queue_delay_load_event: move || {
1368                let this = this.root();
1369                if generation_id != this.generation_id.get() {
1370                    return;
1371                }
1372
1373                this.delay_load_event(false, CanGc::note());
1374            }));
1375
1376        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1377        // list. (This step might wait forever.)
1378    }
1379
1380    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1381    fn resource_selection_algorithm_failure_steps(&self) {
1382        match self.load_state.get() {
1383            LoadState::LoadingFromSrcObject => {
1384                // Step 9.object.4. Failed with media provider: Reaching this step indicates that
1385                // the media resource failed to load. Take pending play promises and queue a media
1386                // element task given the media element to run the dedicated media source failure
1387                // steps with the result.
1388                self.queue_dedicated_media_source_failure_steps();
1389            },
1390            LoadState::LoadingFromSrcAttribute => {
1391                // Step 9.attribute.6. Failed with attribute: Reaching this step indicates that the
1392                // media resource failed to load or that urlRecord is failure. Take pending play
1393                // promises and queue a media element task given the media element to run the
1394                // dedicated media source failure steps with the result.
1395                self.queue_dedicated_media_source_failure_steps();
1396            },
1397            LoadState::LoadingFromSourceChild => {
1398                // Step 9.children.10. Failed with elements: Queue a media element task given the
1399                // media element to fire an event named error at candidate.
1400                if let Some(source) = self.current_source_child.take() {
1401                    self.load_from_source_child_failure_steps(&source);
1402                }
1403            },
1404            _ => {},
1405        }
1406    }
1407
1408    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
1409        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
1410            error!("Missing request url");
1411            if let Some(seek_lock) = seek_lock {
1412                seek_lock.unlock(/* successful seek */ false);
1413            }
1414            self.resource_selection_algorithm_failure_steps();
1415            return;
1416        }
1417
1418        let document = self.owner_document();
1419        let destination = match self.media_type_id() {
1420            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
1421            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
1422        };
1423        let mut headers = HeaderMap::new();
1424        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
1425        headers.insert(
1426            header::RANGE,
1427            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
1428        );
1429        let url = match self.resource_url.borrow().as_ref() {
1430            Some(url) => url.clone(),
1431            None => self.blob_url.borrow().as_ref().unwrap().clone(),
1432        };
1433
1434        let cors_setting = cors_setting_for_element(self.upcast());
1435        let global = self.global();
1436        let request = create_a_potential_cors_request(
1437            Some(document.webview_id()),
1438            url.clone(),
1439            destination,
1440            cors_setting,
1441            None,
1442            global.get_referrer(),
1443        )
1444        .with_global_scope(&global)
1445        .headers(headers)
1446        .referrer_policy(document.get_referrer_policy());
1447
1448        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
1449        if let Some(ref mut current_fetch_context) = *current_fetch_context {
1450            current_fetch_context.cancel(CancelReason::Abort);
1451        }
1452
1453        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
1454            request.id,
1455            global.core_resource_thread(),
1456        ));
1457        let listener =
1458            HTMLMediaElementFetchListener::new(self, request.id, url.clone(), offset.unwrap_or(0));
1459
1460        self.owner_document().fetch_background(request, listener);
1461
1462        // Since we cancelled the previous fetch, from now on the media element
1463        // will only receive response data from the new fetch that's been
1464        // initiated. This means the player can resume operation, since all subsequent data
1465        // pushes will originate from the new seek offset.
1466        if let Some(seek_lock) = seek_lock {
1467            seek_lock.unlock(/* successful seek */ true);
1468        }
1469    }
1470
1471    /// <https://html.spec.whatwg.org/multipage/#eligible-for-autoplay>
1472    fn eligible_for_autoplay(&self) -> bool {
1473        // its can autoplay flag is true;
1474        self.autoplaying.get() &&
1475
1476        // its paused attribute is true;
1477        self.Paused() &&
1478
1479        // it has an autoplay attribute specified;
1480        self.Autoplay() &&
1481
1482        // its node document's active sandboxing flag set does not have the sandboxed automatic
1483        // features browsing context flag set; and
1484        {
1485            let document = self.owner_document();
1486
1487            !document.has_active_sandboxing_flag(
1488                SandboxingFlagSet::SANDBOXED_AUTOMATIC_FEATURES_BROWSING_CONTEXT_FLAG,
1489            )
1490        }
1491
1492        // its node document is allowed to use the "autoplay" feature.
1493        // TODO: Feature policy: https://html.spec.whatwg.org/iframe-embed-object.html#allowed-to-use
1494    }
1495
1496    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
1497    fn resource_fetch_algorithm(&self, resource: Resource) {
1498        if let Err(e) = self.create_media_player(&resource) {
1499            error!("Create media player error {:?}", e);
1500            self.resource_selection_algorithm_failure_steps();
1501            return;
1502        }
1503
1504        // Steps 1-2.
1505        // Unapplicable, the `resource` variable already conveys which mode
1506        // is in use.
1507
1508        // Step 3.
1509        // FIXME(nox): Remove all media-resource-specific text tracks.
1510
1511        // Step 5. Run the appropriate steps from the following list:
1512        match resource {
1513            Resource::Url(url) => {
1514                // Step 5.remote.1. Optionally, run the following substeps. This is the expected
1515                // behavior if the user agent intends to not attempt to fetch the resource until the
1516                // user requests it explicitly (e.g. as a way to implement the preload attribute's
1517                // none keyword).
1518                if self.Preload() == "none" && !self.autoplaying.get() {
1519                    // Step 5.remote.1.1. Set the networkState to NETWORK_IDLE.
1520                    self.network_state.set(NetworkState::Idle);
1521
1522                    // Step 5.remote.1.2. Queue a media element task given the media element to fire
1523                    // an event named suspend at the element.
1524                    self.queue_media_element_task_to_fire_event(atom!("suspend"));
1525
1526                    // Step 5.remote.1.3. Queue a media element task given the media element to set
1527                    // the element's delaying-the-load-event flag to false. This stops delaying the
1528                    // load event.
1529                    let this = Trusted::new(self);
1530                    let generation_id = self.generation_id.get();
1531
1532                    self.owner_global()
1533                        .task_manager()
1534                        .media_element_task_source()
1535                        .queue(task!(queue_delay_load_event: move || {
1536                            let this = this.root();
1537                            if generation_id != this.generation_id.get() {
1538                                return;
1539                            }
1540
1541                            this.delay_load_event(false, CanGc::note());
1542                        }));
1543
1544                    // TODO Steps 5.remote.1.4. Wait for the task to be run.
1545                    // FIXME(nox): Somehow we should wait for the task from previous
1546                    // step to be ran before continuing.
1547
1548                    // TODO Steps 5.remote.1.5-5.remote.1.7.
1549                    // FIXME(nox): Wait for an implementation-defined event and
1550                    // then continue with the normal set of steps instead of just
1551                    // returning.
1552                    return;
1553                }
1554
1555                *self.resource_url.borrow_mut() = Some(url);
1556
1557                // Steps 5.remote.2-5.remote.8
1558                self.fetch_request(None, None);
1559            },
1560            Resource::Object => {
1561                if let Some(ref src_object) = *self.src_object.borrow() {
1562                    match src_object {
1563                        SrcObject::Blob(blob) => {
1564                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1565                            *self.blob_url.borrow_mut() =
1566                                Some(ServoUrl::parse(&blob_url.str()).expect("infallible"));
1567                            self.fetch_request(None, None);
1568                        },
1569                        SrcObject::MediaStream(stream) => {
1570                            let tracks = &*stream.get_tracks();
1571                            for (pos, track) in tracks.iter().enumerate() {
1572                                if self
1573                                    .player
1574                                    .borrow()
1575                                    .as_ref()
1576                                    .unwrap()
1577                                    .lock()
1578                                    .unwrap()
1579                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1580                                    .is_err()
1581                                {
1582                                    self.resource_selection_algorithm_failure_steps();
1583                                }
1584                            }
1585                        },
1586                    }
1587                }
1588            },
1589        }
1590    }
1591
1592    /// Queues a task to run the [dedicated media source failure steps][steps].
1593    ///
1594    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1595    fn queue_dedicated_media_source_failure_steps(&self) {
1596        let this = Trusted::new(self);
1597        let generation_id = self.generation_id.get();
1598        self.take_pending_play_promises(Err(Error::NotSupported(None)));
1599        self.owner_global()
1600            .task_manager()
1601            .media_element_task_source()
1602            .queue(task!(dedicated_media_source_failure_steps: move || {
1603                let this = this.root();
1604                if generation_id != this.generation_id.get() {
1605                    return;
1606                }
1607
1608                this.fulfill_in_flight_play_promises(|| {
1609                    // Step 1. Set the error attribute to the result of creating a MediaError with
1610                    // MEDIA_ERR_SRC_NOT_SUPPORTED.
1611                    this.error.set(Some(&*MediaError::new(
1612                        &this.owner_window(),
1613                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::note())));
1614
1615                    // Step 2. Forget the media element's media-resource-specific tracks.
1616                    this.AudioTracks(CanGc::note()).clear();
1617                    this.VideoTracks(CanGc::note()).clear();
1618
1619                    // Step 3. Set the element's networkState attribute to the NETWORK_NO_SOURCE
1620                    // value.
1621                    this.network_state.set(NetworkState::NoSource);
1622
1623                    // Step 4. Set the element's show poster flag to true.
1624                    this.show_poster.set(true);
1625
1626                    // Step 5. Fire an event named error at the media element.
1627                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1628
1629                    if let Some(ref player) = *this.player.borrow() {
1630                        if let Err(error) = player.lock().unwrap().stop() {
1631                            error!("Could not stop player: {error:?}");
1632                        }
1633                    }
1634
1635                    // Step 6. Reject pending play promises with promises and a "NotSupportedError"
1636                    // DOMException.
1637                    // Done after running this closure in `fulfill_in_flight_play_promises`.
1638                });
1639
1640                // Step 7. Set the element's delaying-the-load-event flag to false. This stops
1641                // delaying the load event.
1642                this.delay_load_event(false, CanGc::note());
1643            }));
1644    }
1645
1646    fn in_error_state(&self) -> bool {
1647        self.error.get().is_some()
1648    }
1649
1650    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1651    fn is_potentially_playing(&self) -> bool {
1652        !self.paused.get() &&
1653            !self.ended_playback(LoopCondition::Included) &&
1654            self.error.get().is_none() &&
1655            !self.is_blocked_media_element()
1656    }
1657
1658    /// <https://html.spec.whatwg.org/multipage/#blocked-media-element>
1659    fn is_blocked_media_element(&self) -> bool {
1660        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1661            self.is_paused_for_user_interaction() ||
1662            self.is_paused_for_in_band_content()
1663    }
1664
1665    /// <https://html.spec.whatwg.org/multipage/#paused-for-user-interaction>
1666    fn is_paused_for_user_interaction(&self) -> bool {
1667        // FIXME: we will likely be able to fill this placeholder once (if) we
1668        //        implement the MediaSession API.
1669        false
1670    }
1671
1672    /// <https://html.spec.whatwg.org/multipage/#paused-for-in-band-content>
1673    fn is_paused_for_in_band_content(&self) -> bool {
1674        // FIXME: we will likely be able to fill this placeholder once (if) we
1675        //        implement https://github.com/servo/servo/issues/22314
1676        false
1677    }
1678
1679    /// <https://html.spec.whatwg.org/multipage/#media-element-load-algorithm>
1680    fn media_element_load_algorithm(&self, can_gc: CanGc) {
1681        // Reset the flag that signals whether loadeddata was ever fired for
1682        // this invokation of the load algorithm.
1683        self.fired_loadeddata_event.set(false);
1684
1685        // TODO Step 1. Set this element's is currently stalled to false.
1686
1687        // Step 2. Abort any already-running instance of the resource selection algorithm for this
1688        // element.
1689        self.generation_id.set(self.generation_id.get() + 1);
1690
1691        self.load_state.set(LoadState::NotLoaded);
1692        *self.source_children_pointer.borrow_mut() = None;
1693        self.current_source_child.set(None);
1694
1695        // Step 3. Let pending tasks be a list of all tasks from the media element's media element
1696        // event task source in one of the task queues.
1697
1698        // Step 4. For each task in pending tasks that would resolve pending play promises or reject
1699        // pending play promises, immediately resolve or reject those promises in the order the
1700        // corresponding tasks were queued.
1701        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1702            self.fulfill_in_flight_play_promises(|| ());
1703        }
1704
1705        // Step 5. Remove each task in pending tasks from its task queue.
1706        // Note that each media element's pending event and callback is scheduled with associated
1707        // generation id and will be aborted eventually (from Step 2).
1708
1709        let network_state = self.network_state.get();
1710
1711        // Step 6. If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE,
1712        // queue a media element task given the media element to fire an event named abort at the
1713        // media element.
1714        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1715            self.queue_media_element_task_to_fire_event(atom!("abort"));
1716        }
1717
1718        // Reset the media player for any previously playing media resource (see Step 11).
1719        self.reset_media_player();
1720
1721        // Step 7. If the media element's networkState is not set to NETWORK_EMPTY, then:
1722        if network_state != NetworkState::Empty {
1723            // Step 7.1. Queue a media element task given the media element to fire an event named
1724            // emptied at the media element.
1725            self.queue_media_element_task_to_fire_event(atom!("emptied"));
1726
1727            // Step 7.2. If a fetching process is in progress for the media element, the user agent
1728            // should stop it.
1729            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1730                current_fetch_context.cancel(CancelReason::Abort);
1731            }
1732
1733            // TODO Step 7.3. If the media element's assigned media provider object is a MediaSource
1734            // object, then detach it.
1735
1736            // Step 7.4. Forget the media element's media-resource-specific tracks.
1737            self.AudioTracks(can_gc).clear();
1738            self.VideoTracks(can_gc).clear();
1739
1740            // Step 7.5. If readyState is not set to HAVE_NOTHING, then set it to that state.
1741            if self.ready_state.get() != ReadyState::HaveNothing {
1742                self.change_ready_state(ReadyState::HaveNothing);
1743            }
1744
1745            // Step 7.6. If the paused attribute is false, then:
1746            if !self.Paused() {
1747                // Step 7.6.1. Set the paused attribute to true.
1748                self.paused.set(true);
1749
1750                // Step 7.6.2. Take pending play promises and reject pending play promises with the
1751                // result and an "AbortError" DOMException.
1752                self.take_pending_play_promises(Err(Error::Abort(None)));
1753                self.fulfill_in_flight_play_promises(|| ());
1754            }
1755
1756            // Step 7.7. If seeking is true, set it to false.
1757            self.seeking.set(false);
1758
1759            self.current_seek_position.set(f64::NAN);
1760
1761            // Step 7.8. Set the current playback position to 0.
1762            // Set the official playback position to 0.
1763            // If this changed the official playback position, then queue a media element task given
1764            // the media element to fire an event named timeupdate at the media element.
1765            self.current_playback_position.set(0.);
1766            if self.official_playback_position.get() != 0. {
1767                self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
1768            }
1769            self.official_playback_position.set(0.);
1770
1771            // TODO Step 7.9. Set the timeline offset to Not-a-Number (NaN).
1772
1773            // Step 7.10. Update the duration attribute to Not-a-Number (NaN).
1774            self.duration.set(f64::NAN);
1775        }
1776
1777        // Step 8. Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
1778        self.playback_rate.set(self.default_playback_rate.get());
1779
1780        // Step 9. Set the error attribute to null and the can autoplay flag to true.
1781        self.error.set(None);
1782        self.autoplaying.set(true);
1783
1784        // Step 10. Invoke the media element's resource selection algorithm.
1785        self.invoke_resource_selection_algorithm(can_gc);
1786
1787        // Step 11. Note: Playback of any previously playing media resource for this element stops.
1788    }
1789
1790    /// Queue a media element task given the media element to fire an event at the media element.
1791    /// <https://html.spec.whatwg.org/multipage/#queue-a-media-element-task>
1792    fn queue_media_element_task_to_fire_event(&self, name: Atom) {
1793        let this = Trusted::new(self);
1794        let generation_id = self.generation_id.get();
1795
1796        self.owner_global()
1797            .task_manager()
1798            .media_element_task_source()
1799            .queue(task!(queue_event: move || {
1800                let this = this.root();
1801                if generation_id != this.generation_id.get() {
1802                    return;
1803                }
1804
1805                this.upcast::<EventTarget>().fire_event(name, CanGc::note());
1806            }));
1807    }
1808
1809    /// Appends a promise to the list of pending play promises.
1810    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1811        self.pending_play_promises
1812            .borrow_mut()
1813            .push(promise.clone());
1814    }
1815
1816    /// Takes the pending play promises.
1817    ///
1818    /// The result with which these promises will be fulfilled is passed here
1819    /// and this method returns nothing because we actually just move the
1820    /// current list of pending play promises to the
1821    /// `in_flight_play_promises_queue` field.
1822    ///
1823    /// Each call to this method must be followed by a call to
1824    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1825    /// which were taken and moved to the in-flight queue.
1826    fn take_pending_play_promises(&self, result: ErrorResult) {
1827        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1828        self.in_flight_play_promises_queue
1829            .borrow_mut()
1830            .push_back((pending_play_promises.into(), result));
1831    }
1832
1833    /// Fulfills the next in-flight play promises queue after running a closure.
1834    ///
1835    /// See the comment on `take_pending_play_promises` for why this method
1836    /// does not take a list of promises to fulfill. Callers cannot just pop
1837    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1838    /// the promises because that would mean putting
1839    /// `#[cfg_attr(crown, expect(crown::unrooted_must_root))]` on even more functions, potentially
1840    /// hiding actual safety bugs.
1841    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1842    where
1843        F: FnOnce(),
1844    {
1845        let (promises, result) = self
1846            .in_flight_play_promises_queue
1847            .borrow_mut()
1848            .pop_front()
1849            .expect("there should be at least one list of in flight play promises");
1850        f();
1851        for promise in &*promises {
1852            match result {
1853                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1854                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1855            }
1856        }
1857    }
1858
1859    pub(crate) fn handle_source_child_insertion(&self, source: &HTMLSourceElement, can_gc: CanGc) {
1860        // <https://html.spec.whatwg.org/multipage/#the-source-element:html-element-insertion-steps>
1861        // Step 2. If parent is a media element that has no src attribute and whose networkState has
1862        // the value NETWORK_EMPTY, then invoke that media element's resource selection algorithm.
1863        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1864            return;
1865        }
1866
1867        if self.network_state.get() == NetworkState::Empty {
1868            self.invoke_resource_selection_algorithm(can_gc);
1869            return;
1870        }
1871
1872        // <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1873        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1874        // list. (This step might wait forever.)
1875        if self.load_state.get() != LoadState::WaitingForSource {
1876            return;
1877        }
1878
1879        self.load_state.set(LoadState::LoadingFromSourceChild);
1880
1881        *self.source_children_pointer.borrow_mut() =
1882            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), true));
1883
1884        // Step 9.children.23. Await a stable state.
1885        let task = MediaElementMicrotask::SelectNextSourceChildAfterWait {
1886            elem: DomRoot::from_ref(self),
1887            generation_id: self.generation_id.get(),
1888        };
1889
1890        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1891    }
1892
1893    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1894    fn select_next_source_child_after_wait(&self, can_gc: CanGc) {
1895        // Step 9.children.24. Set the element's delaying-the-load-event flag back to true (this
1896        // delays the load event again, in case it hasn't been fired yet).
1897        self.delay_load_event(true, can_gc);
1898
1899        // Step 9.children.25. Set the networkState back to NETWORK_LOADING.
1900        self.network_state.set(NetworkState::Loading);
1901
1902        // Step 9.children.26. Jump back to the find next candidate step above.
1903        self.select_next_source_child(can_gc);
1904    }
1905
1906    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1907    /// => "If the media data cannot be fetched at all, due to network errors..."
1908    /// => "If the media data can be fetched but is found by inspection to be in an unsupported
1909    /// format, or can otherwise not be rendered at all"
1910    fn media_data_processing_failure_steps(&self) {
1911        // Step 1. The user agent should cancel the fetching process.
1912        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1913            current_fetch_context.cancel(CancelReason::Error);
1914        }
1915
1916        // Step 2. Abort this subalgorithm, returning to the resource selection algorithm.
1917        self.resource_selection_algorithm_failure_steps();
1918    }
1919
1920    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1921    /// => "If the connection is interrupted after some media data has been received..."
1922    /// => "If the media data is corrupted"
1923    fn media_data_processing_fatal_steps(&self, error: u16, can_gc: CanGc) {
1924        *self.source_children_pointer.borrow_mut() = None;
1925        self.current_source_child.set(None);
1926
1927        // Step 1. The user agent should cancel the fetching process.
1928        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1929            current_fetch_context.cancel(CancelReason::Error);
1930        }
1931
1932        // Step 2. Set the error attribute to the result of creating a MediaError with
1933        // MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
1934        self.error
1935            .set(Some(&*MediaError::new(&self.owner_window(), error, can_gc)));
1936
1937        // Step 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1938        self.network_state.set(NetworkState::Idle);
1939
1940        // Step 4. Set the element's delaying-the-load-event flag to false. This stops delaying
1941        // the load event.
1942        self.delay_load_event(false, can_gc);
1943
1944        // Step 5. Fire an event named error at the media element.
1945        self.upcast::<EventTarget>()
1946            .fire_event(atom!("error"), can_gc);
1947
1948        // Step 6. Abort the overall resource selection algorithm.
1949    }
1950
1951    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1952    fn seek(&self, time: f64, _approximate_for_speed: bool) {
1953        // Step 1. Set the media element's show poster flag to false.
1954        self.show_poster.set(false);
1955
1956        // Step 2. If the media element's readyState is HAVE_NOTHING, return.
1957        if self.ready_state.get() == ReadyState::HaveNothing {
1958            return;
1959        }
1960
1961        // Step 3. If the element's seeking IDL attribute is true, then another instance of this
1962        // algorithm is already running. Abort that other instance of the algorithm without waiting
1963        // for the step that it is running to complete.
1964        self.current_seek_position.set(f64::NAN);
1965
1966        // Step 4. Set the seeking IDL attribute to true.
1967        self.seeking.set(true);
1968
1969        // Step 5. If the seek was in response to a DOM method call or setting of an IDL attribute,
1970        // then continue the script. The remainder of these steps must be run in parallel.
1971
1972        // Step 6. If the new playback position is later than the end of the media resource, then
1973        // let it be the end of the media resource instead.
1974        let time = f64::min(time, self.Duration());
1975
1976        // Step 7. If the new playback position is less than the earliest possible position, let it
1977        // be that position instead.
1978        let time = f64::max(time, self.earliest_possible_position());
1979
1980        // Step 8. If the (possibly now changed) new playback position is not in one of the ranges
1981        // given in the seekable attribute, then let it be the position in one of the ranges given
1982        // in the seekable attribute that is the nearest to the new playback position. If there are
1983        // no ranges given in the seekable attribute, then set the seeking IDL attribute to false
1984        // and return.
1985        let seekable = self.seekable();
1986
1987        if seekable.is_empty() {
1988            self.seeking.set(false);
1989            return;
1990        }
1991
1992        let mut nearest_seekable_position = 0.0;
1993        let mut in_seekable_range = false;
1994        let mut nearest_seekable_distance = f64::MAX;
1995        for i in 0..seekable.len() {
1996            let start = seekable.start(i).unwrap().abs();
1997            let end = seekable.end(i).unwrap().abs();
1998            if time >= start && time <= end {
1999                nearest_seekable_position = time;
2000                in_seekable_range = true;
2001                break;
2002            } else if time < start {
2003                let distance = start - time;
2004                if distance < nearest_seekable_distance {
2005                    nearest_seekable_distance = distance;
2006                    nearest_seekable_position = start;
2007                }
2008            } else {
2009                let distance = time - end;
2010                if distance < nearest_seekable_distance {
2011                    nearest_seekable_distance = distance;
2012                    nearest_seekable_position = end;
2013                }
2014            }
2015        }
2016        let time = if in_seekable_range {
2017            time
2018        } else {
2019            nearest_seekable_position
2020        };
2021
2022        // Step 9. If the approximate-for-speed flag is set, adjust the new playback position to a
2023        // value that will allow for playback to resume promptly. If new playback position before
2024        // this step is before current playback position, then the adjusted new playback position
2025        // must also be before the current playback position. Similarly, if the new playback
2026        // position before this step is after current playback position, then the adjusted new
2027        // playback position must also be after the current playback position.
2028        // TODO: Note that servo-media with gstreamer does not support inaccurate seeking for now.
2029
2030        // Step 10. Queue a media element task given the media element to fire an event named
2031        // seeking at the element.
2032        self.queue_media_element_task_to_fire_event(atom!("seeking"));
2033
2034        // Step 11. Set the current playback position to the new playback position.
2035        self.current_playback_position.set(time);
2036
2037        if let Some(ref player) = *self.player.borrow() {
2038            if let Err(error) = player.lock().unwrap().seek(time) {
2039                error!("Could not seek player: {error:?}");
2040            }
2041        }
2042
2043        self.current_seek_position.set(time);
2044
2045        // Step 12. Wait until the user agent has established whether or not the media data for the
2046        // new playback position is available, and, if it is, until it has decoded enough data to
2047        // play back that position.
2048        // The rest of the steps are handled when the media engine signals a ready state change or
2049        // otherwise satisfies seek completion and signals a position change.
2050    }
2051
2052    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2053    fn seek_end(&self) {
2054        // Any time the user agent provides a stable state, the official playback position must be
2055        // set to the current playback position.
2056        self.official_playback_position
2057            .set(self.current_playback_position.get());
2058
2059        // Step 14. Set the seeking IDL attribute to false.
2060        self.seeking.set(false);
2061
2062        self.current_seek_position.set(f64::NAN);
2063
2064        // Step 15. Run the time marches on steps.
2065        self.time_marches_on();
2066
2067        // Step 16. Queue a media element task given the media element to fire an event named
2068        // timeupdate at the element.
2069        self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2070
2071        // Step 17. Queue a media element task given the media element to fire an event named seeked
2072        // at the element.
2073        self.queue_media_element_task_to_fire_event(atom!("seeked"));
2074    }
2075
2076    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
2077    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
2078        if pref!(media_testing_enabled) && image.is_some() {
2079            self.queue_media_element_task_to_fire_event(atom!("postershown"));
2080        }
2081
2082        self.video_renderer.lock().unwrap().set_poster_frame(image);
2083
2084        self.upcast::<Node>().dirty(NodeDamage::Other);
2085    }
2086
2087    fn player_id(&self) -> Option<usize> {
2088        self.player
2089            .borrow()
2090            .as_ref()
2091            .map(|player| player.lock().unwrap().get_id())
2092    }
2093
2094    fn create_media_player(&self, resource: &Resource) -> Result<(), ()> {
2095        let stream_type = match *resource {
2096            Resource::Object => {
2097                if let Some(ref src_object) = *self.src_object.borrow() {
2098                    match src_object {
2099                        SrcObject::MediaStream(_) => StreamType::Stream,
2100                        _ => StreamType::Seekable,
2101                    }
2102                } else {
2103                    return Err(());
2104                }
2105            },
2106            _ => StreamType::Seekable,
2107        };
2108
2109        let window = self.owner_window();
2110        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
2111        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
2112        {
2113            HTMLMediaElementTypeId::HTMLAudioElement => None,
2114            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
2115        };
2116
2117        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
2118
2119        let pipeline_id = window.pipeline_id();
2120        let client_context_id =
2121            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
2122        let player = ServoMedia::get().create_player(
2123            &client_context_id,
2124            stream_type,
2125            action_sender,
2126            video_renderer,
2127            audio_renderer,
2128            Box::new(window.get_player_context()),
2129        );
2130        let player_id = {
2131            let player_guard = player.lock().unwrap();
2132
2133            if let Err(error) = player_guard.set_mute(self.muted.get()) {
2134                warn!("Could not set mute state: {error:?}");
2135            }
2136
2137            player_guard.get_id()
2138        };
2139
2140        *self.player.borrow_mut() = Some(player);
2141
2142        let event_handler = Arc::new(Mutex::new(HTMLMediaElementEventHandler::new(self)));
2143        let weak_event_handler = Arc::downgrade(&event_handler);
2144        *self.event_handler.borrow_mut() = Some(event_handler);
2145
2146        let task_source = self
2147            .owner_global()
2148            .task_manager()
2149            .media_element_task_source()
2150            .to_sendable();
2151        ROUTER.add_typed_route(
2152            action_receiver,
2153            Box::new(move |message| {
2154                let event = message.unwrap();
2155                let weak_event_handler = weak_event_handler.clone();
2156
2157                task_source.queue(task!(handle_player_event: move || {
2158                    trace!("HTMLMediaElement event: {event:?}");
2159
2160                    let Some(event_handler) = weak_event_handler.upgrade() else {
2161                        return;
2162                    };
2163
2164                    event_handler.lock().unwrap().handle_player_event(player_id, event, CanGc::note());
2165                }));
2166            }),
2167        );
2168
2169        let task_source = self
2170            .owner_global()
2171            .task_manager()
2172            .media_element_task_source()
2173            .to_sendable();
2174        let weak_video_renderer = Arc::downgrade(&self.video_renderer);
2175
2176        self.video_renderer
2177            .lock()
2178            .unwrap()
2179            .setup(player_id, task_source, weak_video_renderer);
2180
2181        Ok(())
2182    }
2183
2184    fn reset_media_player(&self) {
2185        if self.player.borrow().is_none() {
2186            return;
2187        }
2188
2189        if let Some(ref player) = *self.player.borrow() {
2190            if let Err(error) = player.lock().unwrap().stop() {
2191                error!("Could not stop player: {error:?}");
2192            }
2193        }
2194
2195        *self.player.borrow_mut() = None;
2196        self.video_renderer.lock().unwrap().reset();
2197        *self.event_handler.borrow_mut() = None;
2198
2199        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2200            video_element.set_natural_dimensions(None, None);
2201        }
2202    }
2203
2204    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
2205        if let Some(ref player) = *self.player.borrow() {
2206            if let Err(error) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
2207                warn!("Could not set audio track {error:?}");
2208            }
2209        }
2210    }
2211
2212    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
2213        if let Some(ref player) = *self.player.borrow() {
2214            if let Err(error) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
2215                warn!("Could not set video track: {error:?}");
2216            }
2217        }
2218    }
2219
2220    /// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
2221    fn direction_of_playback(&self) -> PlaybackDirection {
2222        // If the element's playbackRate is positive or zero, then the direction of playback is
2223        // forwards. Otherwise, it is backwards.
2224        if self.playback_rate.get() >= 0. {
2225            PlaybackDirection::Forwards
2226        } else {
2227            PlaybackDirection::Backwards
2228        }
2229    }
2230
2231    /// <https://html.spec.whatwg.org/multipage/#ended-playback>
2232    fn ended_playback(&self, loop_condition: LoopCondition) -> bool {
2233        // A media element is said to have ended playback when:
2234
2235        // The element's readyState attribute is HAVE_METADATA or greater, and
2236        if self.ready_state.get() < ReadyState::HaveMetadata {
2237            return false;
2238        }
2239
2240        let playback_position = self.current_playback_position.get();
2241
2242        match self.direction_of_playback() {
2243            // Either: The current playback position is the end of the media resource, and the
2244            // direction of playback is forwards, and the media element does not have a loop
2245            // attribute specified.
2246            PlaybackDirection::Forwards => {
2247                playback_position >= self.Duration() &&
2248                    (loop_condition == LoopCondition::Ignored || !self.Loop())
2249            },
2250            // Or: The current playback position is the earliest possible position, and the
2251            // direction of playback is backwards.
2252            PlaybackDirection::Backwards => playback_position <= self.earliest_possible_position(),
2253        }
2254    }
2255
2256    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2257    fn end_of_playback_in_forwards_direction(&self) {
2258        // When the current playback position reaches the end of the media resource when the
2259        // direction of playback is forwards, then the user agent must follow these steps:
2260
2261        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
2262        // posible position of the media resource and return.
2263        if self.Loop() {
2264            self.seek(
2265                self.earliest_possible_position(),
2266                /* approximate_for_speed */ false,
2267            );
2268            return;
2269        }
2270
2271        // Step 2. As defined above, the ended IDL attribute starts returning true once the event
2272        // loop returns to step 1.
2273
2274        // Step 3. Queue a media element task given the media element and the following steps:
2275        let this = Trusted::new(self);
2276        let generation_id = self.generation_id.get();
2277
2278        self.owner_global()
2279            .task_manager()
2280            .media_element_task_source()
2281            .queue(task!(reaches_the_end_steps: move || {
2282                let this = this.root();
2283                if generation_id != this.generation_id.get() {
2284                    return;
2285                }
2286
2287                // Step 3.1. Fire an event named timeupdate at the media element.
2288                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
2289
2290                // Step 3.2. If the media element has ended playback, the direction of playback is
2291                // forwards, and paused is false, then:
2292                if this.ended_playback(LoopCondition::Included) &&
2293                    this.direction_of_playback() == PlaybackDirection::Forwards &&
2294                    !this.Paused() {
2295                    // Step 3.2.1. Set the paused attribute to true.
2296                    this.paused.set(true);
2297
2298                    // Step 3.2.2. Fire an event named pause at the media element.
2299                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
2300
2301                    // Step 3.2.3. Take pending play promises and reject pending play promises with
2302                    // the result and an "AbortError" DOMException.
2303                    this.take_pending_play_promises(Err(Error::Abort(None)));
2304                    this.fulfill_in_flight_play_promises(|| ());
2305                }
2306
2307                // Step 3.3. Fire an event named ended at the media element.
2308                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
2309            }));
2310
2311        // <https://html.spec.whatwg.org/multipage/#dom-media-have_current_data>
2312        self.change_ready_state(ReadyState::HaveCurrentData);
2313    }
2314
2315    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2316    fn end_of_playback_in_backwards_direction(&self) {
2317        // When the current playback position reaches the earliest possible position of the media
2318        // resource when the direction of playback is backwards, then the user agent must only queue
2319        // a media element task given the media element to fire an event named timeupdate at the
2320        // element.
2321        if self.current_playback_position.get() <= self.earliest_possible_position() {
2322            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2323        }
2324    }
2325
2326    fn playback_end(&self) {
2327        // Abort the following steps of the end of playback if seeking is in progress.
2328        if self.seeking.get() {
2329            return;
2330        }
2331
2332        match self.direction_of_playback() {
2333            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(),
2334            PlaybackDirection::Backwards => self.end_of_playback_in_backwards_direction(),
2335        }
2336    }
2337
2338    fn playback_error(&self, error: &str, can_gc: CanGc) {
2339        error!("Player error: {:?}", error);
2340
2341        // If we have already flagged an error condition while processing
2342        // the network response, we should silently skip any observable
2343        // errors originating while decoding the erroneous response.
2344        if self.in_error_state() {
2345            return;
2346        }
2347
2348        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
2349        if self.ready_state.get() == ReadyState::HaveNothing {
2350            // => "If the media data can be fetched but is found by inspection to be in an
2351            // unsupported format, or can otherwise not be rendered at all"
2352            self.media_data_processing_failure_steps();
2353        } else {
2354            // => "If the media data is corrupted"
2355            self.media_data_processing_fatal_steps(MEDIA_ERR_DECODE, can_gc);
2356        }
2357    }
2358
2359    fn playback_metadata_updated(
2360        &self,
2361        metadata: &servo_media::player::metadata::Metadata,
2362        can_gc: CanGc,
2363    ) {
2364        // The following steps should be run once on the initial `metadata` signal from the media
2365        // engine.
2366        if self.ready_state.get() != ReadyState::HaveNothing {
2367            return;
2368        }
2369
2370        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2371        // => "If the media resource is found to have an audio track"
2372        for (i, _track) in metadata.audio_tracks.iter().enumerate() {
2373            let audio_track_list = self.AudioTracks(can_gc);
2374
2375            // Step 1. Create an AudioTrack object to represent the audio track.
2376            let kind = match i {
2377                0 => DOMString::from("main"),
2378                _ => DOMString::new(),
2379            };
2380
2381            let audio_track = AudioTrack::new(
2382                self.global().as_window(),
2383                DOMString::new(),
2384                kind,
2385                DOMString::new(),
2386                DOMString::new(),
2387                Some(&*audio_track_list),
2388                can_gc,
2389            );
2390
2391            // Steps 2. Update the media element's audioTracks attribute's AudioTrackList object
2392            // with the new AudioTrack object.
2393            audio_track_list.add(&audio_track);
2394
2395            // Step 3. Let enable be unknown.
2396            // Step 4. If either the media resource or the URL of the current media resource
2397            // indicate a particular set of audio tracks to enable, or if the user agent has
2398            // information that would facilitate the selection of specific audio tracks to
2399            // improve the user's experience, then: if this audio track is one of the ones to
2400            // enable, then set enable to true, otherwise, set enable to false.
2401            if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2402                let fragment = MediaFragmentParser::from(servo_url);
2403                if let Some(id) = fragment.id() {
2404                    if audio_track.id() == id {
2405                        audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2406                    }
2407                }
2408
2409                if fragment.tracks().contains(&audio_track.kind().into()) {
2410                    audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2411                }
2412            }
2413
2414            // Step 5. If enable is still unknown, then, if the media element does not yet have an
2415            // enabled audio track, then set enable to true, otherwise, set enable to false.
2416            // Step 6. If enable is true, then enable this audio track, otherwise, do not enable
2417            // this audio track.
2418            if audio_track_list.enabled_index().is_none() {
2419                audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2420            }
2421
2422            // Step 7. Fire an event named addtrack at this AudioTrackList object, using TrackEvent,
2423            // with the track attribute initialized to the new AudioTrack object.
2424            let event = TrackEvent::new(
2425                self.global().as_window(),
2426                atom!("addtrack"),
2427                false,
2428                false,
2429                &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
2430                can_gc,
2431            );
2432
2433            event
2434                .upcast::<Event>()
2435                .fire(audio_track_list.upcast::<EventTarget>(), can_gc);
2436        }
2437
2438        // => "If the media resource is found to have a video track"
2439        for (i, _track) in metadata.video_tracks.iter().enumerate() {
2440            let video_track_list = self.VideoTracks(can_gc);
2441
2442            // Step 1. Create a VideoTrack object to represent the video track.
2443            let kind = match i {
2444                0 => DOMString::from("main"),
2445                _ => DOMString::new(),
2446            };
2447
2448            let video_track = VideoTrack::new(
2449                self.global().as_window(),
2450                DOMString::new(),
2451                kind,
2452                DOMString::new(),
2453                DOMString::new(),
2454                Some(&*video_track_list),
2455                can_gc,
2456            );
2457
2458            // Steps 2. Update the media element's videoTracks attribute's VideoTrackList object
2459            // with the new VideoTrack object.
2460            video_track_list.add(&video_track);
2461
2462            // Step 3. Let enable be unknown.
2463            // Step 4. If either the media resource or the URL of the current media resource
2464            // indicate a particular set of video tracks to enable, or if the user agent has
2465            // information that would facilitate the selection of specific video tracks to
2466            // improve the user's experience, then: if this video track is the first such video
2467            // track, then set enable to true, otherwise, set enable to false.
2468            if let Some(track) = video_track_list.item(0) {
2469                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2470                    let fragment = MediaFragmentParser::from(servo_url);
2471                    if let Some(id) = fragment.id() {
2472                        if track.id() == id {
2473                            video_track_list.set_selected(0, true);
2474                        }
2475                    } else if fragment.tracks().contains(&track.kind().into()) {
2476                        video_track_list.set_selected(0, true);
2477                    }
2478                }
2479            }
2480
2481            // Step 5. If enable is still unknown, then, if the media element does not yet have a
2482            // selected video track, then set enable to true, otherwise, set enable to false.
2483            // Step 6. If enable is true, then select this track and unselect any previously
2484            // selected video tracks, otherwise, do not select this video track. If other tracks are
2485            // unselected, then a change event will be fired.
2486            if video_track_list.selected_index().is_none() {
2487                video_track_list.set_selected(video_track_list.len() - 1, true);
2488            }
2489
2490            // Step 7. Fire an event named addtrack at this VideoTrackList object, using TrackEvent,
2491            // with the track attribute initialized to the new VideoTrack object.
2492            let event = TrackEvent::new(
2493                self.global().as_window(),
2494                atom!("addtrack"),
2495                false,
2496                false,
2497                &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
2498                can_gc,
2499            );
2500
2501            event
2502                .upcast::<Event>()
2503                .fire(video_track_list.upcast::<EventTarget>(), can_gc);
2504        }
2505
2506        // => "Once enough of the media data has been fetched to determine the duration..."
2507
2508        // TODO Step 1. Establish the media timeline for the purposes of the current playback
2509        // position and the earliest possible position, based on the media data.
2510
2511        // TODO Step 2. Update the timeline offset to the date and time that corresponds to the zero
2512        // time in the media timeline established in the previous step, if any. If no explicit time
2513        // and date is given by the media resource, the timeline offset must be set to Not-a-Number
2514        // (NaN).
2515
2516        // Step 3. Set the current playback position and the official playback position to the
2517        // earliest possible position.
2518        let earliest_possible_position = self.earliest_possible_position();
2519        self.current_playback_position
2520            .set(earliest_possible_position);
2521        self.official_playback_position
2522            .set(earliest_possible_position);
2523
2524        // Step 4. Update the duration attribute with the time of the last frame of the resource, if
2525        // known, on the media timeline established above. If it is not known (e.g. a stream that is
2526        // in principle infinite), update the duration attribute to the value positive Infinity.
2527        // Note: The user agent will queue a media element task given the media element to fire an
2528        // event named durationchange at the element at this point.
2529        self.duration.set(
2530            metadata
2531                .duration
2532                .map_or(f64::INFINITY, |duration| duration.as_secs_f64()),
2533        );
2534        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2535
2536        // Step 5. For video elements, set the videoWidth and videoHeight attributes, and queue a
2537        // media element task given the media element to fire an event named resize at the media
2538        // element.
2539        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2540            video_element.set_natural_dimensions(Some(metadata.width), Some(metadata.height));
2541            self.queue_media_element_task_to_fire_event(atom!("resize"));
2542        }
2543
2544        // Step 6. Set the readyState attribute to HAVE_METADATA.
2545        self.change_ready_state(ReadyState::HaveMetadata);
2546
2547        // Step 7. Let jumped be false.
2548        let mut jumped = false;
2549
2550        // Step 8. If the media element's default playback start position is greater than zero, then
2551        // seek to that time, and let jumped be true.
2552        if self.default_playback_start_position.get() > 0. {
2553            self.seek(
2554                self.default_playback_start_position.get(),
2555                /* approximate_for_speed */ false,
2556            );
2557            jumped = true;
2558        }
2559
2560        // Step 9. Set the media element's default playback start position to zero.
2561        self.default_playback_start_position.set(0.);
2562
2563        // Step 10. Let the initial playback position be 0.
2564        // Step 11. If either the media resource or the URL of the current media resource indicate a
2565        // particular start time, then set the initial playback position to that time and, if jumped
2566        // is still false, seek to that time.
2567        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2568            let fragment = MediaFragmentParser::from(servo_url);
2569            if let Some(initial_playback_position) = fragment.start() {
2570                if initial_playback_position > 0. &&
2571                    initial_playback_position < self.duration.get() &&
2572                    !jumped
2573                {
2574                    self.seek(
2575                        initial_playback_position,
2576                        /* approximate_for_speed */ false,
2577                    )
2578                }
2579            }
2580        }
2581
2582        // Step 12. If there is no enabled audio track, then enable an audio track. This will cause
2583        // a change event to be fired.
2584        // Step 13. If there is no selected video track, then select a video track. This will cause
2585        // a change event to be fired.
2586        // Note that these steps are already handled by the earlier media track processing.
2587
2588        let global = self.global();
2589        let window = global.as_window();
2590
2591        // Update the media session metadata title with the obtained metadata.
2592        window.Navigator().MediaSession().update_title(
2593            metadata
2594                .title
2595                .clone()
2596                .unwrap_or(window.get_url().into_string()),
2597        );
2598    }
2599
2600    fn playback_duration_changed(&self, duration: Option<Duration>) {
2601        let duration = duration.map_or(f64::INFINITY, |duration| duration.as_secs_f64());
2602
2603        if self.duration.get() == duration {
2604            return;
2605        }
2606
2607        self.duration.set(duration);
2608
2609        // When the length of the media resource changes to a known value (e.g. from being unknown
2610        // to known, or from a previously established length to a new length), the user agent must
2611        // queue a media element task given the media element to fire an event named durationchange
2612        // at the media element.
2613        // <https://html.spec.whatwg.org/multipage/#offsets-into-the-media-resource:media-resource-22>
2614        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2615
2616        // If the duration is changed such that the current playback position ends up being greater
2617        // than the time of the end of the media resource, then the user agent must also seek to the
2618        // time of the end of the media resource.
2619        if self.current_playback_position.get() > duration {
2620            self.seek(duration, /* approximate_for_speed */ false);
2621        }
2622    }
2623
2624    fn playback_video_frame_updated(&self) {
2625        let Some(video_element) = self.downcast::<HTMLVideoElement>() else {
2626            return;
2627        };
2628
2629        // Whenever the natural width or natural height of the video changes (including, for
2630        // example, because the selected video track was changed), if the element's readyState
2631        // attribute is not HAVE_NOTHING, the user agent must queue a media element task given
2632        // the media element to fire an event named resize at the media element.
2633        // <https://html.spec.whatwg.org/multipage/#concept-video-intrinsic-width>
2634
2635        // The event for the prerolled frame from media engine could reached us before the media
2636        // element HAVE_METADATA ready state so subsequent steps will be cancelled.
2637        if self.ready_state.get() == ReadyState::HaveNothing {
2638            return;
2639        }
2640
2641        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
2642            if video_element
2643                .set_natural_dimensions(Some(frame.width as u32), Some(frame.height as u32))
2644            {
2645                self.queue_media_element_task_to_fire_event(atom!("resize"));
2646            } else {
2647                // If the natural dimensions have not been changed, the node should be marked as
2648                // damaged to force a repaint with the new frame contents.
2649                self.upcast::<Node>().dirty(NodeDamage::Other);
2650            }
2651        }
2652    }
2653
2654    fn playback_need_data(&self) {
2655        // The media engine signals that the source needs more data. If we already have a valid
2656        // fetch request, we do nothing. Otherwise, if we have no request and the previous request
2657        // was cancelled because we got an EnoughData event, we restart fetching where we left.
2658        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2659            if let Some(reason) = current_fetch_context.cancel_reason() {
2660                // XXX(ferjm) Ideally we should just create a fetch request from
2661                // where we left. But keeping track of the exact next byte that the
2662                // media backend expects is not the easiest task, so I'm simply
2663                // seeking to the current playback position for now which will create
2664                // a new fetch request for the last rendered frame.
2665                if *reason == CancelReason::Backoff {
2666                    self.seek(
2667                        self.current_playback_position.get(),
2668                        /* approximate_for_speed */ false,
2669                    );
2670                }
2671                return;
2672            }
2673        }
2674
2675        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2676            if let Err(e) = {
2677                let mut data_source = current_fetch_context.data_source().borrow_mut();
2678                data_source.set_locked(false);
2679                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
2680            } {
2681                // If we are pushing too much data and we know that we can
2682                // restart the download later from where we left, we cancel
2683                // the current request. Otherwise, we continue the request
2684                // assuming that we may drop some frames.
2685                if e == PlayerError::EnoughData {
2686                    current_fetch_context.cancel(CancelReason::Backoff);
2687                }
2688            }
2689        }
2690    }
2691
2692    fn playback_enough_data(&self) {
2693        // The media engine signals that the source has enough data and asks us to stop pushing bytes
2694        // to avoid excessive buffer queueing, so we cancel the ongoing fetch request if we are able
2695        // to restart it from where we left. Otherwise, we continue the current fetch request,
2696        // assuming that some frames will be dropped.
2697        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2698            if current_fetch_context.is_seekable() {
2699                current_fetch_context.cancel(CancelReason::Backoff);
2700            }
2701        }
2702    }
2703
2704    fn playback_position_changed(&self, position: f64) {
2705        // Abort the following steps of the current time update if seeking is in progress.
2706        if self.seeking.get() {
2707            return;
2708        }
2709
2710        let _ = self
2711            .played
2712            .borrow_mut()
2713            .add(self.current_playback_position.get(), position);
2714        self.current_playback_position.set(position);
2715        self.official_playback_position.set(position);
2716        self.time_marches_on();
2717
2718        let media_position_state =
2719            MediaPositionState::new(self.duration.get(), self.playback_rate.get(), position);
2720        debug!(
2721            "Sending media session event set position state {:?}",
2722            media_position_state
2723        );
2724        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
2725    }
2726
2727    fn playback_seek_done(&self, position: f64) {
2728        // If the seek was initiated by script or by the user agent itself continue with the
2729        // following steps, otherwise abort.
2730        if !self.seeking.get() || position != self.current_seek_position.get() {
2731            return;
2732        }
2733
2734        // <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2735        // Step 13. Await a stable state.
2736        let task = MediaElementMicrotask::Seeked {
2737            elem: DomRoot::from_ref(self),
2738            generation_id: self.generation_id.get(),
2739        };
2740
2741        ScriptThread::await_stable_state(Microtask::MediaElement(task));
2742    }
2743
2744    fn playback_state_changed(&self, state: &PlaybackState) {
2745        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
2746        match *state {
2747            PlaybackState::Paused => {
2748                media_session_playback_state = MediaSessionPlaybackState::Paused;
2749                if self.ready_state.get() == ReadyState::HaveMetadata {
2750                    self.change_ready_state(ReadyState::HaveEnoughData);
2751                }
2752            },
2753            PlaybackState::Playing => {
2754                media_session_playback_state = MediaSessionPlaybackState::Playing;
2755                if self.ready_state.get() == ReadyState::HaveMetadata {
2756                    self.change_ready_state(ReadyState::HaveEnoughData);
2757                }
2758            },
2759            PlaybackState::Buffering => {
2760                // Do not send the media session playback state change event
2761                // in this case as a None_ state is expected to clean up the
2762                // session.
2763                return;
2764            },
2765            _ => {},
2766        };
2767        debug!(
2768            "Sending media session event playback state changed to {:?}",
2769            media_session_playback_state
2770        );
2771        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
2772            media_session_playback_state,
2773        ));
2774    }
2775
2776    fn seekable(&self) -> TimeRangesContainer {
2777        let mut seekable = TimeRangesContainer::default();
2778        if let Some(ref player) = *self.player.borrow() {
2779            let ranges = player.lock().unwrap().seekable();
2780            for range in ranges {
2781                let _ = seekable.add(range.start, range.end);
2782            }
2783        }
2784        seekable
2785    }
2786
2787    /// <https://html.spec.whatwg.org/multipage/#earliest-possible-position>
2788    fn earliest_possible_position(&self) -> f64 {
2789        self.seekable()
2790            .start(0)
2791            .unwrap_or_else(|_| self.current_playback_position.get())
2792    }
2793
2794    fn render_controls(&self, can_gc: CanGc) {
2795        if self.upcast::<Element>().is_shadow_host() {
2796            // Bail out if we are already showing the controls.
2797            return;
2798        }
2799
2800        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2801        //                       reparsing styles.
2802        let shadow_root = self
2803            .upcast::<Element>()
2804            .attach_ua_shadow_root(false, can_gc);
2805        let document = self.owner_document();
2806        let script = Element::create(
2807            QualName::new(None, ns!(html), local_name!("script")),
2808            None,
2809            &document,
2810            ElementCreator::ScriptCreated,
2811            CustomElementCreationMode::Asynchronous,
2812            None,
2813            can_gc,
2814        );
2815        // This is our hacky way to temporarily workaround the lack of a privileged
2816        // JS context.
2817        // The media controls UI accesses the document.servoGetMediaControls(id) API
2818        // to get an instance to the media controls ShadowRoot.
2819        // `id` needs to match the internally generated UUID assigned to a media element.
2820        let id = Uuid::new_v4().to_string();
2821        document.register_media_controls(&id, &shadow_root);
2822        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2823        *self.media_controls_id.borrow_mut() = Some(id);
2824        script
2825            .upcast::<Node>()
2826            .set_text_content_for_element(Some(DOMString::from(media_controls_script)), can_gc);
2827        if let Err(e) = shadow_root
2828            .upcast::<Node>()
2829            .AppendChild(script.upcast::<Node>(), can_gc)
2830        {
2831            warn!("Could not render media controls {:?}", e);
2832            return;
2833        }
2834
2835        let style = Element::create(
2836            QualName::new(None, ns!(html), local_name!("style")),
2837            None,
2838            &document,
2839            ElementCreator::ScriptCreated,
2840            CustomElementCreationMode::Asynchronous,
2841            None,
2842            can_gc,
2843        );
2844
2845        style
2846            .upcast::<Node>()
2847            .set_text_content_for_element(Some(DOMString::from(MEDIA_CONTROL_CSS)), can_gc);
2848
2849        if let Err(e) = shadow_root
2850            .upcast::<Node>()
2851            .AppendChild(style.upcast::<Node>(), can_gc)
2852        {
2853            warn!("Could not render media controls {:?}", e);
2854        }
2855
2856        self.upcast::<Node>().dirty(NodeDamage::Other);
2857    }
2858
2859    fn remove_controls(&self) {
2860        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2861            self.owner_document().unregister_media_controls(&id);
2862        }
2863    }
2864
2865    /// Gets the video frame at the current playback position.
2866    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2867        self.video_renderer
2868            .lock()
2869            .unwrap()
2870            .current_frame_holder
2871            .as_ref()
2872            .map(|holder| holder.get_frame())
2873    }
2874
2875    /// Gets the current frame of the video element to present, if any.
2876    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2877    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2878        let (current_frame, poster_frame) = {
2879            let renderer = self.video_renderer.lock().unwrap();
2880            (renderer.current_frame, renderer.poster_frame)
2881        };
2882
2883        // If the show poster flag is set (or there is no current video frame to
2884        // present) AND there is a poster frame, present that.
2885        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2886            return poster_frame;
2887        }
2888
2889        current_frame
2890    }
2891
2892    /// By default the audio is rendered through the audio sink automatically
2893    /// selected by the servo-media Player instance. However, in some cases, like
2894    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2895    /// renderer.
2896    pub(crate) fn set_audio_renderer(
2897        &self,
2898        audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>,
2899        can_gc: CanGc,
2900    ) {
2901        *self.audio_renderer.borrow_mut() = audio_renderer;
2902
2903        let had_player = {
2904            if let Some(ref player) = *self.player.borrow() {
2905                if let Err(error) = player.lock().unwrap().stop() {
2906                    error!("Could not stop player: {error:?}");
2907                }
2908                true
2909            } else {
2910                false
2911            }
2912        };
2913
2914        if had_player {
2915            self.media_element_load_algorithm(can_gc);
2916        }
2917    }
2918
2919    fn send_media_session_event(&self, event: MediaSessionEvent) {
2920        let global = self.global();
2921        let media_session = global.as_window().Navigator().MediaSession();
2922
2923        media_session.register_media_instance(self);
2924
2925        media_session.send_event(event);
2926    }
2927
2928    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2929    pub(crate) fn origin_is_clean(&self) -> bool {
2930        // Step 5.local (media provider object).
2931        if self.src_object.borrow().is_some() {
2932            // The resource described by the current media resource, if any,
2933            // contains the media data. It is CORS-same-origin.
2934            return true;
2935        }
2936
2937        // Step 5.remote (URL record).
2938        if self.resource_url.borrow().is_some() {
2939            // Update the media data with the contents
2940            // of response's unsafe response obtained in this fashion.
2941            // Response can be CORS-same-origin or CORS-cross-origin;
2942            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2943                return current_fetch_context.origin_is_clean();
2944            }
2945        }
2946
2947        true
2948    }
2949}
2950
2951impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2952    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
2953    fn NetworkState(&self) -> u16 {
2954        self.network_state.get() as u16
2955    }
2956
2957    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
2958    fn ReadyState(&self) -> u16 {
2959        self.ready_state.get() as u16
2960    }
2961
2962    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2963    make_bool_getter!(Autoplay, "autoplay");
2964    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2965    make_bool_setter!(SetAutoplay, "autoplay");
2966
2967    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2968    make_bool_getter!(Loop, "loop");
2969    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2970    make_bool_setter!(SetLoop, "loop");
2971
2972    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2973    make_bool_getter!(DefaultMuted, "muted");
2974    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2975    make_bool_setter!(SetDefaultMuted, "muted");
2976
2977    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2978    make_bool_getter!(Controls, "controls");
2979    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2980    make_bool_setter!(SetControls, "controls");
2981
2982    // https://html.spec.whatwg.org/multipage/#dom-media-src
2983    make_url_getter!(Src, "src");
2984
2985    // https://html.spec.whatwg.org/multipage/#dom-media-src
2986    make_url_setter!(SetSrc, "src");
2987
2988    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2989    fn GetCrossOrigin(&self) -> Option<DOMString> {
2990        reflect_cross_origin_attribute(self.upcast::<Element>())
2991    }
2992    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2993    fn SetCrossOrigin(&self, value: Option<DOMString>, can_gc: CanGc) {
2994        set_cross_origin_attribute(self.upcast::<Element>(), value, can_gc);
2995    }
2996
2997    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2998    fn Muted(&self) -> bool {
2999        self.muted.get()
3000    }
3001
3002    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3003    fn SetMuted(&self, value: bool) {
3004        if self.muted.get() == value {
3005            return;
3006        }
3007
3008        self.muted.set(value);
3009
3010        if let Some(ref player) = *self.player.borrow() {
3011            if let Err(error) = player.lock().unwrap().set_mute(value) {
3012                warn!("Could not set mute state: {error:?}");
3013            }
3014        }
3015
3016        // The user agent must queue a media element task given the media element to fire an event
3017        // named volumechange at the media element.
3018        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3019
3020        // Then, if the media element is not allowed to play, the user agent must run the internal
3021        // pause steps for the media element.
3022        if !self.is_allowed_to_play() {
3023            self.internal_pause_steps();
3024        }
3025    }
3026
3027    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3028    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
3029        (*self.src_object.borrow())
3030            .as_ref()
3031            .map(|src_object| match src_object {
3032                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
3033                SrcObject::MediaStream(stream) => {
3034                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
3035                },
3036            })
3037    }
3038
3039    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3040    fn SetSrcObject(&self, value: Option<MediaStreamOrBlob>, can_gc: CanGc) {
3041        *self.src_object.borrow_mut() = value.map(|value| value.into());
3042        self.media_element_load_algorithm(can_gc);
3043    }
3044
3045    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3046    // Missing/Invalid values are user-agent defined.
3047    make_enumerated_getter!(
3048        Preload,
3049        "preload",
3050        "none" | "metadata" | "auto",
3051        missing => "auto",
3052        invalid => "auto"
3053    );
3054
3055    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3056    make_setter!(SetPreload, "preload");
3057
3058    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
3059    fn CurrentSrc(&self) -> USVString {
3060        USVString(self.current_src.borrow().clone())
3061    }
3062
3063    /// <https://html.spec.whatwg.org/multipage/#dom-media-load>
3064    fn Load(&self, can_gc: CanGc) {
3065        self.media_element_load_algorithm(can_gc);
3066    }
3067
3068    /// <https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype>
3069    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
3070        match ServoMedia::get().can_play_type(&type_.str()) {
3071            SupportsMediaType::No => CanPlayTypeResult::_empty,
3072            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
3073            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
3074        }
3075    }
3076
3077    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
3078    fn GetError(&self) -> Option<DomRoot<MediaError>> {
3079        self.error.get()
3080    }
3081
3082    /// <https://html.spec.whatwg.org/multipage/#dom-media-play>
3083    fn Play(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
3084        let promise = Promise::new_in_current_realm(comp, can_gc);
3085
3086        // TODO Step 1. If the media element is not allowed to play, then return a promise rejected
3087        // with a "NotAllowedError" DOMException.
3088
3089        // Step 2. If the media element's error attribute is not null and its code is
3090        // MEDIA_ERR_SRC_NOT_SUPPORTED, then return a promise rejected with a "NotSupportedError"
3091        // DOMException.
3092        if self
3093            .error
3094            .get()
3095            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
3096        {
3097            promise.reject_error(Error::NotSupported(None), can_gc);
3098            return promise;
3099        }
3100
3101        // Step 3. Let promise be a new promise and append promise to the list of pending play
3102        // promises.
3103        self.push_pending_play_promise(&promise);
3104
3105        // Step 4. Run the internal play steps for the media element.
3106        self.internal_play_steps(can_gc);
3107
3108        // Step 5. Return promise.
3109        promise
3110    }
3111
3112    /// <https://html.spec.whatwg.org/multipage/#dom-media-pause>
3113    fn Pause(&self, can_gc: CanGc) {
3114        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
3115        // the media element's resource selection algorithm.
3116        if self.network_state.get() == NetworkState::Empty {
3117            self.invoke_resource_selection_algorithm(can_gc);
3118        }
3119
3120        // Step 2. Run the internal pause steps for the media element.
3121        self.internal_pause_steps();
3122    }
3123
3124    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
3125    fn Paused(&self) -> bool {
3126        self.paused.get()
3127    }
3128
3129    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3130    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
3131        Ok(Finite::wrap(self.default_playback_rate.get()))
3132    }
3133
3134    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3135    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3136        // If the given value is not supported by the user agent, then throw a "NotSupportedError"
3137        // DOMException.
3138        let min_allowed = -64.0;
3139        let max_allowed = 64.0;
3140        if *value < min_allowed || *value > max_allowed {
3141            return Err(Error::NotSupported(None));
3142        }
3143
3144        if self.default_playback_rate.get() == *value {
3145            return Ok(());
3146        }
3147
3148        self.default_playback_rate.set(*value);
3149
3150        // The user agent must queue a media element task given the media element to fire an event
3151        // named ratechange at the media element.
3152        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3153
3154        Ok(())
3155    }
3156
3157    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3158    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
3159        Ok(Finite::wrap(self.playback_rate.get()))
3160    }
3161
3162    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3163    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3164        // The attribute is mutable: on setting, the user agent must follow these steps:
3165
3166        // Step 1. If the given value is not supported by the user agent, then throw a
3167        // "NotSupportedError" DOMException.
3168        let min_allowed = -64.0;
3169        let max_allowed = 64.0;
3170        if *value < min_allowed || *value > max_allowed {
3171            return Err(Error::NotSupported(None));
3172        }
3173
3174        if self.playback_rate.get() == *value {
3175            return Ok(());
3176        }
3177
3178        // Step 2. Set playbackRate to the new value, and if the element is potentially playing,
3179        // change the playback speed.
3180        self.playback_rate.set(*value);
3181
3182        if self.is_potentially_playing() {
3183            if let Some(ref player) = *self.player.borrow() {
3184                if let Err(error) = player.lock().unwrap().set_playback_rate(*value) {
3185                    warn!("Could not set the playback rate: {error:?}");
3186                }
3187            }
3188        }
3189
3190        // The user agent must queue a media element task given the media element to fire an event
3191        // named ratechange at the media element.
3192        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3193
3194        Ok(())
3195    }
3196
3197    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
3198    fn Duration(&self) -> f64 {
3199        self.duration.get()
3200    }
3201
3202    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3203    fn CurrentTime(&self) -> Finite<f64> {
3204        Finite::wrap(if self.default_playback_start_position.get() != 0. {
3205            self.default_playback_start_position.get()
3206        } else if self.seeking.get() {
3207            // Note that the other browsers do the similar (by checking `seeking` value or clamp the
3208            // `official` position to the earliest possible position, the duration, and the seekable
3209            // ranges.
3210            // <https://github.com/whatwg/html/issues/11773>
3211            self.current_seek_position.get()
3212        } else {
3213            self.official_playback_position.get()
3214        })
3215    }
3216
3217    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3218    fn SetCurrentTime(&self, time: Finite<f64>) {
3219        if self.ready_state.get() == ReadyState::HaveNothing {
3220            self.default_playback_start_position.set(*time);
3221        } else {
3222            self.official_playback_position.set(*time);
3223            self.seek(*time, /* approximate_for_speed */ false);
3224        }
3225    }
3226
3227    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
3228    fn Seeking(&self) -> bool {
3229        self.seeking.get()
3230    }
3231
3232    /// <https://html.spec.whatwg.org/multipage/#dom-media-ended>
3233    fn Ended(&self) -> bool {
3234        self.ended_playback(LoopCondition::Included) &&
3235            self.direction_of_playback() == PlaybackDirection::Forwards
3236    }
3237
3238    /// <https://html.spec.whatwg.org/multipage/#dom-media-fastseek>
3239    fn FastSeek(&self, time: Finite<f64>) {
3240        self.seek(*time, /* approximate_for_speed */ true);
3241    }
3242
3243    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
3244    fn Played(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3245        TimeRanges::new(
3246            self.global().as_window(),
3247            self.played.borrow().clone(),
3248            can_gc,
3249        )
3250    }
3251
3252    /// <https://html.spec.whatwg.org/multipage/#dom-media-seekable>
3253    fn Seekable(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3254        TimeRanges::new(self.global().as_window(), self.seekable(), can_gc)
3255    }
3256
3257    /// <https://html.spec.whatwg.org/multipage/#dom-media-buffered>
3258    fn Buffered(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3259        let mut buffered = TimeRangesContainer::default();
3260        if let Some(ref player) = *self.player.borrow() {
3261            let ranges = player.lock().unwrap().buffered();
3262            for range in ranges {
3263                let _ = buffered.add(range.start, range.end);
3264            }
3265        }
3266        TimeRanges::new(self.global().as_window(), buffered, can_gc)
3267    }
3268
3269    /// <https://html.spec.whatwg.org/multipage/#dom-media-audiotracks>
3270    fn AudioTracks(&self, can_gc: CanGc) -> DomRoot<AudioTrackList> {
3271        let window = self.owner_window();
3272        self.audio_tracks_list
3273            .or_init(|| AudioTrackList::new(&window, &[], Some(self), can_gc))
3274    }
3275
3276    /// <https://html.spec.whatwg.org/multipage/#dom-media-videotracks>
3277    fn VideoTracks(&self, can_gc: CanGc) -> DomRoot<VideoTrackList> {
3278        let window = self.owner_window();
3279        self.video_tracks_list
3280            .or_init(|| VideoTrackList::new(&window, &[], Some(self), can_gc))
3281    }
3282
3283    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
3284    fn TextTracks(&self, can_gc: CanGc) -> DomRoot<TextTrackList> {
3285        let window = self.owner_window();
3286        self.text_tracks_list
3287            .or_init(|| TextTrackList::new(&window, &[], can_gc))
3288    }
3289
3290    /// <https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack>
3291    fn AddTextTrack(
3292        &self,
3293        kind: TextTrackKind,
3294        label: DOMString,
3295        language: DOMString,
3296        can_gc: CanGc,
3297    ) -> DomRoot<TextTrack> {
3298        let window = self.owner_window();
3299        // Step 1 & 2
3300        // FIXME(#22314, dlrobertson) set the ready state to Loaded
3301        let track = TextTrack::new(
3302            &window,
3303            "".into(),
3304            kind,
3305            label,
3306            language,
3307            TextTrackMode::Hidden,
3308            None,
3309            can_gc,
3310        );
3311        // Step 3 & 4
3312        self.TextTracks(can_gc).add(&track);
3313        // Step 5
3314        DomRoot::from_ref(&track)
3315    }
3316
3317    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3318    fn GetVolume(&self) -> Fallible<Finite<f64>> {
3319        Ok(Finite::wrap(self.volume.get()))
3320    }
3321
3322    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3323    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
3324        // If the new value is outside the range 0.0 to 1.0 inclusive, then, on setting, an
3325        // "IndexSizeError" DOMException must be thrown instead.
3326        let minimum_volume = 0.0;
3327        let maximum_volume = 1.0;
3328        if *value < minimum_volume || *value > maximum_volume {
3329            return Err(Error::IndexSize(None));
3330        }
3331
3332        if self.volume.get() == *value {
3333            return Ok(());
3334        }
3335
3336        self.volume.set(*value);
3337
3338        if let Some(ref player) = *self.player.borrow() {
3339            if let Err(error) = player.lock().unwrap().set_volume(*value) {
3340                warn!("Could not set the volume: {error:?}");
3341            }
3342        }
3343
3344        // The user agent must queue a media element task given the media element to fire an event
3345        // named volumechange at the media element.
3346        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3347
3348        // Then, if the media element is not allowed to play, the user agent must run the internal
3349        // pause steps for the media element.
3350        if !self.is_allowed_to_play() {
3351            self.internal_pause_steps();
3352        }
3353
3354        Ok(())
3355    }
3356}
3357
3358impl VirtualMethods for HTMLMediaElement {
3359    fn super_type(&self) -> Option<&dyn VirtualMethods> {
3360        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
3361    }
3362
3363    fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation, can_gc: CanGc) {
3364        self.super_type()
3365            .unwrap()
3366            .attribute_mutated(attr, mutation, can_gc);
3367
3368        match *attr.local_name() {
3369            local_name!("muted") => {
3370                // <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3371                // When a media element is created, if the element has a muted content attribute
3372                // specified, then the muted IDL attribute should be set to true.
3373                if let AttributeMutation::Set(
3374                    _,
3375                    AttributeMutationReason::ByCloning | AttributeMutationReason::ByParser,
3376                ) = mutation
3377                {
3378                    self.SetMuted(true);
3379                }
3380            },
3381            local_name!("src") => {
3382                // <https://html.spec.whatwg.org/multipage/#location-of-the-media-resource>
3383                // If a src attribute of a media element is set or changed, the user agent must invoke
3384                // the media element's media element load algorithm (Removing the src attribute does
3385                // not do this, even if there are source elements present).
3386                if !mutation.is_removal() {
3387                    self.media_element_load_algorithm(can_gc);
3388                }
3389            },
3390            local_name!("controls") => {
3391                if mutation.new_value(attr).is_some() {
3392                    self.render_controls(can_gc);
3393                } else {
3394                    self.remove_controls();
3395                }
3396            },
3397            _ => (),
3398        };
3399    }
3400
3401    /// <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document>
3402    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
3403        self.super_type().unwrap().unbind_from_tree(context, can_gc);
3404
3405        self.remove_controls();
3406
3407        if context.tree_connected {
3408            let task = MediaElementMicrotask::PauseIfNotInDocument {
3409                elem: DomRoot::from_ref(self),
3410            };
3411            ScriptThread::await_stable_state(Microtask::MediaElement(task));
3412        }
3413    }
3414
3415    fn adopting_steps(&self, old_doc: &Document, can_gc: CanGc) {
3416        self.super_type().unwrap().adopting_steps(old_doc, can_gc);
3417
3418        // Note that media control id should be adopting between documents so "privileged"
3419        // document.servoGetMediaControls(id) API is keeping access to the whitelist of media
3420        // controls identifiers.
3421        if let Some(id) = &*self.media_controls_id.borrow() {
3422            let Some(shadow_root) = self.upcast::<Element>().shadow_root() else {
3423                error!("Missing media controls shadow root");
3424                return;
3425            };
3426
3427            old_doc.unregister_media_controls(id);
3428            self.owner_document()
3429                .register_media_controls(id, &shadow_root);
3430        }
3431    }
3432}
3433
3434#[derive(JSTraceable, MallocSizeOf)]
3435pub(crate) enum MediaElementMicrotask {
3436    ResourceSelection {
3437        elem: DomRoot<HTMLMediaElement>,
3438        generation_id: u32,
3439        #[no_trace]
3440        base_url: ServoUrl,
3441    },
3442    PauseIfNotInDocument {
3443        elem: DomRoot<HTMLMediaElement>,
3444    },
3445    Seeked {
3446        elem: DomRoot<HTMLMediaElement>,
3447        generation_id: u32,
3448    },
3449    SelectNextSourceChild {
3450        elem: DomRoot<HTMLMediaElement>,
3451        generation_id: u32,
3452    },
3453    SelectNextSourceChildAfterWait {
3454        elem: DomRoot<HTMLMediaElement>,
3455        generation_id: u32,
3456    },
3457}
3458
3459impl MicrotaskRunnable for MediaElementMicrotask {
3460    fn handler(&self, can_gc: CanGc) {
3461        match self {
3462            &MediaElementMicrotask::ResourceSelection {
3463                ref elem,
3464                generation_id,
3465                ref base_url,
3466            } => {
3467                if generation_id == elem.generation_id.get() {
3468                    elem.resource_selection_algorithm_sync(base_url.clone(), can_gc);
3469                }
3470            },
3471            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
3472                if !elem.upcast::<Node>().is_connected() {
3473                    elem.internal_pause_steps();
3474                }
3475            },
3476            &MediaElementMicrotask::Seeked {
3477                ref elem,
3478                generation_id,
3479            } => {
3480                if generation_id == elem.generation_id.get() {
3481                    elem.seek_end();
3482                }
3483            },
3484            &MediaElementMicrotask::SelectNextSourceChild {
3485                ref elem,
3486                generation_id,
3487            } => {
3488                if generation_id == elem.generation_id.get() {
3489                    elem.select_next_source_child(can_gc);
3490                }
3491            },
3492            &MediaElementMicrotask::SelectNextSourceChildAfterWait {
3493                ref elem,
3494                generation_id,
3495            } => {
3496                if generation_id == elem.generation_id.get() {
3497                    elem.select_next_source_child_after_wait(can_gc);
3498                }
3499            },
3500        }
3501    }
3502
3503    fn enter_realm(&self) -> JSAutoRealm {
3504        match self {
3505            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
3506            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
3507            &MediaElementMicrotask::Seeked { ref elem, .. } |
3508            &MediaElementMicrotask::SelectNextSourceChild { ref elem, .. } |
3509            &MediaElementMicrotask::SelectNextSourceChildAfterWait { ref elem, .. } => {
3510                enter_realm(&**elem)
3511            },
3512        }
3513    }
3514}
3515
3516enum Resource {
3517    Object,
3518    Url(ServoUrl),
3519}
3520
3521#[derive(Debug, MallocSizeOf, PartialEq)]
3522enum DataBuffer {
3523    Payload(Vec<u8>),
3524    EndOfStream,
3525}
3526
3527#[derive(MallocSizeOf)]
3528struct BufferedDataSource {
3529    /// During initial setup and seeking (including clearing the buffer queue
3530    /// and resetting the end-of-stream state), the data source should be locked and
3531    /// any request for processing should be ignored until the media player informs us
3532    /// via the NeedData event that it is ready to accept incoming data.
3533    locked: Cell<bool>,
3534    /// Temporary storage for incoming data.
3535    buffers: VecDeque<DataBuffer>,
3536}
3537
3538impl BufferedDataSource {
3539    fn new() -> BufferedDataSource {
3540        BufferedDataSource {
3541            locked: Cell::new(true),
3542            buffers: VecDeque::default(),
3543        }
3544    }
3545
3546    fn set_locked(&self, locked: bool) {
3547        self.locked.set(locked)
3548    }
3549
3550    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
3551        debug_assert_ne!(
3552            self.buffers.back(),
3553            Some(&DataBuffer::EndOfStream),
3554            "The media backend not expects any further data after end of stream"
3555        );
3556
3557        self.buffers.push_back(buffer);
3558    }
3559
3560    fn process_into_player_from_queue(
3561        &mut self,
3562        player: &Arc<Mutex<dyn Player>>,
3563    ) -> Result<(), PlayerError> {
3564        // Early out if any request for processing should be ignored.
3565        if self.locked.get() {
3566            return Ok(());
3567        }
3568
3569        while let Some(buffer) = self.buffers.pop_front() {
3570            match buffer {
3571                DataBuffer::Payload(payload) => {
3572                    if let Err(error) = player.lock().unwrap().push_data(payload) {
3573                        warn!("Could not push input data to player: {error:?}");
3574                        return Err(error);
3575                    }
3576                },
3577                DataBuffer::EndOfStream => {
3578                    if let Err(error) = player.lock().unwrap().end_of_stream() {
3579                        warn!("Could not signal EOS to player: {error:?}");
3580                        return Err(error);
3581                    }
3582                },
3583            }
3584        }
3585
3586        Ok(())
3587    }
3588
3589    fn reset(&mut self) {
3590        self.locked.set(true);
3591        self.buffers.clear();
3592    }
3593}
3594
3595/// Indicates the reason why a fetch request was cancelled.
3596#[derive(Debug, MallocSizeOf, PartialEq)]
3597enum CancelReason {
3598    /// We were asked to stop pushing data to the player.
3599    Backoff,
3600    /// An error ocurred while fetching the media data.
3601    Error,
3602    /// The fetching process is aborted by the user.
3603    Abort,
3604}
3605
3606#[derive(MallocSizeOf)]
3607pub(crate) struct HTMLMediaElementFetchContext {
3608    /// The fetch request id.
3609    request_id: RequestId,
3610    /// Some if the request has been cancelled.
3611    cancel_reason: Option<CancelReason>,
3612    /// Indicates whether the fetched stream is seekable.
3613    is_seekable: bool,
3614    /// Indicates whether the fetched stream is origin clean.
3615    origin_clean: bool,
3616    /// The buffered data source which to be processed by media backend.
3617    data_source: RefCell<BufferedDataSource>,
3618    /// Fetch canceller. Allows cancelling the current fetch request by
3619    /// manually calling its .cancel() method or automatically on Drop.
3620    fetch_canceller: FetchCanceller,
3621}
3622
3623impl HTMLMediaElementFetchContext {
3624    fn new(
3625        request_id: RequestId,
3626        core_resource_thread: CoreResourceThread,
3627    ) -> HTMLMediaElementFetchContext {
3628        HTMLMediaElementFetchContext {
3629            request_id,
3630            cancel_reason: None,
3631            is_seekable: false,
3632            origin_clean: true,
3633            data_source: RefCell::new(BufferedDataSource::new()),
3634            fetch_canceller: FetchCanceller::new(request_id, false, core_resource_thread.clone()),
3635        }
3636    }
3637
3638    fn request_id(&self) -> RequestId {
3639        self.request_id
3640    }
3641
3642    fn is_seekable(&self) -> bool {
3643        self.is_seekable
3644    }
3645
3646    fn set_seekable(&mut self, seekable: bool) {
3647        self.is_seekable = seekable;
3648    }
3649
3650    fn origin_is_clean(&self) -> bool {
3651        self.origin_clean
3652    }
3653
3654    fn set_origin_clean(&mut self, origin_clean: bool) {
3655        self.origin_clean = origin_clean;
3656    }
3657
3658    fn data_source(&self) -> &RefCell<BufferedDataSource> {
3659        &self.data_source
3660    }
3661
3662    fn cancel(&mut self, reason: CancelReason) {
3663        if self.cancel_reason.is_some() {
3664            return;
3665        }
3666        self.cancel_reason = Some(reason);
3667        self.data_source.borrow_mut().reset();
3668        self.fetch_canceller.abort();
3669    }
3670
3671    fn cancel_reason(&self) -> &Option<CancelReason> {
3672        &self.cancel_reason
3673    }
3674}
3675
3676struct HTMLMediaElementFetchListener {
3677    /// The element that initiated the request.
3678    element: Trusted<HTMLMediaElement>,
3679    /// The generation of the media element when this fetch started.
3680    generation_id: u32,
3681    /// The fetch request id.
3682    request_id: RequestId,
3683    /// Time of last progress notification.
3684    next_progress_event: Instant,
3685    /// Url for the resource.
3686    url: ServoUrl,
3687    /// Expected content length of the media asset being fetched or played.
3688    expected_content_length: Option<u64>,
3689    /// Actual content length of the media asset was fetched.
3690    fetched_content_length: u64,
3691    /// Discarded content length from the network for the ongoing
3692    /// request if range requests are not supported. Seek requests set it
3693    /// to the required position (in bytes).
3694    content_length_to_discard: u64,
3695}
3696
3697impl FetchResponseListener for HTMLMediaElementFetchListener {
3698    fn process_request_body(&mut self, _: RequestId) {}
3699
3700    fn process_request_eof(&mut self, _: RequestId) {}
3701
3702    fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
3703        let element = self.element.root();
3704
3705        let (metadata, origin_clean) = match metadata {
3706            Ok(fetch_metadata) => match fetch_metadata {
3707                FetchMetadata::Unfiltered(metadata) => (Some(metadata), true),
3708                FetchMetadata::Filtered { filtered, unsafe_ } => (
3709                    Some(unsafe_),
3710                    matches!(
3711                        filtered,
3712                        FilteredMetadata::Basic(_) | FilteredMetadata::Cors(_)
3713                    ),
3714                ),
3715            },
3716            Err(_) => (None, true),
3717        };
3718
3719        let (status_is_success, is_seekable) =
3720            metadata.as_ref().map_or((false, false), |metadata| {
3721                let status = &metadata.status;
3722                (status.is_success(), *status == StatusCode::PARTIAL_CONTENT)
3723            });
3724
3725        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3726        if !status_is_success {
3727            if element.ready_state.get() == ReadyState::HaveNothing {
3728                // => "If the media data cannot be fetched at all, due to network errors..."
3729                element.media_data_processing_failure_steps();
3730            } else {
3731                // => "If the connection is interrupted after some media data has been received..."
3732                element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3733            }
3734            return;
3735        }
3736
3737        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3738            current_fetch_context.set_seekable(is_seekable);
3739            current_fetch_context.set_origin_clean(origin_clean);
3740        }
3741
3742        if let Some(metadata) = metadata.as_ref() {
3743            if let Some(headers) = metadata.headers.as_ref() {
3744                // For range requests we get the size of the media asset from the Content-Range
3745                // header. Otherwise, we get it from the Content-Length header.
3746                let content_length =
3747                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
3748                        content_range.bytes_len()
3749                    } else {
3750                        headers
3751                            .typed_get::<ContentLength>()
3752                            .map(|content_length| content_length.0)
3753                    };
3754
3755                // We only set the expected input size if it changes.
3756                if content_length != self.expected_content_length {
3757                    if let Some(content_length) = content_length {
3758                        self.expected_content_length = Some(content_length);
3759                    }
3760                }
3761            }
3762        }
3763
3764        // Explicit media player initialization with live/seekable source.
3765        if let Some(expected_content_length) = self.expected_content_length {
3766            if let Err(e) = element
3767                .player
3768                .borrow()
3769                .as_ref()
3770                .unwrap()
3771                .lock()
3772                .unwrap()
3773                .set_input_size(expected_content_length)
3774            {
3775                warn!("Could not set player input size {:?}", e);
3776            }
3777        }
3778    }
3779
3780    fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
3781        let element = self.element.root();
3782
3783        self.fetched_content_length += chunk.len() as u64;
3784
3785        // If an error was received previously, we skip processing the payload.
3786        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3787            if let Some(CancelReason::Backoff) = current_fetch_context.cancel_reason() {
3788                return;
3789            }
3790
3791            // Discard chunk of the response body if fetch context doesn't support range requests.
3792            let payload = if !current_fetch_context.is_seekable() &&
3793                self.content_length_to_discard != 0
3794            {
3795                if chunk.len() as u64 > self.content_length_to_discard {
3796                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3797                    self.content_length_to_discard = 0;
3798                    shrink_chunk
3799                } else {
3800                    // Completely discard this response chunk.
3801                    self.content_length_to_discard -= chunk.len() as u64;
3802                    return;
3803                }
3804            } else {
3805                chunk
3806            };
3807
3808            if let Err(e) = {
3809                let mut data_source = current_fetch_context.data_source().borrow_mut();
3810                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3811                data_source
3812                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap())
3813            } {
3814                // If we are pushing too much data and we know that we can
3815                // restart the download later from where we left, we cancel
3816                // the current request. Otherwise, we continue the request
3817                // assuming that we may drop some frames.
3818                if e == PlayerError::EnoughData {
3819                    current_fetch_context.cancel(CancelReason::Backoff);
3820                }
3821                return;
3822            }
3823        }
3824
3825        // <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
3826        // While the load is not suspended (see below), every 350ms (±200ms) or for every byte
3827        // received, whichever is least frequent, queue a media element task given the media element
3828        // to fire an event named progress at the element.
3829        if Instant::now() > self.next_progress_event {
3830            element.queue_media_element_task_to_fire_event(atom!("progress"));
3831            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3832        }
3833    }
3834
3835    fn process_response_eof(
3836        self,
3837        _: RequestId,
3838        status: Result<(), NetworkError>,
3839        timing: ResourceFetchTiming,
3840    ) {
3841        let element = self.element.root();
3842
3843        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3844        if status.is_ok() && self.fetched_content_length != 0 {
3845            // => "Once the entire media resource has been fetched..."
3846
3847            // There are no more chunks of the response body forthcoming, so we can
3848            // go ahead and notify the media backend not to expect any further data.
3849            if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut()
3850            {
3851                // On initial state change READY -> PAUSED the media player perform
3852                // seek to initial position by event with seek segment (TIME format)
3853                // while media stack operates in BYTES format and configuring segment
3854                // start and stop positions without the total size of the stream is not
3855                // possible. As fallback the media player perform seek with BYTES format
3856                // and initiate seek request via "seek-data" callback with required offset.
3857                if self.expected_content_length.is_none() {
3858                    if let Err(e) = element
3859                        .player
3860                        .borrow()
3861                        .as_ref()
3862                        .unwrap()
3863                        .lock()
3864                        .unwrap()
3865                        .set_input_size(self.fetched_content_length)
3866                    {
3867                        warn!("Could not set player input size {:?}", e);
3868                    }
3869                }
3870
3871                let mut data_source = current_fetch_context.data_source().borrow_mut();
3872
3873                data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3874                let _ = data_source
3875                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap());
3876            }
3877
3878            // Step 1. Fire an event named progress at the media element.
3879            element
3880                .upcast::<EventTarget>()
3881                .fire_event(atom!("progress"), CanGc::note());
3882
3883            // Step 2. Set the networkState to NETWORK_IDLE and fire an event named suspend at the
3884            // media element.
3885            element.network_state.set(NetworkState::Idle);
3886
3887            element
3888                .upcast::<EventTarget>()
3889                .fire_event(atom!("suspend"), CanGc::note());
3890        } else if status.is_err() && element.ready_state.get() != ReadyState::HaveNothing {
3891            // => "If the connection is interrupted after some media data has been received..."
3892            element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3893        } else {
3894            // => "If the media data can be fetched but is found by inspection to be in an
3895            // unsupported format, or can otherwise not be rendered at all"
3896            element.media_data_processing_failure_steps();
3897        }
3898
3899        network_listener::submit_timing(&self, &status, &timing, CanGc::note());
3900    }
3901
3902    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3903        let global = &self.resource_timing_global();
3904        global.report_csp_violations(violations, None, None);
3905    }
3906
3907    fn should_invoke(&self) -> bool {
3908        let element = self.element.root();
3909
3910        if element.generation_id.get() != self.generation_id || element.player.borrow().is_none() {
3911            return false;
3912        }
3913
3914        let Some(ref current_fetch_context) = *element.current_fetch_context.borrow() else {
3915            return false;
3916        };
3917
3918        // Whether the new fetch request was triggered.
3919        if current_fetch_context.request_id() != self.request_id {
3920            return false;
3921        }
3922
3923        // Whether the current fetch request was cancelled due to a network or decoding error, or
3924        // was aborted by the user.
3925        if let Some(cancel_reason) = current_fetch_context.cancel_reason() {
3926            if matches!(*cancel_reason, CancelReason::Error | CancelReason::Abort) {
3927                return false;
3928            }
3929        }
3930
3931        true
3932    }
3933}
3934
3935impl ResourceTimingListener for HTMLMediaElementFetchListener {
3936    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3937        let initiator_type = InitiatorType::LocalName(
3938            self.element
3939                .root()
3940                .upcast::<Element>()
3941                .local_name()
3942                .to_string(),
3943        );
3944        (initiator_type, self.url.clone())
3945    }
3946
3947    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3948        self.element.root().owner_document().global()
3949    }
3950}
3951
3952impl HTMLMediaElementFetchListener {
3953    fn new(element: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3954        Self {
3955            element: Trusted::new(element),
3956            generation_id: element.generation_id.get(),
3957            request_id,
3958            next_progress_event: Instant::now() + Duration::from_millis(350),
3959            url,
3960            expected_content_length: None,
3961            fetched_content_length: 0,
3962            content_length_to_discard: offset,
3963        }
3964    }
3965}
3966
3967/// The [`HTMLMediaElementEventHandler`] is a structure responsible for handling media events for
3968/// the [`HTMLMediaElement`] and exists to decouple ownership of the [`HTMLMediaElement`] from IPC
3969/// router callback.
3970#[derive(JSTraceable, MallocSizeOf)]
3971struct HTMLMediaElementEventHandler {
3972    element: WeakRef<HTMLMediaElement>,
3973}
3974
3975#[expect(unsafe_code)]
3976unsafe impl Send for HTMLMediaElementEventHandler {}
3977
3978impl HTMLMediaElementEventHandler {
3979    fn new(element: &HTMLMediaElement) -> Self {
3980        Self {
3981            element: WeakRef::new(element),
3982        }
3983    }
3984
3985    fn handle_player_event(&self, player_id: usize, event: PlayerEvent, can_gc: CanGc) {
3986        let Some(element) = self.element.root() else {
3987            return;
3988        };
3989
3990        // Abort event processing if the associated media player is outdated.
3991        if element.player_id().is_none_or(|id| id != player_id) {
3992            return;
3993        }
3994
3995        match event {
3996            PlayerEvent::DurationChanged(duration) => element.playback_duration_changed(duration),
3997            PlayerEvent::EndOfStream => element.playback_end(),
3998            PlayerEvent::EnoughData => element.playback_enough_data(),
3999            PlayerEvent::Error(ref error) => element.playback_error(error, can_gc),
4000            PlayerEvent::MetadataUpdated(ref metadata) => {
4001                element.playback_metadata_updated(metadata, can_gc)
4002            },
4003            PlayerEvent::NeedData => element.playback_need_data(),
4004            PlayerEvent::PositionChanged(position) => element.playback_position_changed(position),
4005            PlayerEvent::SeekData(offset, seek_lock) => {
4006                element.fetch_request(Some(offset), Some(seek_lock))
4007            },
4008            PlayerEvent::SeekDone(position) => element.playback_seek_done(position),
4009            PlayerEvent::StateChanged(ref state) => element.playback_state_changed(state),
4010            PlayerEvent::VideoFrameUpdated => element.playback_video_frame_updated(),
4011        }
4012    }
4013}
4014
4015impl Drop for HTMLMediaElementEventHandler {
4016    fn drop(&mut self) {
4017        // The weak reference to the media element is not thread-safe and MUST be deleted on the
4018        // script thread, which is guaranteed by ownership of the `event handler` in the IPC router
4019        // callback (queued task to the media element task source) and the media element itself.
4020        assert_in_script();
4021    }
4022}