script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::{Cell, RefCell};
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex, Weak};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use base::generic_channel::GenericSharedMemory;
13use base::id::WebViewId;
14use content_security_policy::sandboxing_directive::SandboxingFlagSet;
15use dom_struct::dom_struct;
16use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
17use euclid::default::Size2D;
18use headers::{ContentLength, ContentRange, HeaderMapExt};
19use html5ever::{LocalName, Prefix, QualName, local_name, ns};
20use http::StatusCode;
21use http::header::{self, HeaderMap, HeaderValue};
22use ipc_channel::ipc::{self};
23use ipc_channel::router::ROUTER;
24use js::context::JSContext;
25use js::realm::{AutoRealm, CurrentRealm};
26use layout_api::MediaFrame;
27use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
28use net_traits::request::{Destination, RequestId};
29use net_traits::{
30    CoreResourceThread, FetchMetadata, FilteredMetadata, NetworkError, ResourceFetchTiming,
31};
32use paint_api::{CrossProcessPaintApi, ImageUpdate, SerializableImageData};
33use pixels::RasterImage;
34use script_bindings::codegen::InheritTypes::{
35    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
36};
37use script_bindings::root::assert_in_script;
38use script_bindings::script_runtime::temp_cx;
39use script_bindings::weakref::WeakRef;
40use servo_config::pref;
41use servo_media::player::audio::AudioRenderer;
42use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
43use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
44use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
45use servo_url::ServoUrl;
46use stylo_atoms::Atom;
47use uuid::Uuid;
48use webrender_api::{
49    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
50    ImageDescriptorFlags, ImageFormat, ImageKey,
51};
52
53use crate::document_loader::{LoadBlocker, LoadType};
54use crate::dom::attr::Attr;
55use crate::dom::audio::audiotrack::AudioTrack;
56use crate::dom::audio::audiotracklist::AudioTrackList;
57use crate::dom::bindings::cell::DomRefCell;
58use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
59    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
60};
61use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
62use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
63use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
64use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
65use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
66use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
67use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
68use crate::dom::bindings::codegen::UnionTypes::{
69    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
70};
71use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
72use crate::dom::bindings::inheritance::Castable;
73use crate::dom::bindings::num::Finite;
74use crate::dom::bindings::refcounted::Trusted;
75use crate::dom::bindings::reflector::DomGlobal;
76use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
77use crate::dom::bindings::str::{DOMString, USVString};
78use crate::dom::blob::Blob;
79use crate::dom::csp::{GlobalCspReporting, Violation};
80use crate::dom::document::Document;
81use crate::dom::element::{
82    AttributeMutation, AttributeMutationReason, CustomElementCreationMode, Element, ElementCreator,
83    cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
84};
85use crate::dom::event::Event;
86use crate::dom::eventtarget::EventTarget;
87use crate::dom::globalscope::GlobalScope;
88use crate::dom::html::htmlelement::HTMLElement;
89use crate::dom::html::htmlsourceelement::HTMLSourceElement;
90use crate::dom::html::htmlvideoelement::HTMLVideoElement;
91use crate::dom::mediaerror::MediaError;
92use crate::dom::mediafragmentparser::MediaFragmentParser;
93use crate::dom::medialist::MediaList;
94use crate::dom::mediastream::MediaStream;
95use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
96use crate::dom::performance::performanceresourcetiming::InitiatorType;
97use crate::dom::promise::Promise;
98use crate::dom::texttrack::TextTrack;
99use crate::dom::texttracklist::TextTrackList;
100use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
101use crate::dom::trackevent::TrackEvent;
102use crate::dom::url::URL;
103use crate::dom::videotrack::VideoTrack;
104use crate::dom::videotracklist::VideoTrackList;
105use crate::dom::virtualmethods::VirtualMethods;
106use crate::fetch::{FetchCanceller, RequestWithGlobalScope, create_a_potential_cors_request};
107use crate::microtask::{Microtask, MicrotaskRunnable};
108use crate::network_listener::{self, FetchResponseListener, ResourceTimingListener};
109use crate::realms::enter_auto_realm;
110use crate::script_runtime::CanGc;
111use crate::script_thread::ScriptThread;
112use crate::task_source::SendableTaskSource;
113
114/// A CSS file to style the media controls.
115static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
116
117/// A JS file to control the media controls.
118static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
119
120#[derive(MallocSizeOf, PartialEq)]
121enum FrameStatus {
122    Locked,
123    Unlocked,
124}
125
126#[derive(MallocSizeOf)]
127struct FrameHolder(
128    FrameStatus,
129    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
130);
131
132impl FrameHolder {
133    fn new(frame: VideoFrame) -> FrameHolder {
134        FrameHolder(FrameStatus::Unlocked, frame)
135    }
136
137    fn lock(&mut self) {
138        if self.0 == FrameStatus::Unlocked {
139            self.0 = FrameStatus::Locked;
140        };
141    }
142
143    fn unlock(&mut self) {
144        if self.0 == FrameStatus::Locked {
145            self.0 = FrameStatus::Unlocked;
146        };
147    }
148
149    fn set(&mut self, new_frame: VideoFrame) {
150        if self.0 == FrameStatus::Unlocked {
151            self.1 = new_frame
152        };
153    }
154
155    fn get(&self) -> (u32, Size2D<i32>, usize) {
156        if self.0 == FrameStatus::Locked {
157            (
158                self.1.get_texture_id(),
159                Size2D::new(self.1.get_width(), self.1.get_height()),
160                0,
161            )
162        } else {
163            unreachable!();
164        }
165    }
166
167    fn get_frame(&self) -> VideoFrame {
168        self.1.clone()
169    }
170}
171
172#[derive(MallocSizeOf)]
173pub(crate) struct MediaFrameRenderer {
174    webview_id: WebViewId,
175    player_id: Option<usize>,
176    glplayer_id: Option<u64>,
177    paint_api: CrossProcessPaintApi,
178    #[ignore_malloc_size_of = "Defined in other crates"]
179    player_context: WindowGLContext,
180    current_frame: Option<MediaFrame>,
181    old_frame: Option<ImageKey>,
182    very_old_frame: Option<ImageKey>,
183    current_frame_holder: Option<FrameHolder>,
184    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
185    poster_frame: Option<MediaFrame>,
186}
187
188impl MediaFrameRenderer {
189    fn new(
190        webview_id: WebViewId,
191        paint_api: CrossProcessPaintApi,
192        player_context: WindowGLContext,
193    ) -> Self {
194        Self {
195            webview_id,
196            player_id: None,
197            glplayer_id: None,
198            paint_api,
199            player_context,
200            current_frame: None,
201            old_frame: None,
202            very_old_frame: None,
203            current_frame_holder: None,
204            poster_frame: None,
205        }
206    }
207
208    fn setup(
209        &mut self,
210        player_id: usize,
211        task_source: SendableTaskSource,
212        weak_video_renderer: Weak<Mutex<MediaFrameRenderer>>,
213    ) {
214        self.player_id = Some(player_id);
215
216        let (glplayer_id, image_receiver) = self
217            .player_context
218            .glplayer_thread_sender
219            .as_ref()
220            .map(|sender| {
221                let (image_sender, image_receiver) = ipc::channel::<GLPlayerMsgForward>().unwrap();
222                sender
223                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
224                    .unwrap();
225                match image_receiver.recv().unwrap() {
226                    GLPlayerMsgForward::PlayerId(id) => (Some(id), Some(image_receiver)),
227                    _ => unreachable!(),
228                }
229            })
230            .unwrap_or((None, None));
231
232        self.glplayer_id = glplayer_id;
233
234        let Some(image_receiver) = image_receiver else {
235            return;
236        };
237
238        ROUTER.add_typed_route(
239            image_receiver,
240            Box::new(move |message| {
241                let message = message.unwrap();
242                let weak_video_renderer = weak_video_renderer.clone();
243
244                task_source.queue(task!(handle_glplayer_message: move || {
245                    trace!("GLPlayer message {:?}", message);
246
247                    let Some(video_renderer) = weak_video_renderer.upgrade() else {
248                        return;
249                    };
250
251                    match message {
252                        GLPlayerMsgForward::Lock(sender) => {
253                            if let Some(holder) = video_renderer
254                                .lock()
255                                .unwrap()
256                                .current_frame_holder
257                                .as_mut() {
258                                    holder.lock();
259                                    sender.send(holder.get()).unwrap();
260                                };
261                        },
262                        GLPlayerMsgForward::Unlock() => {
263                            if let Some(holder) = video_renderer
264                                .lock()
265                                .unwrap()
266                                .current_frame_holder
267                                .as_mut() { holder.unlock() }
268                        },
269                        _ => (),
270                    }
271                }));
272            }),
273        );
274    }
275
276    fn reset(&mut self) {
277        self.player_id = None;
278
279        if let Some(glplayer_id) = self.glplayer_id.take() {
280            self.player_context
281                .send(GLPlayerMsg::UnregisterPlayer(glplayer_id));
282        }
283
284        self.current_frame_holder = None;
285
286        let mut updates = smallvec::smallvec![];
287
288        if let Some(current_frame) = self.current_frame.take() {
289            updates.push(ImageUpdate::DeleteImage(current_frame.image_key));
290        }
291
292        if let Some(old_image_key) = self.old_frame.take() {
293            updates.push(ImageUpdate::DeleteImage(old_image_key));
294        }
295
296        if let Some(very_old_image_key) = self.very_old_frame.take() {
297            updates.push(ImageUpdate::DeleteImage(very_old_image_key));
298        }
299
300        if !updates.is_empty() {
301            self.paint_api
302                .update_images(self.webview_id.into(), updates);
303        }
304    }
305
306    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
307        self.poster_frame = image.and_then(|image| {
308            image.id.map(|image_key| MediaFrame {
309                image_key,
310                width: image.metadata.width as i32,
311                height: image.metadata.height as i32,
312            })
313        });
314    }
315}
316
317impl Drop for MediaFrameRenderer {
318    fn drop(&mut self) {
319        self.reset();
320    }
321}
322
323impl VideoFrameRenderer for MediaFrameRenderer {
324    fn render(&mut self, frame: VideoFrame) {
325        if self.player_id.is_none() || (frame.is_gl_texture() && self.glplayer_id.is_none()) {
326            return;
327        }
328
329        let mut updates = smallvec::smallvec![];
330
331        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
332            updates.push(ImageUpdate::DeleteImage(old_image_key));
333        }
334
335        let descriptor = ImageDescriptor::new(
336            frame.get_width(),
337            frame.get_height(),
338            ImageFormat::BGRA8,
339            ImageDescriptorFlags::empty(),
340        );
341
342        match &mut self.current_frame {
343            Some(current_frame)
344                if current_frame.width == frame.get_width() &&
345                    current_frame.height == frame.get_height() =>
346            {
347                if !frame.is_gl_texture() {
348                    updates.push(ImageUpdate::UpdateImage(
349                        current_frame.image_key,
350                        descriptor,
351                        SerializableImageData::Raw(GenericSharedMemory::from_bytes(
352                            &frame.get_data(),
353                        )),
354                        None,
355                    ));
356                }
357
358                self.current_frame_holder
359                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
360                    .set(frame);
361
362                if let Some(old_image_key) = self.old_frame.take() {
363                    updates.push(ImageUpdate::DeleteImage(old_image_key));
364                }
365            },
366            Some(current_frame) => {
367                self.old_frame = Some(current_frame.image_key);
368
369                let Some(new_image_key) =
370                    self.paint_api.generate_image_key_blocking(self.webview_id)
371                else {
372                    return;
373                };
374
375                /* update current_frame */
376                current_frame.image_key = new_image_key;
377                current_frame.width = frame.get_width();
378                current_frame.height = frame.get_height();
379
380                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
381                    let texture_target = if frame.is_external_oes() {
382                        ImageBufferKind::TextureExternal
383                    } else {
384                        ImageBufferKind::Texture2D
385                    };
386
387                    SerializableImageData::External(ExternalImageData {
388                        id: ExternalImageId(self.glplayer_id.unwrap()),
389                        channel_index: 0,
390                        image_type: ExternalImageType::TextureHandle(texture_target),
391                        normalized_uvs: false,
392                    })
393                } else {
394                    SerializableImageData::Raw(GenericSharedMemory::from_bytes(&frame.get_data()))
395                };
396
397                self.current_frame_holder
398                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
399                    .set(frame);
400
401                updates.push(ImageUpdate::AddImage(
402                    new_image_key,
403                    descriptor,
404                    image_data,
405                    false,
406                ));
407            },
408            None => {
409                let Some(image_key) = self.paint_api.generate_image_key_blocking(self.webview_id)
410                else {
411                    return;
412                };
413
414                self.current_frame = Some(MediaFrame {
415                    image_key,
416                    width: frame.get_width(),
417                    height: frame.get_height(),
418                });
419
420                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
421                    let texture_target = if frame.is_external_oes() {
422                        ImageBufferKind::TextureExternal
423                    } else {
424                        ImageBufferKind::Texture2D
425                    };
426
427                    SerializableImageData::External(ExternalImageData {
428                        id: ExternalImageId(self.glplayer_id.unwrap()),
429                        channel_index: 0,
430                        image_type: ExternalImageType::TextureHandle(texture_target),
431                        normalized_uvs: false,
432                    })
433                } else {
434                    SerializableImageData::Raw(GenericSharedMemory::from_bytes(&frame.get_data()))
435                };
436
437                self.current_frame_holder = Some(FrameHolder::new(frame));
438
439                updates.push(ImageUpdate::AddImage(
440                    image_key, descriptor, image_data, false,
441                ));
442            },
443        }
444        self.paint_api
445            .update_images(self.webview_id.into(), updates);
446    }
447}
448
449#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
450#[derive(JSTraceable, MallocSizeOf)]
451enum SrcObject {
452    MediaStream(Dom<MediaStream>),
453    Blob(Dom<Blob>),
454}
455
456impl From<MediaStreamOrBlob> for SrcObject {
457    #[cfg_attr(crown, expect(crown::unrooted_must_root))]
458    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
459        match src_object {
460            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
461            MediaStreamOrBlob::MediaStream(stream) => {
462                SrcObject::MediaStream(Dom::from_ref(&*stream))
463            },
464        }
465    }
466}
467
468#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
469enum LoadState {
470    NotLoaded,
471    LoadingFromSrcObject,
472    LoadingFromSrcAttribute,
473    LoadingFromSourceChild,
474    WaitingForSource,
475}
476
477/// <https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-element-29>
478#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
479#[derive(JSTraceable, MallocSizeOf)]
480struct SourceChildrenPointer {
481    source_before_pointer: Dom<HTMLSourceElement>,
482    inclusive: bool,
483}
484
485impl SourceChildrenPointer {
486    fn new(source_before_pointer: DomRoot<HTMLSourceElement>, inclusive: bool) -> Self {
487        Self {
488            source_before_pointer: source_before_pointer.as_traced(),
489            inclusive,
490        }
491    }
492}
493
494/// Generally the presence of the loop attribute should be considered to mean playback has not
495/// "ended", as "ended" and "looping" are mutually exclusive.
496/// <https://html.spec.whatwg.org/multipage/#ended-playback>
497#[derive(Clone, Copy, Debug, PartialEq)]
498enum LoopCondition {
499    Included,
500    Ignored,
501}
502
503#[dom_struct]
504pub(crate) struct HTMLMediaElement {
505    htmlelement: HTMLElement,
506    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
507    network_state: Cell<NetworkState>,
508    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
509    ready_state: Cell<ReadyState>,
510    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
511    src_object: DomRefCell<Option<SrcObject>>,
512    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
513    current_src: DomRefCell<String>,
514    /// Incremented whenever tasks associated with this element are cancelled.
515    generation_id: Cell<u32>,
516    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
517    ///
518    /// Reset to false every time the load algorithm is invoked.
519    fired_loadeddata_event: Cell<bool>,
520    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
521    error: MutNullableDom<MediaError>,
522    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
523    paused: Cell<bool>,
524    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
525    default_playback_rate: Cell<f64>,
526    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
527    playback_rate: Cell<f64>,
528    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
529    autoplaying: Cell<bool>,
530    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
531    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
532    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
533    #[conditional_malloc_size_of]
534    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
535    /// Play promises which are soon to be fulfilled by a queued task.
536    #[expect(clippy::type_complexity)]
537    #[conditional_malloc_size_of]
538    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
539    #[ignore_malloc_size_of = "servo_media"]
540    #[no_trace]
541    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
542    #[conditional_malloc_size_of]
543    #[no_trace]
544    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
545    #[ignore_malloc_size_of = "servo_media"]
546    #[no_trace]
547    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
548    #[conditional_malloc_size_of]
549    #[no_trace]
550    event_handler: RefCell<Option<Arc<Mutex<HTMLMediaElementEventHandler>>>>,
551    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
552    show_poster: Cell<bool>,
553    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
554    duration: Cell<f64>,
555    /// <https://html.spec.whatwg.org/multipage/#current-playback-position>
556    current_playback_position: Cell<f64>,
557    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
558    official_playback_position: Cell<f64>,
559    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
560    default_playback_start_position: Cell<f64>,
561    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
562    volume: Cell<f64>,
563    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
564    seeking: Cell<bool>,
565    /// The latest seek position (in seconds) is used to distinguish whether the seek request was
566    /// initiated by a script or by the user agent itself, rather than by the media engine and to
567    /// abort other running instance of the `seek` algorithm.
568    current_seek_position: Cell<f64>,
569    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
570    muted: Cell<bool>,
571    /// Loading state from source, if any.
572    load_state: Cell<LoadState>,
573    source_children_pointer: DomRefCell<Option<SourceChildrenPointer>>,
574    current_source_child: MutNullableDom<HTMLSourceElement>,
575    /// URL of the media resource, if any.
576    #[no_trace]
577    resource_url: DomRefCell<Option<ServoUrl>>,
578    /// URL of the media resource, if the resource is set through the src_object attribute and it
579    /// is a blob.
580    #[no_trace]
581    blob_url: DomRefCell<Option<ServoUrl>>,
582    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
583    played: DomRefCell<TimeRangesContainer>,
584    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
585    audio_tracks_list: MutNullableDom<AudioTrackList>,
586    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
587    video_tracks_list: MutNullableDom<VideoTrackList>,
588    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
589    text_tracks_list: MutNullableDom<TextTrackList>,
590    /// Time of last timeupdate notification.
591    #[ignore_malloc_size_of = "Defined in std::time"]
592    next_timeupdate_event: Cell<Instant>,
593    /// Latest fetch request context.
594    current_fetch_context: RefCell<Option<HTMLMediaElementFetchContext>>,
595    /// Media controls id.
596    /// In order to workaround the lack of privileged JS context, we secure the
597    /// the access to the "privileged" document.servoGetMediaControls(id) API by
598    /// keeping a whitelist of media controls identifiers.
599    media_controls_id: DomRefCell<Option<String>>,
600}
601
602/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
603#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
604#[repr(u8)]
605pub(crate) enum NetworkState {
606    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
607    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
608    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
609    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
610}
611
612/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
613#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
614#[repr(u8)]
615#[expect(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
616pub(crate) enum ReadyState {
617    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
618    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
619    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
620    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
621    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
622}
623
624/// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
625#[derive(Clone, Copy, PartialEq)]
626enum PlaybackDirection {
627    Forwards,
628    Backwards,
629}
630
631impl HTMLMediaElement {
632    pub(crate) fn new_inherited(
633        tag_name: LocalName,
634        prefix: Option<Prefix>,
635        document: &Document,
636    ) -> Self {
637        Self {
638            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
639            network_state: Cell::new(NetworkState::Empty),
640            ready_state: Cell::new(ReadyState::HaveNothing),
641            src_object: Default::default(),
642            current_src: DomRefCell::new("".to_owned()),
643            generation_id: Cell::new(0),
644            fired_loadeddata_event: Cell::new(false),
645            error: Default::default(),
646            paused: Cell::new(true),
647            default_playback_rate: Cell::new(1.0),
648            playback_rate: Cell::new(1.0),
649            muted: Cell::new(false),
650            load_state: Cell::new(LoadState::NotLoaded),
651            source_children_pointer: DomRefCell::new(None),
652            current_source_child: Default::default(),
653            // FIXME(nox): Why is this initialised to true?
654            autoplaying: Cell::new(true),
655            delaying_the_load_event_flag: Default::default(),
656            pending_play_promises: Default::default(),
657            in_flight_play_promises_queue: Default::default(),
658            player: Default::default(),
659            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
660                document.webview_id(),
661                document.window().paint_api().clone(),
662                document.window().get_player_context(),
663            ))),
664            audio_renderer: Default::default(),
665            event_handler: Default::default(),
666            show_poster: Cell::new(true),
667            duration: Cell::new(f64::NAN),
668            current_playback_position: Cell::new(0.),
669            official_playback_position: Cell::new(0.),
670            default_playback_start_position: Cell::new(0.),
671            volume: Cell::new(1.0),
672            seeking: Cell::new(false),
673            current_seek_position: Cell::new(f64::NAN),
674            resource_url: DomRefCell::new(None),
675            blob_url: DomRefCell::new(None),
676            played: DomRefCell::new(TimeRangesContainer::default()),
677            audio_tracks_list: Default::default(),
678            video_tracks_list: Default::default(),
679            text_tracks_list: Default::default(),
680            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
681            current_fetch_context: RefCell::new(None),
682            media_controls_id: DomRefCell::new(None),
683        }
684    }
685
686    pub(crate) fn network_state(&self) -> NetworkState {
687        self.network_state.get()
688    }
689
690    pub(crate) fn get_ready_state(&self) -> ReadyState {
691        self.ready_state.get()
692    }
693
694    fn media_type_id(&self) -> HTMLMediaElementTypeId {
695        match self.upcast::<Node>().type_id() {
696            NodeTypeId::Element(ElementTypeId::HTMLElement(
697                HTMLElementTypeId::HTMLMediaElement(media_type_id),
698            )) => media_type_id,
699            _ => unreachable!(),
700        }
701    }
702
703    fn update_media_state(&self) {
704        let is_playing = self
705            .player
706            .borrow()
707            .as_ref()
708            .is_some_and(|player| !player.lock().unwrap().paused());
709
710        if self.is_potentially_playing() && !is_playing {
711            if let Some(ref player) = *self.player.borrow() {
712                let player = player.lock().unwrap();
713
714                if let Err(error) = player.set_playback_rate(self.playback_rate.get()) {
715                    warn!("Could not set the playback rate: {error:?}");
716                }
717                if let Err(error) = player.set_volume(self.volume.get()) {
718                    warn!("Could not set the volume: {error:?}");
719                }
720                if let Err(error) = player.play() {
721                    error!("Could not play media: {error:?}");
722                }
723            }
724        } else if is_playing {
725            if let Some(ref player) = *self.player.borrow() {
726                if let Err(error) = player.lock().unwrap().pause() {
727                    error!("Could not pause player: {error:?}");
728                }
729            }
730        }
731    }
732
733    /// Marks that element as delaying the load event or not.
734    ///
735    /// Nothing happens if the element was already delaying the load event and
736    /// we pass true to that method again.
737    ///
738    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
739    pub(crate) fn delay_load_event(&self, delay: bool, cx: &mut js::context::JSContext) {
740        let blocker = &self.delaying_the_load_event_flag;
741        if delay && blocker.borrow().is_none() {
742            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
743        } else if !delay && blocker.borrow().is_some() {
744            LoadBlocker::terminate(blocker, cx);
745        }
746    }
747
748    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
749    fn time_marches_on(&self) {
750        // Step 6. If the time was reached through the usual monotonic increase of the current
751        // playback position during normal playback, and if the user agent has not fired a
752        // timeupdate event at the element in the past 15 to 250ms and is not still running event
753        // handlers for such an event, then the user agent must queue a media element task given the
754        // media element to fire an event named timeupdate at the element.
755        if Instant::now() > self.next_timeupdate_event.get() {
756            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
757            self.next_timeupdate_event
758                .set(Instant::now() + Duration::from_millis(250));
759        }
760    }
761
762    /// <https://html.spec.whatwg.org/multipage/#internal-play-steps>
763    fn internal_play_steps(&self, cx: &mut js::context::JSContext) {
764        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
765        // the media element's resource selection algorithm.
766        if self.network_state.get() == NetworkState::Empty {
767            self.invoke_resource_selection_algorithm(cx);
768        }
769
770        // Step 2. If the playback has ended and the direction of playback is forwards, seek to the
771        // earliest possible position of the media resource.
772        // Generally "ended" and "looping" are exclusive. Here, the loop attribute is ignored to
773        // seek back to start in case loop was set after playback ended.
774        // <https://github.com/whatwg/html/issues/4487>
775        if self.ended_playback(LoopCondition::Ignored) &&
776            self.direction_of_playback() == PlaybackDirection::Forwards
777        {
778            self.seek(
779                self.earliest_possible_position(),
780                /* approximate_for_speed */ false,
781            );
782        }
783
784        let state = self.ready_state.get();
785
786        // Step 3. If the media element's paused attribute is true, then:
787        if self.Paused() {
788            // Step 3.1. Change the value of paused to false.
789            self.paused.set(false);
790
791            // Step 3.2. If the show poster flag is true, set the element's show poster flag to
792            // false and run the time marches on steps.
793            if self.show_poster.get() {
794                self.show_poster.set(false);
795                self.time_marches_on();
796            }
797
798            // Step 3.3. Queue a media element task given the media element to fire an event named
799            // play at the element.
800            self.queue_media_element_task_to_fire_event(atom!("play"));
801
802            // Step 3.4. If the media element's readyState attribute has the value HAVE_NOTHING,
803            // HAVE_METADATA, or HAVE_CURRENT_DATA, queue a media element task given the media
804            // element to fire an event named waiting at the element. Otherwise, the media element's
805            // readyState attribute has the value HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA: notify about
806            // playing for the element.
807            match state {
808                ReadyState::HaveNothing |
809                ReadyState::HaveMetadata |
810                ReadyState::HaveCurrentData => {
811                    self.queue_media_element_task_to_fire_event(atom!("waiting"));
812                },
813                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
814                    self.notify_about_playing();
815                },
816            }
817        }
818        // Step 4. Otherwise, if the media element's readyState attribute has the value
819        // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, take pending play promises and queue a media
820        // element task given the media element to resolve pending play promises with the
821        // result.
822        else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
823            self.take_pending_play_promises(Ok(()));
824
825            let this = Trusted::new(self);
826            let generation_id = self.generation_id.get();
827
828            self.owner_global()
829                .task_manager()
830                .media_element_task_source()
831                .queue(task!(resolve_pending_play_promises: move || {
832                    let this = this.root();
833                    if generation_id != this.generation_id.get() {
834                        return;
835                    }
836
837                    this.fulfill_in_flight_play_promises(|| {});
838                }));
839        }
840
841        // Step 5. Set the media element's can autoplay flag to false.
842        self.autoplaying.set(false);
843
844        self.update_media_state();
845    }
846
847    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
848    fn internal_pause_steps(&self) {
849        // Step 1. Set the media element's can autoplay flag to false.
850        self.autoplaying.set(false);
851
852        // Step 2. If the media element's paused attribute is false, run the following steps:
853        if !self.Paused() {
854            // Step 2.1. Change the value of paused to true.
855            self.paused.set(true);
856
857            // Step 2.2. Take pending play promises and let promises be the result.
858            self.take_pending_play_promises(Err(Error::Abort(None)));
859
860            // Step 2.3. Queue a media element task given the media element and the following steps:
861            let this = Trusted::new(self);
862            let generation_id = self.generation_id.get();
863
864            self.owner_global()
865                .task_manager()
866                .media_element_task_source()
867                .queue(task!(internal_pause_steps: move || {
868                    let this = this.root();
869                    if generation_id != this.generation_id.get() {
870                        return;
871                    }
872
873                    this.fulfill_in_flight_play_promises(|| {
874                        // Step 2.3.1. Fire an event named timeupdate at the element.
875                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
876
877                        // Step 2.3.2. Fire an event named pause at the element.
878                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
879
880                        // Step 2.3.3. Reject pending play promises with promises and an
881                        // "AbortError" DOMException.
882                        // Done after running this closure in `fulfill_in_flight_play_promises`.
883                    });
884                }));
885
886            // Step 2.4. Set the official playback position to the current playback position.
887            self.official_playback_position
888                .set(self.current_playback_position.get());
889        }
890
891        self.update_media_state();
892    }
893
894    /// <https://html.spec.whatwg.org/multipage/#allowed-to-play>
895    fn is_allowed_to_play(&self) -> bool {
896        true
897    }
898
899    /// <https://html.spec.whatwg.org/multipage/#notify-about-playing>
900    fn notify_about_playing(&self) {
901        // Step 1. Take pending play promises and let promises be the result.
902        self.take_pending_play_promises(Ok(()));
903
904        // Step 2. Queue a media element task given the element and the following steps:
905        let this = Trusted::new(self);
906        let generation_id = self.generation_id.get();
907
908        self.owner_global()
909            .task_manager()
910            .media_element_task_source()
911            .queue(task!(notify_about_playing: move || {
912                let this = this.root();
913                if generation_id != this.generation_id.get() {
914                    return;
915                }
916
917                this.fulfill_in_flight_play_promises(|| {
918                    // Step 2.1. Fire an event named playing at the element.
919                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
920
921                    // Step 2.2. Resolve pending play promises with promises.
922                    // Done after running this closure in `fulfill_in_flight_play_promises`.
923                });
924            }));
925    }
926
927    /// <https://html.spec.whatwg.org/multipage/#ready-states>
928    fn change_ready_state(&self, ready_state: ReadyState) {
929        let old_ready_state = self.ready_state.get();
930        self.ready_state.set(ready_state);
931
932        if self.network_state.get() == NetworkState::Empty {
933            return;
934        }
935
936        if old_ready_state == ready_state {
937            return;
938        }
939
940        // Step 1. Apply the first applicable set of substeps from the following list:
941        match (old_ready_state, ready_state) {
942            // => "If the previous ready state was HAVE_NOTHING, and the new ready state is
943            // HAVE_METADATA"
944            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
945                // Queue a media element task given the media element to fire an event named
946                // loadedmetadata at the element.
947                self.queue_media_element_task_to_fire_event(atom!("loadedmetadata"));
948                // No other steps are applicable in this case.
949                return;
950            },
951            // => "If the previous ready state was HAVE_METADATA and the new ready state is
952            // HAVE_CURRENT_DATA or greater"
953            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
954                // If this is the first time this occurs for this media element since the load()
955                // algorithm was last invoked, the user agent must queue a media element task given
956                // the media element to fire an event named loadeddata at the element.
957                if !self.fired_loadeddata_event.get() {
958                    self.fired_loadeddata_event.set(true);
959
960                    let this = Trusted::new(self);
961                    let generation_id = self.generation_id.get();
962
963                    self.owner_global()
964                        .task_manager()
965                        .media_element_task_source()
966                        .queue(task!(media_reached_current_data: move |cx| {
967                            let this = this.root();
968                            if generation_id != this.generation_id.get() {
969                                return;
970                            }
971
972                            this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::from_cx(cx));
973                            // Once the readyState attribute reaches HAVE_CURRENT_DATA, after the
974                            // loadeddata event has been fired, set the element's
975                            // delaying-the-load-event flag to false.
976                            this.delay_load_event(false, cx);
977                        }));
978                }
979
980                // Steps for the transition from HaveMetadata to HaveCurrentData
981                // or HaveFutureData also apply here, as per the next match
982                // expression.
983            },
984            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
985                // FIXME(nox): Queue a task to fire timeupdate and waiting
986                // events if the conditions call from the spec are met.
987
988                // No other steps are applicable in this case.
989                return;
990            },
991
992            _ => (),
993        }
994
995        // => "If the previous ready state was HAVE_CURRENT_DATA or less, and the new ready state is
996        // HAVE_FUTURE_DATA or more"
997        if old_ready_state <= ReadyState::HaveCurrentData &&
998            ready_state >= ReadyState::HaveFutureData
999        {
1000            // The user agent must queue a media element task given the media element to fire an
1001            // event named canplay at the element.
1002            self.queue_media_element_task_to_fire_event(atom!("canplay"));
1003
1004            // If the element's paused attribute is false, the user agent must notify about playing
1005            // for the element.
1006            if !self.Paused() {
1007                self.notify_about_playing();
1008            }
1009        }
1010
1011        // => "If the new ready state is HAVE_ENOUGH_DATA"
1012        if ready_state == ReadyState::HaveEnoughData {
1013            // The user agent must queue a media element task given the media element to fire an
1014            // event named canplaythrough at the element.
1015            self.queue_media_element_task_to_fire_event(atom!("canplaythrough"));
1016
1017            // If the element is eligible for autoplay, then the user agent may run the following
1018            // substeps:
1019            if self.eligible_for_autoplay() {
1020                // Step 1. Set the paused attribute to false.
1021                self.paused.set(false);
1022
1023                // Step 2. If the element's show poster flag is true, set it to false and run the
1024                // time marches on steps.
1025                if self.show_poster.get() {
1026                    self.show_poster.set(false);
1027                    self.time_marches_on();
1028                }
1029
1030                // Step 3. Queue a media element task given the element to fire an event named play
1031                // at the element.
1032                self.queue_media_element_task_to_fire_event(atom!("play"));
1033
1034                // Step 4. Notify about playing for the element.
1035                self.notify_about_playing();
1036            }
1037        }
1038
1039        self.update_media_state();
1040    }
1041
1042    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1043    fn invoke_resource_selection_algorithm(&self, cx: &mut js::context::JSContext) {
1044        // Step 1. Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
1045        self.network_state.set(NetworkState::NoSource);
1046
1047        // Step 2. Set the element's show poster flag to true.
1048        self.show_poster.set(true);
1049
1050        // Step 3. Set the media element's delaying-the-load-event flag to true (this delays the
1051        // load event).
1052        self.delay_load_event(true, cx);
1053
1054        // Step 4. Await a stable state, allowing the task that invoked this algorithm to continue.
1055        // If the resource selection mode in the synchronous section is
1056        // "attribute", the URL of the resource to fetch is relative to the
1057        // media element's node document when the src attribute was last
1058        // changed, which is why we need to pass the base URL in the task
1059        // right here.
1060        let task = MediaElementMicrotask::ResourceSelection {
1061            elem: DomRoot::from_ref(self),
1062            generation_id: self.generation_id.get(),
1063            base_url: self.owner_document().base_url(),
1064        };
1065
1066        // FIXME(nox): This will later call the resource_selection_algorithm_sync
1067        // method from below, if microtasks were trait objects, we would be able
1068        // to put the code directly in this method, without the boilerplate
1069        // indirections.
1070        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1071    }
1072
1073    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1074    fn resource_selection_algorithm_sync(
1075        &self,
1076        base_url: ServoUrl,
1077        cx: &mut js::context::JSContext,
1078    ) {
1079        // TODO Step 5. If the media element's blocked-on-parser flag is false, then populate the
1080        // list of pending text tracks.
1081        // FIXME(ferjm): Implement blocked_on_parser logic
1082        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
1083        // FIXME(nox): Maybe populate the list of pending text tracks.
1084
1085        enum Mode {
1086            Object,
1087            Attribute(String),
1088            Children(DomRoot<HTMLSourceElement>),
1089        }
1090
1091        // Step 6.
1092        let mode = if self.src_object.borrow().is_some() {
1093            // If the media element has an assigned media provider object, then let mode be object.
1094            Mode::Object
1095        } else if let Some(attribute) = self.upcast::<Element>().get_attribute(&local_name!("src"))
1096        {
1097            // Otherwise, if the media element has no assigned media provider object but has a src
1098            // attribute, then let mode be attribute.
1099            Mode::Attribute((**attribute.value()).to_owned())
1100        } else if let Some(source) = self
1101            .upcast::<Node>()
1102            .children()
1103            .find_map(DomRoot::downcast::<HTMLSourceElement>)
1104        {
1105            // Otherwise, if the media element does not have an assigned media provider object and
1106            // does not have a src attribute, but does have a source element child, then let mode be
1107            // children and let candidate be the first such source element child in tree order.
1108            Mode::Children(source)
1109        } else {
1110            // Otherwise, the media element has no assigned media provider object and has neither a
1111            // src attribute nor a source element child:
1112            self.load_state.set(LoadState::NotLoaded);
1113
1114            // Step 6.none.1. Set the networkState to NETWORK_EMPTY.
1115            self.network_state.set(NetworkState::Empty);
1116
1117            // Step 6.none.2. Set the element's delaying-the-load-event flag to false. This stops
1118            // delaying the load event.
1119            self.delay_load_event(false, cx);
1120
1121            // Step 6.none.3. End the synchronous section and return.
1122            return;
1123        };
1124
1125        // Step 7. Set the media element's networkState to NETWORK_LOADING.
1126        self.network_state.set(NetworkState::Loading);
1127
1128        // Step 8. Queue a media element task given the media element to fire an event named
1129        // loadstart at the media element.
1130        self.queue_media_element_task_to_fire_event(atom!("loadstart"));
1131
1132        // Step 9. Run the appropriate steps from the following list:
1133        match mode {
1134            Mode::Object => {
1135                // => "If mode is object"
1136                self.load_from_src_object();
1137            },
1138            Mode::Attribute(src) => {
1139                // => "If mode is attribute"
1140                self.load_from_src_attribute(base_url, &src);
1141            },
1142            Mode::Children(source) => {
1143                // => "Otherwise (mode is children)""
1144                self.load_from_source_child(&source);
1145            },
1146        }
1147    }
1148
1149    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1150    fn load_from_src_object(&self) {
1151        self.load_state.set(LoadState::LoadingFromSrcObject);
1152
1153        // Step 9.object.1. Set the currentSrc attribute to the empty string.
1154        "".clone_into(&mut self.current_src.borrow_mut());
1155
1156        // Step 9.object.3. Run the resource fetch algorithm with the assigned media
1157        // provider object. If that algorithm returns without aborting this one, then the
1158        // load failed.
1159        // Note that the resource fetch algorithm itself takes care of the cleanup in case
1160        // of failure itself.
1161        self.resource_fetch_algorithm(Resource::Object);
1162    }
1163
1164    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1165    fn load_from_src_attribute(&self, base_url: ServoUrl, src: &str) {
1166        self.load_state.set(LoadState::LoadingFromSrcAttribute);
1167
1168        // Step 9.attribute.1. If the src attribute's value is the empty string, then end
1169        // the synchronous section, and jump down to the failed with attribute step below.
1170        if src.is_empty() {
1171            self.queue_dedicated_media_source_failure_steps();
1172            return;
1173        }
1174
1175        // Step 9.attribute.2. Let urlRecord be the result of encoding-parsing a URL given
1176        // the src attribute's value, relative to the media element's node document when the
1177        // src attribute was last changed.
1178        let Ok(url_record) = base_url.join(src) else {
1179            self.queue_dedicated_media_source_failure_steps();
1180            return;
1181        };
1182
1183        // Step 9.attribute.3. If urlRecord is not failure, then set the currentSrc
1184        // attribute to the result of applying the URL serializer to urlRecord.
1185        *self.current_src.borrow_mut() = url_record.as_str().into();
1186
1187        // Step 9.attribute.5. If urlRecord is not failure, then run the resource fetch
1188        // algorithm with urlRecord. If that algorithm returns without aborting this one,
1189        // then the load failed.
1190        // Note that the resource fetch algorithm itself takes care
1191        // of the cleanup in case of failure itself.
1192        self.resource_fetch_algorithm(Resource::Url(url_record));
1193    }
1194
1195    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1196    fn load_from_source_child(&self, source: &HTMLSourceElement) {
1197        self.load_state.set(LoadState::LoadingFromSourceChild);
1198
1199        // Step 9.children.1. Let pointer be a position defined by two adjacent nodes in the media
1200        // element's child list, treating the start of the list (before the first child in the list,
1201        // if any) and end of the list (after the last child in the list, if any) as nodes in their
1202        // own right. One node is the node before pointer, and the other node is the node after
1203        // pointer. Initially, let pointer be the position between the candidate node and the next
1204        // node, if there are any, or the end of the list, if it is the last node.
1205        *self.source_children_pointer.borrow_mut() =
1206            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), false));
1207
1208        let element = source.upcast::<Element>();
1209
1210        // Step 9.children.2. Process candidate: If candidate does not have a src attribute, or if
1211        // its src attribute's value is the empty string, then end the synchronous section, and jump
1212        // down to the failed with elements step below.
1213        let Some(src) = element
1214            .get_attribute(&local_name!("src"))
1215            .filter(|attribute| !attribute.value().is_empty())
1216        else {
1217            self.load_from_source_child_failure_steps(source);
1218            return;
1219        };
1220
1221        // Step 9.children.3. If candidate has a media attribute whose value does not match the
1222        // environment, then end the synchronous section, and jump down to the failed with elements
1223        // step below.
1224        if let Some(media) = element.get_attribute(&local_name!("media")) {
1225            if !MediaList::matches_environment(&element.owner_document(), &media.value()) {
1226                self.load_from_source_child_failure_steps(source);
1227                return;
1228            }
1229        }
1230
1231        // Step 9.children.4. Let urlRecord be the result of encoding-parsing a URL given
1232        // candidate's src attribute's value, relative to candidate's node document when the src
1233        // attribute was last changed.
1234        let Ok(url_record) = source.owner_document().base_url().join(&src.value()) else {
1235            // Step 9.children.5. If urlRecord is failure, then end the synchronous section,
1236            // and jump down to the failed with elements step below.
1237            self.load_from_source_child_failure_steps(source);
1238            return;
1239        };
1240
1241        // Step 9.children.6. If candidate has a type attribute whose value, when parsed as a MIME
1242        // type (including any codecs described by the codecs parameter, for types that define that
1243        // parameter), represents a type that the user agent knows it cannot render, then end the
1244        // synchronous section, and jump down to the failed with elements step below.
1245        if let Some(type_) = element.get_attribute(&local_name!("type")) {
1246            if ServoMedia::get().can_play_type(&type_.value()) == SupportsMediaType::No {
1247                self.load_from_source_child_failure_steps(source);
1248                return;
1249            }
1250        }
1251
1252        // Reset the media player before loading the next source child.
1253        self.reset_media_player();
1254
1255        self.current_source_child.set(Some(source));
1256
1257        // Step 9.children.7. Set the currentSrc attribute to the result of applying the URL
1258        // serializer to urlRecord.
1259        *self.current_src.borrow_mut() = url_record.as_str().into();
1260
1261        // Step 9.children.9. Run the resource fetch algorithm with urlRecord. If that
1262        // algorithm returns without aborting this one, then the load failed.
1263        // Note that the resource fetch algorithm itself takes care
1264        // of the cleanup in case of failure itself.
1265        self.resource_fetch_algorithm(Resource::Url(url_record));
1266    }
1267
1268    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1269    fn load_from_source_child_failure_steps(&self, source: &HTMLSourceElement) {
1270        // Step 9.children.10. Failed with elements: Queue a media element task given the media
1271        // element to fire an event named error at candidate.
1272        let trusted_this = Trusted::new(self);
1273        let trusted_source = Trusted::new(source);
1274        let generation_id = self.generation_id.get();
1275
1276        self.owner_global()
1277            .task_manager()
1278            .media_element_task_source()
1279            .queue(task!(queue_error_event: move |cx| {
1280                let this = trusted_this.root();
1281                if generation_id != this.generation_id.get() {
1282                    return;
1283                }
1284
1285                let source = trusted_source.root();
1286                source.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::from_cx(cx));
1287            }));
1288
1289        // Step 9.children.11. Await a stable state.
1290        let task = MediaElementMicrotask::SelectNextSourceChild {
1291            elem: DomRoot::from_ref(self),
1292            generation_id: self.generation_id.get(),
1293        };
1294
1295        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1296    }
1297
1298    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1299    fn select_next_source_child(&self, can_gc: CanGc) {
1300        // Step 9.children.12. Forget the media element's media-resource-specific tracks.
1301        self.AudioTracks(can_gc).clear();
1302        self.VideoTracks(can_gc).clear();
1303
1304        // Step 9.children.13. Find next candidate: Let candidate be null.
1305        let mut source_candidate = None;
1306
1307        // Step 9.children.14. Search loop: If the node after pointer is the end of the list, then
1308        // jump to the waiting step below.
1309        // Step 9.children.15. If the node after pointer is a source element, let candidate be that
1310        // element.
1311        // Step 9.children.16. Advance pointer so that the node before pointer is now the node that
1312        // was after pointer, and the node after pointer is the node after the node that used to be
1313        // after pointer, if any.
1314        if let Some(ref source_children_pointer) = *self.source_children_pointer.borrow() {
1315            // Note that shared implementation between opaque types from
1316            // `inclusively_following_siblings` and `following_siblings` if not possible due to
1317            // precise capturing.
1318            if source_children_pointer.inclusive {
1319                for next_sibling in source_children_pointer
1320                    .source_before_pointer
1321                    .upcast::<Node>()
1322                    .inclusively_following_siblings()
1323                {
1324                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1325                    {
1326                        source_candidate = Some(next_source);
1327                        break;
1328                    }
1329                }
1330            } else {
1331                for next_sibling in source_children_pointer
1332                    .source_before_pointer
1333                    .upcast::<Node>()
1334                    .following_siblings()
1335                {
1336                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1337                    {
1338                        source_candidate = Some(next_source);
1339                        break;
1340                    }
1341                }
1342            };
1343        }
1344
1345        // Step 9.children.17. If candidate is null, jump back to the search loop step. Otherwise,
1346        // jump back to the process candidate step.
1347        if let Some(source_candidate) = source_candidate {
1348            self.load_from_source_child(&source_candidate);
1349            return;
1350        }
1351
1352        self.load_state.set(LoadState::WaitingForSource);
1353
1354        *self.source_children_pointer.borrow_mut() = None;
1355
1356        // Step 9.children.18. Waiting: Set the element's networkState attribute to the
1357        // NETWORK_NO_SOURCE value.
1358        self.network_state.set(NetworkState::NoSource);
1359
1360        // Step 9.children.19. Set the element's show poster flag to true.
1361        self.show_poster.set(true);
1362
1363        // Step 9.children.20. Queue a media element task given the media element to set the
1364        // element's delaying-the-load-event flag to false. This stops delaying the load event.
1365        let this = Trusted::new(self);
1366        let generation_id = self.generation_id.get();
1367
1368        self.owner_global()
1369            .task_manager()
1370            .media_element_task_source()
1371            .queue(task!(queue_delay_load_event: move |cx| {
1372                let this = this.root();
1373                if generation_id != this.generation_id.get() {
1374                    return;
1375                }
1376
1377                this.delay_load_event(false, cx);
1378            }));
1379
1380        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1381        // list. (This step might wait forever.)
1382    }
1383
1384    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1385    fn resource_selection_algorithm_failure_steps(&self) {
1386        match self.load_state.get() {
1387            LoadState::LoadingFromSrcObject => {
1388                // Step 9.object.4. Failed with media provider: Reaching this step indicates that
1389                // the media resource failed to load. Take pending play promises and queue a media
1390                // element task given the media element to run the dedicated media source failure
1391                // steps with the result.
1392                self.queue_dedicated_media_source_failure_steps();
1393            },
1394            LoadState::LoadingFromSrcAttribute => {
1395                // Step 9.attribute.6. Failed with attribute: Reaching this step indicates that the
1396                // media resource failed to load or that urlRecord is failure. Take pending play
1397                // promises and queue a media element task given the media element to run the
1398                // dedicated media source failure steps with the result.
1399                self.queue_dedicated_media_source_failure_steps();
1400            },
1401            LoadState::LoadingFromSourceChild => {
1402                // Step 9.children.10. Failed with elements: Queue a media element task given the
1403                // media element to fire an event named error at candidate.
1404                if let Some(source) = self.current_source_child.take() {
1405                    self.load_from_source_child_failure_steps(&source);
1406                }
1407            },
1408            _ => {},
1409        }
1410    }
1411
1412    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
1413        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
1414            error!("Missing request url");
1415            if let Some(seek_lock) = seek_lock {
1416                seek_lock.unlock(/* successful seek */ false);
1417            }
1418            self.resource_selection_algorithm_failure_steps();
1419            return;
1420        }
1421
1422        let document = self.owner_document();
1423        let destination = match self.media_type_id() {
1424            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
1425            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
1426        };
1427        let mut headers = HeaderMap::new();
1428        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
1429        headers.insert(
1430            header::RANGE,
1431            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
1432        );
1433        let url = match self.resource_url.borrow().as_ref() {
1434            Some(url) => url.clone(),
1435            None => self.blob_url.borrow().as_ref().unwrap().clone(),
1436        };
1437
1438        let cors_setting = cors_setting_for_element(self.upcast());
1439        let global = self.global();
1440        let request = create_a_potential_cors_request(
1441            Some(document.webview_id()),
1442            url.clone(),
1443            destination,
1444            cors_setting,
1445            None,
1446            global.get_referrer(),
1447        )
1448        .with_global_scope(&global)
1449        .headers(headers)
1450        .referrer_policy(document.get_referrer_policy());
1451
1452        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
1453        if let Some(ref mut current_fetch_context) = *current_fetch_context {
1454            current_fetch_context.cancel(CancelReason::Abort);
1455        }
1456
1457        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
1458            request.id,
1459            global.core_resource_thread(),
1460        ));
1461        let listener =
1462            HTMLMediaElementFetchListener::new(self, request.id, url.clone(), offset.unwrap_or(0));
1463
1464        self.owner_document().fetch_background(request, listener);
1465
1466        // Since we cancelled the previous fetch, from now on the media element
1467        // will only receive response data from the new fetch that's been
1468        // initiated. This means the player can resume operation, since all subsequent data
1469        // pushes will originate from the new seek offset.
1470        if let Some(seek_lock) = seek_lock {
1471            seek_lock.unlock(/* successful seek */ true);
1472        }
1473    }
1474
1475    /// <https://html.spec.whatwg.org/multipage/#eligible-for-autoplay>
1476    fn eligible_for_autoplay(&self) -> bool {
1477        // its can autoplay flag is true;
1478        self.autoplaying.get() &&
1479
1480        // its paused attribute is true;
1481        self.Paused() &&
1482
1483        // it has an autoplay attribute specified;
1484        self.Autoplay() &&
1485
1486        // its node document's active sandboxing flag set does not have the sandboxed automatic
1487        // features browsing context flag set; and
1488        {
1489            let document = self.owner_document();
1490
1491            !document.has_active_sandboxing_flag(
1492                SandboxingFlagSet::SANDBOXED_AUTOMATIC_FEATURES_BROWSING_CONTEXT_FLAG,
1493            )
1494        }
1495
1496        // its node document is allowed to use the "autoplay" feature.
1497        // TODO: Feature policy: https://html.spec.whatwg.org/iframe-embed-object.html#allowed-to-use
1498    }
1499
1500    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
1501    fn resource_fetch_algorithm(&self, resource: Resource) {
1502        if let Err(e) = self.create_media_player(&resource) {
1503            error!("Create media player error {:?}", e);
1504            self.resource_selection_algorithm_failure_steps();
1505            return;
1506        }
1507
1508        // Steps 1-2.
1509        // Unapplicable, the `resource` variable already conveys which mode
1510        // is in use.
1511
1512        // Step 3.
1513        // FIXME(nox): Remove all media-resource-specific text tracks.
1514
1515        // Step 5. Run the appropriate steps from the following list:
1516        match resource {
1517            Resource::Url(url) => {
1518                // Step 5.remote.1. Optionally, run the following substeps. This is the expected
1519                // behavior if the user agent intends to not attempt to fetch the resource until the
1520                // user requests it explicitly (e.g. as a way to implement the preload attribute's
1521                // none keyword).
1522                if self.Preload() == "none" && !self.autoplaying.get() {
1523                    // Step 5.remote.1.1. Set the networkState to NETWORK_IDLE.
1524                    self.network_state.set(NetworkState::Idle);
1525
1526                    // Step 5.remote.1.2. Queue a media element task given the media element to fire
1527                    // an event named suspend at the element.
1528                    self.queue_media_element_task_to_fire_event(atom!("suspend"));
1529
1530                    // Step 5.remote.1.3. Queue a media element task given the media element to set
1531                    // the element's delaying-the-load-event flag to false. This stops delaying the
1532                    // load event.
1533                    let this = Trusted::new(self);
1534                    let generation_id = self.generation_id.get();
1535
1536                    self.owner_global()
1537                        .task_manager()
1538                        .media_element_task_source()
1539                        .queue(task!(queue_delay_load_event: move |cx| {
1540                            let this = this.root();
1541                            if generation_id != this.generation_id.get() {
1542                                return;
1543                            }
1544
1545                            this.delay_load_event(false, cx);
1546                        }));
1547
1548                    // TODO Steps 5.remote.1.4. Wait for the task to be run.
1549                    // FIXME(nox): Somehow we should wait for the task from previous
1550                    // step to be ran before continuing.
1551
1552                    // TODO Steps 5.remote.1.5-5.remote.1.7.
1553                    // FIXME(nox): Wait for an implementation-defined event and
1554                    // then continue with the normal set of steps instead of just
1555                    // returning.
1556                    return;
1557                }
1558
1559                *self.resource_url.borrow_mut() = Some(url);
1560
1561                // Steps 5.remote.2-5.remote.8
1562                self.fetch_request(None, None);
1563            },
1564            Resource::Object => {
1565                if let Some(ref src_object) = *self.src_object.borrow() {
1566                    match src_object {
1567                        SrcObject::Blob(blob) => {
1568                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1569                            *self.blob_url.borrow_mut() =
1570                                Some(ServoUrl::parse(&blob_url.str()).expect("infallible"));
1571                            self.fetch_request(None, None);
1572                        },
1573                        SrcObject::MediaStream(stream) => {
1574                            let tracks = &*stream.get_tracks();
1575                            for (pos, track) in tracks.iter().enumerate() {
1576                                if self
1577                                    .player
1578                                    .borrow()
1579                                    .as_ref()
1580                                    .unwrap()
1581                                    .lock()
1582                                    .unwrap()
1583                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1584                                    .is_err()
1585                                {
1586                                    self.resource_selection_algorithm_failure_steps();
1587                                }
1588                            }
1589                        },
1590                    }
1591                }
1592            },
1593        }
1594    }
1595
1596    /// Queues a task to run the [dedicated media source failure steps][steps].
1597    ///
1598    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1599    fn queue_dedicated_media_source_failure_steps(&self) {
1600        let this = Trusted::new(self);
1601        let generation_id = self.generation_id.get();
1602        self.take_pending_play_promises(Err(Error::NotSupported(None)));
1603        self.owner_global()
1604            .task_manager()
1605            .media_element_task_source()
1606            .queue(task!(dedicated_media_source_failure_steps: move |cx| {
1607                let this = this.root();
1608                if generation_id != this.generation_id.get() {
1609                    return;
1610                }
1611
1612                this.fulfill_in_flight_play_promises(|| {
1613                    // Step 1. Set the error attribute to the result of creating a MediaError with
1614                    // MEDIA_ERR_SRC_NOT_SUPPORTED.
1615                    this.error.set(Some(&*MediaError::new(
1616                        &this.owner_window(),
1617                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::from_cx(cx))));
1618
1619                    // Step 2. Forget the media element's media-resource-specific tracks.
1620                    this.AudioTracks(CanGc::from_cx(cx)).clear();
1621                    this.VideoTracks(CanGc::from_cx(cx)).clear();
1622
1623                    // Step 3. Set the element's networkState attribute to the NETWORK_NO_SOURCE
1624                    // value.
1625                    this.network_state.set(NetworkState::NoSource);
1626
1627                    // Step 4. Set the element's show poster flag to true.
1628                    this.show_poster.set(true);
1629
1630                    // Step 5. Fire an event named error at the media element.
1631                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::from_cx(cx));
1632
1633                    if let Some(ref player) = *this.player.borrow() {
1634                        if let Err(error) = player.lock().unwrap().stop() {
1635                            error!("Could not stop player: {error:?}");
1636                        }
1637                    }
1638
1639                    // Step 6. Reject pending play promises with promises and a "NotSupportedError"
1640                    // DOMException.
1641                    // Done after running this closure in `fulfill_in_flight_play_promises`.
1642                });
1643
1644                // Step 7. Set the element's delaying-the-load-event flag to false. This stops
1645                // delaying the load event.
1646                this.delay_load_event(false, cx);
1647            }));
1648    }
1649
1650    fn in_error_state(&self) -> bool {
1651        self.error.get().is_some()
1652    }
1653
1654    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1655    fn is_potentially_playing(&self) -> bool {
1656        !self.paused.get() &&
1657            !self.ended_playback(LoopCondition::Included) &&
1658            self.error.get().is_none() &&
1659            !self.is_blocked_media_element()
1660    }
1661
1662    /// <https://html.spec.whatwg.org/multipage/#blocked-media-element>
1663    fn is_blocked_media_element(&self) -> bool {
1664        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1665            self.is_paused_for_user_interaction() ||
1666            self.is_paused_for_in_band_content()
1667    }
1668
1669    /// <https://html.spec.whatwg.org/multipage/#paused-for-user-interaction>
1670    fn is_paused_for_user_interaction(&self) -> bool {
1671        // FIXME: we will likely be able to fill this placeholder once (if) we
1672        //        implement the MediaSession API.
1673        false
1674    }
1675
1676    /// <https://html.spec.whatwg.org/multipage/#paused-for-in-band-content>
1677    fn is_paused_for_in_band_content(&self) -> bool {
1678        // FIXME: we will likely be able to fill this placeholder once (if) we
1679        //        implement https://github.com/servo/servo/issues/22314
1680        false
1681    }
1682
1683    /// <https://html.spec.whatwg.org/multipage/#media-element-load-algorithm>
1684    fn media_element_load_algorithm(&self, cx: &mut js::context::JSContext) {
1685        // Reset the flag that signals whether loadeddata was ever fired for
1686        // this invokation of the load algorithm.
1687        self.fired_loadeddata_event.set(false);
1688
1689        // TODO Step 1. Set this element's is currently stalled to false.
1690
1691        // Step 2. Abort any already-running instance of the resource selection algorithm for this
1692        // element.
1693        self.generation_id.set(self.generation_id.get() + 1);
1694
1695        self.load_state.set(LoadState::NotLoaded);
1696        *self.source_children_pointer.borrow_mut() = None;
1697        self.current_source_child.set(None);
1698
1699        // Step 3. Let pending tasks be a list of all tasks from the media element's media element
1700        // event task source in one of the task queues.
1701
1702        // Step 4. For each task in pending tasks that would resolve pending play promises or reject
1703        // pending play promises, immediately resolve or reject those promises in the order the
1704        // corresponding tasks were queued.
1705        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1706            self.fulfill_in_flight_play_promises(|| ());
1707        }
1708
1709        // Step 5. Remove each task in pending tasks from its task queue.
1710        // Note that each media element's pending event and callback is scheduled with associated
1711        // generation id and will be aborted eventually (from Step 2).
1712
1713        let network_state = self.network_state.get();
1714
1715        // Step 6. If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE,
1716        // queue a media element task given the media element to fire an event named abort at the
1717        // media element.
1718        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1719            self.queue_media_element_task_to_fire_event(atom!("abort"));
1720        }
1721
1722        // Reset the media player for any previously playing media resource (see Step 11).
1723        self.reset_media_player();
1724
1725        // Step 7. If the media element's networkState is not set to NETWORK_EMPTY, then:
1726        if network_state != NetworkState::Empty {
1727            // Step 7.1. Queue a media element task given the media element to fire an event named
1728            // emptied at the media element.
1729            self.queue_media_element_task_to_fire_event(atom!("emptied"));
1730
1731            // Step 7.2. If a fetching process is in progress for the media element, the user agent
1732            // should stop it.
1733            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1734                current_fetch_context.cancel(CancelReason::Abort);
1735            }
1736
1737            // TODO Step 7.3. If the media element's assigned media provider object is a MediaSource
1738            // object, then detach it.
1739
1740            // Step 7.4. Forget the media element's media-resource-specific tracks.
1741            self.AudioTracks(CanGc::from_cx(cx)).clear();
1742            self.VideoTracks(CanGc::from_cx(cx)).clear();
1743
1744            // Step 7.5. If readyState is not set to HAVE_NOTHING, then set it to that state.
1745            if self.ready_state.get() != ReadyState::HaveNothing {
1746                self.change_ready_state(ReadyState::HaveNothing);
1747            }
1748
1749            // Step 7.6. If the paused attribute is false, then:
1750            if !self.Paused() {
1751                // Step 7.6.1. Set the paused attribute to true.
1752                self.paused.set(true);
1753
1754                // Step 7.6.2. Take pending play promises and reject pending play promises with the
1755                // result and an "AbortError" DOMException.
1756                self.take_pending_play_promises(Err(Error::Abort(None)));
1757                self.fulfill_in_flight_play_promises(|| ());
1758            }
1759
1760            // Step 7.7. If seeking is true, set it to false.
1761            self.seeking.set(false);
1762
1763            self.current_seek_position.set(f64::NAN);
1764
1765            // Step 7.8. Set the current playback position to 0.
1766            // Set the official playback position to 0.
1767            // If this changed the official playback position, then queue a media element task given
1768            // the media element to fire an event named timeupdate at the media element.
1769            self.current_playback_position.set(0.);
1770            if self.official_playback_position.get() != 0. {
1771                self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
1772            }
1773            self.official_playback_position.set(0.);
1774
1775            // TODO Step 7.9. Set the timeline offset to Not-a-Number (NaN).
1776
1777            // Step 7.10. Update the duration attribute to Not-a-Number (NaN).
1778            self.duration.set(f64::NAN);
1779        }
1780
1781        // Step 8. Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
1782        self.playback_rate.set(self.default_playback_rate.get());
1783
1784        // Step 9. Set the error attribute to null and the can autoplay flag to true.
1785        self.error.set(None);
1786        self.autoplaying.set(true);
1787
1788        // Step 10. Invoke the media element's resource selection algorithm.
1789        self.invoke_resource_selection_algorithm(cx);
1790
1791        // Step 11. Note: Playback of any previously playing media resource for this element stops.
1792    }
1793
1794    /// Queue a media element task given the media element to fire an event at the media element.
1795    /// <https://html.spec.whatwg.org/multipage/#queue-a-media-element-task>
1796    fn queue_media_element_task_to_fire_event(&self, name: Atom) {
1797        let this = Trusted::new(self);
1798        let generation_id = self.generation_id.get();
1799
1800        self.owner_global()
1801            .task_manager()
1802            .media_element_task_source()
1803            .queue(task!(queue_event: move |cx| {
1804                let this = this.root();
1805                if generation_id != this.generation_id.get() {
1806                    return;
1807                }
1808
1809                this.upcast::<EventTarget>().fire_event(name, CanGc::from_cx(cx));
1810            }));
1811    }
1812
1813    /// Appends a promise to the list of pending play promises.
1814    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1815        self.pending_play_promises
1816            .borrow_mut()
1817            .push(promise.clone());
1818    }
1819
1820    /// Takes the pending play promises.
1821    ///
1822    /// The result with which these promises will be fulfilled is passed here
1823    /// and this method returns nothing because we actually just move the
1824    /// current list of pending play promises to the
1825    /// `in_flight_play_promises_queue` field.
1826    ///
1827    /// Each call to this method must be followed by a call to
1828    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1829    /// which were taken and moved to the in-flight queue.
1830    fn take_pending_play_promises(&self, result: ErrorResult) {
1831        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1832        self.in_flight_play_promises_queue
1833            .borrow_mut()
1834            .push_back((pending_play_promises.into(), result));
1835    }
1836
1837    /// Fulfills the next in-flight play promises queue after running a closure.
1838    ///
1839    /// See the comment on `take_pending_play_promises` for why this method
1840    /// does not take a list of promises to fulfill. Callers cannot just pop
1841    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1842    /// the promises because that would mean putting
1843    /// `#[cfg_attr(crown, expect(crown::unrooted_must_root))]` on even more functions, potentially
1844    /// hiding actual safety bugs.
1845    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1846    where
1847        F: FnOnce(),
1848    {
1849        let (promises, result) = self
1850            .in_flight_play_promises_queue
1851            .borrow_mut()
1852            .pop_front()
1853            .expect("there should be at least one list of in flight play promises");
1854        f();
1855        for promise in &*promises {
1856            match result {
1857                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1858                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1859            }
1860        }
1861    }
1862
1863    pub(crate) fn handle_source_child_insertion(
1864        &self,
1865        source: &HTMLSourceElement,
1866        cx: &mut js::context::JSContext,
1867    ) {
1868        // <https://html.spec.whatwg.org/multipage/#the-source-element:html-element-insertion-steps>
1869        // Step 2. If parent is a media element that has no src attribute and whose networkState has
1870        // the value NETWORK_EMPTY, then invoke that media element's resource selection algorithm.
1871        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1872            return;
1873        }
1874
1875        if self.network_state.get() == NetworkState::Empty {
1876            self.invoke_resource_selection_algorithm(cx);
1877            return;
1878        }
1879
1880        // <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1881        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1882        // list. (This step might wait forever.)
1883        if self.load_state.get() != LoadState::WaitingForSource {
1884            return;
1885        }
1886
1887        self.load_state.set(LoadState::LoadingFromSourceChild);
1888
1889        *self.source_children_pointer.borrow_mut() =
1890            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), true));
1891
1892        // Step 9.children.23. Await a stable state.
1893        let task = MediaElementMicrotask::SelectNextSourceChildAfterWait {
1894            elem: DomRoot::from_ref(self),
1895            generation_id: self.generation_id.get(),
1896        };
1897
1898        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1899    }
1900
1901    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1902    fn select_next_source_child_after_wait(&self, cx: &mut js::context::JSContext) {
1903        // Step 9.children.24. Set the element's delaying-the-load-event flag back to true (this
1904        // delays the load event again, in case it hasn't been fired yet).
1905        self.delay_load_event(true, cx);
1906
1907        // Step 9.children.25. Set the networkState back to NETWORK_LOADING.
1908        self.network_state.set(NetworkState::Loading);
1909
1910        // Step 9.children.26. Jump back to the find next candidate step above.
1911        self.select_next_source_child(CanGc::from_cx(cx));
1912    }
1913
1914    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1915    /// => "If the media data cannot be fetched at all, due to network errors..."
1916    /// => "If the media data can be fetched but is found by inspection to be in an unsupported
1917    /// format, or can otherwise not be rendered at all"
1918    fn media_data_processing_failure_steps(&self) {
1919        // Step 1. The user agent should cancel the fetching process.
1920        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1921            current_fetch_context.cancel(CancelReason::Error);
1922        }
1923
1924        // Step 2. Abort this subalgorithm, returning to the resource selection algorithm.
1925        self.resource_selection_algorithm_failure_steps();
1926    }
1927
1928    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1929    /// => "If the connection is interrupted after some media data has been received..."
1930    /// => "If the media data is corrupted"
1931    fn media_data_processing_fatal_steps(&self, error: u16, cx: &mut js::context::JSContext) {
1932        *self.source_children_pointer.borrow_mut() = None;
1933        self.current_source_child.set(None);
1934
1935        // Step 1. The user agent should cancel the fetching process.
1936        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1937            current_fetch_context.cancel(CancelReason::Error);
1938        }
1939
1940        // Step 2. Set the error attribute to the result of creating a MediaError with
1941        // MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
1942        self.error.set(Some(&*MediaError::new(
1943            &self.owner_window(),
1944            error,
1945            CanGc::from_cx(cx),
1946        )));
1947
1948        // Step 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1949        self.network_state.set(NetworkState::Idle);
1950
1951        // Step 4. Set the element's delaying-the-load-event flag to false. This stops delaying
1952        // the load event.
1953        self.delay_load_event(false, cx);
1954
1955        // Step 5. Fire an event named error at the media element.
1956        self.upcast::<EventTarget>()
1957            .fire_event(atom!("error"), CanGc::from_cx(cx));
1958
1959        // Step 6. Abort the overall resource selection algorithm.
1960    }
1961
1962    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1963    fn seek(&self, time: f64, _approximate_for_speed: bool) {
1964        // Step 1. Set the media element's show poster flag to false.
1965        self.show_poster.set(false);
1966
1967        // Step 2. If the media element's readyState is HAVE_NOTHING, return.
1968        if self.ready_state.get() == ReadyState::HaveNothing {
1969            return;
1970        }
1971
1972        // Step 3. If the element's seeking IDL attribute is true, then another instance of this
1973        // algorithm is already running. Abort that other instance of the algorithm without waiting
1974        // for the step that it is running to complete.
1975        self.current_seek_position.set(f64::NAN);
1976
1977        // Step 4. Set the seeking IDL attribute to true.
1978        self.seeking.set(true);
1979
1980        // Step 5. If the seek was in response to a DOM method call or setting of an IDL attribute,
1981        // then continue the script. The remainder of these steps must be run in parallel.
1982
1983        // Step 6. If the new playback position is later than the end of the media resource, then
1984        // let it be the end of the media resource instead.
1985        let time = f64::min(time, self.Duration());
1986
1987        // Step 7. If the new playback position is less than the earliest possible position, let it
1988        // be that position instead.
1989        let time = f64::max(time, self.earliest_possible_position());
1990
1991        // Step 8. If the (possibly now changed) new playback position is not in one of the ranges
1992        // given in the seekable attribute, then let it be the position in one of the ranges given
1993        // in the seekable attribute that is the nearest to the new playback position. If there are
1994        // no ranges given in the seekable attribute, then set the seeking IDL attribute to false
1995        // and return.
1996        let seekable = self.seekable();
1997
1998        if seekable.is_empty() {
1999            self.seeking.set(false);
2000            return;
2001        }
2002
2003        let mut nearest_seekable_position = 0.0;
2004        let mut in_seekable_range = false;
2005        let mut nearest_seekable_distance = f64::MAX;
2006        for i in 0..seekable.len() {
2007            let start = seekable.start(i).unwrap().abs();
2008            let end = seekable.end(i).unwrap().abs();
2009            if time >= start && time <= end {
2010                nearest_seekable_position = time;
2011                in_seekable_range = true;
2012                break;
2013            } else if time < start {
2014                let distance = start - time;
2015                if distance < nearest_seekable_distance {
2016                    nearest_seekable_distance = distance;
2017                    nearest_seekable_position = start;
2018                }
2019            } else {
2020                let distance = time - end;
2021                if distance < nearest_seekable_distance {
2022                    nearest_seekable_distance = distance;
2023                    nearest_seekable_position = end;
2024                }
2025            }
2026        }
2027        let time = if in_seekable_range {
2028            time
2029        } else {
2030            nearest_seekable_position
2031        };
2032
2033        // Step 9. If the approximate-for-speed flag is set, adjust the new playback position to a
2034        // value that will allow for playback to resume promptly. If new playback position before
2035        // this step is before current playback position, then the adjusted new playback position
2036        // must also be before the current playback position. Similarly, if the new playback
2037        // position before this step is after current playback position, then the adjusted new
2038        // playback position must also be after the current playback position.
2039        // TODO: Note that servo-media with gstreamer does not support inaccurate seeking for now.
2040
2041        // Step 10. Queue a media element task given the media element to fire an event named
2042        // seeking at the element.
2043        self.queue_media_element_task_to_fire_event(atom!("seeking"));
2044
2045        // Step 11. Set the current playback position to the new playback position.
2046        self.current_playback_position.set(time);
2047
2048        if let Some(ref player) = *self.player.borrow() {
2049            if let Err(error) = player.lock().unwrap().seek(time) {
2050                error!("Could not seek player: {error:?}");
2051            }
2052        }
2053
2054        self.current_seek_position.set(time);
2055
2056        // Step 12. Wait until the user agent has established whether or not the media data for the
2057        // new playback position is available, and, if it is, until it has decoded enough data to
2058        // play back that position.
2059        // The rest of the steps are handled when the media engine signals a ready state change or
2060        // otherwise satisfies seek completion and signals a position change.
2061    }
2062
2063    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2064    fn seek_end(&self) {
2065        // Any time the user agent provides a stable state, the official playback position must be
2066        // set to the current playback position.
2067        self.official_playback_position
2068            .set(self.current_playback_position.get());
2069
2070        // Step 14. Set the seeking IDL attribute to false.
2071        self.seeking.set(false);
2072
2073        self.current_seek_position.set(f64::NAN);
2074
2075        // Step 15. Run the time marches on steps.
2076        self.time_marches_on();
2077
2078        // Step 16. Queue a media element task given the media element to fire an event named
2079        // timeupdate at the element.
2080        self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2081
2082        // Step 17. Queue a media element task given the media element to fire an event named seeked
2083        // at the element.
2084        self.queue_media_element_task_to_fire_event(atom!("seeked"));
2085    }
2086
2087    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
2088    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
2089        if pref!(media_testing_enabled) && image.is_some() {
2090            self.queue_media_element_task_to_fire_event(atom!("postershown"));
2091        }
2092
2093        self.video_renderer.lock().unwrap().set_poster_frame(image);
2094
2095        self.upcast::<Node>().dirty(NodeDamage::Other);
2096    }
2097
2098    fn player_id(&self) -> Option<usize> {
2099        self.player
2100            .borrow()
2101            .as_ref()
2102            .map(|player| player.lock().unwrap().get_id())
2103    }
2104
2105    fn create_media_player(&self, resource: &Resource) -> Result<(), ()> {
2106        let stream_type = match *resource {
2107            Resource::Object => {
2108                if let Some(ref src_object) = *self.src_object.borrow() {
2109                    match src_object {
2110                        SrcObject::MediaStream(_) => StreamType::Stream,
2111                        _ => StreamType::Seekable,
2112                    }
2113                } else {
2114                    return Err(());
2115                }
2116            },
2117            _ => StreamType::Seekable,
2118        };
2119
2120        let window = self.owner_window();
2121        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
2122        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
2123        {
2124            HTMLMediaElementTypeId::HTMLAudioElement => None,
2125            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
2126        };
2127
2128        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
2129
2130        let pipeline_id = window.pipeline_id();
2131        let client_context_id =
2132            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
2133        let player = ServoMedia::get().create_player(
2134            &client_context_id,
2135            stream_type,
2136            action_sender,
2137            video_renderer,
2138            audio_renderer,
2139            Box::new(window.get_player_context()),
2140        );
2141        let player_id = {
2142            let player_guard = player.lock().unwrap();
2143
2144            if let Err(error) = player_guard.set_mute(self.muted.get()) {
2145                warn!("Could not set mute state: {error:?}");
2146            }
2147
2148            player_guard.get_id()
2149        };
2150
2151        *self.player.borrow_mut() = Some(player);
2152
2153        let event_handler = Arc::new(Mutex::new(HTMLMediaElementEventHandler::new(self)));
2154        let weak_event_handler = Arc::downgrade(&event_handler);
2155        *self.event_handler.borrow_mut() = Some(event_handler);
2156
2157        let task_source = self
2158            .owner_global()
2159            .task_manager()
2160            .media_element_task_source()
2161            .to_sendable();
2162        ROUTER.add_typed_route(
2163            action_receiver,
2164            Box::new(move |message| {
2165                let event = message.unwrap();
2166                let weak_event_handler = weak_event_handler.clone();
2167
2168                task_source.queue(task!(handle_player_event: move |cx| {
2169                    trace!("HTMLMediaElement event: {event:?}");
2170
2171                    let Some(event_handler) = weak_event_handler.upgrade() else {
2172                        return;
2173                    };
2174
2175                    event_handler.lock().unwrap().handle_player_event(player_id, event, cx);
2176                }));
2177            }),
2178        );
2179
2180        let task_source = self
2181            .owner_global()
2182            .task_manager()
2183            .media_element_task_source()
2184            .to_sendable();
2185        let weak_video_renderer = Arc::downgrade(&self.video_renderer);
2186
2187        self.video_renderer
2188            .lock()
2189            .unwrap()
2190            .setup(player_id, task_source, weak_video_renderer);
2191
2192        Ok(())
2193    }
2194
2195    fn reset_media_player(&self) {
2196        if self.player.borrow().is_none() {
2197            return;
2198        }
2199
2200        if let Some(ref player) = *self.player.borrow() {
2201            if let Err(error) = player.lock().unwrap().stop() {
2202                error!("Could not stop player: {error:?}");
2203            }
2204        }
2205
2206        *self.player.borrow_mut() = None;
2207        self.video_renderer.lock().unwrap().reset();
2208        *self.event_handler.borrow_mut() = None;
2209
2210        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2211            video_element.set_natural_dimensions(None, None);
2212        }
2213    }
2214
2215    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
2216        if let Some(ref player) = *self.player.borrow() {
2217            if let Err(error) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
2218                warn!("Could not set audio track {error:?}");
2219            }
2220        }
2221    }
2222
2223    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
2224        if let Some(ref player) = *self.player.borrow() {
2225            if let Err(error) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
2226                warn!("Could not set video track: {error:?}");
2227            }
2228        }
2229    }
2230
2231    /// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
2232    fn direction_of_playback(&self) -> PlaybackDirection {
2233        // If the element's playbackRate is positive or zero, then the direction of playback is
2234        // forwards. Otherwise, it is backwards.
2235        if self.playback_rate.get() >= 0. {
2236            PlaybackDirection::Forwards
2237        } else {
2238            PlaybackDirection::Backwards
2239        }
2240    }
2241
2242    /// <https://html.spec.whatwg.org/multipage/#ended-playback>
2243    fn ended_playback(&self, loop_condition: LoopCondition) -> bool {
2244        // A media element is said to have ended playback when:
2245
2246        // The element's readyState attribute is HAVE_METADATA or greater, and
2247        if self.ready_state.get() < ReadyState::HaveMetadata {
2248            return false;
2249        }
2250
2251        let playback_position = self.current_playback_position.get();
2252
2253        match self.direction_of_playback() {
2254            // Either: The current playback position is the end of the media resource, and the
2255            // direction of playback is forwards, and the media element does not have a loop
2256            // attribute specified.
2257            PlaybackDirection::Forwards => {
2258                playback_position >= self.Duration() &&
2259                    (loop_condition == LoopCondition::Ignored || !self.Loop())
2260            },
2261            // Or: The current playback position is the earliest possible position, and the
2262            // direction of playback is backwards.
2263            PlaybackDirection::Backwards => playback_position <= self.earliest_possible_position(),
2264        }
2265    }
2266
2267    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2268    fn end_of_playback_in_forwards_direction(&self) {
2269        // When the current playback position reaches the end of the media resource when the
2270        // direction of playback is forwards, then the user agent must follow these steps:
2271
2272        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
2273        // posible position of the media resource and return.
2274        if self.Loop() {
2275            self.seek(
2276                self.earliest_possible_position(),
2277                /* approximate_for_speed */ false,
2278            );
2279            return;
2280        }
2281
2282        // Step 2. As defined above, the ended IDL attribute starts returning true once the event
2283        // loop returns to step 1.
2284
2285        // Step 3. Queue a media element task given the media element and the following steps:
2286        let this = Trusted::new(self);
2287        let generation_id = self.generation_id.get();
2288
2289        self.owner_global()
2290            .task_manager()
2291            .media_element_task_source()
2292            .queue(task!(reaches_the_end_steps: move || {
2293                let this = this.root();
2294                if generation_id != this.generation_id.get() {
2295                    return;
2296                }
2297
2298                // Step 3.1. Fire an event named timeupdate at the media element.
2299                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
2300
2301                // Step 3.2. If the media element has ended playback, the direction of playback is
2302                // forwards, and paused is false, then:
2303                if this.ended_playback(LoopCondition::Included) &&
2304                    this.direction_of_playback() == PlaybackDirection::Forwards &&
2305                    !this.Paused() {
2306                    // Step 3.2.1. Set the paused attribute to true.
2307                    this.paused.set(true);
2308
2309                    // Step 3.2.2. Fire an event named pause at the media element.
2310                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
2311
2312                    // Step 3.2.3. Take pending play promises and reject pending play promises with
2313                    // the result and an "AbortError" DOMException.
2314                    this.take_pending_play_promises(Err(Error::Abort(None)));
2315                    this.fulfill_in_flight_play_promises(|| ());
2316                }
2317
2318                // Step 3.3. Fire an event named ended at the media element.
2319                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
2320            }));
2321
2322        // <https://html.spec.whatwg.org/multipage/#dom-media-have_current_data>
2323        self.change_ready_state(ReadyState::HaveCurrentData);
2324    }
2325
2326    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2327    fn end_of_playback_in_backwards_direction(&self) {
2328        // When the current playback position reaches the earliest possible position of the media
2329        // resource when the direction of playback is backwards, then the user agent must only queue
2330        // a media element task given the media element to fire an event named timeupdate at the
2331        // element.
2332        if self.current_playback_position.get() <= self.earliest_possible_position() {
2333            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2334        }
2335    }
2336
2337    fn playback_end(&self) {
2338        // Abort the following steps of the end of playback if seeking is in progress.
2339        if self.seeking.get() {
2340            return;
2341        }
2342
2343        match self.direction_of_playback() {
2344            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(),
2345            PlaybackDirection::Backwards => self.end_of_playback_in_backwards_direction(),
2346        }
2347    }
2348
2349    fn playback_error(&self, error: &str, cx: &mut js::context::JSContext) {
2350        error!("Player error: {:?}", error);
2351
2352        // If we have already flagged an error condition while processing
2353        // the network response, we should silently skip any observable
2354        // errors originating while decoding the erroneous response.
2355        if self.in_error_state() {
2356            return;
2357        }
2358
2359        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
2360        if self.ready_state.get() == ReadyState::HaveNothing {
2361            // => "If the media data can be fetched but is found by inspection to be in an
2362            // unsupported format, or can otherwise not be rendered at all"
2363            self.media_data_processing_failure_steps();
2364        } else {
2365            // => "If the media data is corrupted"
2366            self.media_data_processing_fatal_steps(MEDIA_ERR_DECODE, cx);
2367        }
2368    }
2369
2370    fn playback_metadata_updated(
2371        &self,
2372        metadata: &servo_media::player::metadata::Metadata,
2373        can_gc: CanGc,
2374    ) {
2375        // The following steps should be run once on the initial `metadata` signal from the media
2376        // engine.
2377        if self.ready_state.get() != ReadyState::HaveNothing {
2378            return;
2379        }
2380
2381        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2382        // => "If the media resource is found to have an audio track"
2383        for (i, _track) in metadata.audio_tracks.iter().enumerate() {
2384            let audio_track_list = self.AudioTracks(can_gc);
2385
2386            // Step 1. Create an AudioTrack object to represent the audio track.
2387            let kind = match i {
2388                0 => DOMString::from("main"),
2389                _ => DOMString::new(),
2390            };
2391
2392            let audio_track = AudioTrack::new(
2393                self.global().as_window(),
2394                DOMString::new(),
2395                kind,
2396                DOMString::new(),
2397                DOMString::new(),
2398                Some(&*audio_track_list),
2399                can_gc,
2400            );
2401
2402            // Steps 2. Update the media element's audioTracks attribute's AudioTrackList object
2403            // with the new AudioTrack object.
2404            audio_track_list.add(&audio_track);
2405
2406            // Step 3. Let enable be unknown.
2407            // Step 4. If either the media resource or the URL of the current media resource
2408            // indicate a particular set of audio tracks to enable, or if the user agent has
2409            // information that would facilitate the selection of specific audio tracks to
2410            // improve the user's experience, then: if this audio track is one of the ones to
2411            // enable, then set enable to true, otherwise, set enable to false.
2412            if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2413                let fragment = MediaFragmentParser::from(servo_url);
2414                if let Some(id) = fragment.id() {
2415                    if audio_track.id() == id {
2416                        audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2417                    }
2418                }
2419
2420                if fragment.tracks().contains(&audio_track.kind().into()) {
2421                    audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2422                }
2423            }
2424
2425            // Step 5. If enable is still unknown, then, if the media element does not yet have an
2426            // enabled audio track, then set enable to true, otherwise, set enable to false.
2427            // Step 6. If enable is true, then enable this audio track, otherwise, do not enable
2428            // this audio track.
2429            if audio_track_list.enabled_index().is_none() {
2430                audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2431            }
2432
2433            // Step 7. Fire an event named addtrack at this AudioTrackList object, using TrackEvent,
2434            // with the track attribute initialized to the new AudioTrack object.
2435            let event = TrackEvent::new(
2436                self.global().as_window(),
2437                atom!("addtrack"),
2438                false,
2439                false,
2440                &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
2441                can_gc,
2442            );
2443
2444            event
2445                .upcast::<Event>()
2446                .fire(audio_track_list.upcast::<EventTarget>(), can_gc);
2447        }
2448
2449        // => "If the media resource is found to have a video track"
2450        for (i, _track) in metadata.video_tracks.iter().enumerate() {
2451            let video_track_list = self.VideoTracks(can_gc);
2452
2453            // Step 1. Create a VideoTrack object to represent the video track.
2454            let kind = match i {
2455                0 => DOMString::from("main"),
2456                _ => DOMString::new(),
2457            };
2458
2459            let video_track = VideoTrack::new(
2460                self.global().as_window(),
2461                DOMString::new(),
2462                kind,
2463                DOMString::new(),
2464                DOMString::new(),
2465                Some(&*video_track_list),
2466                can_gc,
2467            );
2468
2469            // Steps 2. Update the media element's videoTracks attribute's VideoTrackList object
2470            // with the new VideoTrack object.
2471            video_track_list.add(&video_track);
2472
2473            // Step 3. Let enable be unknown.
2474            // Step 4. If either the media resource or the URL of the current media resource
2475            // indicate a particular set of video tracks to enable, or if the user agent has
2476            // information that would facilitate the selection of specific video tracks to
2477            // improve the user's experience, then: if this video track is the first such video
2478            // track, then set enable to true, otherwise, set enable to false.
2479            if let Some(track) = video_track_list.item(0) {
2480                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2481                    let fragment = MediaFragmentParser::from(servo_url);
2482                    if let Some(id) = fragment.id() {
2483                        if track.id() == id {
2484                            video_track_list.set_selected(0, true);
2485                        }
2486                    } else if fragment.tracks().contains(&track.kind().into()) {
2487                        video_track_list.set_selected(0, true);
2488                    }
2489                }
2490            }
2491
2492            // Step 5. If enable is still unknown, then, if the media element does not yet have a
2493            // selected video track, then set enable to true, otherwise, set enable to false.
2494            // Step 6. If enable is true, then select this track and unselect any previously
2495            // selected video tracks, otherwise, do not select this video track. If other tracks are
2496            // unselected, then a change event will be fired.
2497            if video_track_list.selected_index().is_none() {
2498                video_track_list.set_selected(video_track_list.len() - 1, true);
2499            }
2500
2501            // Step 7. Fire an event named addtrack at this VideoTrackList object, using TrackEvent,
2502            // with the track attribute initialized to the new VideoTrack object.
2503            let event = TrackEvent::new(
2504                self.global().as_window(),
2505                atom!("addtrack"),
2506                false,
2507                false,
2508                &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
2509                can_gc,
2510            );
2511
2512            event
2513                .upcast::<Event>()
2514                .fire(video_track_list.upcast::<EventTarget>(), can_gc);
2515        }
2516
2517        // => "Once enough of the media data has been fetched to determine the duration..."
2518
2519        // TODO Step 1. Establish the media timeline for the purposes of the current playback
2520        // position and the earliest possible position, based on the media data.
2521
2522        // TODO Step 2. Update the timeline offset to the date and time that corresponds to the zero
2523        // time in the media timeline established in the previous step, if any. If no explicit time
2524        // and date is given by the media resource, the timeline offset must be set to Not-a-Number
2525        // (NaN).
2526
2527        // Step 3. Set the current playback position and the official playback position to the
2528        // earliest possible position.
2529        let earliest_possible_position = self.earliest_possible_position();
2530        self.current_playback_position
2531            .set(earliest_possible_position);
2532        self.official_playback_position
2533            .set(earliest_possible_position);
2534
2535        // Step 4. Update the duration attribute with the time of the last frame of the resource, if
2536        // known, on the media timeline established above. If it is not known (e.g. a stream that is
2537        // in principle infinite), update the duration attribute to the value positive Infinity.
2538        // Note: The user agent will queue a media element task given the media element to fire an
2539        // event named durationchange at the element at this point.
2540        self.duration.set(
2541            metadata
2542                .duration
2543                .map_or(f64::INFINITY, |duration| duration.as_secs_f64()),
2544        );
2545        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2546
2547        // Step 5. For video elements, set the videoWidth and videoHeight attributes, and queue a
2548        // media element task given the media element to fire an event named resize at the media
2549        // element.
2550        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2551            video_element.set_natural_dimensions(Some(metadata.width), Some(metadata.height));
2552            self.queue_media_element_task_to_fire_event(atom!("resize"));
2553        }
2554
2555        // Step 6. Set the readyState attribute to HAVE_METADATA.
2556        self.change_ready_state(ReadyState::HaveMetadata);
2557
2558        // Step 7. Let jumped be false.
2559        let mut jumped = false;
2560
2561        // Step 8. If the media element's default playback start position is greater than zero, then
2562        // seek to that time, and let jumped be true.
2563        if self.default_playback_start_position.get() > 0. {
2564            self.seek(
2565                self.default_playback_start_position.get(),
2566                /* approximate_for_speed */ false,
2567            );
2568            jumped = true;
2569        }
2570
2571        // Step 9. Set the media element's default playback start position to zero.
2572        self.default_playback_start_position.set(0.);
2573
2574        // Step 10. Let the initial playback position be 0.
2575        // Step 11. If either the media resource or the URL of the current media resource indicate a
2576        // particular start time, then set the initial playback position to that time and, if jumped
2577        // is still false, seek to that time.
2578        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2579            let fragment = MediaFragmentParser::from(servo_url);
2580            if let Some(initial_playback_position) = fragment.start() {
2581                if initial_playback_position > 0. &&
2582                    initial_playback_position < self.duration.get() &&
2583                    !jumped
2584                {
2585                    self.seek(
2586                        initial_playback_position,
2587                        /* approximate_for_speed */ false,
2588                    )
2589                }
2590            }
2591        }
2592
2593        // Step 12. If there is no enabled audio track, then enable an audio track. This will cause
2594        // a change event to be fired.
2595        // Step 13. If there is no selected video track, then select a video track. This will cause
2596        // a change event to be fired.
2597        // Note that these steps are already handled by the earlier media track processing.
2598
2599        let global = self.global();
2600        let window = global.as_window();
2601
2602        // Update the media session metadata title with the obtained metadata.
2603        window.Navigator().MediaSession().update_title(
2604            metadata
2605                .title
2606                .clone()
2607                .unwrap_or(window.get_url().into_string()),
2608        );
2609    }
2610
2611    fn playback_duration_changed(&self, duration: Option<Duration>) {
2612        let duration = duration.map_or(f64::INFINITY, |duration| duration.as_secs_f64());
2613
2614        if self.duration.get() == duration {
2615            return;
2616        }
2617
2618        self.duration.set(duration);
2619
2620        // When the length of the media resource changes to a known value (e.g. from being unknown
2621        // to known, or from a previously established length to a new length), the user agent must
2622        // queue a media element task given the media element to fire an event named durationchange
2623        // at the media element.
2624        // <https://html.spec.whatwg.org/multipage/#offsets-into-the-media-resource:media-resource-22>
2625        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2626
2627        // If the duration is changed such that the current playback position ends up being greater
2628        // than the time of the end of the media resource, then the user agent must also seek to the
2629        // time of the end of the media resource.
2630        if self.current_playback_position.get() > duration {
2631            self.seek(duration, /* approximate_for_speed */ false);
2632        }
2633    }
2634
2635    fn playback_video_frame_updated(&self) {
2636        let Some(video_element) = self.downcast::<HTMLVideoElement>() else {
2637            return;
2638        };
2639
2640        // Whenever the natural width or natural height of the video changes (including, for
2641        // example, because the selected video track was changed), if the element's readyState
2642        // attribute is not HAVE_NOTHING, the user agent must queue a media element task given
2643        // the media element to fire an event named resize at the media element.
2644        // <https://html.spec.whatwg.org/multipage/#concept-video-intrinsic-width>
2645
2646        // The event for the prerolled frame from media engine could reached us before the media
2647        // element HAVE_METADATA ready state so subsequent steps will be cancelled.
2648        if self.ready_state.get() == ReadyState::HaveNothing {
2649            return;
2650        }
2651
2652        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
2653            if video_element
2654                .set_natural_dimensions(Some(frame.width as u32), Some(frame.height as u32))
2655            {
2656                self.queue_media_element_task_to_fire_event(atom!("resize"));
2657            } else {
2658                // If the natural dimensions have not been changed, the node should be marked as
2659                // damaged to force a repaint with the new frame contents.
2660                self.upcast::<Node>().dirty(NodeDamage::Other);
2661            }
2662        }
2663    }
2664
2665    fn playback_need_data(&self) {
2666        // The media engine signals that the source needs more data. If we already have a valid
2667        // fetch request, we do nothing. Otherwise, if we have no request and the previous request
2668        // was cancelled because we got an EnoughData event, we restart fetching where we left.
2669        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2670            if let Some(reason) = current_fetch_context.cancel_reason() {
2671                // XXX(ferjm) Ideally we should just create a fetch request from
2672                // where we left. But keeping track of the exact next byte that the
2673                // media backend expects is not the easiest task, so I'm simply
2674                // seeking to the current playback position for now which will create
2675                // a new fetch request for the last rendered frame.
2676                if *reason == CancelReason::Backoff {
2677                    self.seek(
2678                        self.current_playback_position.get(),
2679                        /* approximate_for_speed */ false,
2680                    );
2681                }
2682                return;
2683            }
2684        }
2685
2686        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2687            if let Err(e) = {
2688                let mut data_source = current_fetch_context.data_source().borrow_mut();
2689                data_source.set_locked(false);
2690                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
2691            } {
2692                // If we are pushing too much data and we know that we can
2693                // restart the download later from where we left, we cancel
2694                // the current request. Otherwise, we continue the request
2695                // assuming that we may drop some frames.
2696                if e == PlayerError::EnoughData {
2697                    current_fetch_context.cancel(CancelReason::Backoff);
2698                }
2699            }
2700        }
2701    }
2702
2703    fn playback_enough_data(&self) {
2704        // The media engine signals that the source has enough data and asks us to stop pushing bytes
2705        // to avoid excessive buffer queueing, so we cancel the ongoing fetch request if we are able
2706        // to restart it from where we left. Otherwise, we continue the current fetch request,
2707        // assuming that some frames will be dropped.
2708        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2709            if current_fetch_context.is_seekable() {
2710                current_fetch_context.cancel(CancelReason::Backoff);
2711            }
2712        }
2713    }
2714
2715    fn playback_position_changed(&self, position: f64) {
2716        // Abort the following steps of the current time update if seeking is in progress.
2717        if self.seeking.get() {
2718            return;
2719        }
2720
2721        let _ = self
2722            .played
2723            .borrow_mut()
2724            .add(self.current_playback_position.get(), position);
2725        self.current_playback_position.set(position);
2726        self.official_playback_position.set(position);
2727        self.time_marches_on();
2728
2729        let media_position_state =
2730            MediaPositionState::new(self.duration.get(), self.playback_rate.get(), position);
2731        debug!(
2732            "Sending media session event set position state {:?}",
2733            media_position_state
2734        );
2735        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
2736    }
2737
2738    fn playback_seek_done(&self, position: f64) {
2739        // If the seek was initiated by script or by the user agent itself continue with the
2740        // following steps, otherwise abort.
2741        if !self.seeking.get() || position != self.current_seek_position.get() {
2742            return;
2743        }
2744
2745        // <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2746        // Step 13. Await a stable state.
2747        let task = MediaElementMicrotask::Seeked {
2748            elem: DomRoot::from_ref(self),
2749            generation_id: self.generation_id.get(),
2750        };
2751
2752        ScriptThread::await_stable_state(Microtask::MediaElement(task));
2753    }
2754
2755    fn playback_state_changed(&self, state: &PlaybackState) {
2756        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
2757        match *state {
2758            PlaybackState::Paused => {
2759                media_session_playback_state = MediaSessionPlaybackState::Paused;
2760                if self.ready_state.get() == ReadyState::HaveMetadata {
2761                    self.change_ready_state(ReadyState::HaveEnoughData);
2762                }
2763            },
2764            PlaybackState::Playing => {
2765                media_session_playback_state = MediaSessionPlaybackState::Playing;
2766                if self.ready_state.get() == ReadyState::HaveMetadata {
2767                    self.change_ready_state(ReadyState::HaveEnoughData);
2768                }
2769            },
2770            PlaybackState::Buffering => {
2771                // Do not send the media session playback state change event
2772                // in this case as a None_ state is expected to clean up the
2773                // session.
2774                return;
2775            },
2776            _ => {},
2777        };
2778        debug!(
2779            "Sending media session event playback state changed to {:?}",
2780            media_session_playback_state
2781        );
2782        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
2783            media_session_playback_state,
2784        ));
2785    }
2786
2787    fn seekable(&self) -> TimeRangesContainer {
2788        let mut seekable = TimeRangesContainer::default();
2789        if let Some(ref player) = *self.player.borrow() {
2790            let ranges = player.lock().unwrap().seekable();
2791            for range in ranges {
2792                let _ = seekable.add(range.start, range.end);
2793            }
2794        }
2795        seekable
2796    }
2797
2798    /// <https://html.spec.whatwg.org/multipage/#earliest-possible-position>
2799    fn earliest_possible_position(&self) -> f64 {
2800        self.seekable()
2801            .start(0)
2802            .unwrap_or_else(|_| self.current_playback_position.get())
2803    }
2804
2805    fn render_controls(&self, can_gc: CanGc) {
2806        if self.upcast::<Element>().is_shadow_host() {
2807            // Bail out if we are already showing the controls.
2808            return;
2809        }
2810
2811        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2812        //                       reparsing styles.
2813        let shadow_root = self
2814            .upcast::<Element>()
2815            .attach_ua_shadow_root(false, can_gc);
2816        let document = self.owner_document();
2817        let script = Element::create(
2818            QualName::new(None, ns!(html), local_name!("script")),
2819            None,
2820            &document,
2821            ElementCreator::ScriptCreated,
2822            CustomElementCreationMode::Asynchronous,
2823            None,
2824            can_gc,
2825        );
2826        // This is our hacky way to temporarily workaround the lack of a privileged
2827        // JS context.
2828        // The media controls UI accesses the document.servoGetMediaControls(id) API
2829        // to get an instance to the media controls ShadowRoot.
2830        // `id` needs to match the internally generated UUID assigned to a media element.
2831        let id = Uuid::new_v4().to_string();
2832        document.register_media_controls(&id, &shadow_root);
2833        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2834        *self.media_controls_id.borrow_mut() = Some(id);
2835        script
2836            .upcast::<Node>()
2837            .set_text_content_for_element(Some(DOMString::from(media_controls_script)), can_gc);
2838        if let Err(e) = shadow_root
2839            .upcast::<Node>()
2840            .AppendChild(script.upcast::<Node>(), can_gc)
2841        {
2842            warn!("Could not render media controls {:?}", e);
2843            return;
2844        }
2845
2846        let style = Element::create(
2847            QualName::new(None, ns!(html), local_name!("style")),
2848            None,
2849            &document,
2850            ElementCreator::ScriptCreated,
2851            CustomElementCreationMode::Asynchronous,
2852            None,
2853            can_gc,
2854        );
2855
2856        style
2857            .upcast::<Node>()
2858            .set_text_content_for_element(Some(DOMString::from(MEDIA_CONTROL_CSS)), can_gc);
2859
2860        if let Err(e) = shadow_root
2861            .upcast::<Node>()
2862            .AppendChild(style.upcast::<Node>(), can_gc)
2863        {
2864            warn!("Could not render media controls {:?}", e);
2865        }
2866
2867        self.upcast::<Node>().dirty(NodeDamage::Other);
2868    }
2869
2870    fn remove_controls(&self) {
2871        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2872            self.owner_document().unregister_media_controls(&id);
2873        }
2874    }
2875
2876    /// Gets the video frame at the current playback position.
2877    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2878        self.video_renderer
2879            .lock()
2880            .unwrap()
2881            .current_frame_holder
2882            .as_ref()
2883            .map(|holder| holder.get_frame())
2884    }
2885
2886    /// Gets the current frame of the video element to present, if any.
2887    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2888    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2889        let (current_frame, poster_frame) = {
2890            let renderer = self.video_renderer.lock().unwrap();
2891            (renderer.current_frame, renderer.poster_frame)
2892        };
2893
2894        // If the show poster flag is set (or there is no current video frame to
2895        // present) AND there is a poster frame, present that.
2896        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2897            return poster_frame;
2898        }
2899
2900        current_frame
2901    }
2902
2903    /// By default the audio is rendered through the audio sink automatically
2904    /// selected by the servo-media Player instance. However, in some cases, like
2905    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2906    /// renderer.
2907    pub(crate) fn set_audio_renderer(
2908        &self,
2909        audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>,
2910        cx: &mut js::context::JSContext,
2911    ) {
2912        *self.audio_renderer.borrow_mut() = audio_renderer;
2913
2914        let had_player = {
2915            if let Some(ref player) = *self.player.borrow() {
2916                if let Err(error) = player.lock().unwrap().stop() {
2917                    error!("Could not stop player: {error:?}");
2918                }
2919                true
2920            } else {
2921                false
2922            }
2923        };
2924
2925        if had_player {
2926            self.media_element_load_algorithm(cx);
2927        }
2928    }
2929
2930    fn send_media_session_event(&self, event: MediaSessionEvent) {
2931        let global = self.global();
2932        let media_session = global.as_window().Navigator().MediaSession();
2933
2934        media_session.register_media_instance(self);
2935
2936        media_session.send_event(event);
2937    }
2938
2939    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2940    pub(crate) fn origin_is_clean(&self) -> bool {
2941        // Step 5.local (media provider object).
2942        if self.src_object.borrow().is_some() {
2943            // The resource described by the current media resource, if any,
2944            // contains the media data. It is CORS-same-origin.
2945            return true;
2946        }
2947
2948        // Step 5.remote (URL record).
2949        if self.resource_url.borrow().is_some() {
2950            // Update the media data with the contents
2951            // of response's unsafe response obtained in this fashion.
2952            // Response can be CORS-same-origin or CORS-cross-origin;
2953            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2954                return current_fetch_context.origin_is_clean();
2955            }
2956        }
2957
2958        true
2959    }
2960}
2961
2962impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2963    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
2964    fn NetworkState(&self) -> u16 {
2965        self.network_state.get() as u16
2966    }
2967
2968    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
2969    fn ReadyState(&self) -> u16 {
2970        self.ready_state.get() as u16
2971    }
2972
2973    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2974    make_bool_getter!(Autoplay, "autoplay");
2975    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2976    make_bool_setter!(SetAutoplay, "autoplay");
2977
2978    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2979    make_bool_getter!(Loop, "loop");
2980    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2981    make_bool_setter!(SetLoop, "loop");
2982
2983    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2984    make_bool_getter!(DefaultMuted, "muted");
2985    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2986    make_bool_setter!(SetDefaultMuted, "muted");
2987
2988    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2989    make_bool_getter!(Controls, "controls");
2990    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2991    make_bool_setter!(SetControls, "controls");
2992
2993    // https://html.spec.whatwg.org/multipage/#dom-media-src
2994    make_url_getter!(Src, "src");
2995
2996    // https://html.spec.whatwg.org/multipage/#dom-media-src
2997    make_url_setter!(SetSrc, "src");
2998
2999    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
3000    fn GetCrossOrigin(&self) -> Option<DOMString> {
3001        reflect_cross_origin_attribute(self.upcast::<Element>())
3002    }
3003    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
3004    fn SetCrossOrigin(&self, cx: &mut JSContext, value: Option<DOMString>) {
3005        set_cross_origin_attribute(cx, self.upcast::<Element>(), value);
3006    }
3007
3008    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3009    fn Muted(&self) -> bool {
3010        self.muted.get()
3011    }
3012
3013    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3014    fn SetMuted(&self, value: bool) {
3015        if self.muted.get() == value {
3016            return;
3017        }
3018
3019        self.muted.set(value);
3020
3021        if let Some(ref player) = *self.player.borrow() {
3022            if let Err(error) = player.lock().unwrap().set_mute(value) {
3023                warn!("Could not set mute state: {error:?}");
3024            }
3025        }
3026
3027        // The user agent must queue a media element task given the media element to fire an event
3028        // named volumechange at the media element.
3029        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3030
3031        // Then, if the media element is not allowed to play, the user agent must run the internal
3032        // pause steps for the media element.
3033        if !self.is_allowed_to_play() {
3034            self.internal_pause_steps();
3035        }
3036    }
3037
3038    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3039    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
3040        (*self.src_object.borrow())
3041            .as_ref()
3042            .map(|src_object| match src_object {
3043                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
3044                SrcObject::MediaStream(stream) => {
3045                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
3046                },
3047            })
3048    }
3049
3050    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3051    fn SetSrcObject(&self, cx: &mut js::context::JSContext, value: Option<MediaStreamOrBlob>) {
3052        *self.src_object.borrow_mut() = value.map(|value| value.into());
3053        self.media_element_load_algorithm(cx);
3054    }
3055
3056    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3057    // Missing/Invalid values are user-agent defined.
3058    make_enumerated_getter!(
3059        Preload,
3060        "preload",
3061        "none" | "metadata" | "auto",
3062        missing => "auto",
3063        invalid => "auto"
3064    );
3065
3066    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3067    make_setter!(SetPreload, "preload");
3068
3069    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
3070    fn CurrentSrc(&self) -> USVString {
3071        USVString(self.current_src.borrow().clone())
3072    }
3073
3074    /// <https://html.spec.whatwg.org/multipage/#dom-media-load>
3075    fn Load(&self, cx: &mut js::context::JSContext) {
3076        self.media_element_load_algorithm(cx);
3077    }
3078
3079    /// <https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype>
3080    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
3081        match ServoMedia::get().can_play_type(&type_.str()) {
3082            SupportsMediaType::No => CanPlayTypeResult::_empty,
3083            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
3084            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
3085        }
3086    }
3087
3088    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
3089    fn GetError(&self) -> Option<DomRoot<MediaError>> {
3090        self.error.get()
3091    }
3092
3093    /// <https://html.spec.whatwg.org/multipage/#dom-media-play>
3094    fn Play(&self, cx: &mut CurrentRealm) -> Rc<Promise> {
3095        let promise = Promise::new_in_realm(cx);
3096
3097        // TODO Step 1. If the media element is not allowed to play, then return a promise rejected
3098        // with a "NotAllowedError" DOMException.
3099
3100        // Step 2. If the media element's error attribute is not null and its code is
3101        // MEDIA_ERR_SRC_NOT_SUPPORTED, then return a promise rejected with a "NotSupportedError"
3102        // DOMException.
3103        if self
3104            .error
3105            .get()
3106            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
3107        {
3108            promise.reject_error(Error::NotSupported(None), CanGc::from_cx(cx));
3109            return promise;
3110        }
3111
3112        // Step 3. Let promise be a new promise and append promise to the list of pending play
3113        // promises.
3114        self.push_pending_play_promise(&promise);
3115
3116        // Step 4. Run the internal play steps for the media element.
3117        self.internal_play_steps(cx);
3118
3119        // Step 5. Return promise.
3120        promise
3121    }
3122
3123    /// <https://html.spec.whatwg.org/multipage/#dom-media-pause>
3124    fn Pause(&self, cx: &mut js::context::JSContext) {
3125        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
3126        // the media element's resource selection algorithm.
3127        if self.network_state.get() == NetworkState::Empty {
3128            self.invoke_resource_selection_algorithm(cx);
3129        }
3130
3131        // Step 2. Run the internal pause steps for the media element.
3132        self.internal_pause_steps();
3133    }
3134
3135    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
3136    fn Paused(&self) -> bool {
3137        self.paused.get()
3138    }
3139
3140    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3141    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
3142        Ok(Finite::wrap(self.default_playback_rate.get()))
3143    }
3144
3145    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3146    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3147        // If the given value is not supported by the user agent, then throw a "NotSupportedError"
3148        // DOMException.
3149        let min_allowed = -64.0;
3150        let max_allowed = 64.0;
3151        if *value < min_allowed || *value > max_allowed {
3152            return Err(Error::NotSupported(None));
3153        }
3154
3155        if self.default_playback_rate.get() == *value {
3156            return Ok(());
3157        }
3158
3159        self.default_playback_rate.set(*value);
3160
3161        // The user agent must queue a media element task given the media element to fire an event
3162        // named ratechange at the media element.
3163        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3164
3165        Ok(())
3166    }
3167
3168    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3169    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
3170        Ok(Finite::wrap(self.playback_rate.get()))
3171    }
3172
3173    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3174    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3175        // The attribute is mutable: on setting, the user agent must follow these steps:
3176
3177        // Step 1. If the given value is not supported by the user agent, then throw a
3178        // "NotSupportedError" DOMException.
3179        let min_allowed = -64.0;
3180        let max_allowed = 64.0;
3181        if *value < min_allowed || *value > max_allowed {
3182            return Err(Error::NotSupported(None));
3183        }
3184
3185        if self.playback_rate.get() == *value {
3186            return Ok(());
3187        }
3188
3189        // Step 2. Set playbackRate to the new value, and if the element is potentially playing,
3190        // change the playback speed.
3191        self.playback_rate.set(*value);
3192
3193        if self.is_potentially_playing() {
3194            if let Some(ref player) = *self.player.borrow() {
3195                if let Err(error) = player.lock().unwrap().set_playback_rate(*value) {
3196                    warn!("Could not set the playback rate: {error:?}");
3197                }
3198            }
3199        }
3200
3201        // The user agent must queue a media element task given the media element to fire an event
3202        // named ratechange at the media element.
3203        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3204
3205        Ok(())
3206    }
3207
3208    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
3209    fn Duration(&self) -> f64 {
3210        self.duration.get()
3211    }
3212
3213    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3214    fn CurrentTime(&self) -> Finite<f64> {
3215        Finite::wrap(if self.default_playback_start_position.get() != 0. {
3216            self.default_playback_start_position.get()
3217        } else if self.seeking.get() {
3218            // Note that the other browsers do the similar (by checking `seeking` value or clamp the
3219            // `official` position to the earliest possible position, the duration, and the seekable
3220            // ranges.
3221            // <https://github.com/whatwg/html/issues/11773>
3222            self.current_seek_position.get()
3223        } else {
3224            self.official_playback_position.get()
3225        })
3226    }
3227
3228    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3229    fn SetCurrentTime(&self, time: Finite<f64>) {
3230        if self.ready_state.get() == ReadyState::HaveNothing {
3231            self.default_playback_start_position.set(*time);
3232        } else {
3233            self.official_playback_position.set(*time);
3234            self.seek(*time, /* approximate_for_speed */ false);
3235        }
3236    }
3237
3238    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
3239    fn Seeking(&self) -> bool {
3240        self.seeking.get()
3241    }
3242
3243    /// <https://html.spec.whatwg.org/multipage/#dom-media-ended>
3244    fn Ended(&self) -> bool {
3245        self.ended_playback(LoopCondition::Included) &&
3246            self.direction_of_playback() == PlaybackDirection::Forwards
3247    }
3248
3249    /// <https://html.spec.whatwg.org/multipage/#dom-media-fastseek>
3250    fn FastSeek(&self, time: Finite<f64>) {
3251        self.seek(*time, /* approximate_for_speed */ true);
3252    }
3253
3254    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
3255    fn Played(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3256        TimeRanges::new(
3257            self.global().as_window(),
3258            self.played.borrow().clone(),
3259            can_gc,
3260        )
3261    }
3262
3263    /// <https://html.spec.whatwg.org/multipage/#dom-media-seekable>
3264    fn Seekable(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3265        TimeRanges::new(self.global().as_window(), self.seekable(), can_gc)
3266    }
3267
3268    /// <https://html.spec.whatwg.org/multipage/#dom-media-buffered>
3269    fn Buffered(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3270        let mut buffered = TimeRangesContainer::default();
3271        if let Some(ref player) = *self.player.borrow() {
3272            let ranges = player.lock().unwrap().buffered();
3273            for range in ranges {
3274                let _ = buffered.add(range.start, range.end);
3275            }
3276        }
3277        TimeRanges::new(self.global().as_window(), buffered, can_gc)
3278    }
3279
3280    /// <https://html.spec.whatwg.org/multipage/#dom-media-audiotracks>
3281    fn AudioTracks(&self, can_gc: CanGc) -> DomRoot<AudioTrackList> {
3282        let window = self.owner_window();
3283        self.audio_tracks_list
3284            .or_init(|| AudioTrackList::new(&window, &[], Some(self), can_gc))
3285    }
3286
3287    /// <https://html.spec.whatwg.org/multipage/#dom-media-videotracks>
3288    fn VideoTracks(&self, can_gc: CanGc) -> DomRoot<VideoTrackList> {
3289        let window = self.owner_window();
3290        self.video_tracks_list
3291            .or_init(|| VideoTrackList::new(&window, &[], Some(self), can_gc))
3292    }
3293
3294    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
3295    fn TextTracks(&self, can_gc: CanGc) -> DomRoot<TextTrackList> {
3296        let window = self.owner_window();
3297        self.text_tracks_list
3298            .or_init(|| TextTrackList::new(&window, &[], can_gc))
3299    }
3300
3301    /// <https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack>
3302    fn AddTextTrack(
3303        &self,
3304        kind: TextTrackKind,
3305        label: DOMString,
3306        language: DOMString,
3307        can_gc: CanGc,
3308    ) -> DomRoot<TextTrack> {
3309        let window = self.owner_window();
3310        // Step 1 & 2
3311        // FIXME(#22314, dlrobertson) set the ready state to Loaded
3312        let track = TextTrack::new(
3313            &window,
3314            "".into(),
3315            kind,
3316            label,
3317            language,
3318            TextTrackMode::Hidden,
3319            None,
3320            can_gc,
3321        );
3322        // Step 3 & 4
3323        self.TextTracks(can_gc).add(&track);
3324        // Step 5
3325        DomRoot::from_ref(&track)
3326    }
3327
3328    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3329    fn GetVolume(&self) -> Fallible<Finite<f64>> {
3330        Ok(Finite::wrap(self.volume.get()))
3331    }
3332
3333    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3334    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
3335        // If the new value is outside the range 0.0 to 1.0 inclusive, then, on setting, an
3336        // "IndexSizeError" DOMException must be thrown instead.
3337        let minimum_volume = 0.0;
3338        let maximum_volume = 1.0;
3339        if *value < minimum_volume || *value > maximum_volume {
3340            return Err(Error::IndexSize(None));
3341        }
3342
3343        if self.volume.get() == *value {
3344            return Ok(());
3345        }
3346
3347        self.volume.set(*value);
3348
3349        if let Some(ref player) = *self.player.borrow() {
3350            if let Err(error) = player.lock().unwrap().set_volume(*value) {
3351                warn!("Could not set the volume: {error:?}");
3352            }
3353        }
3354
3355        // The user agent must queue a media element task given the media element to fire an event
3356        // named volumechange at the media element.
3357        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3358
3359        // Then, if the media element is not allowed to play, the user agent must run the internal
3360        // pause steps for the media element.
3361        if !self.is_allowed_to_play() {
3362            self.internal_pause_steps();
3363        }
3364
3365        Ok(())
3366    }
3367}
3368
3369impl VirtualMethods for HTMLMediaElement {
3370    fn super_type(&self) -> Option<&dyn VirtualMethods> {
3371        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
3372    }
3373
3374    #[expect(unsafe_code)]
3375    fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation, _can_gc: CanGc) {
3376        // TODO: https://github.com/servo/servo/issues/42812
3377        let mut cx = unsafe { temp_cx() };
3378        let cx = &mut cx;
3379        self.super_type()
3380            .unwrap()
3381            .attribute_mutated(attr, mutation, CanGc::from_cx(cx));
3382
3383        match *attr.local_name() {
3384            local_name!("muted") => {
3385                // <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3386                // When a media element is created, if the element has a muted content attribute
3387                // specified, then the muted IDL attribute should be set to true.
3388                if let AttributeMutation::Set(
3389                    _,
3390                    AttributeMutationReason::ByCloning | AttributeMutationReason::ByParser,
3391                ) = mutation
3392                {
3393                    self.SetMuted(true);
3394                }
3395            },
3396            local_name!("src") => {
3397                // <https://html.spec.whatwg.org/multipage/#location-of-the-media-resource>
3398                // If a src attribute of a media element is set or changed, the user agent must invoke
3399                // the media element's media element load algorithm (Removing the src attribute does
3400                // not do this, even if there are source elements present).
3401                if !mutation.is_removal() {
3402                    self.media_element_load_algorithm(cx);
3403                }
3404            },
3405            local_name!("controls") => {
3406                if mutation.new_value(attr).is_some() {
3407                    self.render_controls(CanGc::from_cx(cx));
3408                } else {
3409                    self.remove_controls();
3410                }
3411            },
3412            _ => (),
3413        };
3414    }
3415
3416    /// <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document>
3417    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
3418        self.super_type().unwrap().unbind_from_tree(context, can_gc);
3419
3420        self.remove_controls();
3421
3422        if context.tree_connected {
3423            let task = MediaElementMicrotask::PauseIfNotInDocument {
3424                elem: DomRoot::from_ref(self),
3425            };
3426            ScriptThread::await_stable_state(Microtask::MediaElement(task));
3427        }
3428    }
3429
3430    fn adopting_steps(&self, old_doc: &Document, can_gc: CanGc) {
3431        self.super_type().unwrap().adopting_steps(old_doc, can_gc);
3432
3433        // Note that media control id should be adopting between documents so "privileged"
3434        // document.servoGetMediaControls(id) API is keeping access to the whitelist of media
3435        // controls identifiers.
3436        if let Some(id) = &*self.media_controls_id.borrow() {
3437            let Some(shadow_root) = self.upcast::<Element>().shadow_root() else {
3438                error!("Missing media controls shadow root");
3439                return;
3440            };
3441
3442            old_doc.unregister_media_controls(id);
3443            self.owner_document()
3444                .register_media_controls(id, &shadow_root);
3445        }
3446    }
3447}
3448
3449#[derive(JSTraceable, MallocSizeOf)]
3450pub(crate) enum MediaElementMicrotask {
3451    ResourceSelection {
3452        elem: DomRoot<HTMLMediaElement>,
3453        generation_id: u32,
3454        #[no_trace]
3455        base_url: ServoUrl,
3456    },
3457    PauseIfNotInDocument {
3458        elem: DomRoot<HTMLMediaElement>,
3459    },
3460    Seeked {
3461        elem: DomRoot<HTMLMediaElement>,
3462        generation_id: u32,
3463    },
3464    SelectNextSourceChild {
3465        elem: DomRoot<HTMLMediaElement>,
3466        generation_id: u32,
3467    },
3468    SelectNextSourceChildAfterWait {
3469        elem: DomRoot<HTMLMediaElement>,
3470        generation_id: u32,
3471    },
3472}
3473
3474impl MicrotaskRunnable for MediaElementMicrotask {
3475    fn handler(&self, cx: &mut js::context::JSContext) {
3476        match self {
3477            &MediaElementMicrotask::ResourceSelection {
3478                ref elem,
3479                generation_id,
3480                ref base_url,
3481            } => {
3482                if generation_id == elem.generation_id.get() {
3483                    elem.resource_selection_algorithm_sync(base_url.clone(), cx);
3484                }
3485            },
3486            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
3487                if !elem.upcast::<Node>().is_connected() {
3488                    elem.internal_pause_steps();
3489                }
3490            },
3491            &MediaElementMicrotask::Seeked {
3492                ref elem,
3493                generation_id,
3494            } => {
3495                if generation_id == elem.generation_id.get() {
3496                    elem.seek_end();
3497                }
3498            },
3499            &MediaElementMicrotask::SelectNextSourceChild {
3500                ref elem,
3501                generation_id,
3502            } => {
3503                if generation_id == elem.generation_id.get() {
3504                    elem.select_next_source_child(CanGc::from_cx(cx));
3505                }
3506            },
3507            &MediaElementMicrotask::SelectNextSourceChildAfterWait {
3508                ref elem,
3509                generation_id,
3510            } => {
3511                if generation_id == elem.generation_id.get() {
3512                    elem.select_next_source_child_after_wait(cx);
3513                }
3514            },
3515        }
3516    }
3517
3518    fn enter_realm<'cx>(&self, cx: &'cx mut js::context::JSContext) -> AutoRealm<'cx> {
3519        match self {
3520            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
3521            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
3522            &MediaElementMicrotask::Seeked { ref elem, .. } |
3523            &MediaElementMicrotask::SelectNextSourceChild { ref elem, .. } |
3524            &MediaElementMicrotask::SelectNextSourceChildAfterWait { ref elem, .. } => {
3525                enter_auto_realm(cx, &**elem)
3526            },
3527        }
3528    }
3529}
3530
3531enum Resource {
3532    Object,
3533    Url(ServoUrl),
3534}
3535
3536#[derive(Debug, MallocSizeOf, PartialEq)]
3537enum DataBuffer {
3538    Payload(Vec<u8>),
3539    EndOfStream,
3540}
3541
3542#[derive(MallocSizeOf)]
3543struct BufferedDataSource {
3544    /// During initial setup and seeking (including clearing the buffer queue
3545    /// and resetting the end-of-stream state), the data source should be locked and
3546    /// any request for processing should be ignored until the media player informs us
3547    /// via the NeedData event that it is ready to accept incoming data.
3548    locked: Cell<bool>,
3549    /// Temporary storage for incoming data.
3550    buffers: VecDeque<DataBuffer>,
3551}
3552
3553impl BufferedDataSource {
3554    fn new() -> BufferedDataSource {
3555        BufferedDataSource {
3556            locked: Cell::new(true),
3557            buffers: VecDeque::default(),
3558        }
3559    }
3560
3561    fn set_locked(&self, locked: bool) {
3562        self.locked.set(locked)
3563    }
3564
3565    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
3566        debug_assert_ne!(
3567            self.buffers.back(),
3568            Some(&DataBuffer::EndOfStream),
3569            "The media backend not expects any further data after end of stream"
3570        );
3571
3572        self.buffers.push_back(buffer);
3573    }
3574
3575    fn process_into_player_from_queue(
3576        &mut self,
3577        player: &Arc<Mutex<dyn Player>>,
3578    ) -> Result<(), PlayerError> {
3579        // Early out if any request for processing should be ignored.
3580        if self.locked.get() {
3581            return Ok(());
3582        }
3583
3584        while let Some(buffer) = self.buffers.pop_front() {
3585            match buffer {
3586                DataBuffer::Payload(payload) => {
3587                    if let Err(error) = player.lock().unwrap().push_data(payload) {
3588                        warn!("Could not push input data to player: {error:?}");
3589                        return Err(error);
3590                    }
3591                },
3592                DataBuffer::EndOfStream => {
3593                    if let Err(error) = player.lock().unwrap().end_of_stream() {
3594                        warn!("Could not signal EOS to player: {error:?}");
3595                        return Err(error);
3596                    }
3597                },
3598            }
3599        }
3600
3601        Ok(())
3602    }
3603
3604    fn reset(&mut self) {
3605        self.locked.set(true);
3606        self.buffers.clear();
3607    }
3608}
3609
3610/// Indicates the reason why a fetch request was cancelled.
3611#[derive(Debug, MallocSizeOf, PartialEq)]
3612enum CancelReason {
3613    /// We were asked to stop pushing data to the player.
3614    Backoff,
3615    /// An error ocurred while fetching the media data.
3616    Error,
3617    /// The fetching process is aborted by the user.
3618    Abort,
3619}
3620
3621#[derive(MallocSizeOf)]
3622pub(crate) struct HTMLMediaElementFetchContext {
3623    /// The fetch request id.
3624    request_id: RequestId,
3625    /// Some if the request has been cancelled.
3626    cancel_reason: Option<CancelReason>,
3627    /// Indicates whether the fetched stream is seekable.
3628    is_seekable: bool,
3629    /// Indicates whether the fetched stream is origin clean.
3630    origin_clean: bool,
3631    /// The buffered data source which to be processed by media backend.
3632    data_source: RefCell<BufferedDataSource>,
3633    /// Fetch canceller. Allows cancelling the current fetch request by
3634    /// manually calling its .cancel() method or automatically on Drop.
3635    fetch_canceller: FetchCanceller,
3636}
3637
3638impl HTMLMediaElementFetchContext {
3639    fn new(
3640        request_id: RequestId,
3641        core_resource_thread: CoreResourceThread,
3642    ) -> HTMLMediaElementFetchContext {
3643        HTMLMediaElementFetchContext {
3644            request_id,
3645            cancel_reason: None,
3646            is_seekable: false,
3647            origin_clean: true,
3648            data_source: RefCell::new(BufferedDataSource::new()),
3649            fetch_canceller: FetchCanceller::new(request_id, false, core_resource_thread.clone()),
3650        }
3651    }
3652
3653    fn request_id(&self) -> RequestId {
3654        self.request_id
3655    }
3656
3657    fn is_seekable(&self) -> bool {
3658        self.is_seekable
3659    }
3660
3661    fn set_seekable(&mut self, seekable: bool) {
3662        self.is_seekable = seekable;
3663    }
3664
3665    fn origin_is_clean(&self) -> bool {
3666        self.origin_clean
3667    }
3668
3669    fn set_origin_clean(&mut self, origin_clean: bool) {
3670        self.origin_clean = origin_clean;
3671    }
3672
3673    fn data_source(&self) -> &RefCell<BufferedDataSource> {
3674        &self.data_source
3675    }
3676
3677    fn cancel(&mut self, reason: CancelReason) {
3678        if self.cancel_reason.is_some() {
3679            return;
3680        }
3681        self.cancel_reason = Some(reason);
3682        self.data_source.borrow_mut().reset();
3683        self.fetch_canceller.abort();
3684    }
3685
3686    fn cancel_reason(&self) -> &Option<CancelReason> {
3687        &self.cancel_reason
3688    }
3689}
3690
3691struct HTMLMediaElementFetchListener {
3692    /// The element that initiated the request.
3693    element: Trusted<HTMLMediaElement>,
3694    /// The generation of the media element when this fetch started.
3695    generation_id: u32,
3696    /// The fetch request id.
3697    request_id: RequestId,
3698    /// Time of last progress notification.
3699    next_progress_event: Instant,
3700    /// Url for the resource.
3701    url: ServoUrl,
3702    /// Expected content length of the media asset being fetched or played.
3703    expected_content_length: Option<u64>,
3704    /// Actual content length of the media asset was fetched.
3705    fetched_content_length: u64,
3706    /// Discarded content length from the network for the ongoing
3707    /// request if range requests are not supported. Seek requests set it
3708    /// to the required position (in bytes).
3709    content_length_to_discard: u64,
3710}
3711
3712impl FetchResponseListener for HTMLMediaElementFetchListener {
3713    fn process_request_body(&mut self, _: RequestId) {}
3714
3715    fn process_request_eof(&mut self, _: RequestId) {}
3716
3717    #[expect(unsafe_code)]
3718    fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
3719        // TODO: https://github.com/servo/servo/issues/42840
3720        let mut cx = unsafe { temp_cx() };
3721        let cx = &mut cx;
3722        let element = self.element.root();
3723
3724        let (metadata, origin_clean) = match metadata {
3725            Ok(fetch_metadata) => match fetch_metadata {
3726                FetchMetadata::Unfiltered(metadata) => (Some(metadata), true),
3727                FetchMetadata::Filtered { filtered, unsafe_ } => (
3728                    Some(unsafe_),
3729                    matches!(
3730                        filtered,
3731                        FilteredMetadata::Basic(_) | FilteredMetadata::Cors(_)
3732                    ),
3733                ),
3734            },
3735            Err(_) => (None, true),
3736        };
3737
3738        let (status_is_success, is_seekable) =
3739            metadata.as_ref().map_or((false, false), |metadata| {
3740                let status = &metadata.status;
3741                (status.is_success(), *status == StatusCode::PARTIAL_CONTENT)
3742            });
3743
3744        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3745        if !status_is_success {
3746            if element.ready_state.get() == ReadyState::HaveNothing {
3747                // => "If the media data cannot be fetched at all, due to network errors..."
3748                element.media_data_processing_failure_steps();
3749            } else {
3750                // => "If the connection is interrupted after some media data has been received..."
3751                element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, cx);
3752            }
3753            return;
3754        }
3755
3756        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3757            current_fetch_context.set_seekable(is_seekable);
3758            current_fetch_context.set_origin_clean(origin_clean);
3759        }
3760
3761        if let Some(metadata) = metadata.as_ref() {
3762            if let Some(headers) = metadata.headers.as_ref() {
3763                // For range requests we get the size of the media asset from the Content-Range
3764                // header. Otherwise, we get it from the Content-Length header.
3765                let content_length =
3766                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
3767                        content_range.bytes_len()
3768                    } else {
3769                        headers
3770                            .typed_get::<ContentLength>()
3771                            .map(|content_length| content_length.0)
3772                    };
3773
3774                // We only set the expected input size if it changes.
3775                if content_length != self.expected_content_length {
3776                    if let Some(content_length) = content_length {
3777                        self.expected_content_length = Some(content_length);
3778                    }
3779                }
3780            }
3781        }
3782
3783        // Explicit media player initialization with live/seekable source.
3784        if let Some(expected_content_length) = self.expected_content_length {
3785            if let Err(e) = element
3786                .player
3787                .borrow()
3788                .as_ref()
3789                .unwrap()
3790                .lock()
3791                .unwrap()
3792                .set_input_size(expected_content_length)
3793            {
3794                warn!("Could not set player input size {:?}", e);
3795            }
3796        }
3797    }
3798
3799    fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
3800        let element = self.element.root();
3801
3802        self.fetched_content_length += chunk.len() as u64;
3803
3804        // If an error was received previously, we skip processing the payload.
3805        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3806            if let Some(CancelReason::Backoff) = current_fetch_context.cancel_reason() {
3807                return;
3808            }
3809
3810            // Discard chunk of the response body if fetch context doesn't support range requests.
3811            let payload = if !current_fetch_context.is_seekable() &&
3812                self.content_length_to_discard != 0
3813            {
3814                if chunk.len() as u64 > self.content_length_to_discard {
3815                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3816                    self.content_length_to_discard = 0;
3817                    shrink_chunk
3818                } else {
3819                    // Completely discard this response chunk.
3820                    self.content_length_to_discard -= chunk.len() as u64;
3821                    return;
3822                }
3823            } else {
3824                chunk
3825            };
3826
3827            if let Err(e) = {
3828                let mut data_source = current_fetch_context.data_source().borrow_mut();
3829                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3830                data_source
3831                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap())
3832            } {
3833                // If we are pushing too much data and we know that we can
3834                // restart the download later from where we left, we cancel
3835                // the current request. Otherwise, we continue the request
3836                // assuming that we may drop some frames.
3837                if e == PlayerError::EnoughData {
3838                    current_fetch_context.cancel(CancelReason::Backoff);
3839                }
3840                return;
3841            }
3842        }
3843
3844        // <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
3845        // While the load is not suspended (see below), every 350ms (±200ms) or for every byte
3846        // received, whichever is least frequent, queue a media element task given the media element
3847        // to fire an event named progress at the element.
3848        if Instant::now() > self.next_progress_event {
3849            element.queue_media_element_task_to_fire_event(atom!("progress"));
3850            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3851        }
3852    }
3853
3854    fn process_response_eof(
3855        self,
3856        cx: &mut js::context::JSContext,
3857        _: RequestId,
3858        status: Result<(), NetworkError>,
3859        timing: ResourceFetchTiming,
3860    ) {
3861        let element = self.element.root();
3862
3863        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3864        if status.is_ok() && self.fetched_content_length != 0 {
3865            // => "Once the entire media resource has been fetched..."
3866
3867            // There are no more chunks of the response body forthcoming, so we can
3868            // go ahead and notify the media backend not to expect any further data.
3869            if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut()
3870            {
3871                // On initial state change READY -> PAUSED the media player perform
3872                // seek to initial position by event with seek segment (TIME format)
3873                // while media stack operates in BYTES format and configuring segment
3874                // start and stop positions without the total size of the stream is not
3875                // possible. As fallback the media player perform seek with BYTES format
3876                // and initiate seek request via "seek-data" callback with required offset.
3877                if self.expected_content_length.is_none() {
3878                    if let Err(e) = element
3879                        .player
3880                        .borrow()
3881                        .as_ref()
3882                        .unwrap()
3883                        .lock()
3884                        .unwrap()
3885                        .set_input_size(self.fetched_content_length)
3886                    {
3887                        warn!("Could not set player input size {:?}", e);
3888                    }
3889                }
3890
3891                let mut data_source = current_fetch_context.data_source().borrow_mut();
3892
3893                data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3894                let _ = data_source
3895                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap());
3896            }
3897
3898            // Step 1. Fire an event named progress at the media element.
3899            element
3900                .upcast::<EventTarget>()
3901                .fire_event(atom!("progress"), CanGc::from_cx(cx));
3902
3903            // Step 2. Set the networkState to NETWORK_IDLE and fire an event named suspend at the
3904            // media element.
3905            element.network_state.set(NetworkState::Idle);
3906
3907            element
3908                .upcast::<EventTarget>()
3909                .fire_event(atom!("suspend"), CanGc::from_cx(cx));
3910        } else if status.is_err() && element.ready_state.get() != ReadyState::HaveNothing {
3911            // => "If the connection is interrupted after some media data has been received..."
3912            element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, cx);
3913        } else {
3914            // => "If the media data can be fetched but is found by inspection to be in an
3915            // unsupported format, or can otherwise not be rendered at all"
3916            element.media_data_processing_failure_steps();
3917        }
3918
3919        network_listener::submit_timing(&self, &status, &timing, CanGc::from_cx(cx));
3920    }
3921
3922    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3923        let global = &self.resource_timing_global();
3924        global.report_csp_violations(violations, None, None);
3925    }
3926
3927    fn should_invoke(&self) -> bool {
3928        let element = self.element.root();
3929
3930        if element.generation_id.get() != self.generation_id || element.player.borrow().is_none() {
3931            return false;
3932        }
3933
3934        let Some(ref current_fetch_context) = *element.current_fetch_context.borrow() else {
3935            return false;
3936        };
3937
3938        // Whether the new fetch request was triggered.
3939        if current_fetch_context.request_id() != self.request_id {
3940            return false;
3941        }
3942
3943        // Whether the current fetch request was cancelled due to a network or decoding error, or
3944        // was aborted by the user.
3945        if let Some(cancel_reason) = current_fetch_context.cancel_reason() {
3946            if matches!(*cancel_reason, CancelReason::Error | CancelReason::Abort) {
3947                return false;
3948            }
3949        }
3950
3951        true
3952    }
3953}
3954
3955impl ResourceTimingListener for HTMLMediaElementFetchListener {
3956    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3957        let initiator_type = InitiatorType::LocalName(
3958            self.element
3959                .root()
3960                .upcast::<Element>()
3961                .local_name()
3962                .to_string(),
3963        );
3964        (initiator_type, self.url.clone())
3965    }
3966
3967    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3968        self.element.root().owner_document().global()
3969    }
3970}
3971
3972impl HTMLMediaElementFetchListener {
3973    fn new(element: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3974        Self {
3975            element: Trusted::new(element),
3976            generation_id: element.generation_id.get(),
3977            request_id,
3978            next_progress_event: Instant::now() + Duration::from_millis(350),
3979            url,
3980            expected_content_length: None,
3981            fetched_content_length: 0,
3982            content_length_to_discard: offset,
3983        }
3984    }
3985}
3986
3987/// The [`HTMLMediaElementEventHandler`] is a structure responsible for handling media events for
3988/// the [`HTMLMediaElement`] and exists to decouple ownership of the [`HTMLMediaElement`] from IPC
3989/// router callback.
3990#[derive(JSTraceable, MallocSizeOf)]
3991struct HTMLMediaElementEventHandler {
3992    element: WeakRef<HTMLMediaElement>,
3993}
3994
3995#[expect(unsafe_code)]
3996unsafe impl Send for HTMLMediaElementEventHandler {}
3997
3998impl HTMLMediaElementEventHandler {
3999    fn new(element: &HTMLMediaElement) -> Self {
4000        Self {
4001            element: WeakRef::new(element),
4002        }
4003    }
4004
4005    fn handle_player_event(
4006        &self,
4007        player_id: usize,
4008        event: PlayerEvent,
4009        cx: &mut js::context::JSContext,
4010    ) {
4011        let Some(element) = self.element.root() else {
4012            return;
4013        };
4014
4015        // Abort event processing if the associated media player is outdated.
4016        if element.player_id().is_none_or(|id| id != player_id) {
4017            return;
4018        }
4019
4020        match event {
4021            PlayerEvent::DurationChanged(duration) => element.playback_duration_changed(duration),
4022            PlayerEvent::EndOfStream => element.playback_end(),
4023            PlayerEvent::EnoughData => element.playback_enough_data(),
4024            PlayerEvent::Error(ref error) => element.playback_error(error, cx),
4025            PlayerEvent::MetadataUpdated(ref metadata) => {
4026                element.playback_metadata_updated(metadata, CanGc::from_cx(cx))
4027            },
4028            PlayerEvent::NeedData => element.playback_need_data(),
4029            PlayerEvent::PositionChanged(position) => element.playback_position_changed(position),
4030            PlayerEvent::SeekData(offset, seek_lock) => {
4031                element.fetch_request(Some(offset), Some(seek_lock))
4032            },
4033            PlayerEvent::SeekDone(position) => element.playback_seek_done(position),
4034            PlayerEvent::StateChanged(ref state) => element.playback_state_changed(state),
4035            PlayerEvent::VideoFrameUpdated => element.playback_video_frame_updated(),
4036        }
4037    }
4038}
4039
4040impl Drop for HTMLMediaElementEventHandler {
4041    fn drop(&mut self) {
4042        // The weak reference to the media element is not thread-safe and MUST be deleted on the
4043        // script thread, which is guaranteed by ownership of the `event handler` in the IPC router
4044        // callback (queued task to the media element task source) and the media element itself.
4045        assert_in_script();
4046    }
4047}