script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::{Cell, RefCell};
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex, Weak};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use content_security_policy::sandboxing_directive::SandboxingFlagSet;
13use dom_struct::dom_struct;
14use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
15use euclid::default::Size2D;
16use headers::{ContentLength, ContentRange, HeaderMapExt};
17use html5ever::{LocalName, Prefix, QualName, local_name, ns};
18use http::StatusCode;
19use http::header::{self, HeaderMap, HeaderValue};
20use ipc_channel::ipc::{self};
21use ipc_channel::router::ROUTER;
22use js::context::JSContext;
23use js::realm::{AutoRealm, CurrentRealm};
24use layout_api::MediaFrame;
25use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
26use net_traits::request::{Destination, RequestId};
27use net_traits::{
28    CoreResourceThread, FetchMetadata, FilteredMetadata, NetworkError, ResourceFetchTiming,
29};
30use paint_api::{CrossProcessPaintApi, ImageUpdate, SerializableImageData};
31use pixels::RasterImage;
32use script_bindings::codegen::InheritTypes::{
33    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
34};
35use script_bindings::root::assert_in_script;
36use script_bindings::weakref::WeakRef;
37use servo_base::generic_channel::GenericSharedMemory;
38use servo_base::id::WebViewId;
39use servo_config::pref;
40use servo_media::player::audio::AudioRenderer;
41use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
42use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
43use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
44use servo_url::ServoUrl;
45use stylo_atoms::Atom;
46use uuid::Uuid;
47use webrender_api::{
48    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
49    ImageDescriptorFlags, ImageFormat, ImageKey,
50};
51
52use crate::document_loader::{LoadBlocker, LoadType};
53use crate::dom::attr::Attr;
54use crate::dom::audio::audiotrack::AudioTrack;
55use crate::dom::audio::audiotracklist::AudioTrackList;
56use crate::dom::bindings::cell::DomRefCell;
57use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
58    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
59};
60use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
61use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
62use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
63use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
64use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
65use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
66use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
67use crate::dom::bindings::codegen::UnionTypes::{
68    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
69};
70use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
71use crate::dom::bindings::inheritance::Castable;
72use crate::dom::bindings::num::Finite;
73use crate::dom::bindings::refcounted::Trusted;
74use crate::dom::bindings::reflector::DomGlobal;
75use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
76use crate::dom::bindings::str::{DOMString, USVString};
77use crate::dom::blob::Blob;
78use crate::dom::csp::{GlobalCspReporting, Violation};
79use crate::dom::document::Document;
80use crate::dom::element::{
81    AttributeMutation, AttributeMutationReason, CustomElementCreationMode, Element, ElementCreator,
82    cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
83};
84use crate::dom::event::Event;
85use crate::dom::eventtarget::EventTarget;
86use crate::dom::globalscope::GlobalScope;
87use crate::dom::html::htmlelement::HTMLElement;
88use crate::dom::html::htmlsourceelement::HTMLSourceElement;
89use crate::dom::html::htmlvideoelement::HTMLVideoElement;
90use crate::dom::mediaerror::MediaError;
91use crate::dom::mediafragmentparser::MediaFragmentParser;
92use crate::dom::medialist::MediaList;
93use crate::dom::mediastream::MediaStream;
94use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
95use crate::dom::performance::performanceresourcetiming::InitiatorType;
96use crate::dom::promise::Promise;
97use crate::dom::texttrack::TextTrack;
98use crate::dom::texttracklist::TextTrackList;
99use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
100use crate::dom::trackevent::TrackEvent;
101use crate::dom::url::URL;
102use crate::dom::videotrack::VideoTrack;
103use crate::dom::videotracklist::VideoTrackList;
104use crate::dom::virtualmethods::VirtualMethods;
105use crate::fetch::{FetchCanceller, RequestWithGlobalScope, create_a_potential_cors_request};
106use crate::microtask::{Microtask, MicrotaskRunnable};
107use crate::network_listener::{self, FetchResponseListener, ResourceTimingListener};
108use crate::realms::enter_auto_realm;
109use crate::script_runtime::CanGc;
110use crate::script_thread::ScriptThread;
111use crate::task_source::SendableTaskSource;
112
113/// A CSS file to style the media controls.
114static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
115
116/// A JS file to control the media controls.
117static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
118
119/// The media engine may report a seek-done position that differs slightly from the
120/// requested position (e.g. snapping to the nearest keyframe), so we use a threshold
121/// instead of strict equality. (Unit is second)
122const SEEK_POSITION_THRESHOLD: f64 = 0.5;
123
124#[derive(MallocSizeOf, PartialEq)]
125enum FrameStatus {
126    Locked,
127    Unlocked,
128}
129
130#[derive(MallocSizeOf)]
131struct FrameHolder(
132    FrameStatus,
133    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
134);
135
136impl FrameHolder {
137    fn new(frame: VideoFrame) -> FrameHolder {
138        FrameHolder(FrameStatus::Unlocked, frame)
139    }
140
141    fn lock(&mut self) {
142        if self.0 == FrameStatus::Unlocked {
143            self.0 = FrameStatus::Locked;
144        };
145    }
146
147    fn unlock(&mut self) {
148        if self.0 == FrameStatus::Locked {
149            self.0 = FrameStatus::Unlocked;
150        };
151    }
152
153    fn set(&mut self, new_frame: VideoFrame) {
154        if self.0 == FrameStatus::Unlocked {
155            self.1 = new_frame
156        };
157    }
158
159    fn get(&self) -> (u32, Size2D<i32>, usize) {
160        if self.0 == FrameStatus::Locked {
161            (
162                self.1.get_texture_id(),
163                Size2D::new(self.1.get_width(), self.1.get_height()),
164                0,
165            )
166        } else {
167            unreachable!();
168        }
169    }
170
171    fn get_frame(&self) -> VideoFrame {
172        self.1.clone()
173    }
174}
175
176#[derive(MallocSizeOf)]
177pub(crate) struct MediaFrameRenderer {
178    webview_id: WebViewId,
179    player_id: Option<usize>,
180    glplayer_id: Option<u64>,
181    paint_api: CrossProcessPaintApi,
182    #[ignore_malloc_size_of = "Defined in other crates"]
183    player_context: WindowGLContext,
184    current_frame: Option<MediaFrame>,
185    old_frame: Option<ImageKey>,
186    very_old_frame: Option<ImageKey>,
187    current_frame_holder: Option<FrameHolder>,
188    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
189    poster_frame: Option<MediaFrame>,
190}
191
192impl MediaFrameRenderer {
193    fn new(
194        webview_id: WebViewId,
195        paint_api: CrossProcessPaintApi,
196        player_context: WindowGLContext,
197    ) -> Self {
198        Self {
199            webview_id,
200            player_id: None,
201            glplayer_id: None,
202            paint_api,
203            player_context,
204            current_frame: None,
205            old_frame: None,
206            very_old_frame: None,
207            current_frame_holder: None,
208            poster_frame: None,
209        }
210    }
211
212    fn setup(
213        &mut self,
214        player_id: usize,
215        task_source: SendableTaskSource,
216        weak_video_renderer: Weak<Mutex<MediaFrameRenderer>>,
217    ) {
218        self.player_id = Some(player_id);
219
220        let (glplayer_id, image_receiver) = self
221            .player_context
222            .glplayer_thread_sender
223            .as_ref()
224            .map(|sender| {
225                let (image_sender, image_receiver) = ipc::channel::<GLPlayerMsgForward>().unwrap();
226                sender
227                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
228                    .unwrap();
229                match image_receiver.recv().unwrap() {
230                    GLPlayerMsgForward::PlayerId(id) => (Some(id), Some(image_receiver)),
231                    _ => unreachable!(),
232                }
233            })
234            .unwrap_or((None, None));
235
236        self.glplayer_id = glplayer_id;
237
238        let Some(image_receiver) = image_receiver else {
239            return;
240        };
241
242        ROUTER.add_typed_route(
243            image_receiver,
244            Box::new(move |message| {
245                let message = message.unwrap();
246                let weak_video_renderer = weak_video_renderer.clone();
247
248                task_source.queue(task!(handle_glplayer_message: move || {
249                    trace!("GLPlayer message {:?}", message);
250
251                    let Some(video_renderer) = weak_video_renderer.upgrade() else {
252                        return;
253                    };
254
255                    match message {
256                        GLPlayerMsgForward::Lock(sender) => {
257                            if let Some(holder) = video_renderer
258                                .lock()
259                                .unwrap()
260                                .current_frame_holder
261                                .as_mut() {
262                                    holder.lock();
263                                    sender.send(holder.get()).unwrap();
264                                };
265                        },
266                        GLPlayerMsgForward::Unlock() => {
267                            if let Some(holder) = video_renderer
268                                .lock()
269                                .unwrap()
270                                .current_frame_holder
271                                .as_mut() { holder.unlock() }
272                        },
273                        _ => (),
274                    }
275                }));
276            }),
277        );
278    }
279
280    fn reset(&mut self) {
281        self.player_id = None;
282
283        if let Some(glplayer_id) = self.glplayer_id.take() {
284            self.player_context
285                .send(GLPlayerMsg::UnregisterPlayer(glplayer_id));
286        }
287
288        self.current_frame_holder = None;
289
290        let mut updates = smallvec::smallvec![];
291
292        if let Some(current_frame) = self.current_frame.take() {
293            updates.push(ImageUpdate::DeleteImage(current_frame.image_key));
294        }
295
296        if let Some(old_image_key) = self.old_frame.take() {
297            updates.push(ImageUpdate::DeleteImage(old_image_key));
298        }
299
300        if let Some(very_old_image_key) = self.very_old_frame.take() {
301            updates.push(ImageUpdate::DeleteImage(very_old_image_key));
302        }
303
304        if !updates.is_empty() {
305            self.paint_api
306                .update_images(self.webview_id.into(), updates);
307        }
308    }
309
310    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
311        self.poster_frame = image.and_then(|image| {
312            image.id.map(|image_key| MediaFrame {
313                image_key,
314                width: image.metadata.width as i32,
315                height: image.metadata.height as i32,
316            })
317        });
318    }
319}
320
321impl Drop for MediaFrameRenderer {
322    fn drop(&mut self) {
323        self.reset();
324    }
325}
326
327impl VideoFrameRenderer for MediaFrameRenderer {
328    fn render(&mut self, frame: VideoFrame) {
329        if self.player_id.is_none() || (frame.is_gl_texture() && self.glplayer_id.is_none()) {
330            return;
331        }
332
333        let mut updates = smallvec::smallvec![];
334
335        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
336            updates.push(ImageUpdate::DeleteImage(old_image_key));
337        }
338
339        let descriptor = ImageDescriptor::new(
340            frame.get_width(),
341            frame.get_height(),
342            ImageFormat::BGRA8,
343            ImageDescriptorFlags::empty(),
344        );
345
346        match &mut self.current_frame {
347            Some(current_frame)
348                if current_frame.width == frame.get_width() &&
349                    current_frame.height == frame.get_height() =>
350            {
351                if !frame.is_gl_texture() {
352                    updates.push(ImageUpdate::UpdateImage(
353                        current_frame.image_key,
354                        descriptor,
355                        SerializableImageData::Raw(GenericSharedMemory::from_bytes(
356                            &frame.get_data(),
357                        )),
358                        None,
359                    ));
360                }
361
362                self.current_frame_holder
363                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
364                    .set(frame);
365
366                if let Some(old_image_key) = self.old_frame.take() {
367                    updates.push(ImageUpdate::DeleteImage(old_image_key));
368                }
369            },
370            Some(current_frame) => {
371                self.old_frame = Some(current_frame.image_key);
372
373                let Some(new_image_key) =
374                    self.paint_api.generate_image_key_blocking(self.webview_id)
375                else {
376                    return;
377                };
378
379                /* update current_frame */
380                current_frame.image_key = new_image_key;
381                current_frame.width = frame.get_width();
382                current_frame.height = frame.get_height();
383
384                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
385                    let texture_target = if frame.is_external_oes() {
386                        ImageBufferKind::TextureExternal
387                    } else {
388                        ImageBufferKind::Texture2D
389                    };
390
391                    SerializableImageData::External(ExternalImageData {
392                        id: ExternalImageId(self.glplayer_id.unwrap()),
393                        channel_index: 0,
394                        image_type: ExternalImageType::TextureHandle(texture_target),
395                        normalized_uvs: false,
396                    })
397                } else {
398                    SerializableImageData::Raw(GenericSharedMemory::from_bytes(&frame.get_data()))
399                };
400
401                self.current_frame_holder
402                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
403                    .set(frame);
404
405                updates.push(ImageUpdate::AddImage(
406                    new_image_key,
407                    descriptor,
408                    image_data,
409                    false,
410                ));
411            },
412            None => {
413                let Some(image_key) = self.paint_api.generate_image_key_blocking(self.webview_id)
414                else {
415                    return;
416                };
417
418                self.current_frame = Some(MediaFrame {
419                    image_key,
420                    width: frame.get_width(),
421                    height: frame.get_height(),
422                });
423
424                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
425                    let texture_target = if frame.is_external_oes() {
426                        ImageBufferKind::TextureExternal
427                    } else {
428                        ImageBufferKind::Texture2D
429                    };
430
431                    SerializableImageData::External(ExternalImageData {
432                        id: ExternalImageId(self.glplayer_id.unwrap()),
433                        channel_index: 0,
434                        image_type: ExternalImageType::TextureHandle(texture_target),
435                        normalized_uvs: false,
436                    })
437                } else {
438                    SerializableImageData::Raw(GenericSharedMemory::from_bytes(&frame.get_data()))
439                };
440
441                self.current_frame_holder = Some(FrameHolder::new(frame));
442
443                updates.push(ImageUpdate::AddImage(
444                    image_key, descriptor, image_data, false,
445                ));
446            },
447        }
448        self.paint_api
449            .update_images(self.webview_id.into(), updates);
450    }
451}
452
453#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
454#[derive(JSTraceable, MallocSizeOf)]
455enum SrcObject {
456    MediaStream(Dom<MediaStream>),
457    Blob(Dom<Blob>),
458}
459
460impl From<MediaStreamOrBlob> for SrcObject {
461    #[cfg_attr(crown, expect(crown::unrooted_must_root))]
462    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
463        match src_object {
464            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
465            MediaStreamOrBlob::MediaStream(stream) => {
466                SrcObject::MediaStream(Dom::from_ref(&*stream))
467            },
468        }
469    }
470}
471
472#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
473enum LoadState {
474    NotLoaded,
475    LoadingFromSrcObject,
476    LoadingFromSrcAttribute,
477    LoadingFromSourceChild,
478    WaitingForSource,
479}
480
481/// <https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-element-29>
482#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
483#[derive(JSTraceable, MallocSizeOf)]
484struct SourceChildrenPointer {
485    source_before_pointer: Dom<HTMLSourceElement>,
486    inclusive: bool,
487}
488
489impl SourceChildrenPointer {
490    fn new(source_before_pointer: DomRoot<HTMLSourceElement>, inclusive: bool) -> Self {
491        Self {
492            source_before_pointer: source_before_pointer.as_traced(),
493            inclusive,
494        }
495    }
496}
497
498/// Generally the presence of the loop attribute should be considered to mean playback has not
499/// "ended", as "ended" and "looping" are mutually exclusive.
500/// <https://html.spec.whatwg.org/multipage/#ended-playback>
501#[derive(Clone, Copy, Debug, PartialEq)]
502enum LoopCondition {
503    Included,
504    Ignored,
505}
506
507#[dom_struct]
508pub(crate) struct HTMLMediaElement {
509    htmlelement: HTMLElement,
510    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
511    network_state: Cell<NetworkState>,
512    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
513    ready_state: Cell<ReadyState>,
514    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
515    src_object: DomRefCell<Option<SrcObject>>,
516    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
517    current_src: DomRefCell<String>,
518    /// Incremented whenever tasks associated with this element are cancelled.
519    generation_id: Cell<u32>,
520    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
521    ///
522    /// Reset to false every time the load algorithm is invoked.
523    fired_loadeddata_event: Cell<bool>,
524    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
525    error: MutNullableDom<MediaError>,
526    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
527    paused: Cell<bool>,
528    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
529    default_playback_rate: Cell<f64>,
530    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
531    playback_rate: Cell<f64>,
532    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
533    autoplaying: Cell<bool>,
534    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
535    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
536    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
537    #[conditional_malloc_size_of]
538    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
539    /// Play promises which are soon to be fulfilled by a queued task.
540    #[expect(clippy::type_complexity)]
541    #[conditional_malloc_size_of]
542    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
543    #[ignore_malloc_size_of = "servo_media"]
544    #[no_trace]
545    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
546    #[conditional_malloc_size_of]
547    #[no_trace]
548    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
549    #[ignore_malloc_size_of = "servo_media"]
550    #[no_trace]
551    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
552    #[conditional_malloc_size_of]
553    #[no_trace]
554    event_handler: RefCell<Option<Arc<Mutex<HTMLMediaElementEventHandler>>>>,
555    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
556    show_poster: Cell<bool>,
557    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
558    duration: Cell<f64>,
559    /// <https://html.spec.whatwg.org/multipage/#current-playback-position>
560    current_playback_position: Cell<f64>,
561    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
562    official_playback_position: Cell<f64>,
563    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
564    default_playback_start_position: Cell<f64>,
565    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
566    volume: Cell<f64>,
567    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
568    seeking: Cell<bool>,
569    /// The latest seek position (in seconds) is used to distinguish whether the seek request was
570    /// initiated by a script or by the user agent itself, rather than by the media engine and to
571    /// abort other running instance of the `seek` algorithm.
572    current_seek_position: Cell<f64>,
573    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
574    muted: Cell<bool>,
575    /// Loading state from source, if any.
576    load_state: Cell<LoadState>,
577    source_children_pointer: DomRefCell<Option<SourceChildrenPointer>>,
578    current_source_child: MutNullableDom<HTMLSourceElement>,
579    /// URL of the media resource, if any.
580    #[no_trace]
581    resource_url: DomRefCell<Option<ServoUrl>>,
582    /// URL of the media resource, if the resource is set through the src_object attribute and it
583    /// is a blob.
584    #[no_trace]
585    blob_url: DomRefCell<Option<ServoUrl>>,
586    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
587    played: DomRefCell<TimeRangesContainer>,
588    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
589    audio_tracks_list: MutNullableDom<AudioTrackList>,
590    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
591    video_tracks_list: MutNullableDom<VideoTrackList>,
592    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
593    text_tracks_list: MutNullableDom<TextTrackList>,
594    /// Time of last timeupdate notification.
595    #[ignore_malloc_size_of = "Defined in std::time"]
596    next_timeupdate_event: Cell<Instant>,
597    /// Latest fetch request context.
598    current_fetch_context: RefCell<Option<HTMLMediaElementFetchContext>>,
599    /// Media controls id.
600    /// In order to workaround the lack of privileged JS context, we secure the
601    /// the access to the "privileged" document.servoGetMediaControls(id) API by
602    /// keeping a whitelist of media controls identifiers.
603    media_controls_id: DomRefCell<Option<String>>,
604}
605
606/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
607#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
608#[repr(u8)]
609pub(crate) enum NetworkState {
610    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
611    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
612    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
613    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
614}
615
616/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
617#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
618#[repr(u8)]
619#[expect(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
620pub(crate) enum ReadyState {
621    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
622    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
623    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
624    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
625    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
626}
627
628/// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
629#[derive(Clone, Copy, PartialEq)]
630enum PlaybackDirection {
631    Forwards,
632    Backwards,
633}
634
635impl HTMLMediaElement {
636    pub(crate) fn new_inherited(
637        tag_name: LocalName,
638        prefix: Option<Prefix>,
639        document: &Document,
640    ) -> Self {
641        Self {
642            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
643            network_state: Cell::new(NetworkState::Empty),
644            ready_state: Cell::new(ReadyState::HaveNothing),
645            src_object: Default::default(),
646            current_src: DomRefCell::new("".to_owned()),
647            generation_id: Cell::new(0),
648            fired_loadeddata_event: Cell::new(false),
649            error: Default::default(),
650            paused: Cell::new(true),
651            default_playback_rate: Cell::new(1.0),
652            playback_rate: Cell::new(1.0),
653            muted: Cell::new(false),
654            load_state: Cell::new(LoadState::NotLoaded),
655            source_children_pointer: DomRefCell::new(None),
656            current_source_child: Default::default(),
657            // FIXME(nox): Why is this initialised to true?
658            autoplaying: Cell::new(true),
659            delaying_the_load_event_flag: Default::default(),
660            pending_play_promises: Default::default(),
661            in_flight_play_promises_queue: Default::default(),
662            player: Default::default(),
663            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
664                document.webview_id(),
665                document.window().paint_api().clone(),
666                document.window().get_player_context(),
667            ))),
668            audio_renderer: Default::default(),
669            event_handler: Default::default(),
670            show_poster: Cell::new(true),
671            duration: Cell::new(f64::NAN),
672            current_playback_position: Cell::new(0.),
673            official_playback_position: Cell::new(0.),
674            default_playback_start_position: Cell::new(0.),
675            volume: Cell::new(1.0),
676            seeking: Cell::new(false),
677            current_seek_position: Cell::new(f64::NAN),
678            resource_url: DomRefCell::new(None),
679            blob_url: DomRefCell::new(None),
680            played: DomRefCell::new(TimeRangesContainer::default()),
681            audio_tracks_list: Default::default(),
682            video_tracks_list: Default::default(),
683            text_tracks_list: Default::default(),
684            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
685            current_fetch_context: RefCell::new(None),
686            media_controls_id: DomRefCell::new(None),
687        }
688    }
689
690    pub(crate) fn network_state(&self) -> NetworkState {
691        self.network_state.get()
692    }
693
694    pub(crate) fn get_ready_state(&self) -> ReadyState {
695        self.ready_state.get()
696    }
697
698    fn media_type_id(&self) -> HTMLMediaElementTypeId {
699        match self.upcast::<Node>().type_id() {
700            NodeTypeId::Element(ElementTypeId::HTMLElement(
701                HTMLElementTypeId::HTMLMediaElement(media_type_id),
702            )) => media_type_id,
703            _ => unreachable!(),
704        }
705    }
706
707    fn update_media_state(&self) {
708        let is_playing = self
709            .player
710            .borrow()
711            .as_ref()
712            .is_some_and(|player| !player.lock().unwrap().paused());
713
714        if self.is_potentially_playing() && !is_playing {
715            if let Some(ref player) = *self.player.borrow() {
716                let player = player.lock().unwrap();
717
718                if let Err(error) = player.set_playback_rate(self.playback_rate.get()) {
719                    warn!("Could not set the playback rate: {error:?}");
720                }
721                if let Err(error) = player.set_volume(self.volume.get()) {
722                    warn!("Could not set the volume: {error:?}");
723                }
724                if let Err(error) = player.play() {
725                    error!("Could not play media: {error:?}");
726                }
727            }
728        } else if is_playing {
729            if let Some(ref player) = *self.player.borrow() {
730                if let Err(error) = player.lock().unwrap().pause() {
731                    error!("Could not pause player: {error:?}");
732                }
733            }
734        }
735    }
736
737    /// Marks that element as delaying the load event or not.
738    ///
739    /// Nothing happens if the element was already delaying the load event and
740    /// we pass true to that method again.
741    ///
742    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
743    pub(crate) fn delay_load_event(&self, delay: bool, cx: &mut js::context::JSContext) {
744        let blocker = &self.delaying_the_load_event_flag;
745        if delay && blocker.borrow().is_none() {
746            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
747        } else if !delay && blocker.borrow().is_some() {
748            LoadBlocker::terminate(blocker, cx);
749        }
750    }
751
752    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
753    fn time_marches_on(&self) {
754        // Step 6. If the time was reached through the usual monotonic increase of the current
755        // playback position during normal playback, and if the user agent has not fired a
756        // timeupdate event at the element in the past 15 to 250ms and is not still running event
757        // handlers for such an event, then the user agent must queue a media element task given the
758        // media element to fire an event named timeupdate at the element.
759        if Instant::now() > self.next_timeupdate_event.get() {
760            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
761            self.next_timeupdate_event
762                .set(Instant::now() + Duration::from_millis(250));
763        }
764    }
765
766    /// <https://html.spec.whatwg.org/multipage/#internal-play-steps>
767    fn internal_play_steps(&self, cx: &mut js::context::JSContext) {
768        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
769        // the media element's resource selection algorithm.
770        if self.network_state.get() == NetworkState::Empty {
771            self.invoke_resource_selection_algorithm(cx);
772        }
773
774        // Step 2. If the playback has ended and the direction of playback is forwards, seek to the
775        // earliest possible position of the media resource.
776        // Generally "ended" and "looping" are exclusive. Here, the loop attribute is ignored to
777        // seek back to start in case loop was set after playback ended.
778        // <https://github.com/whatwg/html/issues/4487>
779        if self.ended_playback(LoopCondition::Ignored) &&
780            self.direction_of_playback() == PlaybackDirection::Forwards
781        {
782            self.seek(
783                self.earliest_possible_position(),
784                /* approximate_for_speed */ false,
785            );
786        }
787
788        let state = self.ready_state.get();
789
790        // Step 3. If the media element's paused attribute is true, then:
791        if self.Paused() {
792            // Step 3.1. Change the value of paused to false.
793            self.paused.set(false);
794
795            // Step 3.2. If the show poster flag is true, set the element's show poster flag to
796            // false and run the time marches on steps.
797            if self.show_poster.get() {
798                self.show_poster.set(false);
799                self.time_marches_on();
800            }
801
802            // Step 3.3. Queue a media element task given the media element to fire an event named
803            // play at the element.
804            self.queue_media_element_task_to_fire_event(atom!("play"));
805
806            // Step 3.4. If the media element's readyState attribute has the value HAVE_NOTHING,
807            // HAVE_METADATA, or HAVE_CURRENT_DATA, queue a media element task given the media
808            // element to fire an event named waiting at the element. Otherwise, the media element's
809            // readyState attribute has the value HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA: notify about
810            // playing for the element.
811            match state {
812                ReadyState::HaveNothing |
813                ReadyState::HaveMetadata |
814                ReadyState::HaveCurrentData => {
815                    self.queue_media_element_task_to_fire_event(atom!("waiting"));
816                },
817                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
818                    self.notify_about_playing();
819                },
820            }
821        }
822        // Step 4. Otherwise, if the media element's readyState attribute has the value
823        // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, take pending play promises and queue a media
824        // element task given the media element to resolve pending play promises with the
825        // result.
826        else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
827            self.take_pending_play_promises(Ok(()));
828
829            let this = Trusted::new(self);
830            let generation_id = self.generation_id.get();
831
832            self.owner_global()
833                .task_manager()
834                .media_element_task_source()
835                .queue(task!(resolve_pending_play_promises: move || {
836                    let this = this.root();
837                    if generation_id != this.generation_id.get() {
838                        return;
839                    }
840
841                    this.fulfill_in_flight_play_promises(|| {});
842                }));
843        }
844
845        // Step 5. Set the media element's can autoplay flag to false.
846        self.autoplaying.set(false);
847
848        self.update_media_state();
849    }
850
851    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
852    fn internal_pause_steps(&self) {
853        // Step 1. Set the media element's can autoplay flag to false.
854        self.autoplaying.set(false);
855
856        // Step 2. If the media element's paused attribute is false, run the following steps:
857        if !self.Paused() {
858            // Step 2.1. Change the value of paused to true.
859            self.paused.set(true);
860
861            // Step 2.2. Take pending play promises and let promises be the result.
862            self.take_pending_play_promises(Err(Error::Abort(None)));
863
864            // Step 2.3. Queue a media element task given the media element and the following steps:
865            let this = Trusted::new(self);
866            let generation_id = self.generation_id.get();
867
868            self.owner_global()
869                .task_manager()
870                .media_element_task_source()
871                .queue(task!(internal_pause_steps: move || {
872                    let this = this.root();
873                    if generation_id != this.generation_id.get() {
874                        return;
875                    }
876
877                    this.fulfill_in_flight_play_promises(|| {
878                        // Step 2.3.1. Fire an event named timeupdate at the element.
879                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
880
881                        // Step 2.3.2. Fire an event named pause at the element.
882                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
883
884                        // Step 2.3.3. Reject pending play promises with promises and an
885                        // "AbortError" DOMException.
886                        // Done after running this closure in `fulfill_in_flight_play_promises`.
887                    });
888                }));
889
890            // Step 2.4. Set the official playback position to the current playback position.
891            self.official_playback_position
892                .set(self.current_playback_position.get());
893        }
894
895        self.update_media_state();
896    }
897
898    /// <https://html.spec.whatwg.org/multipage/#allowed-to-play>
899    fn is_allowed_to_play(&self) -> bool {
900        true
901    }
902
903    /// <https://html.spec.whatwg.org/multipage/#notify-about-playing>
904    fn notify_about_playing(&self) {
905        // Step 1. Take pending play promises and let promises be the result.
906        self.take_pending_play_promises(Ok(()));
907
908        // Step 2. Queue a media element task given the element and the following steps:
909        let this = Trusted::new(self);
910        let generation_id = self.generation_id.get();
911
912        self.owner_global()
913            .task_manager()
914            .media_element_task_source()
915            .queue(task!(notify_about_playing: move || {
916                let this = this.root();
917                if generation_id != this.generation_id.get() {
918                    return;
919                }
920
921                this.fulfill_in_flight_play_promises(|| {
922                    // Step 2.1. Fire an event named playing at the element.
923                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
924
925                    // Step 2.2. Resolve pending play promises with promises.
926                    // Done after running this closure in `fulfill_in_flight_play_promises`.
927                });
928            }));
929    }
930
931    /// <https://html.spec.whatwg.org/multipage/#ready-states>
932    fn change_ready_state(&self, ready_state: ReadyState) {
933        let old_ready_state = self.ready_state.get();
934        self.ready_state.set(ready_state);
935
936        if self.network_state.get() == NetworkState::Empty {
937            return;
938        }
939
940        if old_ready_state == ready_state {
941            return;
942        }
943
944        // Step 1. Apply the first applicable set of substeps from the following list:
945        match (old_ready_state, ready_state) {
946            // => "If the previous ready state was HAVE_NOTHING, and the new ready state is
947            // HAVE_METADATA"
948            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
949                // Queue a media element task given the media element to fire an event named
950                // loadedmetadata at the element.
951                self.queue_media_element_task_to_fire_event(atom!("loadedmetadata"));
952                // No other steps are applicable in this case.
953                return;
954            },
955            // => "If the previous ready state was HAVE_METADATA and the new ready state is
956            // HAVE_CURRENT_DATA or greater"
957            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
958                // If this is the first time this occurs for this media element since the load()
959                // algorithm was last invoked, the user agent must queue a media element task given
960                // the media element to fire an event named loadeddata at the element.
961                if !self.fired_loadeddata_event.get() {
962                    self.fired_loadeddata_event.set(true);
963
964                    let this = Trusted::new(self);
965                    let generation_id = self.generation_id.get();
966
967                    self.owner_global()
968                        .task_manager()
969                        .media_element_task_source()
970                        .queue(task!(media_reached_current_data: move |cx| {
971                            let this = this.root();
972                            if generation_id != this.generation_id.get() {
973                                return;
974                            }
975
976                            this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::from_cx(cx));
977                            // Once the readyState attribute reaches HAVE_CURRENT_DATA, after the
978                            // loadeddata event has been fired, set the element's
979                            // delaying-the-load-event flag to false.
980                            this.delay_load_event(false, cx);
981                        }));
982                }
983
984                // Steps for the transition from HaveMetadata to HaveCurrentData
985                // or HaveFutureData also apply here, as per the next match
986                // expression.
987            },
988            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
989                // FIXME(nox): Queue a task to fire timeupdate and waiting
990                // events if the conditions call from the spec are met.
991
992                // No other steps are applicable in this case.
993                return;
994            },
995
996            _ => (),
997        }
998
999        // => "If the previous ready state was HAVE_CURRENT_DATA or less, and the new ready state is
1000        // HAVE_FUTURE_DATA or more"
1001        if old_ready_state <= ReadyState::HaveCurrentData &&
1002            ready_state >= ReadyState::HaveFutureData
1003        {
1004            // The user agent must queue a media element task given the media element to fire an
1005            // event named canplay at the element.
1006            self.queue_media_element_task_to_fire_event(atom!("canplay"));
1007
1008            // If the element's paused attribute is false, the user agent must notify about playing
1009            // for the element.
1010            if !self.Paused() {
1011                self.notify_about_playing();
1012            }
1013        }
1014
1015        // => "If the new ready state is HAVE_ENOUGH_DATA"
1016        if ready_state == ReadyState::HaveEnoughData {
1017            // The user agent must queue a media element task given the media element to fire an
1018            // event named canplaythrough at the element.
1019            self.queue_media_element_task_to_fire_event(atom!("canplaythrough"));
1020
1021            // If the element is eligible for autoplay, then the user agent may run the following
1022            // substeps:
1023            if self.eligible_for_autoplay() {
1024                // Step 1. Set the paused attribute to false.
1025                self.paused.set(false);
1026
1027                // Step 2. If the element's show poster flag is true, set it to false and run the
1028                // time marches on steps.
1029                if self.show_poster.get() {
1030                    self.show_poster.set(false);
1031                    self.time_marches_on();
1032                }
1033
1034                // Step 3. Queue a media element task given the element to fire an event named play
1035                // at the element.
1036                self.queue_media_element_task_to_fire_event(atom!("play"));
1037
1038                // Step 4. Notify about playing for the element.
1039                self.notify_about_playing();
1040            }
1041        }
1042
1043        self.update_media_state();
1044    }
1045
1046    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1047    fn invoke_resource_selection_algorithm(&self, cx: &mut js::context::JSContext) {
1048        // Step 1. Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
1049        self.network_state.set(NetworkState::NoSource);
1050
1051        // Step 2. Set the element's show poster flag to true.
1052        self.show_poster.set(true);
1053
1054        // Step 3. Set the media element's delaying-the-load-event flag to true (this delays the
1055        // load event).
1056        self.delay_load_event(true, cx);
1057
1058        // Step 4. Await a stable state, allowing the task that invoked this algorithm to continue.
1059        // If the resource selection mode in the synchronous section is
1060        // "attribute", the URL of the resource to fetch is relative to the
1061        // media element's node document when the src attribute was last
1062        // changed, which is why we need to pass the base URL in the task
1063        // right here.
1064        let task = MediaElementMicrotask::ResourceSelection {
1065            elem: DomRoot::from_ref(self),
1066            generation_id: self.generation_id.get(),
1067            base_url: self.owner_document().base_url(),
1068        };
1069
1070        // FIXME(nox): This will later call the resource_selection_algorithm_sync
1071        // method from below, if microtasks were trait objects, we would be able
1072        // to put the code directly in this method, without the boilerplate
1073        // indirections.
1074        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1075    }
1076
1077    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1078    fn resource_selection_algorithm_sync(
1079        &self,
1080        base_url: ServoUrl,
1081        cx: &mut js::context::JSContext,
1082    ) {
1083        // TODO Step 5. If the media element's blocked-on-parser flag is false, then populate the
1084        // list of pending text tracks.
1085        // FIXME(ferjm): Implement blocked_on_parser logic
1086        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
1087        // FIXME(nox): Maybe populate the list of pending text tracks.
1088
1089        enum Mode {
1090            Object,
1091            Attribute(String),
1092            Children(DomRoot<HTMLSourceElement>),
1093        }
1094
1095        // Step 6.
1096        let mode = if self.src_object.borrow().is_some() {
1097            // If the media element has an assigned media provider object, then let mode be object.
1098            Mode::Object
1099        } else if let Some(attribute) = self.upcast::<Element>().get_attribute(&local_name!("src"))
1100        {
1101            // Otherwise, if the media element has no assigned media provider object but has a src
1102            // attribute, then let mode be attribute.
1103            Mode::Attribute((**attribute.value()).to_owned())
1104        } else if let Some(source) = self
1105            .upcast::<Node>()
1106            .children()
1107            .find_map(DomRoot::downcast::<HTMLSourceElement>)
1108        {
1109            // Otherwise, if the media element does not have an assigned media provider object and
1110            // does not have a src attribute, but does have a source element child, then let mode be
1111            // children and let candidate be the first such source element child in tree order.
1112            Mode::Children(source)
1113        } else {
1114            // Otherwise, the media element has no assigned media provider object and has neither a
1115            // src attribute nor a source element child:
1116            self.load_state.set(LoadState::NotLoaded);
1117
1118            // Step 6.none.1. Set the networkState to NETWORK_EMPTY.
1119            self.network_state.set(NetworkState::Empty);
1120
1121            // Step 6.none.2. Set the element's delaying-the-load-event flag to false. This stops
1122            // delaying the load event.
1123            self.delay_load_event(false, cx);
1124
1125            // Step 6.none.3. End the synchronous section and return.
1126            return;
1127        };
1128
1129        // Step 7. Set the media element's networkState to NETWORK_LOADING.
1130        self.network_state.set(NetworkState::Loading);
1131
1132        // Step 8. Queue a media element task given the media element to fire an event named
1133        // loadstart at the media element.
1134        self.queue_media_element_task_to_fire_event(atom!("loadstart"));
1135
1136        // Step 9. Run the appropriate steps from the following list:
1137        match mode {
1138            Mode::Object => {
1139                // => "If mode is object"
1140                self.load_from_src_object();
1141            },
1142            Mode::Attribute(src) => {
1143                // => "If mode is attribute"
1144                self.load_from_src_attribute(base_url, &src);
1145            },
1146            Mode::Children(source) => {
1147                // => "Otherwise (mode is children)""
1148                self.load_from_source_child(&source);
1149            },
1150        }
1151    }
1152
1153    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1154    fn load_from_src_object(&self) {
1155        self.load_state.set(LoadState::LoadingFromSrcObject);
1156
1157        // Step 9.object.1. Set the currentSrc attribute to the empty string.
1158        "".clone_into(&mut self.current_src.borrow_mut());
1159
1160        // Step 9.object.3. Run the resource fetch algorithm with the assigned media
1161        // provider object. If that algorithm returns without aborting this one, then the
1162        // load failed.
1163        // Note that the resource fetch algorithm itself takes care of the cleanup in case
1164        // of failure itself.
1165        self.resource_fetch_algorithm(Resource::Object);
1166    }
1167
1168    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1169    fn load_from_src_attribute(&self, base_url: ServoUrl, src: &str) {
1170        self.load_state.set(LoadState::LoadingFromSrcAttribute);
1171
1172        // Step 9.attribute.1. If the src attribute's value is the empty string, then end
1173        // the synchronous section, and jump down to the failed with attribute step below.
1174        if src.is_empty() {
1175            self.queue_dedicated_media_source_failure_steps();
1176            return;
1177        }
1178
1179        // Step 9.attribute.2. Let urlRecord be the result of encoding-parsing a URL given
1180        // the src attribute's value, relative to the media element's node document when the
1181        // src attribute was last changed.
1182        let Ok(url_record) = base_url.join(src) else {
1183            self.queue_dedicated_media_source_failure_steps();
1184            return;
1185        };
1186
1187        // Step 9.attribute.3. If urlRecord is not failure, then set the currentSrc
1188        // attribute to the result of applying the URL serializer to urlRecord.
1189        *self.current_src.borrow_mut() = url_record.as_str().into();
1190
1191        // Step 9.attribute.5. If urlRecord is not failure, then run the resource fetch
1192        // algorithm with urlRecord. If that algorithm returns without aborting this one,
1193        // then the load failed.
1194        // Note that the resource fetch algorithm itself takes care
1195        // of the cleanup in case of failure itself.
1196        self.resource_fetch_algorithm(Resource::Url(url_record));
1197    }
1198
1199    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1200    fn load_from_source_child(&self, source: &HTMLSourceElement) {
1201        self.load_state.set(LoadState::LoadingFromSourceChild);
1202
1203        // Step 9.children.1. Let pointer be a position defined by two adjacent nodes in the media
1204        // element's child list, treating the start of the list (before the first child in the list,
1205        // if any) and end of the list (after the last child in the list, if any) as nodes in their
1206        // own right. One node is the node before pointer, and the other node is the node after
1207        // pointer. Initially, let pointer be the position between the candidate node and the next
1208        // node, if there are any, or the end of the list, if it is the last node.
1209        *self.source_children_pointer.borrow_mut() =
1210            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), false));
1211
1212        let element = source.upcast::<Element>();
1213
1214        // Step 9.children.2. Process candidate: If candidate does not have a src attribute, or if
1215        // its src attribute's value is the empty string, then end the synchronous section, and jump
1216        // down to the failed with elements step below.
1217        let Some(src) = element
1218            .get_attribute(&local_name!("src"))
1219            .filter(|attribute| !attribute.value().is_empty())
1220        else {
1221            self.load_from_source_child_failure_steps(source);
1222            return;
1223        };
1224
1225        // Step 9.children.3. If candidate has a media attribute whose value does not match the
1226        // environment, then end the synchronous section, and jump down to the failed with elements
1227        // step below.
1228        if let Some(media) = element.get_attribute(&local_name!("media")) {
1229            if !MediaList::matches_environment(&element.owner_document(), &media.value()) {
1230                self.load_from_source_child_failure_steps(source);
1231                return;
1232            }
1233        }
1234
1235        // Step 9.children.4. Let urlRecord be the result of encoding-parsing a URL given
1236        // candidate's src attribute's value, relative to candidate's node document when the src
1237        // attribute was last changed.
1238        let Ok(url_record) = source.owner_document().base_url().join(&src.value()) else {
1239            // Step 9.children.5. If urlRecord is failure, then end the synchronous section,
1240            // and jump down to the failed with elements step below.
1241            self.load_from_source_child_failure_steps(source);
1242            return;
1243        };
1244
1245        // Step 9.children.6. If candidate has a type attribute whose value, when parsed as a MIME
1246        // type (including any codecs described by the codecs parameter, for types that define that
1247        // parameter), represents a type that the user agent knows it cannot render, then end the
1248        // synchronous section, and jump down to the failed with elements step below.
1249        if let Some(type_) = element.get_attribute(&local_name!("type")) {
1250            if ServoMedia::get().can_play_type(&type_.value()) == SupportsMediaType::No {
1251                self.load_from_source_child_failure_steps(source);
1252                return;
1253            }
1254        }
1255
1256        // Reset the media player before loading the next source child.
1257        self.reset_media_player();
1258
1259        self.current_source_child.set(Some(source));
1260
1261        // Step 9.children.7. Set the currentSrc attribute to the result of applying the URL
1262        // serializer to urlRecord.
1263        *self.current_src.borrow_mut() = url_record.as_str().into();
1264
1265        // Step 9.children.9. Run the resource fetch algorithm with urlRecord. If that
1266        // algorithm returns without aborting this one, then the load failed.
1267        // Note that the resource fetch algorithm itself takes care
1268        // of the cleanup in case of failure itself.
1269        self.resource_fetch_algorithm(Resource::Url(url_record));
1270    }
1271
1272    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1273    fn load_from_source_child_failure_steps(&self, source: &HTMLSourceElement) {
1274        // Step 9.children.10. Failed with elements: Queue a media element task given the media
1275        // element to fire an event named error at candidate.
1276        let trusted_this = Trusted::new(self);
1277        let trusted_source = Trusted::new(source);
1278        let generation_id = self.generation_id.get();
1279
1280        self.owner_global()
1281            .task_manager()
1282            .media_element_task_source()
1283            .queue(task!(queue_error_event: move |cx| {
1284                let this = trusted_this.root();
1285                if generation_id != this.generation_id.get() {
1286                    return;
1287                }
1288
1289                let source = trusted_source.root();
1290                source.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::from_cx(cx));
1291            }));
1292
1293        // Step 9.children.11. Await a stable state.
1294        let task = MediaElementMicrotask::SelectNextSourceChild {
1295            elem: DomRoot::from_ref(self),
1296            generation_id: self.generation_id.get(),
1297        };
1298
1299        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1300    }
1301
1302    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1303    fn select_next_source_child(&self, can_gc: CanGc) {
1304        // Step 9.children.12. Forget the media element's media-resource-specific tracks.
1305        self.AudioTracks(can_gc).clear();
1306        self.VideoTracks(can_gc).clear();
1307
1308        // Step 9.children.13. Find next candidate: Let candidate be null.
1309        let mut source_candidate = None;
1310
1311        // Step 9.children.14. Search loop: If the node after pointer is the end of the list, then
1312        // jump to the waiting step below.
1313        // Step 9.children.15. If the node after pointer is a source element, let candidate be that
1314        // element.
1315        // Step 9.children.16. Advance pointer so that the node before pointer is now the node that
1316        // was after pointer, and the node after pointer is the node after the node that used to be
1317        // after pointer, if any.
1318        if let Some(ref source_children_pointer) = *self.source_children_pointer.borrow() {
1319            // Note that shared implementation between opaque types from
1320            // `inclusively_following_siblings` and `following_siblings` if not possible due to
1321            // precise capturing.
1322            if source_children_pointer.inclusive {
1323                for next_sibling in source_children_pointer
1324                    .source_before_pointer
1325                    .upcast::<Node>()
1326                    .inclusively_following_siblings()
1327                {
1328                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1329                    {
1330                        source_candidate = Some(next_source);
1331                        break;
1332                    }
1333                }
1334            } else {
1335                for next_sibling in source_children_pointer
1336                    .source_before_pointer
1337                    .upcast::<Node>()
1338                    .following_siblings()
1339                {
1340                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1341                    {
1342                        source_candidate = Some(next_source);
1343                        break;
1344                    }
1345                }
1346            };
1347        }
1348
1349        // Step 9.children.17. If candidate is null, jump back to the search loop step. Otherwise,
1350        // jump back to the process candidate step.
1351        if let Some(source_candidate) = source_candidate {
1352            self.load_from_source_child(&source_candidate);
1353            return;
1354        }
1355
1356        self.load_state.set(LoadState::WaitingForSource);
1357
1358        *self.source_children_pointer.borrow_mut() = None;
1359
1360        // Step 9.children.18. Waiting: Set the element's networkState attribute to the
1361        // NETWORK_NO_SOURCE value.
1362        self.network_state.set(NetworkState::NoSource);
1363
1364        // Step 9.children.19. Set the element's show poster flag to true.
1365        self.show_poster.set(true);
1366
1367        // Step 9.children.20. Queue a media element task given the media element to set the
1368        // element's delaying-the-load-event flag to false. This stops delaying the load event.
1369        let this = Trusted::new(self);
1370        let generation_id = self.generation_id.get();
1371
1372        self.owner_global()
1373            .task_manager()
1374            .media_element_task_source()
1375            .queue(task!(queue_delay_load_event: move |cx| {
1376                let this = this.root();
1377                if generation_id != this.generation_id.get() {
1378                    return;
1379                }
1380
1381                this.delay_load_event(false, cx);
1382            }));
1383
1384        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1385        // list. (This step might wait forever.)
1386    }
1387
1388    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1389    fn resource_selection_algorithm_failure_steps(&self) {
1390        match self.load_state.get() {
1391            LoadState::LoadingFromSrcObject => {
1392                // Step 9.object.4. Failed with media provider: Reaching this step indicates that
1393                // the media resource failed to load. Take pending play promises and queue a media
1394                // element task given the media element to run the dedicated media source failure
1395                // steps with the result.
1396                self.queue_dedicated_media_source_failure_steps();
1397            },
1398            LoadState::LoadingFromSrcAttribute => {
1399                // Step 9.attribute.6. Failed with attribute: Reaching this step indicates that the
1400                // media resource failed to load or that urlRecord is failure. Take pending play
1401                // promises and queue a media element task given the media element to run the
1402                // dedicated media source failure steps with the result.
1403                self.queue_dedicated_media_source_failure_steps();
1404            },
1405            LoadState::LoadingFromSourceChild => {
1406                // Step 9.children.10. Failed with elements: Queue a media element task given the
1407                // media element to fire an event named error at candidate.
1408                if let Some(source) = self.current_source_child.take() {
1409                    self.load_from_source_child_failure_steps(&source);
1410                }
1411            },
1412            _ => {},
1413        }
1414    }
1415
1416    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
1417        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
1418            error!("Missing request url");
1419            if let Some(seek_lock) = seek_lock {
1420                seek_lock.unlock(/* successful seek */ false);
1421            }
1422            self.resource_selection_algorithm_failure_steps();
1423            return;
1424        }
1425
1426        let document = self.owner_document();
1427        let destination = match self.media_type_id() {
1428            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
1429            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
1430        };
1431        let mut headers = HeaderMap::new();
1432        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
1433        headers.insert(
1434            header::RANGE,
1435            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
1436        );
1437        let url = match self.resource_url.borrow().as_ref() {
1438            Some(url) => url.clone(),
1439            None => self.blob_url.borrow().as_ref().unwrap().clone(),
1440        };
1441
1442        let cors_setting = cors_setting_for_element(self.upcast());
1443        let global = self.global();
1444        let request = create_a_potential_cors_request(
1445            Some(document.webview_id()),
1446            url.clone(),
1447            destination,
1448            cors_setting,
1449            None,
1450            global.get_referrer(),
1451        )
1452        .with_global_scope(&global)
1453        .headers(headers)
1454        .referrer_policy(document.get_referrer_policy());
1455
1456        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
1457        if let Some(ref mut current_fetch_context) = *current_fetch_context {
1458            current_fetch_context.cancel(CancelReason::Abort);
1459        }
1460
1461        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
1462            request.id,
1463            global.core_resource_thread(),
1464        ));
1465        let listener =
1466            HTMLMediaElementFetchListener::new(self, request.id, url, offset.unwrap_or(0));
1467
1468        self.owner_document().fetch_background(request, listener);
1469
1470        // Since we cancelled the previous fetch, from now on the media element
1471        // will only receive response data from the new fetch that's been
1472        // initiated. This means the player can resume operation, since all subsequent data
1473        // pushes will originate from the new seek offset.
1474        if let Some(seek_lock) = seek_lock {
1475            seek_lock.unlock(/* successful seek */ true);
1476        }
1477    }
1478
1479    /// <https://html.spec.whatwg.org/multipage/#eligible-for-autoplay>
1480    fn eligible_for_autoplay(&self) -> bool {
1481        // its can autoplay flag is true;
1482        self.autoplaying.get() &&
1483
1484        // its paused attribute is true;
1485        self.Paused() &&
1486
1487        // it has an autoplay attribute specified;
1488        self.Autoplay() &&
1489
1490        // its node document's active sandboxing flag set does not have the sandboxed automatic
1491        // features browsing context flag set; and
1492        {
1493            let document = self.owner_document();
1494
1495            !document.has_active_sandboxing_flag(
1496                SandboxingFlagSet::SANDBOXED_AUTOMATIC_FEATURES_BROWSING_CONTEXT_FLAG,
1497            )
1498        }
1499
1500        // its node document is allowed to use the "autoplay" feature.
1501        // TODO: Feature policy: https://html.spec.whatwg.org/iframe-embed-object.html#allowed-to-use
1502    }
1503
1504    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
1505    fn resource_fetch_algorithm(&self, resource: Resource) {
1506        if let Err(e) = self.create_media_player(&resource) {
1507            error!("Create media player error {:?}", e);
1508            self.resource_selection_algorithm_failure_steps();
1509            return;
1510        }
1511
1512        // Steps 1-2.
1513        // Unapplicable, the `resource` variable already conveys which mode
1514        // is in use.
1515
1516        // Step 3.
1517        // FIXME(nox): Remove all media-resource-specific text tracks.
1518
1519        // Step 5. Run the appropriate steps from the following list:
1520        match resource {
1521            Resource::Url(url) => {
1522                // Step 5.remote.1. Optionally, run the following substeps. This is the expected
1523                // behavior if the user agent intends to not attempt to fetch the resource until the
1524                // user requests it explicitly (e.g. as a way to implement the preload attribute's
1525                // none keyword).
1526                if self.Preload() == "none" && !self.autoplaying.get() {
1527                    // Step 5.remote.1.1. Set the networkState to NETWORK_IDLE.
1528                    self.network_state.set(NetworkState::Idle);
1529
1530                    // Step 5.remote.1.2. Queue a media element task given the media element to fire
1531                    // an event named suspend at the element.
1532                    self.queue_media_element_task_to_fire_event(atom!("suspend"));
1533
1534                    // Step 5.remote.1.3. Queue a media element task given the media element to set
1535                    // the element's delaying-the-load-event flag to false. This stops delaying the
1536                    // load event.
1537                    let this = Trusted::new(self);
1538                    let generation_id = self.generation_id.get();
1539
1540                    self.owner_global()
1541                        .task_manager()
1542                        .media_element_task_source()
1543                        .queue(task!(queue_delay_load_event: move |cx| {
1544                            let this = this.root();
1545                            if generation_id != this.generation_id.get() {
1546                                return;
1547                            }
1548
1549                            this.delay_load_event(false, cx);
1550                        }));
1551
1552                    // TODO Steps 5.remote.1.4. Wait for the task to be run.
1553                    // FIXME(nox): Somehow we should wait for the task from previous
1554                    // step to be ran before continuing.
1555
1556                    // TODO Steps 5.remote.1.5-5.remote.1.7.
1557                    // FIXME(nox): Wait for an implementation-defined event and
1558                    // then continue with the normal set of steps instead of just
1559                    // returning.
1560                    return;
1561                }
1562
1563                *self.resource_url.borrow_mut() = Some(url);
1564
1565                // Steps 5.remote.2-5.remote.8
1566                self.fetch_request(None, None);
1567            },
1568            Resource::Object => {
1569                if let Some(ref src_object) = *self.src_object.borrow() {
1570                    match src_object {
1571                        SrcObject::Blob(blob) => {
1572                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1573                            *self.blob_url.borrow_mut() =
1574                                Some(ServoUrl::parse(&blob_url.str()).expect("infallible"));
1575                            self.fetch_request(None, None);
1576                        },
1577                        SrcObject::MediaStream(stream) => {
1578                            let tracks = &*stream.get_tracks();
1579                            for (pos, track) in tracks.iter().enumerate() {
1580                                if self
1581                                    .player
1582                                    .borrow()
1583                                    .as_ref()
1584                                    .unwrap()
1585                                    .lock()
1586                                    .unwrap()
1587                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1588                                    .is_err()
1589                                {
1590                                    self.resource_selection_algorithm_failure_steps();
1591                                }
1592                            }
1593                        },
1594                    }
1595                }
1596            },
1597        }
1598    }
1599
1600    /// Queues a task to run the [dedicated media source failure steps][steps].
1601    ///
1602    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1603    fn queue_dedicated_media_source_failure_steps(&self) {
1604        let this = Trusted::new(self);
1605        let generation_id = self.generation_id.get();
1606        self.take_pending_play_promises(Err(Error::NotSupported(None)));
1607        self.owner_global()
1608            .task_manager()
1609            .media_element_task_source()
1610            .queue(task!(dedicated_media_source_failure_steps: move |cx| {
1611                let this = this.root();
1612                if generation_id != this.generation_id.get() {
1613                    return;
1614                }
1615
1616                this.fulfill_in_flight_play_promises(|| {
1617                    // Step 1. Set the error attribute to the result of creating a MediaError with
1618                    // MEDIA_ERR_SRC_NOT_SUPPORTED.
1619                    this.error.set(Some(&*MediaError::new(
1620                        &this.owner_window(),
1621                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::from_cx(cx))));
1622
1623                    // Step 2. Forget the media element's media-resource-specific tracks.
1624                    this.AudioTracks(CanGc::from_cx(cx)).clear();
1625                    this.VideoTracks(CanGc::from_cx(cx)).clear();
1626
1627                    // Step 3. Set the element's networkState attribute to the NETWORK_NO_SOURCE
1628                    // value.
1629                    this.network_state.set(NetworkState::NoSource);
1630
1631                    // Step 4. Set the element's show poster flag to true.
1632                    this.show_poster.set(true);
1633
1634                    // Step 5. Fire an event named error at the media element.
1635                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::from_cx(cx));
1636
1637                    if let Some(ref player) = *this.player.borrow() {
1638                        if let Err(error) = player.lock().unwrap().stop() {
1639                            error!("Could not stop player: {error:?}");
1640                        }
1641                    }
1642
1643                    // Step 6. Reject pending play promises with promises and a "NotSupportedError"
1644                    // DOMException.
1645                    // Done after running this closure in `fulfill_in_flight_play_promises`.
1646                });
1647
1648                // Step 7. Set the element's delaying-the-load-event flag to false. This stops
1649                // delaying the load event.
1650                this.delay_load_event(false, cx);
1651            }));
1652    }
1653
1654    fn in_error_state(&self) -> bool {
1655        self.error.get().is_some()
1656    }
1657
1658    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1659    fn is_potentially_playing(&self) -> bool {
1660        !self.paused.get() &&
1661            !self.ended_playback(LoopCondition::Included) &&
1662            self.error.get().is_none() &&
1663            !self.is_blocked_media_element()
1664    }
1665
1666    /// <https://html.spec.whatwg.org/multipage/#blocked-media-element>
1667    fn is_blocked_media_element(&self) -> bool {
1668        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1669            self.is_paused_for_user_interaction() ||
1670            self.is_paused_for_in_band_content()
1671    }
1672
1673    /// <https://html.spec.whatwg.org/multipage/#paused-for-user-interaction>
1674    fn is_paused_for_user_interaction(&self) -> bool {
1675        // FIXME: we will likely be able to fill this placeholder once (if) we
1676        //        implement the MediaSession API.
1677        false
1678    }
1679
1680    /// <https://html.spec.whatwg.org/multipage/#paused-for-in-band-content>
1681    fn is_paused_for_in_band_content(&self) -> bool {
1682        // FIXME: we will likely be able to fill this placeholder once (if) we
1683        //        implement https://github.com/servo/servo/issues/22314
1684        false
1685    }
1686
1687    /// <https://html.spec.whatwg.org/multipage/#media-element-load-algorithm>
1688    fn media_element_load_algorithm(&self, cx: &mut js::context::JSContext) {
1689        // Reset the flag that signals whether loadeddata was ever fired for
1690        // this invokation of the load algorithm.
1691        self.fired_loadeddata_event.set(false);
1692
1693        // TODO Step 1. Set this element's is currently stalled to false.
1694
1695        // Step 2. Abort any already-running instance of the resource selection algorithm for this
1696        // element.
1697        self.generation_id.set(self.generation_id.get() + 1);
1698
1699        self.load_state.set(LoadState::NotLoaded);
1700        *self.source_children_pointer.borrow_mut() = None;
1701        self.current_source_child.set(None);
1702
1703        // Step 3. Let pending tasks be a list of all tasks from the media element's media element
1704        // event task source in one of the task queues.
1705
1706        // Step 4. For each task in pending tasks that would resolve pending play promises or reject
1707        // pending play promises, immediately resolve or reject those promises in the order the
1708        // corresponding tasks were queued.
1709        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1710            self.fulfill_in_flight_play_promises(|| ());
1711        }
1712
1713        // Step 5. Remove each task in pending tasks from its task queue.
1714        // Note that each media element's pending event and callback is scheduled with associated
1715        // generation id and will be aborted eventually (from Step 2).
1716
1717        let network_state = self.network_state.get();
1718
1719        // Step 6. If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE,
1720        // queue a media element task given the media element to fire an event named abort at the
1721        // media element.
1722        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1723            self.queue_media_element_task_to_fire_event(atom!("abort"));
1724        }
1725
1726        // Reset the media player for any previously playing media resource (see Step 11).
1727        self.reset_media_player();
1728
1729        // Step 7. If the media element's networkState is not set to NETWORK_EMPTY, then:
1730        if network_state != NetworkState::Empty {
1731            // Step 7.1. Queue a media element task given the media element to fire an event named
1732            // emptied at the media element.
1733            self.queue_media_element_task_to_fire_event(atom!("emptied"));
1734
1735            // Step 7.2. If a fetching process is in progress for the media element, the user agent
1736            // should stop it.
1737            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1738                current_fetch_context.cancel(CancelReason::Abort);
1739            }
1740
1741            // TODO Step 7.3. If the media element's assigned media provider object is a MediaSource
1742            // object, then detach it.
1743
1744            // Step 7.4. Forget the media element's media-resource-specific tracks.
1745            self.AudioTracks(CanGc::from_cx(cx)).clear();
1746            self.VideoTracks(CanGc::from_cx(cx)).clear();
1747
1748            // Step 7.5. If readyState is not set to HAVE_NOTHING, then set it to that state.
1749            if self.ready_state.get() != ReadyState::HaveNothing {
1750                self.change_ready_state(ReadyState::HaveNothing);
1751            }
1752
1753            // Step 7.6. If the paused attribute is false, then:
1754            if !self.Paused() {
1755                // Step 7.6.1. Set the paused attribute to true.
1756                self.paused.set(true);
1757
1758                // Step 7.6.2. Take pending play promises and reject pending play promises with the
1759                // result and an "AbortError" DOMException.
1760                self.take_pending_play_promises(Err(Error::Abort(None)));
1761                self.fulfill_in_flight_play_promises(|| ());
1762            }
1763
1764            // Step 7.7. If seeking is true, set it to false.
1765            self.seeking.set(false);
1766
1767            self.current_seek_position.set(f64::NAN);
1768
1769            // Step 7.8. Set the current playback position to 0.
1770            // Set the official playback position to 0.
1771            // If this changed the official playback position, then queue a media element task given
1772            // the media element to fire an event named timeupdate at the media element.
1773            self.current_playback_position.set(0.);
1774            if self.official_playback_position.get() != 0. {
1775                self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
1776            }
1777            self.official_playback_position.set(0.);
1778
1779            // TODO Step 7.9. Set the timeline offset to Not-a-Number (NaN).
1780
1781            // Step 7.10. Update the duration attribute to Not-a-Number (NaN).
1782            self.duration.set(f64::NAN);
1783        }
1784
1785        // Step 8. Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
1786        self.playback_rate.set(self.default_playback_rate.get());
1787
1788        // Step 9. Set the error attribute to null and the can autoplay flag to true.
1789        self.error.set(None);
1790        self.autoplaying.set(true);
1791
1792        // Step 10. Invoke the media element's resource selection algorithm.
1793        self.invoke_resource_selection_algorithm(cx);
1794
1795        // Step 11. Note: Playback of any previously playing media resource for this element stops.
1796    }
1797
1798    /// Queue a media element task given the media element to fire an event at the media element.
1799    /// <https://html.spec.whatwg.org/multipage/#queue-a-media-element-task>
1800    fn queue_media_element_task_to_fire_event(&self, name: Atom) {
1801        let this = Trusted::new(self);
1802        let generation_id = self.generation_id.get();
1803
1804        self.owner_global()
1805            .task_manager()
1806            .media_element_task_source()
1807            .queue(task!(queue_event: move |cx| {
1808                let this = this.root();
1809                if generation_id != this.generation_id.get() {
1810                    return;
1811                }
1812
1813                this.upcast::<EventTarget>().fire_event(name, CanGc::from_cx(cx));
1814            }));
1815    }
1816
1817    /// Appends a promise to the list of pending play promises.
1818    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1819        self.pending_play_promises
1820            .borrow_mut()
1821            .push(promise.clone());
1822    }
1823
1824    /// Takes the pending play promises.
1825    ///
1826    /// The result with which these promises will be fulfilled is passed here
1827    /// and this method returns nothing because we actually just move the
1828    /// current list of pending play promises to the
1829    /// `in_flight_play_promises_queue` field.
1830    ///
1831    /// Each call to this method must be followed by a call to
1832    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1833    /// which were taken and moved to the in-flight queue.
1834    fn take_pending_play_promises(&self, result: ErrorResult) {
1835        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1836        self.in_flight_play_promises_queue
1837            .borrow_mut()
1838            .push_back((pending_play_promises.into(), result));
1839    }
1840
1841    /// Fulfills the next in-flight play promises queue after running a closure.
1842    ///
1843    /// See the comment on `take_pending_play_promises` for why this method
1844    /// does not take a list of promises to fulfill. Callers cannot just pop
1845    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1846    /// the promises because that would mean putting
1847    /// `#[cfg_attr(crown, expect(crown::unrooted_must_root))]` on even more functions, potentially
1848    /// hiding actual safety bugs.
1849    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1850    where
1851        F: FnOnce(),
1852    {
1853        let (promises, result) = self
1854            .in_flight_play_promises_queue
1855            .borrow_mut()
1856            .pop_front()
1857            .expect("there should be at least one list of in flight play promises");
1858        f();
1859        for promise in &*promises {
1860            match result {
1861                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1862                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1863            }
1864        }
1865    }
1866
1867    pub(crate) fn handle_source_child_insertion(
1868        &self,
1869        source: &HTMLSourceElement,
1870        cx: &mut js::context::JSContext,
1871    ) {
1872        // <https://html.spec.whatwg.org/multipage/#the-source-element:html-element-insertion-steps>
1873        // Step 2. If parent is a media element that has no src attribute and whose networkState has
1874        // the value NETWORK_EMPTY, then invoke that media element's resource selection algorithm.
1875        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1876            return;
1877        }
1878
1879        if self.network_state.get() == NetworkState::Empty {
1880            self.invoke_resource_selection_algorithm(cx);
1881            return;
1882        }
1883
1884        // <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1885        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1886        // list. (This step might wait forever.)
1887        if self.load_state.get() != LoadState::WaitingForSource {
1888            return;
1889        }
1890
1891        self.load_state.set(LoadState::LoadingFromSourceChild);
1892
1893        *self.source_children_pointer.borrow_mut() =
1894            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), true));
1895
1896        // Step 9.children.23. Await a stable state.
1897        let task = MediaElementMicrotask::SelectNextSourceChildAfterWait {
1898            elem: DomRoot::from_ref(self),
1899            generation_id: self.generation_id.get(),
1900        };
1901
1902        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1903    }
1904
1905    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1906    fn select_next_source_child_after_wait(&self, cx: &mut js::context::JSContext) {
1907        // Step 9.children.24. Set the element's delaying-the-load-event flag back to true (this
1908        // delays the load event again, in case it hasn't been fired yet).
1909        self.delay_load_event(true, cx);
1910
1911        // Step 9.children.25. Set the networkState back to NETWORK_LOADING.
1912        self.network_state.set(NetworkState::Loading);
1913
1914        // Step 9.children.26. Jump back to the find next candidate step above.
1915        self.select_next_source_child(CanGc::from_cx(cx));
1916    }
1917
1918    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1919    /// => "If the media data cannot be fetched at all, due to network errors..."
1920    /// => "If the media data can be fetched but is found by inspection to be in an unsupported
1921    /// format, or can otherwise not be rendered at all"
1922    fn media_data_processing_failure_steps(&self) {
1923        // Step 1. The user agent should cancel the fetching process.
1924        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1925            current_fetch_context.cancel(CancelReason::Error);
1926        }
1927
1928        // Step 2. Abort this subalgorithm, returning to the resource selection algorithm.
1929        self.resource_selection_algorithm_failure_steps();
1930    }
1931
1932    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1933    /// => "If the connection is interrupted after some media data has been received..."
1934    /// => "If the media data is corrupted"
1935    fn media_data_processing_fatal_steps(&self, error: u16, cx: &mut js::context::JSContext) {
1936        *self.source_children_pointer.borrow_mut() = None;
1937        self.current_source_child.set(None);
1938
1939        // Step 1. The user agent should cancel the fetching process.
1940        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1941            current_fetch_context.cancel(CancelReason::Error);
1942        }
1943
1944        // Step 2. Set the error attribute to the result of creating a MediaError with
1945        // MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
1946        self.error.set(Some(&*MediaError::new(
1947            &self.owner_window(),
1948            error,
1949            CanGc::from_cx(cx),
1950        )));
1951
1952        // Step 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1953        self.network_state.set(NetworkState::Idle);
1954
1955        // Step 4. Set the element's delaying-the-load-event flag to false. This stops delaying
1956        // the load event.
1957        self.delay_load_event(false, cx);
1958
1959        // Step 5. Fire an event named error at the media element.
1960        self.upcast::<EventTarget>()
1961            .fire_event(atom!("error"), CanGc::from_cx(cx));
1962
1963        // Step 6. Abort the overall resource selection algorithm.
1964    }
1965
1966    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1967    fn seek(&self, time: f64, _approximate_for_speed: bool) {
1968        // Step 1. Set the media element's show poster flag to false.
1969        self.show_poster.set(false);
1970
1971        // Step 2. If the media element's readyState is HAVE_NOTHING, return.
1972        if self.ready_state.get() == ReadyState::HaveNothing {
1973            return;
1974        }
1975
1976        // Step 3. If the element's seeking IDL attribute is true, then another instance of this
1977        // algorithm is already running. Abort that other instance of the algorithm without waiting
1978        // for the step that it is running to complete.
1979        self.current_seek_position.set(f64::NAN);
1980
1981        // Step 4. Set the seeking IDL attribute to true.
1982        self.seeking.set(true);
1983
1984        // Step 5. If the seek was in response to a DOM method call or setting of an IDL attribute,
1985        // then continue the script. The remainder of these steps must be run in parallel.
1986
1987        // Step 6. If the new playback position is later than the end of the media resource, then
1988        // let it be the end of the media resource instead.
1989        let time = f64::min(time, self.Duration());
1990
1991        // Step 7. If the new playback position is less than the earliest possible position, let it
1992        // be that position instead.
1993        let time = f64::max(time, self.earliest_possible_position());
1994
1995        // Step 8. If the (possibly now changed) new playback position is not in one of the ranges
1996        // given in the seekable attribute, then let it be the position in one of the ranges given
1997        // in the seekable attribute that is the nearest to the new playback position. If there are
1998        // no ranges given in the seekable attribute, then set the seeking IDL attribute to false
1999        // and return.
2000        let seekable = self.seekable();
2001
2002        if seekable.is_empty() {
2003            self.seeking.set(false);
2004            return;
2005        }
2006
2007        let mut nearest_seekable_position = 0.0;
2008        let mut in_seekable_range = false;
2009        let mut nearest_seekable_distance = f64::MAX;
2010        for i in 0..seekable.len() {
2011            let start = seekable.start(i).unwrap().abs();
2012            let end = seekable.end(i).unwrap().abs();
2013            if time >= start && time <= end {
2014                nearest_seekable_position = time;
2015                in_seekable_range = true;
2016                break;
2017            } else if time < start {
2018                let distance = start - time;
2019                if distance < nearest_seekable_distance {
2020                    nearest_seekable_distance = distance;
2021                    nearest_seekable_position = start;
2022                }
2023            } else {
2024                let distance = time - end;
2025                if distance < nearest_seekable_distance {
2026                    nearest_seekable_distance = distance;
2027                    nearest_seekable_position = end;
2028                }
2029            }
2030        }
2031        let time = if in_seekable_range {
2032            time
2033        } else {
2034            nearest_seekable_position
2035        };
2036
2037        // Step 9. If the approximate-for-speed flag is set, adjust the new playback position to a
2038        // value that will allow for playback to resume promptly. If new playback position before
2039        // this step is before current playback position, then the adjusted new playback position
2040        // must also be before the current playback position. Similarly, if the new playback
2041        // position before this step is after current playback position, then the adjusted new
2042        // playback position must also be after the current playback position.
2043        // TODO: Note that servo-media with gstreamer does not support inaccurate seeking for now.
2044
2045        // Step 10. Queue a media element task given the media element to fire an event named
2046        // seeking at the element.
2047        self.queue_media_element_task_to_fire_event(atom!("seeking"));
2048
2049        // Step 11. Set the current playback position to the new playback position.
2050        self.current_playback_position.set(time);
2051
2052        if let Some(ref player) = *self.player.borrow() {
2053            if let Err(error) = player.lock().unwrap().seek(time) {
2054                error!("Could not seek player: {error:?}");
2055            }
2056        }
2057
2058        self.current_seek_position.set(time);
2059
2060        // Step 12. Wait until the user agent has established whether or not the media data for the
2061        // new playback position is available, and, if it is, until it has decoded enough data to
2062        // play back that position.
2063        // The rest of the steps are handled when the media engine signals a ready state change or
2064        // otherwise satisfies seek completion and signals a position change.
2065    }
2066
2067    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2068    fn seek_end(&self) {
2069        // Any time the user agent provides a stable state, the official playback position must be
2070        // set to the current playback position.
2071        self.official_playback_position
2072            .set(self.current_playback_position.get());
2073
2074        // Step 14. Set the seeking IDL attribute to false.
2075        self.seeking.set(false);
2076
2077        self.current_seek_position.set(f64::NAN);
2078
2079        // Step 15. Run the time marches on steps.
2080        self.time_marches_on();
2081
2082        // Step 16. Queue a media element task given the media element to fire an event named
2083        // timeupdate at the element.
2084        self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2085
2086        // Step 17. Queue a media element task given the media element to fire an event named seeked
2087        // at the element.
2088        self.queue_media_element_task_to_fire_event(atom!("seeked"));
2089    }
2090
2091    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
2092    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
2093        if pref!(media_testing_enabled) && image.is_some() {
2094            self.queue_media_element_task_to_fire_event(atom!("postershown"));
2095        }
2096
2097        self.video_renderer.lock().unwrap().set_poster_frame(image);
2098
2099        self.upcast::<Node>().dirty(NodeDamage::Other);
2100    }
2101
2102    fn player_id(&self) -> Option<usize> {
2103        self.player
2104            .borrow()
2105            .as_ref()
2106            .map(|player| player.lock().unwrap().get_id())
2107    }
2108
2109    fn create_media_player(&self, resource: &Resource) -> Result<(), ()> {
2110        let stream_type = match *resource {
2111            Resource::Object => {
2112                if let Some(ref src_object) = *self.src_object.borrow() {
2113                    match src_object {
2114                        SrcObject::MediaStream(_) => StreamType::Stream,
2115                        _ => StreamType::Seekable,
2116                    }
2117                } else {
2118                    return Err(());
2119                }
2120            },
2121            _ => StreamType::Seekable,
2122        };
2123
2124        let window = self.owner_window();
2125        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
2126        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
2127        {
2128            HTMLMediaElementTypeId::HTMLAudioElement => None,
2129            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
2130        };
2131
2132        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
2133
2134        let pipeline_id = window.pipeline_id();
2135        let client_context_id =
2136            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
2137        let player = ServoMedia::get().create_player(
2138            &client_context_id,
2139            stream_type,
2140            action_sender,
2141            video_renderer,
2142            audio_renderer,
2143            Box::new(window.get_player_context()),
2144        );
2145        let player_id = {
2146            let player_guard = player.lock().unwrap();
2147
2148            if let Err(error) = player_guard.set_mute(self.muted.get()) {
2149                warn!("Could not set mute state: {error:?}");
2150            }
2151
2152            player_guard.get_id()
2153        };
2154
2155        *self.player.borrow_mut() = Some(player);
2156
2157        let event_handler = Arc::new(Mutex::new(HTMLMediaElementEventHandler::new(self)));
2158        let weak_event_handler = Arc::downgrade(&event_handler);
2159        *self.event_handler.borrow_mut() = Some(event_handler);
2160
2161        let task_source = self
2162            .owner_global()
2163            .task_manager()
2164            .media_element_task_source()
2165            .to_sendable();
2166        ROUTER.add_typed_route(
2167            action_receiver,
2168            Box::new(move |message| {
2169                let event = message.unwrap();
2170                let weak_event_handler = weak_event_handler.clone();
2171
2172                task_source.queue(task!(handle_player_event: move |cx| {
2173                    trace!("HTMLMediaElement event: {event:?}");
2174
2175                    let Some(event_handler) = weak_event_handler.upgrade() else {
2176                        return;
2177                    };
2178
2179                    event_handler.lock().unwrap().handle_player_event(player_id, event, cx);
2180                }));
2181            }),
2182        );
2183
2184        let task_source = self
2185            .owner_global()
2186            .task_manager()
2187            .media_element_task_source()
2188            .to_sendable();
2189        let weak_video_renderer = Arc::downgrade(&self.video_renderer);
2190
2191        self.video_renderer
2192            .lock()
2193            .unwrap()
2194            .setup(player_id, task_source, weak_video_renderer);
2195
2196        Ok(())
2197    }
2198
2199    fn reset_media_player(&self) {
2200        if self.player.borrow().is_none() {
2201            return;
2202        }
2203
2204        if let Some(ref player) = *self.player.borrow() {
2205            if let Err(error) = player.lock().unwrap().stop() {
2206                error!("Could not stop player: {error:?}");
2207            }
2208        }
2209
2210        *self.player.borrow_mut() = None;
2211        self.video_renderer.lock().unwrap().reset();
2212        *self.event_handler.borrow_mut() = None;
2213
2214        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2215            video_element.set_natural_dimensions(None, None);
2216        }
2217    }
2218
2219    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
2220        if let Some(ref player) = *self.player.borrow() {
2221            if let Err(error) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
2222                warn!("Could not set audio track {error:?}");
2223            }
2224        }
2225    }
2226
2227    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
2228        if let Some(ref player) = *self.player.borrow() {
2229            if let Err(error) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
2230                warn!("Could not set video track: {error:?}");
2231            }
2232        }
2233    }
2234
2235    /// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
2236    fn direction_of_playback(&self) -> PlaybackDirection {
2237        // If the element's playbackRate is positive or zero, then the direction of playback is
2238        // forwards. Otherwise, it is backwards.
2239        if self.playback_rate.get() >= 0. {
2240            PlaybackDirection::Forwards
2241        } else {
2242            PlaybackDirection::Backwards
2243        }
2244    }
2245
2246    /// <https://html.spec.whatwg.org/multipage/#ended-playback>
2247    fn ended_playback(&self, loop_condition: LoopCondition) -> bool {
2248        // A media element is said to have ended playback when:
2249
2250        // The element's readyState attribute is HAVE_METADATA or greater, and
2251        if self.ready_state.get() < ReadyState::HaveMetadata {
2252            return false;
2253        }
2254
2255        let playback_position = self.current_playback_position.get();
2256
2257        match self.direction_of_playback() {
2258            // Either: The current playback position is the end of the media resource, and the
2259            // direction of playback is forwards, and the media element does not have a loop
2260            // attribute specified.
2261            PlaybackDirection::Forwards => {
2262                playback_position >= self.Duration() &&
2263                    (loop_condition == LoopCondition::Ignored || !self.Loop())
2264            },
2265            // Or: The current playback position is the earliest possible position, and the
2266            // direction of playback is backwards.
2267            PlaybackDirection::Backwards => playback_position <= self.earliest_possible_position(),
2268        }
2269    }
2270
2271    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2272    fn end_of_playback_in_forwards_direction(&self) {
2273        // When the current playback position reaches the end of the media resource when the
2274        // direction of playback is forwards, then the user agent must follow these steps:
2275
2276        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
2277        // posible position of the media resource and return.
2278        if self.Loop() {
2279            self.seek(
2280                self.earliest_possible_position(),
2281                /* approximate_for_speed */ false,
2282            );
2283            return;
2284        }
2285
2286        // Step 2. As defined above, the ended IDL attribute starts returning true once the event
2287        // loop returns to step 1.
2288
2289        // Step 3. Queue a media element task given the media element and the following steps:
2290        let this = Trusted::new(self);
2291        let generation_id = self.generation_id.get();
2292
2293        self.owner_global()
2294            .task_manager()
2295            .media_element_task_source()
2296            .queue(task!(reaches_the_end_steps: move || {
2297                let this = this.root();
2298                if generation_id != this.generation_id.get() {
2299                    return;
2300                }
2301
2302                // Step 3.1. Fire an event named timeupdate at the media element.
2303                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
2304
2305                // Step 3.2. If the media element has ended playback, the direction of playback is
2306                // forwards, and paused is false, then:
2307                if this.ended_playback(LoopCondition::Included) &&
2308                    this.direction_of_playback() == PlaybackDirection::Forwards &&
2309                    !this.Paused() {
2310                    // Step 3.2.1. Set the paused attribute to true.
2311                    this.paused.set(true);
2312
2313                    // Step 3.2.2. Fire an event named pause at the media element.
2314                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
2315
2316                    // Step 3.2.3. Take pending play promises and reject pending play promises with
2317                    // the result and an "AbortError" DOMException.
2318                    this.take_pending_play_promises(Err(Error::Abort(None)));
2319                    this.fulfill_in_flight_play_promises(|| ());
2320                }
2321
2322                // Step 3.3. Fire an event named ended at the media element.
2323                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
2324            }));
2325
2326        // <https://html.spec.whatwg.org/multipage/#dom-media-have_current_data>
2327        self.change_ready_state(ReadyState::HaveCurrentData);
2328    }
2329
2330    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2331    fn end_of_playback_in_backwards_direction(&self) {
2332        // When the current playback position reaches the earliest possible position of the media
2333        // resource when the direction of playback is backwards, then the user agent must only queue
2334        // a media element task given the media element to fire an event named timeupdate at the
2335        // element.
2336        if self.current_playback_position.get() <= self.earliest_possible_position() {
2337            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2338        }
2339    }
2340
2341    fn playback_end(&self) {
2342        // Abort the following steps of the end of playback if seeking is in progress.
2343        if self.seeking.get() {
2344            return;
2345        }
2346
2347        match self.direction_of_playback() {
2348            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(),
2349            PlaybackDirection::Backwards => self.end_of_playback_in_backwards_direction(),
2350        }
2351    }
2352
2353    fn playback_error(&self, error: &str, cx: &mut js::context::JSContext) {
2354        error!("Player error: {:?}", error);
2355
2356        // If we have already flagged an error condition while processing
2357        // the network response, we should silently skip any observable
2358        // errors originating while decoding the erroneous response.
2359        if self.in_error_state() {
2360            return;
2361        }
2362
2363        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
2364        if self.ready_state.get() == ReadyState::HaveNothing {
2365            // => "If the media data can be fetched but is found by inspection to be in an
2366            // unsupported format, or can otherwise not be rendered at all"
2367            self.media_data_processing_failure_steps();
2368        } else {
2369            // => "If the media data is corrupted"
2370            self.media_data_processing_fatal_steps(MEDIA_ERR_DECODE, cx);
2371        }
2372    }
2373
2374    fn playback_metadata_updated(
2375        &self,
2376        metadata: &servo_media::player::metadata::Metadata,
2377        can_gc: CanGc,
2378    ) {
2379        // The following steps should be run once on the initial `metadata` signal from the media
2380        // engine.
2381        if self.ready_state.get() != ReadyState::HaveNothing {
2382            return;
2383        }
2384
2385        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2386        // => "If the media resource is found to have an audio track"
2387        for (i, _track) in metadata.audio_tracks.iter().enumerate() {
2388            let audio_track_list = self.AudioTracks(can_gc);
2389
2390            // Step 1. Create an AudioTrack object to represent the audio track.
2391            let kind = match i {
2392                0 => DOMString::from("main"),
2393                _ => DOMString::new(),
2394            };
2395
2396            let audio_track = AudioTrack::new(
2397                self.global().as_window(),
2398                DOMString::new(),
2399                kind,
2400                DOMString::new(),
2401                DOMString::new(),
2402                Some(&*audio_track_list),
2403                can_gc,
2404            );
2405
2406            // Steps 2. Update the media element's audioTracks attribute's AudioTrackList object
2407            // with the new AudioTrack object.
2408            audio_track_list.add(&audio_track);
2409
2410            // Step 3. Let enable be unknown.
2411            // Step 4. If either the media resource or the URL of the current media resource
2412            // indicate a particular set of audio tracks to enable, or if the user agent has
2413            // information that would facilitate the selection of specific audio tracks to
2414            // improve the user's experience, then: if this audio track is one of the ones to
2415            // enable, then set enable to true, otherwise, set enable to false.
2416            if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2417                let fragment = MediaFragmentParser::from(servo_url);
2418                if let Some(id) = fragment.id() {
2419                    if audio_track.id() == id {
2420                        audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2421                    }
2422                }
2423
2424                if fragment.tracks().contains(&audio_track.kind().into()) {
2425                    audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2426                }
2427            }
2428
2429            // Step 5. If enable is still unknown, then, if the media element does not yet have an
2430            // enabled audio track, then set enable to true, otherwise, set enable to false.
2431            // Step 6. If enable is true, then enable this audio track, otherwise, do not enable
2432            // this audio track.
2433            if audio_track_list.enabled_index().is_none() {
2434                audio_track_list.set_enabled(audio_track_list.len() - 1, true);
2435            }
2436
2437            // Step 7. Fire an event named addtrack at this AudioTrackList object, using TrackEvent,
2438            // with the track attribute initialized to the new AudioTrack object.
2439            let event = TrackEvent::new(
2440                self.global().as_window(),
2441                atom!("addtrack"),
2442                false,
2443                false,
2444                &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
2445                can_gc,
2446            );
2447
2448            event
2449                .upcast::<Event>()
2450                .fire(audio_track_list.upcast::<EventTarget>(), can_gc);
2451        }
2452
2453        // => "If the media resource is found to have a video track"
2454        for (i, _track) in metadata.video_tracks.iter().enumerate() {
2455            let video_track_list = self.VideoTracks(can_gc);
2456
2457            // Step 1. Create a VideoTrack object to represent the video track.
2458            let kind = match i {
2459                0 => DOMString::from("main"),
2460                _ => DOMString::new(),
2461            };
2462
2463            let video_track = VideoTrack::new(
2464                self.global().as_window(),
2465                DOMString::new(),
2466                kind,
2467                DOMString::new(),
2468                DOMString::new(),
2469                Some(&*video_track_list),
2470                can_gc,
2471            );
2472
2473            // Steps 2. Update the media element's videoTracks attribute's VideoTrackList object
2474            // with the new VideoTrack object.
2475            video_track_list.add(&video_track);
2476
2477            // Step 3. Let enable be unknown.
2478            // Step 4. If either the media resource or the URL of the current media resource
2479            // indicate a particular set of video tracks to enable, or if the user agent has
2480            // information that would facilitate the selection of specific video tracks to
2481            // improve the user's experience, then: if this video track is the first such video
2482            // track, then set enable to true, otherwise, set enable to false.
2483            if let Some(track) = video_track_list.item(0) {
2484                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2485                    let fragment = MediaFragmentParser::from(servo_url);
2486                    if let Some(id) = fragment.id() {
2487                        if track.id() == id {
2488                            video_track_list.set_selected(0, true);
2489                        }
2490                    } else if fragment.tracks().contains(&track.kind().into()) {
2491                        video_track_list.set_selected(0, true);
2492                    }
2493                }
2494            }
2495
2496            // Step 5. If enable is still unknown, then, if the media element does not yet have a
2497            // selected video track, then set enable to true, otherwise, set enable to false.
2498            // Step 6. If enable is true, then select this track and unselect any previously
2499            // selected video tracks, otherwise, do not select this video track. If other tracks are
2500            // unselected, then a change event will be fired.
2501            if video_track_list.selected_index().is_none() {
2502                video_track_list.set_selected(video_track_list.len() - 1, true);
2503            }
2504
2505            // Step 7. Fire an event named addtrack at this VideoTrackList object, using TrackEvent,
2506            // with the track attribute initialized to the new VideoTrack object.
2507            let event = TrackEvent::new(
2508                self.global().as_window(),
2509                atom!("addtrack"),
2510                false,
2511                false,
2512                &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
2513                can_gc,
2514            );
2515
2516            event
2517                .upcast::<Event>()
2518                .fire(video_track_list.upcast::<EventTarget>(), can_gc);
2519        }
2520
2521        // => "Once enough of the media data has been fetched to determine the duration..."
2522
2523        // TODO Step 1. Establish the media timeline for the purposes of the current playback
2524        // position and the earliest possible position, based on the media data.
2525
2526        // TODO Step 2. Update the timeline offset to the date and time that corresponds to the zero
2527        // time in the media timeline established in the previous step, if any. If no explicit time
2528        // and date is given by the media resource, the timeline offset must be set to Not-a-Number
2529        // (NaN).
2530
2531        // Step 3. Set the current playback position and the official playback position to the
2532        // earliest possible position.
2533        let earliest_possible_position = self.earliest_possible_position();
2534        self.current_playback_position
2535            .set(earliest_possible_position);
2536        self.official_playback_position
2537            .set(earliest_possible_position);
2538
2539        // Step 4. Update the duration attribute with the time of the last frame of the resource, if
2540        // known, on the media timeline established above. If it is not known (e.g. a stream that is
2541        // in principle infinite), update the duration attribute to the value positive Infinity.
2542        // Note: The user agent will queue a media element task given the media element to fire an
2543        // event named durationchange at the element at this point.
2544        self.duration.set(
2545            metadata
2546                .duration
2547                .map_or(f64::INFINITY, |duration| duration.as_secs_f64()),
2548        );
2549        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2550
2551        // Step 5. For video elements, set the videoWidth and videoHeight attributes, and queue a
2552        // media element task given the media element to fire an event named resize at the media
2553        // element.
2554        if let Some(video_element) = self.downcast::<HTMLVideoElement>() {
2555            video_element.set_natural_dimensions(Some(metadata.width), Some(metadata.height));
2556            self.queue_media_element_task_to_fire_event(atom!("resize"));
2557        }
2558
2559        // Step 6. Set the readyState attribute to HAVE_METADATA.
2560        self.change_ready_state(ReadyState::HaveMetadata);
2561
2562        // Step 7. Let jumped be false.
2563        let mut jumped = false;
2564
2565        // Step 8. If the media element's default playback start position is greater than zero, then
2566        // seek to that time, and let jumped be true.
2567        if self.default_playback_start_position.get() > 0. {
2568            self.seek(
2569                self.default_playback_start_position.get(),
2570                /* approximate_for_speed */ false,
2571            );
2572            jumped = true;
2573        }
2574
2575        // Step 9. Set the media element's default playback start position to zero.
2576        self.default_playback_start_position.set(0.);
2577
2578        // Step 10. Let the initial playback position be 0.
2579        // Step 11. If either the media resource or the URL of the current media resource indicate a
2580        // particular start time, then set the initial playback position to that time and, if jumped
2581        // is still false, seek to that time.
2582        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2583            let fragment = MediaFragmentParser::from(servo_url);
2584            if let Some(initial_playback_position) = fragment.start() {
2585                if initial_playback_position > 0. &&
2586                    initial_playback_position < self.duration.get() &&
2587                    !jumped
2588                {
2589                    self.seek(
2590                        initial_playback_position,
2591                        /* approximate_for_speed */ false,
2592                    )
2593                }
2594            }
2595        }
2596
2597        // Step 12. If there is no enabled audio track, then enable an audio track. This will cause
2598        // a change event to be fired.
2599        // Step 13. If there is no selected video track, then select a video track. This will cause
2600        // a change event to be fired.
2601        // Note that these steps are already handled by the earlier media track processing.
2602
2603        let global = self.global();
2604        let window = global.as_window();
2605
2606        // Update the media session metadata title with the obtained metadata.
2607        window.Navigator().MediaSession().update_title(
2608            metadata
2609                .title
2610                .clone()
2611                .unwrap_or(window.get_url().into_string()),
2612        );
2613    }
2614
2615    fn playback_duration_changed(&self, duration: Option<Duration>) {
2616        let duration = duration.map_or(f64::INFINITY, |duration| duration.as_secs_f64());
2617
2618        if self.duration.get() == duration {
2619            return;
2620        }
2621
2622        self.duration.set(duration);
2623
2624        // When the length of the media resource changes to a known value (e.g. from being unknown
2625        // to known, or from a previously established length to a new length), the user agent must
2626        // queue a media element task given the media element to fire an event named durationchange
2627        // at the media element.
2628        // <https://html.spec.whatwg.org/multipage/#offsets-into-the-media-resource:media-resource-22>
2629        self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2630
2631        // If the duration is changed such that the current playback position ends up being greater
2632        // than the time of the end of the media resource, then the user agent must also seek to the
2633        // time of the end of the media resource.
2634        if self.current_playback_position.get() > duration {
2635            self.seek(duration, /* approximate_for_speed */ false);
2636        }
2637    }
2638
2639    fn playback_video_frame_updated(&self) {
2640        let Some(video_element) = self.downcast::<HTMLVideoElement>() else {
2641            return;
2642        };
2643
2644        // Whenever the natural width or natural height of the video changes (including, for
2645        // example, because the selected video track was changed), if the element's readyState
2646        // attribute is not HAVE_NOTHING, the user agent must queue a media element task given
2647        // the media element to fire an event named resize at the media element.
2648        // <https://html.spec.whatwg.org/multipage/#concept-video-intrinsic-width>
2649
2650        // The event for the prerolled frame from media engine could reached us before the media
2651        // element HAVE_METADATA ready state so subsequent steps will be cancelled.
2652        if self.ready_state.get() == ReadyState::HaveNothing {
2653            return;
2654        }
2655
2656        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
2657            if video_element
2658                .set_natural_dimensions(Some(frame.width as u32), Some(frame.height as u32))
2659            {
2660                self.queue_media_element_task_to_fire_event(atom!("resize"));
2661            } else {
2662                // If the natural dimensions have not been changed, the node should be marked as
2663                // damaged to force a repaint with the new frame contents.
2664                self.upcast::<Node>().dirty(NodeDamage::Other);
2665            }
2666        }
2667    }
2668
2669    fn playback_need_data(&self) {
2670        // The media engine signals that the source needs more data. If we already have a valid
2671        // fetch request, we do nothing. Otherwise, if we have no request and the previous request
2672        // was cancelled because we got an EnoughData event, we restart fetching where we left.
2673        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2674            if let Some(reason) = current_fetch_context.cancel_reason() {
2675                // XXX(ferjm) Ideally we should just create a fetch request from
2676                // where we left. But keeping track of the exact next byte that the
2677                // media backend expects is not the easiest task, so I'm simply
2678                // seeking to the current playback position for now which will create
2679                // a new fetch request for the last rendered frame.
2680                if *reason == CancelReason::Backoff {
2681                    self.seek(
2682                        self.current_playback_position.get(),
2683                        /* approximate_for_speed */ false,
2684                    );
2685                }
2686                return;
2687            }
2688        }
2689
2690        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2691            if let Err(e) = {
2692                let mut data_source = current_fetch_context.data_source().borrow_mut();
2693                data_source.set_locked(false);
2694                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
2695            } {
2696                // If we are pushing too much data and we know that we can
2697                // restart the download later from where we left, we cancel
2698                // the current request. Otherwise, we continue the request
2699                // assuming that we may drop some frames.
2700                if e == PlayerError::EnoughData {
2701                    current_fetch_context.cancel(CancelReason::Backoff);
2702                }
2703            }
2704        }
2705    }
2706
2707    fn playback_enough_data(&self) {
2708        // The media engine signals that the source has enough data and asks us to stop pushing bytes
2709        // to avoid excessive buffer queueing, so we cancel the ongoing fetch request if we are able
2710        // to restart it from where we left. Otherwise, we continue the current fetch request,
2711        // assuming that some frames will be dropped.
2712        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2713            if current_fetch_context.is_seekable() {
2714                current_fetch_context.cancel(CancelReason::Backoff);
2715            }
2716        }
2717    }
2718
2719    fn playback_position_changed(&self, position: f64) {
2720        // Abort the following steps of the current time update if seeking is in progress.
2721        if self.seeking.get() {
2722            return;
2723        }
2724
2725        let _ = self
2726            .played
2727            .borrow_mut()
2728            .add(self.current_playback_position.get(), position);
2729        self.current_playback_position.set(position);
2730        self.official_playback_position.set(position);
2731        self.time_marches_on();
2732
2733        let media_position_state =
2734            MediaPositionState::new(self.duration.get(), self.playback_rate.get(), position);
2735        debug!(
2736            "Sending media session event set position state {:?}",
2737            media_position_state
2738        );
2739        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
2740    }
2741
2742    fn playback_seek_done(&self, position: f64) {
2743        // If the seek was initiated by script or by the user agent itself continue with the
2744        // following steps, otherwise abort.
2745        let delta = (position - self.current_seek_position.get()).abs();
2746        if !self.seeking.get() || delta > SEEK_POSITION_THRESHOLD {
2747            return;
2748        }
2749
2750        // <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2751        // Step 13. Await a stable state.
2752        let task = MediaElementMicrotask::Seeked {
2753            elem: DomRoot::from_ref(self),
2754            generation_id: self.generation_id.get(),
2755        };
2756
2757        ScriptThread::await_stable_state(Microtask::MediaElement(task));
2758    }
2759
2760    fn playback_state_changed(&self, state: &PlaybackState) {
2761        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
2762        match *state {
2763            PlaybackState::Paused => {
2764                media_session_playback_state = MediaSessionPlaybackState::Paused;
2765                if self.ready_state.get() == ReadyState::HaveMetadata {
2766                    self.change_ready_state(ReadyState::HaveEnoughData);
2767                }
2768            },
2769            PlaybackState::Playing => {
2770                media_session_playback_state = MediaSessionPlaybackState::Playing;
2771                if self.ready_state.get() == ReadyState::HaveMetadata {
2772                    self.change_ready_state(ReadyState::HaveEnoughData);
2773                }
2774            },
2775            PlaybackState::Buffering => {
2776                // Do not send the media session playback state change event
2777                // in this case as a None_ state is expected to clean up the
2778                // session.
2779                return;
2780            },
2781            _ => {},
2782        };
2783        debug!(
2784            "Sending media session event playback state changed to {:?}",
2785            media_session_playback_state
2786        );
2787        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
2788            media_session_playback_state,
2789        ));
2790    }
2791
2792    fn seekable(&self) -> TimeRangesContainer {
2793        let mut seekable = TimeRangesContainer::default();
2794        if let Some(ref player) = *self.player.borrow() {
2795            let ranges = player.lock().unwrap().seekable();
2796            for range in ranges {
2797                let _ = seekable.add(range.start, range.end);
2798            }
2799        }
2800        seekable
2801    }
2802
2803    /// <https://html.spec.whatwg.org/multipage/#earliest-possible-position>
2804    fn earliest_possible_position(&self) -> f64 {
2805        self.seekable()
2806            .start(0)
2807            .unwrap_or_else(|_| self.current_playback_position.get())
2808    }
2809
2810    fn render_controls(&self, cx: &mut JSContext) {
2811        if self.upcast::<Element>().is_shadow_host() {
2812            // Bail out if we are already showing the controls.
2813            return;
2814        }
2815
2816        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2817        //                       reparsing styles.
2818        let shadow_root = self.upcast::<Element>().attach_ua_shadow_root(cx, false);
2819        let document = self.owner_document();
2820        let script = Element::create(
2821            cx,
2822            QualName::new(None, ns!(html), local_name!("script")),
2823            None,
2824            &document,
2825            ElementCreator::ScriptCreated,
2826            CustomElementCreationMode::Asynchronous,
2827            None,
2828        );
2829        // This is our hacky way to temporarily workaround the lack of a privileged
2830        // JS context.
2831        // The media controls UI accesses the document.servoGetMediaControls(id) API
2832        // to get an instance to the media controls ShadowRoot.
2833        // `id` needs to match the internally generated UUID assigned to a media element.
2834        let id = Uuid::new_v4().to_string();
2835        document.register_media_controls(&id, &shadow_root);
2836        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2837        *self.media_controls_id.borrow_mut() = Some(id);
2838        script
2839            .upcast::<Node>()
2840            .set_text_content_for_element(cx, Some(DOMString::from(media_controls_script)));
2841        if let Err(e) = shadow_root
2842            .upcast::<Node>()
2843            .AppendChild(cx, script.upcast::<Node>())
2844        {
2845            warn!("Could not render media controls {:?}", e);
2846            return;
2847        }
2848
2849        let style = Element::create(
2850            cx,
2851            QualName::new(None, ns!(html), local_name!("style")),
2852            None,
2853            &document,
2854            ElementCreator::ScriptCreated,
2855            CustomElementCreationMode::Asynchronous,
2856            None,
2857        );
2858
2859        style
2860            .upcast::<Node>()
2861            .set_text_content_for_element(cx, Some(DOMString::from(MEDIA_CONTROL_CSS)));
2862
2863        if let Err(e) = shadow_root
2864            .upcast::<Node>()
2865            .AppendChild(cx, style.upcast::<Node>())
2866        {
2867            warn!("Could not render media controls {:?}", e);
2868        }
2869
2870        self.upcast::<Node>().dirty(NodeDamage::Other);
2871    }
2872
2873    fn remove_controls(&self) {
2874        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2875            self.owner_document().unregister_media_controls(&id);
2876        }
2877    }
2878
2879    /// Gets the video frame at the current playback position.
2880    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2881        self.video_renderer
2882            .lock()
2883            .unwrap()
2884            .current_frame_holder
2885            .as_ref()
2886            .map(|holder| holder.get_frame())
2887    }
2888
2889    /// Gets the current frame of the video element to present, if any.
2890    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2891    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2892        let (current_frame, poster_frame) = {
2893            let renderer = self.video_renderer.lock().unwrap();
2894            (renderer.current_frame, renderer.poster_frame)
2895        };
2896
2897        // If the show poster flag is set (or there is no current video frame to
2898        // present) AND there is a poster frame, present that.
2899        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2900            return poster_frame;
2901        }
2902
2903        current_frame
2904    }
2905
2906    /// By default the audio is rendered through the audio sink automatically
2907    /// selected by the servo-media Player instance. However, in some cases, like
2908    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2909    /// renderer.
2910    pub(crate) fn set_audio_renderer(
2911        &self,
2912        audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>,
2913        cx: &mut js::context::JSContext,
2914    ) {
2915        *self.audio_renderer.borrow_mut() = audio_renderer;
2916
2917        let had_player = {
2918            if let Some(ref player) = *self.player.borrow() {
2919                if let Err(error) = player.lock().unwrap().stop() {
2920                    error!("Could not stop player: {error:?}");
2921                }
2922                true
2923            } else {
2924                false
2925            }
2926        };
2927
2928        if had_player {
2929            self.media_element_load_algorithm(cx);
2930        }
2931    }
2932
2933    fn send_media_session_event(&self, event: MediaSessionEvent) {
2934        let global = self.global();
2935        let media_session = global.as_window().Navigator().MediaSession();
2936
2937        media_session.register_media_instance(self);
2938
2939        media_session.send_event(event);
2940    }
2941
2942    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2943    pub(crate) fn origin_is_clean(&self) -> bool {
2944        // Step 5.local (media provider object).
2945        if self.src_object.borrow().is_some() {
2946            // The resource described by the current media resource, if any,
2947            // contains the media data. It is CORS-same-origin.
2948            return true;
2949        }
2950
2951        // Step 5.remote (URL record).
2952        if self.resource_url.borrow().is_some() {
2953            // Update the media data with the contents
2954            // of response's unsafe response obtained in this fashion.
2955            // Response can be CORS-same-origin or CORS-cross-origin;
2956            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2957                return current_fetch_context.origin_is_clean();
2958            }
2959        }
2960
2961        true
2962    }
2963}
2964
2965impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2966    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
2967    fn NetworkState(&self) -> u16 {
2968        self.network_state.get() as u16
2969    }
2970
2971    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
2972    fn ReadyState(&self) -> u16 {
2973        self.ready_state.get() as u16
2974    }
2975
2976    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2977    make_bool_getter!(Autoplay, "autoplay");
2978    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2979    make_bool_setter!(SetAutoplay, "autoplay");
2980
2981    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2982    make_bool_getter!(Loop, "loop");
2983    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2984    make_bool_setter!(SetLoop, "loop");
2985
2986    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2987    make_bool_getter!(DefaultMuted, "muted");
2988    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2989    make_bool_setter!(SetDefaultMuted, "muted");
2990
2991    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2992    make_bool_getter!(Controls, "controls");
2993    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2994    make_bool_setter!(SetControls, "controls");
2995
2996    // https://html.spec.whatwg.org/multipage/#dom-media-src
2997    make_url_getter!(Src, "src");
2998
2999    // https://html.spec.whatwg.org/multipage/#dom-media-src
3000    make_url_setter!(SetSrc, "src");
3001
3002    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
3003    fn GetCrossOrigin(&self) -> Option<DOMString> {
3004        reflect_cross_origin_attribute(self.upcast::<Element>())
3005    }
3006    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
3007    fn SetCrossOrigin(&self, cx: &mut JSContext, value: Option<DOMString>) {
3008        set_cross_origin_attribute(cx, self.upcast::<Element>(), value);
3009    }
3010
3011    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3012    fn Muted(&self) -> bool {
3013        self.muted.get()
3014    }
3015
3016    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3017    fn SetMuted(&self, value: bool) {
3018        if self.muted.get() == value {
3019            return;
3020        }
3021
3022        self.muted.set(value);
3023
3024        if let Some(ref player) = *self.player.borrow() {
3025            if let Err(error) = player.lock().unwrap().set_mute(value) {
3026                warn!("Could not set mute state: {error:?}");
3027            }
3028        }
3029
3030        // The user agent must queue a media element task given the media element to fire an event
3031        // named volumechange at the media element.
3032        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3033
3034        // Then, if the media element is not allowed to play, the user agent must run the internal
3035        // pause steps for the media element.
3036        if !self.is_allowed_to_play() {
3037            self.internal_pause_steps();
3038        }
3039    }
3040
3041    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3042    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
3043        (*self.src_object.borrow())
3044            .as_ref()
3045            .map(|src_object| match src_object {
3046                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
3047                SrcObject::MediaStream(stream) => {
3048                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
3049                },
3050            })
3051    }
3052
3053    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
3054    fn SetSrcObject(&self, cx: &mut js::context::JSContext, value: Option<MediaStreamOrBlob>) {
3055        *self.src_object.borrow_mut() = value.map(|value| value.into());
3056        self.media_element_load_algorithm(cx);
3057    }
3058
3059    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3060    // Missing/Invalid values are user-agent defined.
3061    make_enumerated_getter!(
3062        Preload,
3063        "preload",
3064        "none" | "metadata" | "auto",
3065        missing => "auto",
3066        invalid => "auto"
3067    );
3068
3069    // https://html.spec.whatwg.org/multipage/#attr-media-preload
3070    make_setter!(SetPreload, "preload");
3071
3072    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
3073    fn CurrentSrc(&self) -> USVString {
3074        USVString(self.current_src.borrow().clone())
3075    }
3076
3077    /// <https://html.spec.whatwg.org/multipage/#dom-media-load>
3078    fn Load(&self, cx: &mut js::context::JSContext) {
3079        self.media_element_load_algorithm(cx);
3080    }
3081
3082    /// <https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype>
3083    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
3084        match ServoMedia::get().can_play_type(&type_.str()) {
3085            SupportsMediaType::No => CanPlayTypeResult::_empty,
3086            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
3087            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
3088        }
3089    }
3090
3091    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
3092    fn GetError(&self) -> Option<DomRoot<MediaError>> {
3093        self.error.get()
3094    }
3095
3096    /// <https://html.spec.whatwg.org/multipage/#dom-media-play>
3097    fn Play(&self, cx: &mut CurrentRealm) -> Rc<Promise> {
3098        let promise = Promise::new_in_realm(cx);
3099
3100        // TODO Step 1. If the media element is not allowed to play, then return a promise rejected
3101        // with a "NotAllowedError" DOMException.
3102
3103        // Step 2. If the media element's error attribute is not null and its code is
3104        // MEDIA_ERR_SRC_NOT_SUPPORTED, then return a promise rejected with a "NotSupportedError"
3105        // DOMException.
3106        if self
3107            .error
3108            .get()
3109            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
3110        {
3111            promise.reject_error(Error::NotSupported(None), CanGc::from_cx(cx));
3112            return promise;
3113        }
3114
3115        // Step 3. Let promise be a new promise and append promise to the list of pending play
3116        // promises.
3117        self.push_pending_play_promise(&promise);
3118
3119        // Step 4. Run the internal play steps for the media element.
3120        self.internal_play_steps(cx);
3121
3122        // Step 5. Return promise.
3123        promise
3124    }
3125
3126    /// <https://html.spec.whatwg.org/multipage/#dom-media-pause>
3127    fn Pause(&self, cx: &mut js::context::JSContext) {
3128        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
3129        // the media element's resource selection algorithm.
3130        if self.network_state.get() == NetworkState::Empty {
3131            self.invoke_resource_selection_algorithm(cx);
3132        }
3133
3134        // Step 2. Run the internal pause steps for the media element.
3135        self.internal_pause_steps();
3136    }
3137
3138    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
3139    fn Paused(&self) -> bool {
3140        self.paused.get()
3141    }
3142
3143    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3144    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
3145        Ok(Finite::wrap(self.default_playback_rate.get()))
3146    }
3147
3148    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
3149    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3150        // If the given value is not supported by the user agent, then throw a "NotSupportedError"
3151        // DOMException.
3152        let min_allowed = -64.0;
3153        let max_allowed = 64.0;
3154        if *value < min_allowed || *value > max_allowed {
3155            return Err(Error::NotSupported(None));
3156        }
3157
3158        if self.default_playback_rate.get() == *value {
3159            return Ok(());
3160        }
3161
3162        self.default_playback_rate.set(*value);
3163
3164        // The user agent must queue a media element task given the media element to fire an event
3165        // named ratechange at the media element.
3166        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3167
3168        Ok(())
3169    }
3170
3171    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3172    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
3173        Ok(Finite::wrap(self.playback_rate.get()))
3174    }
3175
3176    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3177    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3178        // The attribute is mutable: on setting, the user agent must follow these steps:
3179
3180        // Step 1. If the given value is not supported by the user agent, then throw a
3181        // "NotSupportedError" DOMException.
3182        let min_allowed = -64.0;
3183        let max_allowed = 64.0;
3184        if *value < min_allowed || *value > max_allowed {
3185            return Err(Error::NotSupported(None));
3186        }
3187
3188        if self.playback_rate.get() == *value {
3189            return Ok(());
3190        }
3191
3192        // Step 2. Set playbackRate to the new value, and if the element is potentially playing,
3193        // change the playback speed.
3194        self.playback_rate.set(*value);
3195
3196        if self.is_potentially_playing() {
3197            if let Some(ref player) = *self.player.borrow() {
3198                if let Err(error) = player.lock().unwrap().set_playback_rate(*value) {
3199                    warn!("Could not set the playback rate: {error:?}");
3200                }
3201            }
3202        }
3203
3204        // The user agent must queue a media element task given the media element to fire an event
3205        // named ratechange at the media element.
3206        self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3207
3208        Ok(())
3209    }
3210
3211    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
3212    fn Duration(&self) -> f64 {
3213        self.duration.get()
3214    }
3215
3216    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3217    fn CurrentTime(&self) -> Finite<f64> {
3218        Finite::wrap(if self.default_playback_start_position.get() != 0. {
3219            self.default_playback_start_position.get()
3220        } else if self.seeking.get() {
3221            // Note that the other browsers do the similar (by checking `seeking` value or clamp the
3222            // `official` position to the earliest possible position, the duration, and the seekable
3223            // ranges.
3224            // <https://github.com/whatwg/html/issues/11773>
3225            self.current_seek_position.get()
3226        } else {
3227            self.official_playback_position.get()
3228        })
3229    }
3230
3231    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3232    fn SetCurrentTime(&self, time: Finite<f64>) {
3233        if self.ready_state.get() == ReadyState::HaveNothing {
3234            self.default_playback_start_position.set(*time);
3235        } else {
3236            self.official_playback_position.set(*time);
3237            self.seek(*time, /* approximate_for_speed */ false);
3238        }
3239    }
3240
3241    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
3242    fn Seeking(&self) -> bool {
3243        self.seeking.get()
3244    }
3245
3246    /// <https://html.spec.whatwg.org/multipage/#dom-media-ended>
3247    fn Ended(&self) -> bool {
3248        self.ended_playback(LoopCondition::Included) &&
3249            self.direction_of_playback() == PlaybackDirection::Forwards
3250    }
3251
3252    /// <https://html.spec.whatwg.org/multipage/#dom-media-fastseek>
3253    fn FastSeek(&self, time: Finite<f64>) {
3254        self.seek(*time, /* approximate_for_speed */ true);
3255    }
3256
3257    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
3258    fn Played(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3259        TimeRanges::new(
3260            self.global().as_window(),
3261            self.played.borrow().clone(),
3262            can_gc,
3263        )
3264    }
3265
3266    /// <https://html.spec.whatwg.org/multipage/#dom-media-seekable>
3267    fn Seekable(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3268        TimeRanges::new(self.global().as_window(), self.seekable(), can_gc)
3269    }
3270
3271    /// <https://html.spec.whatwg.org/multipage/#dom-media-buffered>
3272    fn Buffered(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3273        let mut buffered = TimeRangesContainer::default();
3274        if let Some(ref player) = *self.player.borrow() {
3275            let ranges = player.lock().unwrap().buffered();
3276            for range in ranges {
3277                let _ = buffered.add(range.start, range.end);
3278            }
3279        }
3280        TimeRanges::new(self.global().as_window(), buffered, can_gc)
3281    }
3282
3283    /// <https://html.spec.whatwg.org/multipage/#dom-media-audiotracks>
3284    fn AudioTracks(&self, can_gc: CanGc) -> DomRoot<AudioTrackList> {
3285        let window = self.owner_window();
3286        self.audio_tracks_list
3287            .or_init(|| AudioTrackList::new(&window, &[], Some(self), can_gc))
3288    }
3289
3290    /// <https://html.spec.whatwg.org/multipage/#dom-media-videotracks>
3291    fn VideoTracks(&self, can_gc: CanGc) -> DomRoot<VideoTrackList> {
3292        let window = self.owner_window();
3293        self.video_tracks_list
3294            .or_init(|| VideoTrackList::new(&window, &[], Some(self), can_gc))
3295    }
3296
3297    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
3298    fn TextTracks(&self, can_gc: CanGc) -> DomRoot<TextTrackList> {
3299        let window = self.owner_window();
3300        self.text_tracks_list
3301            .or_init(|| TextTrackList::new(&window, &[], can_gc))
3302    }
3303
3304    /// <https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack>
3305    fn AddTextTrack(
3306        &self,
3307        kind: TextTrackKind,
3308        label: DOMString,
3309        language: DOMString,
3310        can_gc: CanGc,
3311    ) -> DomRoot<TextTrack> {
3312        let window = self.owner_window();
3313        // Step 1 & 2
3314        // FIXME(#22314, dlrobertson) set the ready state to Loaded
3315        let track = TextTrack::new(
3316            &window,
3317            "".into(),
3318            kind,
3319            label,
3320            language,
3321            TextTrackMode::Hidden,
3322            None,
3323            can_gc,
3324        );
3325        // Step 3 & 4
3326        self.TextTracks(can_gc).add(&track);
3327        // Step 5
3328        DomRoot::from_ref(&track)
3329    }
3330
3331    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3332    fn GetVolume(&self) -> Fallible<Finite<f64>> {
3333        Ok(Finite::wrap(self.volume.get()))
3334    }
3335
3336    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3337    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
3338        // If the new value is outside the range 0.0 to 1.0 inclusive, then, on setting, an
3339        // "IndexSizeError" DOMException must be thrown instead.
3340        let minimum_volume = 0.0;
3341        let maximum_volume = 1.0;
3342        if *value < minimum_volume || *value > maximum_volume {
3343            return Err(Error::IndexSize(None));
3344        }
3345
3346        if self.volume.get() == *value {
3347            return Ok(());
3348        }
3349
3350        self.volume.set(*value);
3351
3352        if let Some(ref player) = *self.player.borrow() {
3353            if let Err(error) = player.lock().unwrap().set_volume(*value) {
3354                warn!("Could not set the volume: {error:?}");
3355            }
3356        }
3357
3358        // The user agent must queue a media element task given the media element to fire an event
3359        // named volumechange at the media element.
3360        self.queue_media_element_task_to_fire_event(atom!("volumechange"));
3361
3362        // Then, if the media element is not allowed to play, the user agent must run the internal
3363        // pause steps for the media element.
3364        if !self.is_allowed_to_play() {
3365            self.internal_pause_steps();
3366        }
3367
3368        Ok(())
3369    }
3370}
3371
3372impl VirtualMethods for HTMLMediaElement {
3373    fn super_type(&self) -> Option<&dyn VirtualMethods> {
3374        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
3375    }
3376
3377    fn attribute_mutated(
3378        &self,
3379        cx: &mut js::context::JSContext,
3380        attr: &Attr,
3381        mutation: AttributeMutation,
3382    ) {
3383        self.super_type()
3384            .unwrap()
3385            .attribute_mutated(cx, attr, mutation);
3386
3387        match *attr.local_name() {
3388            local_name!("muted") => {
3389                // <https://html.spec.whatwg.org/multipage/#dom-media-muted>
3390                // When a media element is created, if the element has a muted content attribute
3391                // specified, then the muted IDL attribute should be set to true.
3392                if let AttributeMutation::Set(
3393                    _,
3394                    AttributeMutationReason::ByCloning | AttributeMutationReason::ByParser,
3395                ) = mutation
3396                {
3397                    self.SetMuted(true);
3398                }
3399            },
3400            local_name!("src") => {
3401                // <https://html.spec.whatwg.org/multipage/#location-of-the-media-resource>
3402                // If a src attribute of a media element is set or changed, the user agent must invoke
3403                // the media element's media element load algorithm (Removing the src attribute does
3404                // not do this, even if there are source elements present).
3405                if !mutation.is_removal() {
3406                    self.media_element_load_algorithm(cx);
3407                }
3408            },
3409            local_name!("controls") => {
3410                if mutation.new_value(attr).is_some() {
3411                    self.render_controls(cx);
3412                } else {
3413                    self.remove_controls();
3414                }
3415            },
3416            _ => (),
3417        };
3418    }
3419
3420    /// <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document>
3421    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
3422        self.super_type().unwrap().unbind_from_tree(context, can_gc);
3423
3424        self.remove_controls();
3425
3426        if context.tree_connected {
3427            let task = MediaElementMicrotask::PauseIfNotInDocument {
3428                elem: DomRoot::from_ref(self),
3429            };
3430            ScriptThread::await_stable_state(Microtask::MediaElement(task));
3431        }
3432    }
3433
3434    fn adopting_steps(&self, cx: &mut JSContext, old_doc: &Document) {
3435        self.super_type().unwrap().adopting_steps(cx, old_doc);
3436
3437        // Note that media control id should be adopting between documents so "privileged"
3438        // document.servoGetMediaControls(id) API is keeping access to the whitelist of media
3439        // controls identifiers.
3440        if let Some(id) = &*self.media_controls_id.borrow() {
3441            let Some(shadow_root) = self.upcast::<Element>().shadow_root() else {
3442                error!("Missing media controls shadow root");
3443                return;
3444            };
3445
3446            old_doc.unregister_media_controls(id);
3447            self.owner_document()
3448                .register_media_controls(id, &shadow_root);
3449        }
3450    }
3451}
3452
3453#[derive(JSTraceable, MallocSizeOf)]
3454pub(crate) enum MediaElementMicrotask {
3455    ResourceSelection {
3456        elem: DomRoot<HTMLMediaElement>,
3457        generation_id: u32,
3458        #[no_trace]
3459        base_url: ServoUrl,
3460    },
3461    PauseIfNotInDocument {
3462        elem: DomRoot<HTMLMediaElement>,
3463    },
3464    Seeked {
3465        elem: DomRoot<HTMLMediaElement>,
3466        generation_id: u32,
3467    },
3468    SelectNextSourceChild {
3469        elem: DomRoot<HTMLMediaElement>,
3470        generation_id: u32,
3471    },
3472    SelectNextSourceChildAfterWait {
3473        elem: DomRoot<HTMLMediaElement>,
3474        generation_id: u32,
3475    },
3476}
3477
3478impl MicrotaskRunnable for MediaElementMicrotask {
3479    fn handler(&self, cx: &mut js::context::JSContext) {
3480        match self {
3481            &MediaElementMicrotask::ResourceSelection {
3482                ref elem,
3483                generation_id,
3484                ref base_url,
3485            } => {
3486                if generation_id == elem.generation_id.get() {
3487                    elem.resource_selection_algorithm_sync(base_url.clone(), cx);
3488                }
3489            },
3490            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
3491                if !elem.upcast::<Node>().is_connected() {
3492                    elem.internal_pause_steps();
3493                }
3494            },
3495            &MediaElementMicrotask::Seeked {
3496                ref elem,
3497                generation_id,
3498            } => {
3499                if generation_id == elem.generation_id.get() {
3500                    elem.seek_end();
3501                }
3502            },
3503            &MediaElementMicrotask::SelectNextSourceChild {
3504                ref elem,
3505                generation_id,
3506            } => {
3507                if generation_id == elem.generation_id.get() {
3508                    elem.select_next_source_child(CanGc::from_cx(cx));
3509                }
3510            },
3511            &MediaElementMicrotask::SelectNextSourceChildAfterWait {
3512                ref elem,
3513                generation_id,
3514            } => {
3515                if generation_id == elem.generation_id.get() {
3516                    elem.select_next_source_child_after_wait(cx);
3517                }
3518            },
3519        }
3520    }
3521
3522    fn enter_realm<'cx>(&self, cx: &'cx mut js::context::JSContext) -> AutoRealm<'cx> {
3523        match self {
3524            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
3525            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
3526            &MediaElementMicrotask::Seeked { ref elem, .. } |
3527            &MediaElementMicrotask::SelectNextSourceChild { ref elem, .. } |
3528            &MediaElementMicrotask::SelectNextSourceChildAfterWait { ref elem, .. } => {
3529                enter_auto_realm(cx, &**elem)
3530            },
3531        }
3532    }
3533}
3534
3535enum Resource {
3536    Object,
3537    Url(ServoUrl),
3538}
3539
3540#[derive(Debug, MallocSizeOf, PartialEq)]
3541enum DataBuffer {
3542    Payload(Vec<u8>),
3543    EndOfStream,
3544}
3545
3546#[derive(MallocSizeOf)]
3547struct BufferedDataSource {
3548    /// During initial setup and seeking (including clearing the buffer queue
3549    /// and resetting the end-of-stream state), the data source should be locked and
3550    /// any request for processing should be ignored until the media player informs us
3551    /// via the NeedData event that it is ready to accept incoming data.
3552    locked: Cell<bool>,
3553    /// Temporary storage for incoming data.
3554    buffers: VecDeque<DataBuffer>,
3555}
3556
3557impl BufferedDataSource {
3558    fn new() -> BufferedDataSource {
3559        BufferedDataSource {
3560            locked: Cell::new(true),
3561            buffers: VecDeque::default(),
3562        }
3563    }
3564
3565    fn set_locked(&self, locked: bool) {
3566        self.locked.set(locked)
3567    }
3568
3569    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
3570        debug_assert_ne!(
3571            self.buffers.back(),
3572            Some(&DataBuffer::EndOfStream),
3573            "The media backend not expects any further data after end of stream"
3574        );
3575
3576        self.buffers.push_back(buffer);
3577    }
3578
3579    fn process_into_player_from_queue(
3580        &mut self,
3581        player: &Arc<Mutex<dyn Player>>,
3582    ) -> Result<(), PlayerError> {
3583        // Early out if any request for processing should be ignored.
3584        if self.locked.get() {
3585            return Ok(());
3586        }
3587
3588        while let Some(buffer) = self.buffers.pop_front() {
3589            match buffer {
3590                DataBuffer::Payload(payload) => {
3591                    if let Err(error) = player.lock().unwrap().push_data(payload) {
3592                        warn!("Could not push input data to player: {error:?}");
3593                        return Err(error);
3594                    }
3595                },
3596                DataBuffer::EndOfStream => {
3597                    if let Err(error) = player.lock().unwrap().end_of_stream() {
3598                        warn!("Could not signal EOS to player: {error:?}");
3599                        return Err(error);
3600                    }
3601                },
3602            }
3603        }
3604
3605        Ok(())
3606    }
3607
3608    fn reset(&mut self) {
3609        self.locked.set(true);
3610        self.buffers.clear();
3611    }
3612}
3613
3614/// Indicates the reason why a fetch request was cancelled.
3615#[derive(Debug, MallocSizeOf, PartialEq)]
3616enum CancelReason {
3617    /// We were asked to stop pushing data to the player.
3618    Backoff,
3619    /// An error ocurred while fetching the media data.
3620    Error,
3621    /// The fetching process is aborted by the user.
3622    Abort,
3623}
3624
3625#[derive(MallocSizeOf)]
3626pub(crate) struct HTMLMediaElementFetchContext {
3627    /// The fetch request id.
3628    request_id: RequestId,
3629    /// Some if the request has been cancelled.
3630    cancel_reason: Option<CancelReason>,
3631    /// Indicates whether the fetched stream is seekable.
3632    is_seekable: bool,
3633    /// Indicates whether the fetched stream is origin clean.
3634    origin_clean: bool,
3635    /// The buffered data source which to be processed by media backend.
3636    data_source: RefCell<BufferedDataSource>,
3637    /// Fetch canceller. Allows cancelling the current fetch request by
3638    /// manually calling its .cancel() method or automatically on Drop.
3639    fetch_canceller: FetchCanceller,
3640}
3641
3642impl HTMLMediaElementFetchContext {
3643    fn new(
3644        request_id: RequestId,
3645        core_resource_thread: CoreResourceThread,
3646    ) -> HTMLMediaElementFetchContext {
3647        HTMLMediaElementFetchContext {
3648            request_id,
3649            cancel_reason: None,
3650            is_seekable: false,
3651            origin_clean: true,
3652            data_source: RefCell::new(BufferedDataSource::new()),
3653            fetch_canceller: FetchCanceller::new(request_id, false, core_resource_thread),
3654        }
3655    }
3656
3657    fn request_id(&self) -> RequestId {
3658        self.request_id
3659    }
3660
3661    fn is_seekable(&self) -> bool {
3662        self.is_seekable
3663    }
3664
3665    fn set_seekable(&mut self, seekable: bool) {
3666        self.is_seekable = seekable;
3667    }
3668
3669    fn origin_is_clean(&self) -> bool {
3670        self.origin_clean
3671    }
3672
3673    fn set_origin_clean(&mut self, origin_clean: bool) {
3674        self.origin_clean = origin_clean;
3675    }
3676
3677    fn data_source(&self) -> &RefCell<BufferedDataSource> {
3678        &self.data_source
3679    }
3680
3681    fn cancel(&mut self, reason: CancelReason) {
3682        if self.cancel_reason.is_some() {
3683            return;
3684        }
3685        self.cancel_reason = Some(reason);
3686        self.data_source.borrow_mut().reset();
3687        self.fetch_canceller.abort();
3688    }
3689
3690    fn cancel_reason(&self) -> &Option<CancelReason> {
3691        &self.cancel_reason
3692    }
3693}
3694
3695struct HTMLMediaElementFetchListener {
3696    /// The element that initiated the request.
3697    element: Trusted<HTMLMediaElement>,
3698    /// The generation of the media element when this fetch started.
3699    generation_id: u32,
3700    /// The fetch request id.
3701    request_id: RequestId,
3702    /// Time of last progress notification.
3703    next_progress_event: Instant,
3704    /// Url for the resource.
3705    url: ServoUrl,
3706    /// Expected content length of the media asset being fetched or played.
3707    expected_content_length: Option<u64>,
3708    /// Actual content length of the media asset was fetched.
3709    fetched_content_length: u64,
3710    /// Discarded content length from the network for the ongoing
3711    /// request if range requests are not supported. Seek requests set it
3712    /// to the required position (in bytes).
3713    content_length_to_discard: u64,
3714}
3715
3716impl FetchResponseListener for HTMLMediaElementFetchListener {
3717    fn process_request_body(&mut self, _: RequestId) {}
3718
3719    fn process_response(
3720        &mut self,
3721        cx: &mut js::context::JSContext,
3722        _: RequestId,
3723        metadata: Result<FetchMetadata, NetworkError>,
3724    ) {
3725        let element = self.element.root();
3726
3727        let (metadata, origin_clean) = match metadata {
3728            Ok(fetch_metadata) => match fetch_metadata {
3729                FetchMetadata::Unfiltered(metadata) => (Some(metadata), true),
3730                FetchMetadata::Filtered { filtered, unsafe_ } => (
3731                    Some(unsafe_),
3732                    matches!(
3733                        filtered,
3734                        FilteredMetadata::Basic(_) | FilteredMetadata::Cors(_)
3735                    ),
3736                ),
3737            },
3738            Err(_) => (None, true),
3739        };
3740
3741        let (status_is_success, is_seekable) =
3742            metadata.as_ref().map_or((false, false), |metadata| {
3743                let status = &metadata.status;
3744                (status.is_success(), *status == StatusCode::PARTIAL_CONTENT)
3745            });
3746
3747        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3748        if !status_is_success {
3749            if element.ready_state.get() == ReadyState::HaveNothing {
3750                // => "If the media data cannot be fetched at all, due to network errors..."
3751                element.media_data_processing_failure_steps();
3752            } else {
3753                // => "If the connection is interrupted after some media data has been received..."
3754                element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, cx);
3755            }
3756            return;
3757        }
3758
3759        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3760            current_fetch_context.set_seekable(is_seekable);
3761            current_fetch_context.set_origin_clean(origin_clean);
3762        }
3763
3764        if let Some(metadata) = metadata.as_ref() {
3765            if let Some(headers) = metadata.headers.as_ref() {
3766                // For range requests we get the size of the media asset from the Content-Range
3767                // header. Otherwise, we get it from the Content-Length header.
3768                let content_length =
3769                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
3770                        content_range.bytes_len()
3771                    } else {
3772                        headers
3773                            .typed_get::<ContentLength>()
3774                            .map(|content_length| content_length.0)
3775                    };
3776
3777                // We only set the expected input size if it changes.
3778                if content_length != self.expected_content_length {
3779                    if let Some(content_length) = content_length {
3780                        self.expected_content_length = Some(content_length);
3781                    }
3782                }
3783            }
3784        }
3785
3786        // Explicit media player initialization with live/seekable source.
3787        if let Some(expected_content_length) = self.expected_content_length {
3788            if let Err(e) = element
3789                .player
3790                .borrow()
3791                .as_ref()
3792                .unwrap()
3793                .lock()
3794                .unwrap()
3795                .set_input_size(expected_content_length)
3796            {
3797                warn!("Could not set player input size {:?}", e);
3798            }
3799        }
3800    }
3801
3802    fn process_response_chunk(
3803        &mut self,
3804        _: &mut js::context::JSContext,
3805        _: RequestId,
3806        chunk: Vec<u8>,
3807    ) {
3808        let element = self.element.root();
3809
3810        self.fetched_content_length += chunk.len() as u64;
3811
3812        // If an error was received previously, we skip processing the payload.
3813        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3814            if let Some(CancelReason::Backoff) = current_fetch_context.cancel_reason() {
3815                return;
3816            }
3817
3818            // Discard chunk of the response body if fetch context doesn't support range requests.
3819            let payload = if !current_fetch_context.is_seekable() &&
3820                self.content_length_to_discard != 0
3821            {
3822                if chunk.len() as u64 > self.content_length_to_discard {
3823                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3824                    self.content_length_to_discard = 0;
3825                    shrink_chunk
3826                } else {
3827                    // Completely discard this response chunk.
3828                    self.content_length_to_discard -= chunk.len() as u64;
3829                    return;
3830                }
3831            } else {
3832                chunk
3833            };
3834
3835            if let Err(e) = {
3836                let mut data_source = current_fetch_context.data_source().borrow_mut();
3837                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3838                data_source
3839                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap())
3840            } {
3841                // If we are pushing too much data and we know that we can
3842                // restart the download later from where we left, we cancel
3843                // the current request. Otherwise, we continue the request
3844                // assuming that we may drop some frames.
3845                if e == PlayerError::EnoughData {
3846                    current_fetch_context.cancel(CancelReason::Backoff);
3847                }
3848                return;
3849            }
3850        }
3851
3852        // <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
3853        // While the load is not suspended (see below), every 350ms (±200ms) or for every byte
3854        // received, whichever is least frequent, queue a media element task given the media element
3855        // to fire an event named progress at the element.
3856        if Instant::now() > self.next_progress_event {
3857            element.queue_media_element_task_to_fire_event(atom!("progress"));
3858            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3859        }
3860    }
3861
3862    fn process_response_eof(
3863        self,
3864        cx: &mut js::context::JSContext,
3865        _: RequestId,
3866        status: Result<(), NetworkError>,
3867        timing: ResourceFetchTiming,
3868    ) {
3869        let element = self.element.root();
3870
3871        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3872        if status.is_ok() && self.fetched_content_length != 0 {
3873            // => "Once the entire media resource has been fetched..."
3874
3875            // There are no more chunks of the response body forthcoming, so we can
3876            // go ahead and notify the media backend not to expect any further data.
3877            if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut()
3878            {
3879                // On initial state change READY -> PAUSED the media player perform
3880                // seek to initial position by event with seek segment (TIME format)
3881                // while media stack operates in BYTES format and configuring segment
3882                // start and stop positions without the total size of the stream is not
3883                // possible. As fallback the media player perform seek with BYTES format
3884                // and initiate seek request via "seek-data" callback with required offset.
3885                if self.expected_content_length.is_none() {
3886                    if let Err(e) = element
3887                        .player
3888                        .borrow()
3889                        .as_ref()
3890                        .unwrap()
3891                        .lock()
3892                        .unwrap()
3893                        .set_input_size(self.fetched_content_length)
3894                    {
3895                        warn!("Could not set player input size {:?}", e);
3896                    }
3897                }
3898
3899                let mut data_source = current_fetch_context.data_source().borrow_mut();
3900
3901                data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3902                let _ = data_source
3903                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap());
3904            }
3905
3906            // Step 1. Fire an event named progress at the media element.
3907            element
3908                .upcast::<EventTarget>()
3909                .fire_event(atom!("progress"), CanGc::from_cx(cx));
3910
3911            // Step 2. Set the networkState to NETWORK_IDLE and fire an event named suspend at the
3912            // media element.
3913            element.network_state.set(NetworkState::Idle);
3914
3915            element
3916                .upcast::<EventTarget>()
3917                .fire_event(atom!("suspend"), CanGc::from_cx(cx));
3918        } else if status.is_err() && element.ready_state.get() != ReadyState::HaveNothing {
3919            // => "If the connection is interrupted after some media data has been received..."
3920            element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, cx);
3921        } else {
3922            // => "If the media data can be fetched but is found by inspection to be in an
3923            // unsupported format, or can otherwise not be rendered at all"
3924            element.media_data_processing_failure_steps();
3925        }
3926
3927        network_listener::submit_timing(cx, &self, &status, &timing);
3928    }
3929
3930    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3931        let global = &self.resource_timing_global();
3932        global.report_csp_violations(violations, None, None);
3933    }
3934
3935    fn should_invoke(&self) -> bool {
3936        let element = self.element.root();
3937
3938        if element.generation_id.get() != self.generation_id || element.player.borrow().is_none() {
3939            return false;
3940        }
3941
3942        let Some(ref current_fetch_context) = *element.current_fetch_context.borrow() else {
3943            return false;
3944        };
3945
3946        // Whether the new fetch request was triggered.
3947        if current_fetch_context.request_id() != self.request_id {
3948            return false;
3949        }
3950
3951        // Whether the current fetch request was cancelled due to a network or decoding error, or
3952        // was aborted by the user.
3953        if let Some(cancel_reason) = current_fetch_context.cancel_reason() {
3954            if matches!(*cancel_reason, CancelReason::Error | CancelReason::Abort) {
3955                return false;
3956            }
3957        }
3958
3959        true
3960    }
3961}
3962
3963impl ResourceTimingListener for HTMLMediaElementFetchListener {
3964    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3965        let initiator_type = InitiatorType::LocalName(
3966            self.element
3967                .root()
3968                .upcast::<Element>()
3969                .local_name()
3970                .to_string(),
3971        );
3972        (initiator_type, self.url.clone())
3973    }
3974
3975    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3976        self.element.root().owner_document().global()
3977    }
3978}
3979
3980impl HTMLMediaElementFetchListener {
3981    fn new(element: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3982        Self {
3983            element: Trusted::new(element),
3984            generation_id: element.generation_id.get(),
3985            request_id,
3986            next_progress_event: Instant::now() + Duration::from_millis(350),
3987            url,
3988            expected_content_length: None,
3989            fetched_content_length: 0,
3990            content_length_to_discard: offset,
3991        }
3992    }
3993}
3994
3995/// The [`HTMLMediaElementEventHandler`] is a structure responsible for handling media events for
3996/// the [`HTMLMediaElement`] and exists to decouple ownership of the [`HTMLMediaElement`] from IPC
3997/// router callback.
3998#[derive(JSTraceable, MallocSizeOf)]
3999struct HTMLMediaElementEventHandler {
4000    element: WeakRef<HTMLMediaElement>,
4001}
4002
4003#[expect(unsafe_code)]
4004unsafe impl Send for HTMLMediaElementEventHandler {}
4005
4006impl HTMLMediaElementEventHandler {
4007    fn new(element: &HTMLMediaElement) -> Self {
4008        Self {
4009            element: WeakRef::new(element),
4010        }
4011    }
4012
4013    fn handle_player_event(
4014        &self,
4015        player_id: usize,
4016        event: PlayerEvent,
4017        cx: &mut js::context::JSContext,
4018    ) {
4019        let Some(element) = self.element.root() else {
4020            return;
4021        };
4022
4023        // Abort event processing if the associated media player is outdated.
4024        if element.player_id().is_none_or(|id| id != player_id) {
4025            return;
4026        }
4027
4028        match event {
4029            PlayerEvent::DurationChanged(duration) => element.playback_duration_changed(duration),
4030            PlayerEvent::EndOfStream => element.playback_end(),
4031            PlayerEvent::EnoughData => element.playback_enough_data(),
4032            PlayerEvent::Error(ref error) => element.playback_error(error, cx),
4033            PlayerEvent::MetadataUpdated(ref metadata) => {
4034                element.playback_metadata_updated(metadata, CanGc::from_cx(cx))
4035            },
4036            PlayerEvent::NeedData => element.playback_need_data(),
4037            PlayerEvent::PositionChanged(position) => element.playback_position_changed(position),
4038            PlayerEvent::SeekData(offset, seek_lock) => {
4039                element.fetch_request(Some(offset), Some(seek_lock))
4040            },
4041            PlayerEvent::SeekDone(position) => element.playback_seek_done(position),
4042            PlayerEvent::StateChanged(ref state) => element.playback_state_changed(state),
4043            PlayerEvent::VideoFrameUpdated => element.playback_video_frame_updated(),
4044        }
4045    }
4046}
4047
4048impl Drop for HTMLMediaElementEventHandler {
4049    fn drop(&mut self) {
4050        // The weak reference to the media element is not thread-safe and MUST be deleted on the
4051        // script thread, which is guaranteed by ownership of the `event handler` in the IPC router
4052        // callback (queued task to the media element task source) and the media element itself.
4053        assert_in_script();
4054    }
4055}