script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::{Cell, RefCell};
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use compositing_traits::{CrossProcessCompositorApi, ImageUpdate, SerializableImageData};
13use content_security_policy::sandboxing_directive::SandboxingFlagSet;
14use dom_struct::dom_struct;
15use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
16use euclid::default::Size2D;
17use headers::{ContentLength, ContentRange, HeaderMapExt};
18use html5ever::{LocalName, Prefix, QualName, local_name, ns};
19use http::StatusCode;
20use http::header::{self, HeaderMap, HeaderValue};
21use ipc_channel::ipc::{self, IpcSharedMemory, channel};
22use ipc_channel::router::ROUTER;
23use js::jsapi::JSAutoRealm;
24use layout_api::MediaFrame;
25use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
26use net_traits::request::{Destination, RequestId};
27use net_traits::{
28    CoreResourceThread, FetchMetadata, FetchResponseListener, FilteredMetadata, NetworkError,
29    ResourceFetchTiming, ResourceTimingType,
30};
31use pixels::RasterImage;
32use script_bindings::codegen::GenericBindings::TimeRangesBinding::TimeRangesMethods;
33use script_bindings::codegen::InheritTypes::{
34    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
35};
36use servo_config::pref;
37use servo_media::player::audio::AudioRenderer;
38use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
39use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
40use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
41use servo_url::ServoUrl;
42use stylo_atoms::Atom;
43use webrender_api::{
44    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
45    ImageDescriptorFlags, ImageFormat, ImageKey,
46};
47
48use crate::document_loader::{LoadBlocker, LoadType};
49use crate::dom::attr::Attr;
50use crate::dom::audio::audiotrack::AudioTrack;
51use crate::dom::audio::audiotracklist::AudioTrackList;
52use crate::dom::bindings::cell::DomRefCell;
53use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
54    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
55};
56use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
57use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
58use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
59use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
60use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
61use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
62use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
63use crate::dom::bindings::codegen::UnionTypes::{
64    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
65};
66use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
67use crate::dom::bindings::inheritance::Castable;
68use crate::dom::bindings::num::Finite;
69use crate::dom::bindings::refcounted::Trusted;
70use crate::dom::bindings::reflector::DomGlobal;
71use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
72use crate::dom::bindings::str::{DOMString, USVString};
73use crate::dom::blob::Blob;
74use crate::dom::csp::{GlobalCspReporting, Violation};
75use crate::dom::document::Document;
76use crate::dom::element::{
77    AttributeMutation, CustomElementCreationMode, Element, ElementCreator,
78    cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
79};
80use crate::dom::event::Event;
81use crate::dom::eventtarget::EventTarget;
82use crate::dom::globalscope::GlobalScope;
83use crate::dom::html::htmlelement::HTMLElement;
84use crate::dom::html::htmlsourceelement::HTMLSourceElement;
85use crate::dom::html::htmlvideoelement::HTMLVideoElement;
86use crate::dom::mediaerror::MediaError;
87use crate::dom::mediafragmentparser::MediaFragmentParser;
88use crate::dom::medialist::MediaList;
89use crate::dom::mediastream::MediaStream;
90use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
91use crate::dom::performance::performanceresourcetiming::InitiatorType;
92use crate::dom::promise::Promise;
93use crate::dom::texttrack::TextTrack;
94use crate::dom::texttracklist::TextTrackList;
95use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
96use crate::dom::trackevent::TrackEvent;
97use crate::dom::url::URL;
98use crate::dom::videotrack::VideoTrack;
99use crate::dom::videotracklist::VideoTrackList;
100use crate::dom::virtualmethods::VirtualMethods;
101use crate::fetch::{FetchCanceller, create_a_potential_cors_request};
102use crate::microtask::{Microtask, MicrotaskRunnable};
103use crate::network_listener::{self, PreInvoke, ResourceTimingListener};
104use crate::realms::{InRealm, enter_realm};
105use crate::script_runtime::CanGc;
106use crate::script_thread::ScriptThread;
107
108/// A CSS file to style the media controls.
109static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
110
111/// A JS file to control the media controls.
112static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
113
114#[derive(MallocSizeOf, PartialEq)]
115enum FrameStatus {
116    Locked,
117    Unlocked,
118}
119
120#[derive(MallocSizeOf)]
121struct FrameHolder(
122    FrameStatus,
123    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
124);
125
126impl FrameHolder {
127    fn new(frame: VideoFrame) -> FrameHolder {
128        FrameHolder(FrameStatus::Unlocked, frame)
129    }
130
131    fn lock(&mut self) {
132        if self.0 == FrameStatus::Unlocked {
133            self.0 = FrameStatus::Locked;
134        };
135    }
136
137    fn unlock(&mut self) {
138        if self.0 == FrameStatus::Locked {
139            self.0 = FrameStatus::Unlocked;
140        };
141    }
142
143    fn set(&mut self, new_frame: VideoFrame) {
144        if self.0 == FrameStatus::Unlocked {
145            self.1 = new_frame
146        };
147    }
148
149    fn get(&self) -> (u32, Size2D<i32>, usize) {
150        if self.0 == FrameStatus::Locked {
151            (
152                self.1.get_texture_id(),
153                Size2D::new(self.1.get_width(), self.1.get_height()),
154                0,
155            )
156        } else {
157            unreachable!();
158        }
159    }
160
161    fn get_frame(&self) -> VideoFrame {
162        self.1.clone()
163    }
164}
165
166#[derive(MallocSizeOf)]
167pub(crate) struct MediaFrameRenderer {
168    player_id: Option<u64>,
169    compositor_api: CrossProcessCompositorApi,
170    current_frame: Option<MediaFrame>,
171    old_frame: Option<ImageKey>,
172    very_old_frame: Option<ImageKey>,
173    current_frame_holder: Option<FrameHolder>,
174    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
175    poster_frame: Option<MediaFrame>,
176}
177
178impl MediaFrameRenderer {
179    fn new(compositor_api: CrossProcessCompositorApi) -> Self {
180        Self {
181            player_id: None,
182            compositor_api,
183            current_frame: None,
184            old_frame: None,
185            very_old_frame: None,
186            current_frame_holder: None,
187            poster_frame: None,
188        }
189    }
190
191    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
192        self.poster_frame = image.and_then(|image| {
193            image.id.map(|image_key| MediaFrame {
194                image_key,
195                width: image.metadata.width as i32,
196                height: image.metadata.height as i32,
197            })
198        });
199    }
200}
201
202impl VideoFrameRenderer for MediaFrameRenderer {
203    fn render(&mut self, frame: VideoFrame) {
204        let mut updates = smallvec::smallvec![];
205
206        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
207            updates.push(ImageUpdate::DeleteImage(old_image_key));
208        }
209
210        let descriptor = ImageDescriptor::new(
211            frame.get_width(),
212            frame.get_height(),
213            ImageFormat::BGRA8,
214            ImageDescriptorFlags::empty(),
215        );
216
217        match &mut self.current_frame {
218            Some(current_frame)
219                if current_frame.width == frame.get_width() &&
220                    current_frame.height == frame.get_height() =>
221            {
222                if !frame.is_gl_texture() {
223                    updates.push(ImageUpdate::UpdateImage(
224                        current_frame.image_key,
225                        descriptor,
226                        SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data())),
227                        None,
228                    ));
229                }
230
231                self.current_frame_holder
232                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
233                    .set(frame);
234
235                if let Some(old_image_key) = self.old_frame.take() {
236                    updates.push(ImageUpdate::DeleteImage(old_image_key));
237                }
238            },
239            Some(current_frame) => {
240                self.old_frame = Some(current_frame.image_key);
241
242                let Some(new_image_key) = self.compositor_api.generate_image_key_blocking() else {
243                    return;
244                };
245
246                /* update current_frame */
247                current_frame.image_key = new_image_key;
248                current_frame.width = frame.get_width();
249                current_frame.height = frame.get_height();
250
251                let image_data = if frame.is_gl_texture() && self.player_id.is_some() {
252                    let texture_target = if frame.is_external_oes() {
253                        ImageBufferKind::TextureExternal
254                    } else {
255                        ImageBufferKind::Texture2D
256                    };
257
258                    SerializableImageData::External(ExternalImageData {
259                        id: ExternalImageId(self.player_id.unwrap()),
260                        channel_index: 0,
261                        image_type: ExternalImageType::TextureHandle(texture_target),
262                        normalized_uvs: false,
263                    })
264                } else {
265                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
266                };
267
268                self.current_frame_holder
269                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
270                    .set(frame);
271
272                updates.push(ImageUpdate::AddImage(new_image_key, descriptor, image_data));
273            },
274            None => {
275                let Some(image_key) = self.compositor_api.generate_image_key_blocking() else {
276                    return;
277                };
278
279                self.current_frame = Some(MediaFrame {
280                    image_key,
281                    width: frame.get_width(),
282                    height: frame.get_height(),
283                });
284
285                let image_data = if frame.is_gl_texture() && self.player_id.is_some() {
286                    let texture_target = if frame.is_external_oes() {
287                        ImageBufferKind::TextureExternal
288                    } else {
289                        ImageBufferKind::Texture2D
290                    };
291
292                    SerializableImageData::External(ExternalImageData {
293                        id: ExternalImageId(self.player_id.unwrap()),
294                        channel_index: 0,
295                        image_type: ExternalImageType::TextureHandle(texture_target),
296                        normalized_uvs: false,
297                    })
298                } else {
299                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
300                };
301
302                self.current_frame_holder = Some(FrameHolder::new(frame));
303
304                updates.push(ImageUpdate::AddImage(image_key, descriptor, image_data));
305            },
306        }
307        self.compositor_api.update_images(updates);
308    }
309}
310
311#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
312#[derive(JSTraceable, MallocSizeOf)]
313enum SrcObject {
314    MediaStream(Dom<MediaStream>),
315    Blob(Dom<Blob>),
316}
317
318impl From<MediaStreamOrBlob> for SrcObject {
319    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
320    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
321        match src_object {
322            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
323            MediaStreamOrBlob::MediaStream(stream) => {
324                SrcObject::MediaStream(Dom::from_ref(&*stream))
325            },
326        }
327    }
328}
329
330#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
331enum LoadState {
332    NotLoaded,
333    LoadingFromSrcObject,
334    LoadingFromSrcAttribute,
335    LoadingFromSourceChild,
336    WaitingForSource,
337}
338
339/// <https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-element-29>
340#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
341#[derive(JSTraceable, MallocSizeOf)]
342struct SourceChildrenPointer {
343    source_before_pointer: Dom<HTMLSourceElement>,
344    inclusive: bool,
345}
346
347impl SourceChildrenPointer {
348    fn new(source_before_pointer: DomRoot<HTMLSourceElement>, inclusive: bool) -> Self {
349        Self {
350            source_before_pointer: source_before_pointer.as_traced(),
351            inclusive,
352        }
353    }
354}
355
356#[derive(JSTraceable, MallocSizeOf)]
357struct DroppableHtmlMediaElement {
358    /// Player Id reported the player thread
359    player_id: Cell<u64>,
360    #[ignore_malloc_size_of = "Defined in other crates"]
361    #[no_trace]
362    player_context: WindowGLContext,
363}
364
365impl DroppableHtmlMediaElement {
366    fn new(player_id: Cell<u64>, player_context: WindowGLContext) -> Self {
367        Self {
368            player_id,
369            player_context,
370        }
371    }
372
373    pub(crate) fn set_player_id(&self, id: u64) {
374        self.player_id.set(id);
375    }
376}
377
378impl Drop for DroppableHtmlMediaElement {
379    fn drop(&mut self) {
380        self.player_context
381            .send(GLPlayerMsg::UnregisterPlayer(self.player_id.get()));
382    }
383}
384#[dom_struct]
385#[allow(non_snake_case)]
386pub(crate) struct HTMLMediaElement {
387    htmlelement: HTMLElement,
388    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
389    network_state: Cell<NetworkState>,
390    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
391    ready_state: Cell<ReadyState>,
392    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
393    src_object: DomRefCell<Option<SrcObject>>,
394    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
395    current_src: DomRefCell<String>,
396    /// Incremented whenever tasks associated with this element are cancelled.
397    generation_id: Cell<u32>,
398    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
399    ///
400    /// Reset to false every time the load algorithm is invoked.
401    fired_loadeddata_event: Cell<bool>,
402    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
403    error: MutNullableDom<MediaError>,
404    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
405    paused: Cell<bool>,
406    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
407    defaultPlaybackRate: Cell<f64>,
408    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
409    playbackRate: Cell<f64>,
410    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
411    autoplaying: Cell<bool>,
412    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
413    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
414    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
415    #[conditional_malloc_size_of]
416    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
417    /// Play promises which are soon to be fulfilled by a queued task.
418    #[allow(clippy::type_complexity)]
419    #[conditional_malloc_size_of]
420    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
421    #[ignore_malloc_size_of = "servo_media"]
422    #[no_trace]
423    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
424    #[conditional_malloc_size_of]
425    #[no_trace]
426    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
427    #[ignore_malloc_size_of = "servo_media"]
428    #[no_trace]
429    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
430    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
431    show_poster: Cell<bool>,
432    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
433    duration: Cell<f64>,
434    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
435    playback_position: Cell<f64>,
436    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
437    default_playback_start_position: Cell<f64>,
438    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
439    volume: Cell<f64>,
440    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
441    seeking: Cell<bool>,
442    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
443    muted: Cell<bool>,
444    /// Loading state from source, if any.
445    load_state: Cell<LoadState>,
446    source_children_pointer: DomRefCell<Option<SourceChildrenPointer>>,
447    current_source_child: MutNullableDom<HTMLSourceElement>,
448    /// URL of the media resource, if any.
449    #[no_trace]
450    resource_url: DomRefCell<Option<ServoUrl>>,
451    /// URL of the media resource, if the resource is set through the src_object attribute and it
452    /// is a blob.
453    #[no_trace]
454    blob_url: DomRefCell<Option<ServoUrl>>,
455    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
456    played: DomRefCell<TimeRangesContainer>,
457    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
458    audio_tracks_list: MutNullableDom<AudioTrackList>,
459    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
460    video_tracks_list: MutNullableDom<VideoTrackList>,
461    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
462    text_tracks_list: MutNullableDom<TextTrackList>,
463    /// Time of last timeupdate notification.
464    #[ignore_malloc_size_of = "Defined in std::time"]
465    next_timeupdate_event: Cell<Instant>,
466    /// Latest fetch request context.
467    current_fetch_context: RefCell<Option<HTMLMediaElementFetchContext>>,
468    /// Media controls id.
469    /// In order to workaround the lack of privileged JS context, we secure the
470    /// the access to the "privileged" document.servoGetMediaControls(id) API by
471    /// keeping a whitelist of media controls identifiers.
472    media_controls_id: DomRefCell<Option<String>>,
473    droppable: DroppableHtmlMediaElement,
474}
475
476/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
477#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
478#[repr(u8)]
479pub(crate) enum NetworkState {
480    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
481    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
482    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
483    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
484}
485
486/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
487#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
488#[repr(u8)]
489#[allow(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
490pub(crate) enum ReadyState {
491    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
492    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
493    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
494    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
495    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
496}
497
498impl HTMLMediaElement {
499    pub(crate) fn new_inherited(
500        tag_name: LocalName,
501        prefix: Option<Prefix>,
502        document: &Document,
503    ) -> Self {
504        Self {
505            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
506            network_state: Cell::new(NetworkState::Empty),
507            ready_state: Cell::new(ReadyState::HaveNothing),
508            src_object: Default::default(),
509            current_src: DomRefCell::new("".to_owned()),
510            generation_id: Cell::new(0),
511            fired_loadeddata_event: Cell::new(false),
512            error: Default::default(),
513            paused: Cell::new(true),
514            defaultPlaybackRate: Cell::new(1.0),
515            playbackRate: Cell::new(1.0),
516            muted: Cell::new(false),
517            load_state: Cell::new(LoadState::NotLoaded),
518            source_children_pointer: DomRefCell::new(None),
519            current_source_child: Default::default(),
520            // FIXME(nox): Why is this initialised to true?
521            autoplaying: Cell::new(true),
522            delaying_the_load_event_flag: Default::default(),
523            pending_play_promises: Default::default(),
524            in_flight_play_promises_queue: Default::default(),
525            player: Default::default(),
526            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
527                document.window().compositor_api().clone(),
528            ))),
529            audio_renderer: Default::default(),
530            show_poster: Cell::new(true),
531            duration: Cell::new(f64::NAN),
532            playback_position: Cell::new(0.),
533            default_playback_start_position: Cell::new(0.),
534            volume: Cell::new(1.0),
535            seeking: Cell::new(false),
536            resource_url: DomRefCell::new(None),
537            blob_url: DomRefCell::new(None),
538            played: DomRefCell::new(TimeRangesContainer::default()),
539            audio_tracks_list: Default::default(),
540            video_tracks_list: Default::default(),
541            text_tracks_list: Default::default(),
542            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
543            current_fetch_context: RefCell::new(None),
544            media_controls_id: DomRefCell::new(None),
545            droppable: DroppableHtmlMediaElement::new(
546                Cell::new(0),
547                document.window().get_player_context(),
548            ),
549        }
550    }
551
552    pub(crate) fn network_state(&self) -> NetworkState {
553        self.network_state.get()
554    }
555
556    pub(crate) fn get_ready_state(&self) -> ReadyState {
557        self.ready_state.get()
558    }
559
560    fn media_type_id(&self) -> HTMLMediaElementTypeId {
561        match self.upcast::<Node>().type_id() {
562            NodeTypeId::Element(ElementTypeId::HTMLElement(
563                HTMLElementTypeId::HTMLMediaElement(media_type_id),
564            )) => media_type_id,
565            _ => unreachable!(),
566        }
567    }
568
569    fn play_media(&self) {
570        if let Some(ref player) = *self.player.borrow() {
571            if let Err(e) = player.lock().unwrap().set_rate(self.playbackRate.get()) {
572                warn!("Could not set the playback rate {:?}", e);
573            }
574            if let Err(e) = player.lock().unwrap().play() {
575                warn!("Could not play media {:?}", e);
576            }
577        }
578    }
579
580    /// Marks that element as delaying the load event or not.
581    ///
582    /// Nothing happens if the element was already delaying the load event and
583    /// we pass true to that method again.
584    ///
585    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
586    pub(crate) fn delay_load_event(&self, delay: bool, can_gc: CanGc) {
587        let blocker = &self.delaying_the_load_event_flag;
588        if delay && blocker.borrow().is_none() {
589            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
590        } else if !delay && blocker.borrow().is_some() {
591            LoadBlocker::terminate(blocker, can_gc);
592        }
593    }
594
595    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
596    fn time_marches_on(&self) {
597        // Step 6.
598        if Instant::now() > self.next_timeupdate_event.get() {
599            self.owner_global()
600                .task_manager()
601                .media_element_task_source()
602                .queue_simple_event(self.upcast(), atom!("timeupdate"));
603            self.next_timeupdate_event
604                .set(Instant::now() + Duration::from_millis(350));
605        }
606    }
607
608    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
609    fn internal_pause_steps(&self) {
610        // Step 1.
611        self.autoplaying.set(false);
612
613        // Step 2.
614        if !self.Paused() {
615            // Step 2.1.
616            self.paused.set(true);
617
618            // Step 2.2.
619            self.take_pending_play_promises(Err(Error::Abort));
620
621            // Step 2.3.
622            let this = Trusted::new(self);
623            let generation_id = self.generation_id.get();
624            self.owner_global()
625                .task_manager()
626                .media_element_task_source()
627                .queue(task!(internal_pause_steps: move || {
628                    let this = this.root();
629                    if generation_id != this.generation_id.get() {
630                        return;
631                    }
632
633                    this.fulfill_in_flight_play_promises(|| {
634                        // Step 2.3.1.
635                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
636
637                        // Step 2.3.2.
638                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
639
640                        if let Some(ref player) = *this.player.borrow() {
641                            if let Err(e) = player.lock().unwrap().pause() {
642                                eprintln!("Could not pause player {:?}", e);
643                            }
644                        }
645
646                        // Step 2.3.3.
647                        // Done after running this closure in
648                        // `fulfill_in_flight_play_promises`.
649                    });
650                }));
651
652            // Step 2.4.
653            // FIXME(nox): Set the official playback position to the current
654            // playback position.
655        }
656    }
657    /// <https://html.spec.whatwg.org/multipage/#allowed-to-play>
658    fn is_allowed_to_play(&self) -> bool {
659        true
660    }
661
662    /// <https://html.spec.whatwg.org/multipage/#notify-about-playing>
663    fn notify_about_playing(&self) {
664        // Step 1.
665        self.take_pending_play_promises(Ok(()));
666
667        // Step 2.
668        let this = Trusted::new(self);
669        let generation_id = self.generation_id.get();
670        self.owner_global()
671            .task_manager()
672            .media_element_task_source()
673            .queue(task!(notify_about_playing: move || {
674                let this = this.root();
675                if generation_id != this.generation_id.get() {
676                    return;
677                }
678
679                this.fulfill_in_flight_play_promises(|| {
680                    // Step 2.1.
681                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
682                    this.play_media();
683
684                    // Step 2.2.
685                    // Done after running this closure in
686                    // `fulfill_in_flight_play_promises`.
687                });
688
689            }));
690    }
691
692    /// <https://html.spec.whatwg.org/multipage/#ready-states>
693    fn change_ready_state(&self, ready_state: ReadyState) {
694        let old_ready_state = self.ready_state.get();
695        self.ready_state.set(ready_state);
696
697        if self.network_state.get() == NetworkState::Empty {
698            return;
699        }
700
701        if old_ready_state == ready_state {
702            return;
703        }
704
705        let owner_global = self.owner_global();
706        let task_manager = owner_global.task_manager();
707        let task_source = task_manager.media_element_task_source();
708
709        // Step 1.
710        match (old_ready_state, ready_state) {
711            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
712                task_source.queue_simple_event(self.upcast(), atom!("loadedmetadata"));
713                // No other steps are applicable in this case.
714                return;
715            },
716            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
717                if !self.fired_loadeddata_event.get() {
718                    self.fired_loadeddata_event.set(true);
719                    let this = Trusted::new(self);
720                    task_source.queue(task!(media_reached_current_data: move || {
721                        let this = this.root();
722                        this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::note());
723                        this.delay_load_event(false, CanGc::note());
724                    }));
725                }
726
727                // Steps for the transition from HaveMetadata to HaveCurrentData
728                // or HaveFutureData also apply here, as per the next match
729                // expression.
730            },
731            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
732                // FIXME(nox): Queue a task to fire timeupdate and waiting
733                // events if the conditions call from the spec are met.
734
735                // No other steps are applicable in this case.
736                return;
737            },
738
739            _ => (),
740        }
741
742        if old_ready_state <= ReadyState::HaveCurrentData &&
743            ready_state >= ReadyState::HaveFutureData
744        {
745            task_source.queue_simple_event(self.upcast(), atom!("canplay"));
746
747            if !self.Paused() {
748                self.notify_about_playing();
749            }
750        }
751
752        if ready_state == ReadyState::HaveEnoughData {
753            // FIXME(nox): Review this block.
754            if self.eligible_for_autoplay() {
755                // Step 1
756                self.paused.set(false);
757                // Step 2
758                if self.show_poster.get() {
759                    self.show_poster.set(false);
760                    self.time_marches_on();
761                }
762                // Step 3
763                task_source.queue_simple_event(self.upcast(), atom!("play"));
764                // Step 4
765                self.notify_about_playing();
766                // Step 5
767                self.autoplaying.set(false);
768            }
769
770            // FIXME(nox): According to the spec, this should come *before* the
771            // "play" event.
772            task_source.queue_simple_event(self.upcast(), atom!("canplaythrough"));
773        }
774    }
775
776    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
777    fn invoke_resource_selection_algorithm(&self, can_gc: CanGc) {
778        // Step 1. Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
779        self.network_state.set(NetworkState::NoSource);
780
781        // Step 2. Set the element's show poster flag to true.
782        self.show_poster.set(true);
783
784        // Step 3. Set the media element's delaying-the-load-event flag to true (this delays the
785        // load event).
786        self.delay_load_event(true, can_gc);
787
788        // Step 4. Await a stable state, allowing the task that invoked this algorithm to continue.
789        // If the resource selection mode in the synchronous section is
790        // "attribute", the URL of the resource to fetch is relative to the
791        // media element's node document when the src attribute was last
792        // changed, which is why we need to pass the base URL in the task
793        // right here.
794        let task = MediaElementMicrotask::ResourceSelection {
795            elem: DomRoot::from_ref(self),
796            generation_id: self.generation_id.get(),
797            base_url: self.owner_document().base_url(),
798        };
799
800        // FIXME(nox): This will later call the resource_selection_algorithm_sync
801        // method from below, if microtasks were trait objects, we would be able
802        // to put the code directly in this method, without the boilerplate
803        // indirections.
804        ScriptThread::await_stable_state(Microtask::MediaElement(task));
805    }
806
807    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
808    fn resource_selection_algorithm_sync(&self, base_url: ServoUrl, can_gc: CanGc) {
809        // TODO Step 5. If the media element's blocked-on-parser flag is false, then populate the
810        // list of pending text tracks.
811        // FIXME(ferjm): Implement blocked_on_parser logic
812        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
813        // FIXME(nox): Maybe populate the list of pending text tracks.
814
815        enum Mode {
816            Object,
817            Attribute(String),
818            Children(DomRoot<HTMLSourceElement>),
819        }
820
821        // Step 6.
822        let mode = if self.src_object.borrow().is_some() {
823            // If the media element has an assigned media provider object, then let mode be object.
824            Mode::Object
825        } else if let Some(attribute) = self
826            .upcast::<Element>()
827            .get_attribute(&ns!(), &local_name!("src"))
828        {
829            // Otherwise, if the media element has no assigned media provider object but has a src
830            // attribute, then let mode be attribute.
831            Mode::Attribute((**attribute.value()).to_owned())
832        } else if let Some(source) = self
833            .upcast::<Node>()
834            .children()
835            .filter_map(DomRoot::downcast::<HTMLSourceElement>)
836            .next()
837        {
838            // Otherwise, if the media element does not have an assigned media provider object and
839            // does not have a src attribute, but does have a source element child, then let mode be
840            // children and let candidate be the first such source element child in tree order.
841            Mode::Children(source)
842        } else {
843            // Otherwise, the media element has no assigned media provider object and has neither a
844            // src attribute nor a source element child:
845            self.load_state.set(LoadState::NotLoaded);
846
847            // Step 6.none.1. Set the networkState to NETWORK_EMPTY.
848            self.network_state.set(NetworkState::Empty);
849
850            // Step 6.none.2. Set the element's delaying-the-load-event flag to false. This stops
851            // delaying the load event.
852            self.delay_load_event(false, can_gc);
853
854            // Step 6.none.3. End the synchronous section and return.
855            return;
856        };
857
858        // Step 7. Set the media element's networkState to NETWORK_LOADING.
859        self.network_state.set(NetworkState::Loading);
860
861        // Step 8. Queue a media element task given the media element to fire an event named
862        // loadstart at the media element.
863        self.queue_media_element_task_to_fire_event(atom!("loadstart"));
864
865        // Step 9. Run the appropriate steps from the following list:
866        match mode {
867            Mode::Object => {
868                // => "If mode is object"
869                self.load_from_src_object();
870            },
871            Mode::Attribute(src) => {
872                // => "If mode is attribute"
873                self.load_from_src_attribute(base_url, &src);
874            },
875            Mode::Children(source) => {
876                // => "Otherwise (mode is children)""
877                self.load_from_source_child(&source);
878            },
879        }
880    }
881
882    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
883    fn load_from_src_object(&self) {
884        self.load_state.set(LoadState::LoadingFromSrcObject);
885
886        // Step 9.object.1. Set the currentSrc attribute to the empty string.
887        "".clone_into(&mut self.current_src.borrow_mut());
888
889        // Step 9.object.3. Run the resource fetch algorithm with the assigned media
890        // provider object. If that algorithm returns without aborting this one, then the
891        // load failed.
892        // Note that the resource fetch algorithm itself takes care of the cleanup in case
893        // of failure itself.
894        self.resource_fetch_algorithm(Resource::Object);
895    }
896
897    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
898    fn load_from_src_attribute(&self, base_url: ServoUrl, src: &str) {
899        self.load_state.set(LoadState::LoadingFromSrcAttribute);
900
901        // Step 9.attribute.1. If the src attribute's value is the empty string, then end
902        // the synchronous section, and jump down to the failed with attribute step below.
903        if src.is_empty() {
904            self.queue_dedicated_media_source_failure_steps();
905            return;
906        }
907
908        // Step 9.attribute.2. Let urlRecord be the result of encoding-parsing a URL given
909        // the src attribute's value, relative to the media element's node document when the
910        // src attribute was last changed.
911        let Ok(url_record) = base_url.join(src) else {
912            self.queue_dedicated_media_source_failure_steps();
913            return;
914        };
915
916        // Step 9.attribute.3. If urlRecord is not failure, then set the currentSrc
917        // attribute to the result of applying the URL serializer to urlRecord.
918        *self.current_src.borrow_mut() = url_record.as_str().into();
919
920        // Step 9.attribute.5. If urlRecord is not failure, then run the resource fetch
921        // algorithm with urlRecord. If that algorithm returns without aborting this one,
922        // then the load failed.
923        // Note that the resource fetch algorithm itself takes care
924        // of the cleanup in case of failure itself.
925        self.resource_fetch_algorithm(Resource::Url(url_record));
926    }
927
928    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
929    fn load_from_source_child(&self, source: &HTMLSourceElement) {
930        self.load_state.set(LoadState::LoadingFromSourceChild);
931
932        // Step 9.children.1. Let pointer be a position defined by two adjacent nodes in the media
933        // element's child list, treating the start of the list (before the first child in the list,
934        // if any) and end of the list (after the last child in the list, if any) as nodes in their
935        // own right. One node is the node before pointer, and the other node is the node after
936        // pointer. Initially, let pointer be the position between the candidate node and the next
937        // node, if there are any, or the end of the list, if it is the last node.
938        *self.source_children_pointer.borrow_mut() =
939            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), false));
940
941        let element = source.upcast::<Element>();
942
943        // Step 9.children.2. Process candidate: If candidate does not have a src attribute, or if
944        // its src attribute's value is the empty string, then end the synchronous section, and jump
945        // down to the failed with elements step below.
946        let Some(src) = element
947            .get_attribute(&ns!(), &local_name!("src"))
948            .filter(|attribute| !attribute.value().is_empty())
949        else {
950            self.load_from_source_child_failure_steps(source);
951            return;
952        };
953
954        // Step 9.children.3. If candidate has a media attribute whose value does not match the
955        // environment, then end the synchronous section, and jump down to the failed with elements
956        // step below.
957        if let Some(media) = element.get_attribute(&ns!(), &local_name!("media")) {
958            if !MediaList::matches_environment(&element.owner_document(), &media.value()) {
959                self.load_from_source_child_failure_steps(source);
960                return;
961            }
962        }
963
964        // Step 9.children.4. Let urlRecord be the result of encoding-parsing a URL given
965        // candidate's src attribute's value, relative to candidate's node document when the src
966        // attribute was last changed.
967        let Ok(url_record) = source.owner_document().base_url().join(&src.value()) else {
968            // Step 9.children.5. If urlRecord is failure, then end the synchronous section,
969            // and jump down to the failed with elements step below.
970            self.load_from_source_child_failure_steps(source);
971            return;
972        };
973
974        // Step 9.children.6. If candidate has a type attribute whose value, when parsed as a MIME
975        // type (including any codecs described by the codecs parameter, for types that define that
976        // parameter), represents a type that the user agent knows it cannot render, then end the
977        // synchronous section, and jump down to the failed with elements step below.
978        if let Some(type_) = element.get_attribute(&ns!(), &local_name!("type")) {
979            if ServoMedia::get().can_play_type(&type_.value()) == SupportsMediaType::No {
980                self.load_from_source_child_failure_steps(source);
981                return;
982            }
983        }
984
985        self.current_source_child.set(Some(source));
986
987        // Step 9.children.7. Set the currentSrc attribute to the result of applying the URL
988        // serializer to urlRecord.
989        *self.current_src.borrow_mut() = url_record.as_str().into();
990
991        // Step 9.children.9. Run the resource fetch algorithm with urlRecord. If that
992        // algorithm returns without aborting this one, then the load failed.
993        // Note that the resource fetch algorithm itself takes care
994        // of the cleanup in case of failure itself.
995        self.resource_fetch_algorithm(Resource::Url(url_record));
996    }
997
998    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
999    fn load_from_source_child_failure_steps(&self, source: &HTMLSourceElement) {
1000        // Step 9.children.10. Failed with elements: Queue a media element task given the media
1001        // element to fire an event named error at candidate.
1002        let trusted_this = Trusted::new(self);
1003        let trusted_source = Trusted::new(source);
1004        let generation_id = self.generation_id.get();
1005
1006        self.owner_global()
1007            .task_manager()
1008            .media_element_task_source()
1009            .queue(task!(queue_error_event: move || {
1010                let this = trusted_this.root();
1011                if generation_id != this.generation_id.get() {
1012                    return;
1013                }
1014
1015                let source = trusted_source.root();
1016                source.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1017            }));
1018
1019        // Step 9.children.11. Await a stable state.
1020        let task = MediaElementMicrotask::SelectNextSourceChild {
1021            elem: DomRoot::from_ref(self),
1022            generation_id: self.generation_id.get(),
1023        };
1024
1025        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1026    }
1027
1028    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1029    fn select_next_source_child(&self, can_gc: CanGc) {
1030        // Step 9.children.12. Forget the media element's media-resource-specific tracks.
1031        self.AudioTracks(can_gc).clear();
1032        self.VideoTracks(can_gc).clear();
1033
1034        // Step 9.children.13. Find next candidate: Let candidate be null.
1035        let mut source_candidate = None;
1036
1037        // Step 9.children.14. Search loop: If the node after pointer is the end of the list, then
1038        // jump to the waiting step below.
1039        // Step 9.children.15. If the node after pointer is a source element, let candidate be that
1040        // element.
1041        // Step 9.children.16. Advance pointer so that the node before pointer is now the node that
1042        // was after pointer, and the node after pointer is the node after the node that used to be
1043        // after pointer, if any.
1044        if let Some(ref source_children_pointer) = *self.source_children_pointer.borrow() {
1045            // Note that shared implementation between opaque types from
1046            // `inclusively_following_siblings` and `following_siblings` if not possible due to
1047            // precise capturing.
1048            if source_children_pointer.inclusive {
1049                for next_sibling in source_children_pointer
1050                    .source_before_pointer
1051                    .upcast::<Node>()
1052                    .inclusively_following_siblings()
1053                {
1054                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1055                    {
1056                        source_candidate = Some(next_source);
1057                        break;
1058                    }
1059                }
1060            } else {
1061                for next_sibling in source_children_pointer
1062                    .source_before_pointer
1063                    .upcast::<Node>()
1064                    .following_siblings()
1065                {
1066                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1067                    {
1068                        source_candidate = Some(next_source);
1069                        break;
1070                    }
1071                }
1072            };
1073        }
1074
1075        // Step 9.children.17. If candidate is null, jump back to the search loop step. Otherwise,
1076        // jump back to the process candidate step.
1077        if let Some(source_candidate) = source_candidate {
1078            self.load_from_source_child(&source_candidate);
1079            return;
1080        }
1081
1082        self.load_state.set(LoadState::WaitingForSource);
1083
1084        *self.source_children_pointer.borrow_mut() = None;
1085
1086        // Step 9.children.18. Waiting: Set the element's networkState attribute to the
1087        // NETWORK_NO_SOURCE value.
1088        self.network_state.set(NetworkState::NoSource);
1089
1090        // Step 9.children.19. Set the element's show poster flag to true.
1091        self.show_poster.set(true);
1092
1093        // Step 9.children.20. Queue a media element task given the media element to set the
1094        // element's delaying-the-load-event flag to false. This stops delaying the load event.
1095        let this = Trusted::new(self);
1096        let generation_id = self.generation_id.get();
1097
1098        self.owner_global()
1099            .task_manager()
1100            .media_element_task_source()
1101            .queue(task!(queue_delay_load_event: move || {
1102                let this = this.root();
1103                if generation_id != this.generation_id.get() {
1104                    return;
1105                }
1106
1107                this.delay_load_event(false, CanGc::note());
1108            }));
1109
1110        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1111        // list. (This step might wait forever.)
1112    }
1113
1114    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1115    fn resource_selection_algorithm_failure_steps(&self) {
1116        match self.load_state.get() {
1117            LoadState::LoadingFromSrcObject => {
1118                // Step 9.object.4. Failed with media provider: Reaching this step indicates that
1119                // the media resource failed to load. Take pending play promises and queue a media
1120                // element task given the media element to run the dedicated media source failure
1121                // steps with the result.
1122                self.queue_dedicated_media_source_failure_steps();
1123            },
1124            LoadState::LoadingFromSrcAttribute => {
1125                // Step 9.attribute.6. Failed with attribute: Reaching this step indicates that the
1126                // media resource failed to load or that urlRecord is failure. Take pending play
1127                // promises and queue a media element task given the media element to run the
1128                // dedicated media source failure steps with the result.
1129                self.queue_dedicated_media_source_failure_steps();
1130            },
1131            LoadState::LoadingFromSourceChild => {
1132                // Step 9.children.10. Failed with elements: Queue a media element task given the
1133                // media element to fire an event named error at candidate.
1134                if let Some(source) = self.current_source_child.take() {
1135                    self.load_from_source_child_failure_steps(&source);
1136                }
1137            },
1138            _ => {},
1139        }
1140    }
1141
1142    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
1143        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
1144            eprintln!("Missing request url");
1145            if let Some(seek_lock) = seek_lock {
1146                seek_lock.unlock(/* successful seek */ false);
1147            }
1148            self.resource_selection_algorithm_failure_steps();
1149            return;
1150        }
1151
1152        let document = self.owner_document();
1153        let destination = match self.media_type_id() {
1154            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
1155            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
1156        };
1157        let mut headers = HeaderMap::new();
1158        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
1159        headers.insert(
1160            header::RANGE,
1161            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
1162        );
1163        let url = match self.resource_url.borrow().as_ref() {
1164            Some(url) => url.clone(),
1165            None => self.blob_url.borrow().as_ref().unwrap().clone(),
1166        };
1167
1168        let cors_setting = cors_setting_for_element(self.upcast());
1169        let global = self.global();
1170        let request = create_a_potential_cors_request(
1171            Some(document.webview_id()),
1172            url.clone(),
1173            destination,
1174            cors_setting,
1175            None,
1176            global.get_referrer(),
1177            document.insecure_requests_policy(),
1178            document.has_trustworthy_ancestor_or_current_origin(),
1179            global.policy_container(),
1180        )
1181        .headers(headers)
1182        .origin(document.origin().immutable().clone())
1183        .pipeline_id(Some(self.global().pipeline_id()))
1184        .referrer_policy(document.get_referrer_policy());
1185
1186        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
1187        if let Some(ref mut current_fetch_context) = *current_fetch_context {
1188            current_fetch_context.cancel(CancelReason::Abort);
1189        }
1190
1191        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
1192            request.id,
1193            global.core_resource_thread(),
1194        ));
1195        let listener =
1196            HTMLMediaElementFetchListener::new(self, request.id, url.clone(), offset.unwrap_or(0));
1197
1198        self.owner_document().fetch_background(request, listener);
1199
1200        // Since we cancelled the previous fetch, from now on the media element
1201        // will only receive response data from the new fetch that's been
1202        // initiated. This means the player can resume operation, since all subsequent data
1203        // pushes will originate from the new seek offset.
1204        if let Some(seek_lock) = seek_lock {
1205            seek_lock.unlock(/* successful seek */ true);
1206        }
1207    }
1208
1209    /// <https://html.spec.whatwg.org/multipage/#eligible-for-autoplay>
1210    fn eligible_for_autoplay(&self) -> bool {
1211        // its can autoplay flag is true;
1212        self.autoplaying.get() &&
1213
1214        // its paused attribute is true;
1215        self.Paused() &&
1216
1217        // it has an autoplay attribute specified;
1218        self.Autoplay() &&
1219
1220        // its node document's active sandboxing flag set does not have the sandboxed automatic
1221        // features browsing context flag set; and
1222        {
1223            let document = self.owner_document();
1224
1225            !document.has_active_sandboxing_flag(
1226                SandboxingFlagSet::SANDBOXED_AUTOMATIC_FEATURES_BROWSING_CONTEXT_FLAG,
1227            )
1228        }
1229
1230        // its node document is allowed to use the "autoplay" feature.
1231        // TODO: Feature policy: https://html.spec.whatwg.org/iframe-embed-object.html#allowed-to-use
1232    }
1233
1234    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
1235    fn resource_fetch_algorithm(&self, resource: Resource) {
1236        if let Err(e) = self.setup_media_player(&resource) {
1237            eprintln!("Setup media player error {:?}", e);
1238            self.resource_selection_algorithm_failure_steps();
1239            return;
1240        }
1241
1242        // Steps 1-2.
1243        // Unapplicable, the `resource` variable already conveys which mode
1244        // is in use.
1245
1246        // Step 3.
1247        // FIXME(nox): Remove all media-resource-specific text tracks.
1248
1249        // Step 5. Run the appropriate steps from the following list:
1250        match resource {
1251            Resource::Url(url) => {
1252                // Step 5.remote.1. Optionally, run the following substeps. This is the expected
1253                // behavior if the user agent intends to not attempt to fetch the resource until the
1254                // user requests it explicitly (e.g. as a way to implement the preload attribute's
1255                // none keyword).
1256                if self.Preload() == "none" && !self.autoplaying.get() {
1257                    // Step 5.remote.1.1. Set the networkState to NETWORK_IDLE.
1258                    self.network_state.set(NetworkState::Idle);
1259
1260                    // Step 5.remote.1.2. Queue a media element task given the media element to fire
1261                    // an event named suspend at the element.
1262                    self.queue_media_element_task_to_fire_event(atom!("suspend"));
1263
1264                    // Step 5.remote.1.3. Queue a media element task given the media element to set
1265                    // the element's delaying-the-load-event flag to false. This stops delaying the
1266                    // load event.
1267                    let this = Trusted::new(self);
1268                    let generation_id = self.generation_id.get();
1269
1270                    self.owner_global()
1271                        .task_manager()
1272                        .media_element_task_source()
1273                        .queue(task!(queue_delay_load_event: move || {
1274                            let this = this.root();
1275                            if generation_id != this.generation_id.get() {
1276                                return;
1277                            }
1278
1279                            this.delay_load_event(false, CanGc::note());
1280                        }));
1281
1282                    // TODO Steps 5.remote.1.4. Wait for the task to be run.
1283                    // FIXME(nox): Somehow we should wait for the task from previous
1284                    // step to be ran before continuing.
1285
1286                    // TODO Steps 5.remote.1.5-5.remote.1.7.
1287                    // FIXME(nox): Wait for an implementation-defined event and
1288                    // then continue with the normal set of steps instead of just
1289                    // returning.
1290                    return;
1291                }
1292
1293                *self.resource_url.borrow_mut() = Some(url);
1294
1295                // Steps 5.remote.2-5.remote.8
1296                self.fetch_request(None, None);
1297            },
1298            Resource::Object => {
1299                if let Some(ref src_object) = *self.src_object.borrow() {
1300                    match src_object {
1301                        SrcObject::Blob(blob) => {
1302                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1303                            *self.blob_url.borrow_mut() =
1304                                Some(ServoUrl::parse(&blob_url.str()).expect("infallible"));
1305                            self.fetch_request(None, None);
1306                        },
1307                        SrcObject::MediaStream(stream) => {
1308                            let tracks = &*stream.get_tracks();
1309                            for (pos, track) in tracks.iter().enumerate() {
1310                                if self
1311                                    .player
1312                                    .borrow()
1313                                    .as_ref()
1314                                    .unwrap()
1315                                    .lock()
1316                                    .unwrap()
1317                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1318                                    .is_err()
1319                                {
1320                                    self.resource_selection_algorithm_failure_steps();
1321                                }
1322                            }
1323                        },
1324                    }
1325                }
1326            },
1327        }
1328    }
1329
1330    /// Queues a task to run the [dedicated media source failure steps][steps].
1331    ///
1332    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1333    fn queue_dedicated_media_source_failure_steps(&self) {
1334        let this = Trusted::new(self);
1335        let generation_id = self.generation_id.get();
1336        self.take_pending_play_promises(Err(Error::NotSupported));
1337        self.owner_global()
1338            .task_manager()
1339            .media_element_task_source()
1340            .queue(task!(dedicated_media_source_failure_steps: move || {
1341                let this = this.root();
1342                if generation_id != this.generation_id.get() {
1343                    return;
1344                }
1345
1346                this.fulfill_in_flight_play_promises(|| {
1347                    // Step 1. Set the error attribute to the result of creating a MediaError with
1348                    // MEDIA_ERR_SRC_NOT_SUPPORTED.
1349                    this.error.set(Some(&*MediaError::new(
1350                        &this.owner_window(),
1351                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::note())));
1352
1353                    // Step 2. Forget the media element's media-resource-specific tracks.
1354                    this.AudioTracks(CanGc::note()).clear();
1355                    this.VideoTracks(CanGc::note()).clear();
1356
1357                    // Step 3. Set the element's networkState attribute to the NETWORK_NO_SOURCE
1358                    // value.
1359                    this.network_state.set(NetworkState::NoSource);
1360
1361                    // Step 4. Set the element's show poster flag to true.
1362                    this.show_poster.set(true);
1363
1364                    // Step 5. Fire an event named error at the media element.
1365                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1366
1367                    if let Some(ref player) = *this.player.borrow() {
1368                        if let Err(e) = player.lock().unwrap().stop() {
1369                            eprintln!("Could not stop player {:?}", e);
1370                        }
1371                    }
1372
1373                    // Step 6. Reject pending play promises with promises and a "NotSupportedError"
1374                    // DOMException.
1375                    // Done after running this closure in `fulfill_in_flight_play_promises`.
1376                });
1377
1378                // Step 7. Set the element's delaying-the-load-event flag to false. This stops
1379                // delaying the load event.
1380                this.delay_load_event(false, CanGc::note());
1381            }));
1382    }
1383
1384    fn in_error_state(&self) -> bool {
1385        self.error.get().is_some()
1386    }
1387
1388    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1389    fn is_potentially_playing(&self) -> bool {
1390        !self.paused.get() &&
1391            !self.Ended() &&
1392            self.error.get().is_none() &&
1393            !self.is_blocked_media_element()
1394    }
1395
1396    /// <https://html.spec.whatwg.org/multipage/#blocked-media-element>
1397    fn is_blocked_media_element(&self) -> bool {
1398        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1399            self.is_paused_for_user_interaction() ||
1400            self.is_paused_for_in_band_content()
1401    }
1402
1403    /// <https://html.spec.whatwg.org/multipage/#paused-for-user-interaction>
1404    fn is_paused_for_user_interaction(&self) -> bool {
1405        // FIXME: we will likely be able to fill this placeholder once (if) we
1406        //        implement the MediaSession API.
1407        false
1408    }
1409
1410    /// <https://html.spec.whatwg.org/multipage/#paused-for-in-band-content>
1411    fn is_paused_for_in_band_content(&self) -> bool {
1412        // FIXME: we will likely be able to fill this placeholder once (if) we
1413        //        implement https://github.com/servo/servo/issues/22314
1414        false
1415    }
1416
1417    /// <https://html.spec.whatwg.org/multipage/#media-element-load-algorithm>
1418    fn media_element_load_algorithm(&self, can_gc: CanGc) {
1419        // Reset the flag that signals whether loadeddata was ever fired for
1420        // this invokation of the load algorithm.
1421        self.fired_loadeddata_event.set(false);
1422
1423        // TODO Step 1. Set this element's is currently stalled to false.
1424
1425        // Step 2. Abort any already-running instance of the resource selection algorithm for this
1426        // element.
1427        self.generation_id.set(self.generation_id.get() + 1);
1428
1429        self.load_state.set(LoadState::NotLoaded);
1430        *self.source_children_pointer.borrow_mut() = None;
1431        self.current_source_child.set(None);
1432
1433        // Step 3. Let pending tasks be a list of all tasks from the media element's media element
1434        // event task source in one of the task queues.
1435
1436        // Step 4. For each task in pending tasks that would resolve pending play promises or reject
1437        // pending play promises, immediately resolve or reject those promises in the order the
1438        // corresponding tasks were queued.
1439        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1440            self.fulfill_in_flight_play_promises(|| ());
1441        }
1442
1443        // Step 5. Remove each task in pending tasks from its task queue.
1444        // Note that each media element's pending event and callback is scheduled with associated
1445        // generation id and will be aborted eventually (from Step 2).
1446
1447        let network_state = self.network_state.get();
1448
1449        // Step 6. If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE,
1450        // queue a media element task given the media element to fire an event named abort at the
1451        // media element.
1452        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1453            self.queue_media_element_task_to_fire_event(atom!("abort"));
1454        }
1455
1456        // Step 7. If the media element's networkState is not set to NETWORK_EMPTY, then:
1457        if network_state != NetworkState::Empty {
1458            // Step 7.1. Queue a media element task given the media element to fire an event named
1459            // emptied at the media element.
1460            self.queue_media_element_task_to_fire_event(atom!("emptied"));
1461
1462            // Step 7.2. If a fetching process is in progress for the media element, the user agent
1463            // should stop it.
1464            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1465                current_fetch_context.cancel(CancelReason::Abort);
1466            }
1467
1468            // TODO Step 7.3. If the media element's assigned media provider object is a MediaSource
1469            // object, then detach it.
1470
1471            // Step 7.4. Forget the media element's media-resource-specific tracks.
1472            self.AudioTracks(can_gc).clear();
1473            self.VideoTracks(can_gc).clear();
1474
1475            // Step 7.5. If readyState is not set to HAVE_NOTHING, then set it to that state.
1476            if self.ready_state.get() != ReadyState::HaveNothing {
1477                self.change_ready_state(ReadyState::HaveNothing);
1478            }
1479
1480            // Step 7.6. If the paused attribute is false, then:
1481            if !self.Paused() {
1482                // Step 7.6.1. Set the paused attribute to true.
1483                self.paused.set(true);
1484
1485                // Step 7.6.2. Take pending play promises and reject pending play promises with the
1486                // result and an "AbortError" DOMException.
1487                self.take_pending_play_promises(Err(Error::Abort));
1488                self.fulfill_in_flight_play_promises(|| ());
1489            }
1490
1491            // Step 7.7. If seeking is true, set it to false.
1492            self.seeking.set(false);
1493
1494            // Step 7.8. Set the current playback position to 0.
1495            // Set the official playback position to 0.
1496            // If this changed the official playback position, then queue a media element task given
1497            // the media element to fire an event named timeupdate at the media element.
1498            if self.playback_position.get() != 0. {
1499                self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
1500            }
1501            self.playback_position.set(0.);
1502
1503            // TODO Step 7.9. Set the timeline offset to Not-a-Number (NaN).
1504
1505            // Step 7.10. Update the duration attribute to Not-a-Number (NaN).
1506            self.duration.set(f64::NAN);
1507        }
1508
1509        // Step 8. Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
1510        self.playbackRate.set(self.defaultPlaybackRate.get());
1511
1512        // Step 9. Set the error attribute to null and the can autoplay flag to true.
1513        self.error.set(None);
1514        self.autoplaying.set(true);
1515
1516        // Step 10. Invoke the media element's resource selection algorithm.
1517        self.invoke_resource_selection_algorithm(can_gc);
1518
1519        // TODO Step 11. Note: Playback of any previously playing media resource for this element
1520        // stops.
1521    }
1522
1523    /// Queue a media element task given the media element to fire an event at the media element.
1524    /// <https://html.spec.whatwg.org/multipage/#queue-a-media-element-task>
1525    fn queue_media_element_task_to_fire_event(&self, name: Atom) {
1526        let this = Trusted::new(self);
1527        let generation_id = self.generation_id.get();
1528
1529        self.owner_global()
1530            .task_manager()
1531            .media_element_task_source()
1532            .queue(task!(queue_event: move || {
1533                let this = this.root();
1534                if generation_id != this.generation_id.get() {
1535                    return;
1536                }
1537
1538                this.upcast::<EventTarget>().fire_event(name, CanGc::note());
1539            }));
1540    }
1541
1542    /// Appends a promise to the list of pending play promises.
1543    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1544        self.pending_play_promises
1545            .borrow_mut()
1546            .push(promise.clone());
1547    }
1548
1549    /// Takes the pending play promises.
1550    ///
1551    /// The result with which these promises will be fulfilled is passed here
1552    /// and this method returns nothing because we actually just move the
1553    /// current list of pending play promises to the
1554    /// `in_flight_play_promises_queue` field.
1555    ///
1556    /// Each call to this method must be followed by a call to
1557    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1558    /// which were taken and moved to the in-flight queue.
1559    fn take_pending_play_promises(&self, result: ErrorResult) {
1560        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1561        self.in_flight_play_promises_queue
1562            .borrow_mut()
1563            .push_back((pending_play_promises.into(), result));
1564    }
1565
1566    /// Fulfills the next in-flight play promises queue after running a closure.
1567    ///
1568    /// See the comment on `take_pending_play_promises` for why this method
1569    /// does not take a list of promises to fulfill. Callers cannot just pop
1570    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1571    /// the promises because that would mean putting
1572    /// `#[cfg_attr(crown, allow(crown::unrooted_must_root))]` on even more functions, potentially
1573    /// hiding actual safety bugs.
1574    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
1575    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1576    where
1577        F: FnOnce(),
1578    {
1579        let (promises, result) = self
1580            .in_flight_play_promises_queue
1581            .borrow_mut()
1582            .pop_front()
1583            .expect("there should be at least one list of in flight play promises");
1584        f();
1585        for promise in &*promises {
1586            match result {
1587                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1588                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1589            }
1590        }
1591    }
1592
1593    pub(crate) fn handle_source_child_insertion(&self, source: &HTMLSourceElement, can_gc: CanGc) {
1594        // <https://html.spec.whatwg.org/multipage/#the-source-element:html-element-insertion-steps>
1595        // Step 2. If parent is a media element that has no src attribute and whose networkState has
1596        // the value NETWORK_EMPTY, then invoke that media element's resource selection algorithm.
1597        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1598            return;
1599        }
1600
1601        if self.network_state.get() == NetworkState::Empty {
1602            self.invoke_resource_selection_algorithm(can_gc);
1603            return;
1604        }
1605
1606        // <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1607        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1608        // list. (This step might wait forever.)
1609        if self.load_state.get() != LoadState::WaitingForSource {
1610            return;
1611        }
1612
1613        self.load_state.set(LoadState::LoadingFromSourceChild);
1614
1615        *self.source_children_pointer.borrow_mut() =
1616            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), true));
1617
1618        // Step 9.children.23. Await a stable state.
1619        let task = MediaElementMicrotask::SelectNextSourceChildAfterWait {
1620            elem: DomRoot::from_ref(self),
1621            generation_id: self.generation_id.get(),
1622        };
1623
1624        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1625    }
1626
1627    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1628    fn select_next_source_child_after_wait(&self, can_gc: CanGc) {
1629        // Step 9.children.24. Set the element's delaying-the-load-event flag back to true (this
1630        // delays the load event again, in case it hasn't been fired yet).
1631        self.delay_load_event(true, can_gc);
1632
1633        // Step 9.children.25. Set the networkState back to NETWORK_LOADING.
1634        self.network_state.set(NetworkState::Loading);
1635
1636        // Step 9.children.26. Jump back to the find next candidate step above.
1637        self.select_next_source_child(can_gc);
1638    }
1639
1640    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1641    /// => "If the media data cannot be fetched at all, due to network errors..."
1642    /// => "If the media data can be fetched but is found by inspection to be in an unsupported
1643    /// format, or can otherwise not be rendered at all"
1644    fn media_data_processing_failure_steps(&self) {
1645        // Step 1. The user agent should cancel the fetching process.
1646        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1647            current_fetch_context.cancel(CancelReason::Error);
1648        }
1649
1650        // Step 2. Abort this subalgorithm, returning to the resource selection algorithm.
1651        self.resource_selection_algorithm_failure_steps();
1652    }
1653
1654    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1655    /// => "If the connection is interrupted after some media data has been received..."
1656    /// => "If the media data is corrupted"
1657    fn media_data_processing_fatal_steps(&self, error: u16, can_gc: CanGc) {
1658        *self.source_children_pointer.borrow_mut() = None;
1659        self.current_source_child.set(None);
1660
1661        // Step 1. The user agent should cancel the fetching process.
1662        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1663            current_fetch_context.cancel(CancelReason::Error);
1664        }
1665
1666        // Step 2. Set the error attribute to the result of creating a MediaError with
1667        // MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
1668        self.error
1669            .set(Some(&*MediaError::new(&self.owner_window(), error, can_gc)));
1670
1671        // Step 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1672        self.network_state.set(NetworkState::Idle);
1673
1674        // Step 4. Set the element's delaying-the-load-event flag to false. This stops delaying
1675        // the load event.
1676        self.delay_load_event(false, can_gc);
1677
1678        // Step 5. Fire an event named error at the media element.
1679        self.upcast::<EventTarget>()
1680            .fire_event(atom!("error"), can_gc);
1681
1682        // Step 6. Abort the overall resource selection algorithm.
1683    }
1684
1685    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1686    fn seek(&self, time: f64, _approximate_for_speed: bool, can_gc: CanGc) {
1687        // Step 1.
1688        self.show_poster.set(false);
1689
1690        // Step 2.
1691        if self.ready_state.get() == ReadyState::HaveNothing {
1692            return;
1693        }
1694
1695        // Step 3.
1696        // The fetch request associated with this seek already takes
1697        // care of cancelling any previous requests.
1698
1699        // Step 4.
1700        // The flag will be cleared when the media engine tells us the seek was done.
1701        self.seeking.set(true);
1702
1703        // Step 5.
1704        // XXX(ferjm) The rest of the steps should be run in parallel, so seeking cancelation
1705        //            can be done properly. No other browser does it yet anyway.
1706
1707        // Step 6.
1708        let time = f64::min(time, self.Duration());
1709
1710        // Step 7.
1711        let time = f64::max(time, 0.);
1712
1713        // Step 8.
1714        let seekable = self.Seekable(can_gc);
1715        if seekable.Length() == 0 {
1716            self.seeking.set(false);
1717            return;
1718        }
1719        let mut nearest_seekable_position = 0.0;
1720        let mut in_seekable_range = false;
1721        let mut nearest_seekable_distance = f64::MAX;
1722        for i in 0..seekable.Length() {
1723            let start = seekable.Start(i).unwrap().abs();
1724            let end = seekable.End(i).unwrap().abs();
1725            if time >= start && time <= end {
1726                nearest_seekable_position = time;
1727                in_seekable_range = true;
1728                break;
1729            } else if time < start {
1730                let distance = start - time;
1731                if distance < nearest_seekable_distance {
1732                    nearest_seekable_distance = distance;
1733                    nearest_seekable_position = start;
1734                }
1735            } else {
1736                let distance = time - end;
1737                if distance < nearest_seekable_distance {
1738                    nearest_seekable_distance = distance;
1739                    nearest_seekable_position = end;
1740                }
1741            }
1742        }
1743        let time = if in_seekable_range {
1744            time
1745        } else {
1746            nearest_seekable_position
1747        };
1748
1749        // Step 9.
1750        // servo-media with gstreamer does not support inaccurate seeking for now.
1751
1752        // Step 10.
1753        self.owner_global()
1754            .task_manager()
1755            .media_element_task_source()
1756            .queue_simple_event(self.upcast(), atom!("seeking"));
1757
1758        // Step 11.
1759        if let Some(ref player) = *self.player.borrow() {
1760            if let Err(e) = player.lock().unwrap().seek(time) {
1761                eprintln!("Seek error {:?}", e);
1762            }
1763        }
1764
1765        // The rest of the steps are handled when the media engine signals a
1766        // ready state change or otherwise satisfies seek completion and signals
1767        // a position change.
1768    }
1769
1770    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1771    fn seek_end(&self) {
1772        // Step 14.
1773        self.seeking.set(false);
1774
1775        // Step 15.
1776        self.time_marches_on();
1777
1778        // Step 16.
1779        let global = self.owner_global();
1780        let task_manager = global.task_manager();
1781        let task_source = task_manager.media_element_task_source();
1782        task_source.queue_simple_event(self.upcast(), atom!("timeupdate"));
1783
1784        // Step 17.
1785        task_source.queue_simple_event(self.upcast(), atom!("seeked"));
1786    }
1787
1788    fn set_player_id(&self, player_id: u64) {
1789        self.droppable.set_player_id(player_id);
1790    }
1791
1792    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
1793    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
1794        let queue_postershown_event = pref!(media_testing_enabled) && image.is_some();
1795
1796        self.video_renderer.lock().unwrap().set_poster_frame(image);
1797
1798        self.upcast::<Node>().dirty(NodeDamage::Other);
1799
1800        if queue_postershown_event {
1801            self.owner_global()
1802                .task_manager()
1803                .media_element_task_source()
1804                .queue_simple_event(self.upcast(), atom!("postershown"));
1805        }
1806    }
1807
1808    fn setup_media_player(&self, resource: &Resource) -> Result<(), ()> {
1809        let stream_type = match *resource {
1810            Resource::Object => {
1811                if let Some(ref src_object) = *self.src_object.borrow() {
1812                    match src_object {
1813                        SrcObject::MediaStream(_) => StreamType::Stream,
1814                        _ => StreamType::Seekable,
1815                    }
1816                } else {
1817                    return Err(());
1818                }
1819            },
1820            _ => StreamType::Seekable,
1821        };
1822
1823        let window = self.owner_window();
1824        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
1825        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
1826        {
1827            HTMLMediaElementTypeId::HTMLAudioElement => None,
1828            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
1829        };
1830
1831        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
1832
1833        let pipeline_id = window.pipeline_id();
1834        let client_context_id =
1835            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
1836        let player = ServoMedia::get().create_player(
1837            &client_context_id,
1838            stream_type,
1839            action_sender,
1840            video_renderer,
1841            audio_renderer,
1842            Box::new(window.get_player_context()),
1843        );
1844        let player_id = {
1845            let player_guard = player.lock().unwrap();
1846
1847            if let Err(e) = player_guard.set_mute(self.muted.get()) {
1848                log::warn!("Could not set mute state: {:?}", e);
1849            }
1850
1851            player_guard.get_id()
1852        };
1853
1854        *self.player.borrow_mut() = Some(player);
1855
1856        let trusted_node = Trusted::new(self);
1857        let task_source = self
1858            .owner_global()
1859            .task_manager()
1860            .media_element_task_source()
1861            .to_sendable();
1862        ROUTER.add_typed_route(
1863            action_receiver,
1864            Box::new(move |message| {
1865                let event = message.unwrap();
1866                trace!("Player event {:?}", event);
1867                let this = trusted_node.clone();
1868                task_source.queue(task!(handle_player_event: move || {
1869                    this.root().handle_player_event(player_id, &event, CanGc::note());
1870                }));
1871            }),
1872        );
1873
1874        // GLPlayer thread setup
1875        let (player_id, image_receiver) = window
1876            .get_player_context()
1877            .glplayer_thread_sender
1878            .map(|pipeline| {
1879                let (image_sender, image_receiver) = channel().unwrap();
1880                pipeline
1881                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
1882                    .unwrap();
1883                match image_receiver.recv().unwrap() {
1884                    GLPlayerMsgForward::PlayerId(id) => (id, Some(image_receiver)),
1885                    _ => unreachable!(),
1886                }
1887            })
1888            .unwrap_or((0, None));
1889
1890        self.set_player_id(player_id);
1891        self.video_renderer.lock().unwrap().player_id = Some(player_id);
1892
1893        if let Some(image_receiver) = image_receiver {
1894            let trusted_node = Trusted::new(self);
1895            let task_source = self
1896                .owner_global()
1897                .task_manager()
1898                .media_element_task_source()
1899                .to_sendable();
1900            ROUTER.add_typed_route(
1901                image_receiver,
1902                Box::new(move |message| {
1903                    let msg = message.unwrap();
1904                    let this = trusted_node.clone();
1905                    task_source.queue(task!(handle_glplayer_message: move || {
1906                        trace!("GLPlayer message {:?}", msg);
1907                        let video_renderer = this.root().video_renderer.clone();
1908
1909                        match msg {
1910                            GLPlayerMsgForward::Lock(sender) => {
1911                                if let Some(holder) = video_renderer
1912                                    .lock()
1913                                    .unwrap()
1914                                    .current_frame_holder
1915                                    .as_mut() {
1916                                        holder.lock();
1917                                        sender.send(holder.get()).unwrap();
1918                                    };
1919                            },
1920                            GLPlayerMsgForward::Unlock() => {
1921                                if let Some(holder) = video_renderer
1922                                    .lock()
1923                                    .unwrap()
1924                                    .current_frame_holder
1925                                    .as_mut() { holder.unlock() }
1926                            },
1927                            _ => (),
1928                        }
1929                    }));
1930                }),
1931            );
1932        }
1933
1934        Ok(())
1935    }
1936
1937    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
1938        if let Some(ref player) = *self.player.borrow() {
1939            if let Err(err) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
1940                warn!("Could not set audio track {:#?}", err);
1941            }
1942        }
1943    }
1944
1945    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
1946        if let Some(ref player) = *self.player.borrow() {
1947            if let Err(err) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
1948                warn!("Could not set video track {:#?}", err);
1949            }
1950        }
1951    }
1952
1953    fn end_of_playback_in_forwards_direction(&self, can_gc: CanGc) {
1954        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
1955        // posible position of the media resource and return.
1956        if self.Loop() {
1957            self.seek(
1958                self.earliest_possible_position(),
1959                /* approximate_for_speed*/ false,
1960                can_gc,
1961            );
1962            return;
1963        }
1964        // Step 2. The ended IDL attribute starts returning true once the event loop returns to
1965        // step 1.
1966        // The **ended playback** condition is implemented inside of
1967        // the HTMLMediaElementMethods::Ended method
1968
1969        // Step 3. Queue a media element task given the media element and the following steps:
1970        let this = Trusted::new(self);
1971
1972        self.owner_global()
1973            .task_manager()
1974            .media_element_task_source()
1975            .queue(task!(reaches_the_end_steps: move || {
1976                let this = this.root();
1977                // Step 3.1. Fire an event named timeupdate at the media element
1978                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
1979
1980                // Step 3.2. If the media element has ended playback, the direction of playback is
1981                // forwards, and paused is false, then:
1982                if this.Ended() && !this.Paused() {
1983                    // Step 3.2.1. Set the paused attribute to true
1984                    this.paused.set(true);
1985
1986                    // Step 3.2.2. Fire an event named pause at the media element
1987                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
1988
1989                    // Step 3.2.3. Take pending play promises and reject pending play promises with
1990                    // the result and an "AbortError" DOMException
1991                    this.take_pending_play_promises(Err(Error::Abort));
1992                    this.fulfill_in_flight_play_promises(|| ());
1993                }
1994
1995                // Step 3.3. Fire an event named ended at the media element.
1996                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
1997            }));
1998
1999        // https://html.spec.whatwg.org/multipage/#dom-media-have_current_data
2000        self.change_ready_state(ReadyState::HaveCurrentData);
2001    }
2002
2003    fn playback_end(&self, can_gc: CanGc) {
2004        // https://html.spec.whatwg.org/multipage/#reaches-the-end
2005        match self.direction_of_playback() {
2006            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(can_gc),
2007
2008            PlaybackDirection::Backwards => {
2009                if self.playback_position.get() <= self.earliest_possible_position() {
2010                    self.owner_global()
2011                        .task_manager()
2012                        .media_element_task_source()
2013                        .queue_simple_event(self.upcast(), atom!("ended"));
2014                }
2015            },
2016        }
2017    }
2018
2019    fn playback_error(&self, error: &str, can_gc: CanGc) {
2020        error!("Player error: {:?}", error);
2021
2022        // If we have already flagged an error condition while processing
2023        // the network response, we should silently skip any observable
2024        // errors originating while decoding the erroneous response.
2025        if self.in_error_state() {
2026            return;
2027        }
2028
2029        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
2030        if self.ready_state.get() == ReadyState::HaveNothing {
2031            // => "If the media data can be fetched but is found by inspection to be in an
2032            // unsupported format, or can otherwise not be rendered at all"
2033            self.media_data_processing_failure_steps();
2034        } else {
2035            // => "If the media data is corrupted"
2036            self.media_data_processing_fatal_steps(MEDIA_ERR_DECODE, can_gc);
2037        }
2038    }
2039
2040    fn playback_metadata_updated(
2041        &self,
2042        metadata: &servo_media::player::metadata::Metadata,
2043        can_gc: CanGc,
2044    ) {
2045        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2046        // => If the media resource is found to have an audio track
2047        if !metadata.audio_tracks.is_empty() {
2048            for (i, _track) in metadata.audio_tracks.iter().enumerate() {
2049                // Step 1.
2050                let kind = match i {
2051                    0 => DOMString::from("main"),
2052                    _ => DOMString::new(),
2053                };
2054                let window = self.owner_window();
2055                let audio_track = AudioTrack::new(
2056                    &window,
2057                    DOMString::new(),
2058                    kind,
2059                    DOMString::new(),
2060                    DOMString::new(),
2061                    Some(&*self.AudioTracks(can_gc)),
2062                    can_gc,
2063                );
2064
2065                // Steps 2. & 3.
2066                self.AudioTracks(can_gc).add(&audio_track);
2067
2068                // Step 4
2069                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2070                    let fragment = MediaFragmentParser::from(servo_url);
2071                    if let Some(id) = fragment.id() {
2072                        if audio_track.id() == id {
2073                            self.AudioTracks(can_gc)
2074                                .set_enabled(self.AudioTracks(can_gc).len() - 1, true);
2075                        }
2076                    }
2077
2078                    if fragment.tracks().contains(&audio_track.kind().into()) {
2079                        self.AudioTracks(can_gc)
2080                            .set_enabled(self.AudioTracks(can_gc).len() - 1, true);
2081                    }
2082                }
2083
2084                // Step 5. & 6,
2085                if self.AudioTracks(can_gc).enabled_index().is_none() {
2086                    self.AudioTracks(can_gc)
2087                        .set_enabled(self.AudioTracks(can_gc).len() - 1, true);
2088                }
2089
2090                // Steps 7.
2091                let event = TrackEvent::new(
2092                    self.global().as_window(),
2093                    atom!("addtrack"),
2094                    false,
2095                    false,
2096                    &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
2097                    can_gc,
2098                );
2099
2100                event
2101                    .upcast::<Event>()
2102                    .fire(self.upcast::<EventTarget>(), can_gc);
2103            }
2104        }
2105
2106        // => If the media resource is found to have a video track
2107        if !metadata.video_tracks.is_empty() {
2108            for (i, _track) in metadata.video_tracks.iter().enumerate() {
2109                // Step 1.
2110                let kind = match i {
2111                    0 => DOMString::from("main"),
2112                    _ => DOMString::new(),
2113                };
2114                let window = self.owner_window();
2115                let video_track = VideoTrack::new(
2116                    &window,
2117                    DOMString::new(),
2118                    kind,
2119                    DOMString::new(),
2120                    DOMString::new(),
2121                    Some(&*self.VideoTracks(can_gc)),
2122                    can_gc,
2123                );
2124
2125                // Steps 2. & 3.
2126                self.VideoTracks(can_gc).add(&video_track);
2127
2128                // Step 4.
2129                if let Some(track) = self.VideoTracks(can_gc).item(0) {
2130                    if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2131                        let fragment = MediaFragmentParser::from(servo_url);
2132                        if let Some(id) = fragment.id() {
2133                            if track.id() == id {
2134                                self.VideoTracks(can_gc).set_selected(0, true);
2135                            }
2136                        } else if fragment.tracks().contains(&track.kind().into()) {
2137                            self.VideoTracks(can_gc).set_selected(0, true);
2138                        }
2139                    }
2140                }
2141
2142                // Step 5. & 6.
2143                if self.VideoTracks(can_gc).selected_index().is_none() {
2144                    self.VideoTracks(can_gc)
2145                        .set_selected(self.VideoTracks(can_gc).len() - 1, true);
2146                }
2147
2148                // Steps 7.
2149                let event = TrackEvent::new(
2150                    self.global().as_window(),
2151                    atom!("addtrack"),
2152                    false,
2153                    false,
2154                    &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
2155                    can_gc,
2156                );
2157
2158                event
2159                    .upcast::<Event>()
2160                    .fire(self.upcast::<EventTarget>(), can_gc);
2161            }
2162        }
2163
2164        // => "Once enough of the media data has been fetched to determine the duration..."
2165        // Step 1.
2166        // servo-media owns the media timeline.
2167
2168        // Step 2.
2169        // XXX(ferjm) Update the timeline offset.
2170
2171        // Step 3.
2172        self.playback_position.set(0.);
2173
2174        // Step 4.
2175        let previous_duration = self.duration.get();
2176        if let Some(duration) = metadata.duration {
2177            self.duration.set(duration.as_secs() as f64);
2178        } else {
2179            self.duration.set(f64::INFINITY);
2180        }
2181        if previous_duration != self.duration.get() {
2182            self.owner_global()
2183                .task_manager()
2184                .media_element_task_source()
2185                .queue_simple_event(self.upcast(), atom!("durationchange"));
2186        }
2187
2188        // Step 5.
2189        self.handle_resize(Some(metadata.width), Some(metadata.height));
2190
2191        // Step 6.
2192        self.change_ready_state(ReadyState::HaveMetadata);
2193
2194        // Step 7.
2195        let mut jumped = false;
2196
2197        // Step 8.
2198        if self.default_playback_start_position.get() > 0. {
2199            self.seek(
2200                self.default_playback_start_position.get(),
2201                /* approximate_for_speed*/ false,
2202                can_gc,
2203            );
2204            jumped = true;
2205        }
2206
2207        // Step 9.
2208        self.default_playback_start_position.set(0.);
2209
2210        // Steps 10 and 11.
2211        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2212            let fragment = MediaFragmentParser::from(servo_url);
2213            if let Some(start) = fragment.start() {
2214                if start > 0. && start < self.duration.get() {
2215                    self.playback_position.set(start);
2216                    if !jumped {
2217                        self.seek(self.playback_position.get(), false, can_gc)
2218                    }
2219                }
2220            }
2221        }
2222
2223        // Step 12 & 13 are already handled by the earlier media track processing.
2224
2225        // We wait until we have metadata to render the controls, so we render them
2226        // with the appropriate size.
2227        if self.Controls() {
2228            self.render_controls(can_gc);
2229        }
2230
2231        let global = self.global();
2232        let window = global.as_window();
2233
2234        // Update the media session metadata title with the obtained metadata.
2235        window.Navigator().MediaSession().update_title(
2236            metadata
2237                .title
2238                .clone()
2239                .unwrap_or(window.get_url().into_string()),
2240        );
2241    }
2242
2243    fn playback_video_frame_updated(&self) {
2244        // Check if the frame was resized
2245        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
2246            self.handle_resize(Some(frame.width as u32), Some(frame.height as u32));
2247        }
2248    }
2249
2250    fn playback_need_data(&self, can_gc: CanGc) {
2251        // The player needs more data.
2252        // If we already have a valid fetch request, we do nothing.
2253        // Otherwise, if we have no request and the previous request was
2254        // cancelled because we got an EnoughData event, we restart
2255        // fetching where we left.
2256        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2257            if let Some(reason) = current_fetch_context.cancel_reason() {
2258                // XXX(ferjm) Ideally we should just create a fetch request from
2259                // where we left. But keeping track of the exact next byte that the
2260                // media backend expects is not the easiest task, so I'm simply
2261                // seeking to the current playback position for now which will create
2262                // a new fetch request for the last rendered frame.
2263                if *reason == CancelReason::Backoff {
2264                    self.seek(self.playback_position.get(), false, can_gc);
2265                }
2266                return;
2267            }
2268        }
2269
2270        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2271            if let Err(e) = {
2272                let mut data_source = current_fetch_context.data_source().borrow_mut();
2273                data_source.set_locked(false);
2274                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
2275            } {
2276                // If we are pushing too much data and we know that we can
2277                // restart the download later from where we left, we cancel
2278                // the current request. Otherwise, we continue the request
2279                // assuming that we may drop some frames.
2280                if e == PlayerError::EnoughData {
2281                    current_fetch_context.cancel(CancelReason::Backoff);
2282                }
2283            }
2284        }
2285    }
2286
2287    fn playback_enough_data(&self) {
2288        self.change_ready_state(ReadyState::HaveEnoughData);
2289
2290        // The player has enough data and it is asking us to stop pushing
2291        // bytes, so we cancel the ongoing fetch request iff we are able
2292        // to restart it from where we left. Otherwise, we continue the
2293        // current fetch request, assuming that some frames will be dropped.
2294        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2295            if current_fetch_context.is_seekable() {
2296                current_fetch_context.cancel(CancelReason::Backoff);
2297            }
2298        }
2299    }
2300
2301    fn playback_position_changed(&self, position: u64) {
2302        let position = position as f64;
2303        let _ = self
2304            .played
2305            .borrow_mut()
2306            .add(self.playback_position.get(), position);
2307        self.playback_position.set(position);
2308        self.time_marches_on();
2309        let media_position_state =
2310            MediaPositionState::new(self.duration.get(), self.playbackRate.get(), position);
2311        debug!(
2312            "Sending media session event set position state {:?}",
2313            media_position_state
2314        );
2315        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
2316    }
2317
2318    fn playback_seek_done(&self) {
2319        // Continuation of
2320        // https://html.spec.whatwg.org/multipage/#dom-media-seek
2321
2322        // Step 13.
2323        let task = MediaElementMicrotask::Seeked {
2324            elem: DomRoot::from_ref(self),
2325            generation_id: self.generation_id.get(),
2326        };
2327        ScriptThread::await_stable_state(Microtask::MediaElement(task));
2328    }
2329
2330    fn playback_state_changed(&self, state: &PlaybackState) {
2331        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
2332        match *state {
2333            PlaybackState::Paused => {
2334                media_session_playback_state = MediaSessionPlaybackState::Paused;
2335                if self.ready_state.get() == ReadyState::HaveMetadata {
2336                    self.change_ready_state(ReadyState::HaveEnoughData);
2337                }
2338            },
2339            PlaybackState::Playing => {
2340                media_session_playback_state = MediaSessionPlaybackState::Playing;
2341            },
2342            PlaybackState::Buffering => {
2343                // Do not send the media session playback state change event
2344                // in this case as a None_ state is expected to clean up the
2345                // session.
2346                return;
2347            },
2348            _ => {},
2349        };
2350        debug!(
2351            "Sending media session event playback state changed to {:?}",
2352            media_session_playback_state
2353        );
2354        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
2355            media_session_playback_state,
2356        ));
2357    }
2358
2359    fn handle_player_event(&self, player_id: usize, event: &PlayerEvent, can_gc: CanGc) {
2360        // Ignore the asynchronous event from previous player.
2361        if self
2362            .player
2363            .borrow()
2364            .as_ref()
2365            .is_none_or(|player| player.lock().unwrap().get_id() != player_id)
2366        {
2367            return;
2368        }
2369
2370        match *event {
2371            PlayerEvent::EndOfStream => self.playback_end(can_gc),
2372            PlayerEvent::Error(ref error) => self.playback_error(error, can_gc),
2373            PlayerEvent::VideoFrameUpdated => self.playback_video_frame_updated(),
2374            PlayerEvent::MetadataUpdated(ref metadata) => {
2375                self.playback_metadata_updated(metadata, can_gc)
2376            },
2377            PlayerEvent::NeedData => self.playback_need_data(can_gc),
2378            PlayerEvent::EnoughData => self.playback_enough_data(),
2379            PlayerEvent::PositionChanged(position) => self.playback_position_changed(position),
2380            PlayerEvent::SeekData(p, ref seek_lock) => {
2381                self.fetch_request(Some(p), Some(seek_lock.clone()))
2382            },
2383            PlayerEvent::SeekDone(_) => self.playback_seek_done(),
2384            PlayerEvent::StateChanged(ref state) => self.playback_state_changed(state),
2385        }
2386    }
2387
2388    /// <https://html.spec.whatwg.org/multipage/#earliest-possible-position>
2389    fn earliest_possible_position(&self) -> f64 {
2390        self.played
2391            .borrow()
2392            .start(0)
2393            .unwrap_or_else(|_| self.playback_position.get())
2394    }
2395
2396    fn render_controls(&self, can_gc: CanGc) {
2397        let element = self.htmlelement.upcast::<Element>();
2398        if self.ready_state.get() < ReadyState::HaveMetadata || element.is_shadow_host() {
2399            // Bail out if we have no metadata yet or
2400            // if we are already showing the controls.
2401            return;
2402        }
2403        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2404        //                       reparsing styles.
2405        let shadow_root = self
2406            .upcast::<Element>()
2407            .attach_ua_shadow_root(false, can_gc);
2408        let document = self.owner_document();
2409        let script = Element::create(
2410            QualName::new(None, ns!(html), local_name!("script")),
2411            None,
2412            &document,
2413            ElementCreator::ScriptCreated,
2414            CustomElementCreationMode::Asynchronous,
2415            None,
2416            can_gc,
2417        );
2418        // This is our hacky way to temporarily workaround the lack of a privileged
2419        // JS context.
2420        // The media controls UI accesses the document.servoGetMediaControls(id) API
2421        // to get an instance to the media controls ShadowRoot.
2422        // `id` needs to match the internally generated UUID assigned to a media element.
2423        let id = document.register_media_controls(&shadow_root);
2424        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2425        *self.media_controls_id.borrow_mut() = Some(id);
2426        script
2427            .upcast::<Node>()
2428            .set_text_content_for_element(Some(DOMString::from(media_controls_script)), can_gc);
2429        if let Err(e) = shadow_root
2430            .upcast::<Node>()
2431            .AppendChild(script.upcast::<Node>(), can_gc)
2432        {
2433            warn!("Could not render media controls {:?}", e);
2434            return;
2435        }
2436
2437        let style = Element::create(
2438            QualName::new(None, ns!(html), local_name!("style")),
2439            None,
2440            &document,
2441            ElementCreator::ScriptCreated,
2442            CustomElementCreationMode::Asynchronous,
2443            None,
2444            can_gc,
2445        );
2446
2447        style
2448            .upcast::<Node>()
2449            .set_text_content_for_element(Some(DOMString::from(MEDIA_CONTROL_CSS)), can_gc);
2450
2451        if let Err(e) = shadow_root
2452            .upcast::<Node>()
2453            .AppendChild(style.upcast::<Node>(), can_gc)
2454        {
2455            warn!("Could not render media controls {:?}", e);
2456        }
2457
2458        self.upcast::<Node>().dirty(NodeDamage::Other);
2459    }
2460
2461    fn remove_controls(&self) {
2462        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2463            self.owner_document().unregister_media_controls(&id);
2464        }
2465    }
2466
2467    /// Gets the video frame at the current playback position.
2468    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2469        self.video_renderer
2470            .lock()
2471            .unwrap()
2472            .current_frame_holder
2473            .as_ref()
2474            .map(|holder| holder.get_frame())
2475    }
2476
2477    /// Gets the current frame of the video element to present, if any.
2478    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2479    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2480        let (current_frame, poster_frame) = {
2481            let renderer = self.video_renderer.lock().unwrap();
2482            (renderer.current_frame, renderer.poster_frame)
2483        };
2484
2485        // If the show poster flag is set (or there is no current video frame to
2486        // present) AND there is a poster frame, present that.
2487        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2488            return poster_frame;
2489        }
2490
2491        current_frame
2492    }
2493
2494    pub(crate) fn clear_current_frame_data(&self) {
2495        self.handle_resize(None, None);
2496        self.video_renderer.lock().unwrap().current_frame = None;
2497    }
2498
2499    fn handle_resize(&self, width: Option<u32>, height: Option<u32>) {
2500        if let Some(video_elem) = self.downcast::<HTMLVideoElement>() {
2501            video_elem.resize(width, height);
2502            self.upcast::<Node>().dirty(NodeDamage::Other);
2503        }
2504    }
2505
2506    /// By default the audio is rendered through the audio sink automatically
2507    /// selected by the servo-media Player instance. However, in some cases, like
2508    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2509    /// renderer.
2510    pub(crate) fn set_audio_renderer(
2511        &self,
2512        audio_renderer: Arc<Mutex<dyn AudioRenderer>>,
2513        can_gc: CanGc,
2514    ) {
2515        *self.audio_renderer.borrow_mut() = Some(audio_renderer);
2516        if let Some(ref player) = *self.player.borrow() {
2517            if let Err(e) = player.lock().unwrap().stop() {
2518                eprintln!("Could not stop player {:?}", e);
2519            }
2520            self.media_element_load_algorithm(can_gc);
2521        }
2522    }
2523
2524    fn send_media_session_event(&self, event: MediaSessionEvent) {
2525        let global = self.global();
2526        let media_session = global.as_window().Navigator().MediaSession();
2527
2528        media_session.register_media_instance(self);
2529
2530        media_session.send_event(event);
2531    }
2532
2533    pub(crate) fn set_duration(&self, duration: f64) {
2534        self.duration.set(duration);
2535    }
2536
2537    pub(crate) fn reset(&self) {
2538        if let Some(ref player) = *self.player.borrow() {
2539            if let Err(e) = player.lock().unwrap().stop() {
2540                eprintln!("Could not stop player {:?}", e);
2541            }
2542        }
2543    }
2544
2545    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2546    pub(crate) fn origin_is_clean(&self) -> bool {
2547        // Step 5.local (media provider object).
2548        if self.src_object.borrow().is_some() {
2549            // The resource described by the current media resource, if any,
2550            // contains the media data. It is CORS-same-origin.
2551            return true;
2552        }
2553
2554        // Step 5.remote (URL record).
2555        if self.resource_url.borrow().is_some() {
2556            // Update the media data with the contents
2557            // of response's unsafe response obtained in this fashion.
2558            // Response can be CORS-same-origin or CORS-cross-origin;
2559            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2560                return current_fetch_context.origin_is_clean();
2561            }
2562        }
2563
2564        true
2565    }
2566}
2567
2568// XXX Placeholder for [https://github.com/servo/servo/issues/22293]
2569#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
2570enum PlaybackDirection {
2571    Forwards,
2572    #[allow(dead_code)]
2573    Backwards,
2574}
2575
2576// XXX Placeholder implementations for:
2577//
2578// - https://github.com/servo/servo/issues/22293
2579impl HTMLMediaElement {
2580    /// <https://github.com/servo/servo/issues/22293>
2581    fn direction_of_playback(&self) -> PlaybackDirection {
2582        PlaybackDirection::Forwards
2583    }
2584}
2585
2586impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2587    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
2588    fn NetworkState(&self) -> u16 {
2589        self.network_state.get() as u16
2590    }
2591
2592    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
2593    fn ReadyState(&self) -> u16 {
2594        self.ready_state.get() as u16
2595    }
2596
2597    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2598    make_bool_getter!(Autoplay, "autoplay");
2599    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2600    make_bool_setter!(SetAutoplay, "autoplay");
2601
2602    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2603    make_bool_getter!(Loop, "loop");
2604    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2605    make_bool_setter!(SetLoop, "loop");
2606
2607    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2608    make_bool_getter!(DefaultMuted, "muted");
2609    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2610    make_bool_setter!(SetDefaultMuted, "muted");
2611
2612    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2613    make_bool_getter!(Controls, "controls");
2614    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2615    make_bool_setter!(SetControls, "controls");
2616
2617    // https://html.spec.whatwg.org/multipage/#dom-media-src
2618    make_url_getter!(Src, "src");
2619
2620    // https://html.spec.whatwg.org/multipage/#dom-media-src
2621    make_url_setter!(SetSrc, "src");
2622
2623    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2624    fn GetCrossOrigin(&self) -> Option<DOMString> {
2625        reflect_cross_origin_attribute(self.upcast::<Element>())
2626    }
2627    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2628    fn SetCrossOrigin(&self, value: Option<DOMString>, can_gc: CanGc) {
2629        set_cross_origin_attribute(self.upcast::<Element>(), value, can_gc);
2630    }
2631
2632    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2633    fn Muted(&self) -> bool {
2634        self.muted.get()
2635    }
2636
2637    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2638    fn SetMuted(&self, value: bool) {
2639        if self.muted.get() == value {
2640            return;
2641        }
2642
2643        if let Some(ref player) = *self.player.borrow() {
2644            let _ = player.lock().unwrap().set_mute(value);
2645        }
2646
2647        self.muted.set(value);
2648        self.owner_global()
2649            .task_manager()
2650            .media_element_task_source()
2651            .queue_simple_event(self.upcast(), atom!("volumechange"));
2652        if !self.is_allowed_to_play() {
2653            self.internal_pause_steps();
2654        }
2655    }
2656
2657    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
2658    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
2659        (*self.src_object.borrow())
2660            .as_ref()
2661            .map(|src_object| match src_object {
2662                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
2663                SrcObject::MediaStream(stream) => {
2664                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
2665                },
2666            })
2667    }
2668
2669    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
2670    fn SetSrcObject(&self, value: Option<MediaStreamOrBlob>, can_gc: CanGc) {
2671        *self.src_object.borrow_mut() = value.map(|value| value.into());
2672        self.media_element_load_algorithm(can_gc);
2673    }
2674
2675    // https://html.spec.whatwg.org/multipage/#attr-media-preload
2676    // Missing/Invalid values are user-agent defined.
2677    make_enumerated_getter!(
2678        Preload,
2679        "preload",
2680        "none" | "metadata" | "auto",
2681        missing => "auto",
2682        invalid => "auto"
2683    );
2684
2685    // https://html.spec.whatwg.org/multipage/#attr-media-preload
2686    make_setter!(SetPreload, "preload");
2687
2688    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
2689    fn CurrentSrc(&self) -> USVString {
2690        USVString(self.current_src.borrow().clone())
2691    }
2692
2693    /// <https://html.spec.whatwg.org/multipage/#dom-media-load>
2694    fn Load(&self, can_gc: CanGc) {
2695        self.media_element_load_algorithm(can_gc);
2696    }
2697
2698    /// <https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype>
2699    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
2700        match ServoMedia::get().can_play_type(&type_.str()) {
2701            SupportsMediaType::No => CanPlayTypeResult::_empty,
2702            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
2703            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
2704        }
2705    }
2706
2707    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
2708    fn GetError(&self) -> Option<DomRoot<MediaError>> {
2709        self.error.get()
2710    }
2711
2712    /// <https://html.spec.whatwg.org/multipage/#dom-media-play>
2713    fn Play(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
2714        let promise = Promise::new_in_current_realm(comp, can_gc);
2715        // Step 1.
2716        // FIXME(nox): Reject promise if not allowed to play.
2717
2718        // Step 2.
2719        if self
2720            .error
2721            .get()
2722            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
2723        {
2724            promise.reject_error(Error::NotSupported, can_gc);
2725            return promise;
2726        }
2727
2728        // Step 3.
2729        self.push_pending_play_promise(&promise);
2730
2731        // Step 4.
2732        if self.network_state.get() == NetworkState::Empty {
2733            self.invoke_resource_selection_algorithm(can_gc);
2734        }
2735
2736        // Step 5.
2737        if self.Ended() && self.direction_of_playback() == PlaybackDirection::Forwards {
2738            self.seek(
2739                self.earliest_possible_position(),
2740                /* approximate_for_speed */ false,
2741                can_gc,
2742            );
2743        }
2744
2745        let state = self.ready_state.get();
2746
2747        let global = self.owner_global();
2748        let task_manager = global.task_manager();
2749        let task_source = task_manager.media_element_task_source();
2750        if self.Paused() {
2751            // Step 6.1.
2752            self.paused.set(false);
2753
2754            // Step 6.2.
2755            if self.show_poster.get() {
2756                self.show_poster.set(false);
2757                self.time_marches_on();
2758            }
2759
2760            // Step 6.3.
2761            task_source.queue_simple_event(self.upcast(), atom!("play"));
2762
2763            // Step 6.4.
2764            match state {
2765                ReadyState::HaveNothing |
2766                ReadyState::HaveMetadata |
2767                ReadyState::HaveCurrentData => {
2768                    task_source.queue_simple_event(self.upcast(), atom!("waiting"));
2769                },
2770                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
2771                    self.notify_about_playing();
2772                },
2773            }
2774        } else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
2775            // Step 7.
2776            self.take_pending_play_promises(Ok(()));
2777            let this = Trusted::new(self);
2778            let generation_id = self.generation_id.get();
2779            task_source.queue(task!(resolve_pending_play_promises: move || {
2780                let this = this.root();
2781                if generation_id != this.generation_id.get() {
2782                    return;
2783                }
2784
2785                this.fulfill_in_flight_play_promises(|| {
2786                    this.play_media();
2787                });
2788            }));
2789        }
2790
2791        // Step 8.
2792        self.autoplaying.set(false);
2793
2794        // Step 9.
2795        promise
2796    }
2797
2798    /// <https://html.spec.whatwg.org/multipage/#dom-media-pause>
2799    fn Pause(&self, can_gc: CanGc) {
2800        // Step 1
2801        if self.network_state.get() == NetworkState::Empty {
2802            self.invoke_resource_selection_algorithm(can_gc);
2803        }
2804
2805        // Step 2
2806        self.internal_pause_steps();
2807    }
2808
2809    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
2810    fn Paused(&self) -> bool {
2811        self.paused.get()
2812    }
2813
2814    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
2815    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
2816        Ok(Finite::wrap(self.defaultPlaybackRate.get()))
2817    }
2818
2819    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
2820    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
2821        let min_allowed = -64.0;
2822        let max_allowed = 64.0;
2823        if *value < min_allowed || *value > max_allowed {
2824            return Err(Error::NotSupported);
2825        }
2826
2827        if *value != self.defaultPlaybackRate.get() {
2828            self.defaultPlaybackRate.set(*value);
2829            self.queue_media_element_task_to_fire_event(atom!("ratechange"));
2830        }
2831
2832        Ok(())
2833    }
2834
2835    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
2836    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
2837        Ok(Finite::wrap(self.playbackRate.get()))
2838    }
2839
2840    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
2841    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
2842        let min_allowed = -64.0;
2843        let max_allowed = 64.0;
2844        if *value < min_allowed || *value > max_allowed {
2845            return Err(Error::NotSupported);
2846        }
2847
2848        if *value != self.playbackRate.get() {
2849            self.playbackRate.set(*value);
2850            self.queue_media_element_task_to_fire_event(atom!("ratechange"));
2851            if self.is_potentially_playing() {
2852                if let Some(ref player) = *self.player.borrow() {
2853                    if let Err(e) = player.lock().unwrap().set_rate(*value) {
2854                        warn!("Could not set the playback rate {:?}", e);
2855                    }
2856                }
2857            }
2858        }
2859
2860        Ok(())
2861    }
2862
2863    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
2864    fn Duration(&self) -> f64 {
2865        self.duration.get()
2866    }
2867
2868    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
2869    fn CurrentTime(&self) -> Finite<f64> {
2870        Finite::wrap(if self.default_playback_start_position.get() != 0. {
2871            self.default_playback_start_position.get()
2872        } else {
2873            self.playback_position.get()
2874        })
2875    }
2876
2877    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
2878    fn SetCurrentTime(&self, time: Finite<f64>, can_gc: CanGc) {
2879        if self.ready_state.get() == ReadyState::HaveNothing {
2880            self.default_playback_start_position.set(*time);
2881        } else {
2882            self.playback_position.set(*time);
2883            self.seek(*time, /* approximate_for_speed */ false, can_gc);
2884        }
2885    }
2886
2887    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
2888    fn Seeking(&self) -> bool {
2889        self.seeking.get()
2890    }
2891
2892    /// <https://html.spec.whatwg.org/multipage/#ended-playback>
2893    fn Ended(&self) -> bool {
2894        if self.ready_state.get() < ReadyState::HaveMetadata {
2895            return false;
2896        }
2897
2898        let playback_pos = self.playback_position.get();
2899
2900        match self.direction_of_playback() {
2901            PlaybackDirection::Forwards => playback_pos >= self.Duration() && !self.Loop(),
2902            PlaybackDirection::Backwards => playback_pos <= self.earliest_possible_position(),
2903        }
2904    }
2905
2906    /// <https://html.spec.whatwg.org/multipage/#dom-media-fastseek>
2907    fn FastSeek(&self, time: Finite<f64>, can_gc: CanGc) {
2908        self.seek(*time, /* approximate_for_speed */ true, can_gc);
2909    }
2910
2911    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
2912    fn Played(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
2913        TimeRanges::new(
2914            self.global().as_window(),
2915            self.played.borrow().clone(),
2916            can_gc,
2917        )
2918    }
2919
2920    /// <https://html.spec.whatwg.org/multipage/#dom-media-seekable>
2921    fn Seekable(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
2922        let mut seekable = TimeRangesContainer::default();
2923        if let Some(ref player) = *self.player.borrow() {
2924            if let Ok(ranges) = player.lock().unwrap().seekable() {
2925                for range in ranges {
2926                    let _ = seekable.add(range.start, range.end);
2927                }
2928            }
2929        }
2930        TimeRanges::new(self.global().as_window(), seekable, can_gc)
2931    }
2932
2933    /// <https://html.spec.whatwg.org/multipage/#dom-media-buffered>
2934    fn Buffered(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
2935        let mut buffered = TimeRangesContainer::default();
2936        if let Some(ref player) = *self.player.borrow() {
2937            if let Ok(ranges) = player.lock().unwrap().buffered() {
2938                for range in ranges {
2939                    let _ = buffered.add(range.start, range.end);
2940                }
2941            }
2942        }
2943        TimeRanges::new(self.global().as_window(), buffered, can_gc)
2944    }
2945
2946    /// <https://html.spec.whatwg.org/multipage/#dom-media-audiotracks>
2947    fn AudioTracks(&self, can_gc: CanGc) -> DomRoot<AudioTrackList> {
2948        let window = self.owner_window();
2949        self.audio_tracks_list
2950            .or_init(|| AudioTrackList::new(&window, &[], Some(self), can_gc))
2951    }
2952
2953    /// <https://html.spec.whatwg.org/multipage/#dom-media-videotracks>
2954    fn VideoTracks(&self, can_gc: CanGc) -> DomRoot<VideoTrackList> {
2955        let window = self.owner_window();
2956        self.video_tracks_list
2957            .or_init(|| VideoTrackList::new(&window, &[], Some(self), can_gc))
2958    }
2959
2960    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
2961    fn TextTracks(&self, can_gc: CanGc) -> DomRoot<TextTrackList> {
2962        let window = self.owner_window();
2963        self.text_tracks_list
2964            .or_init(|| TextTrackList::new(&window, &[], can_gc))
2965    }
2966
2967    /// <https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack>
2968    fn AddTextTrack(
2969        &self,
2970        kind: TextTrackKind,
2971        label: DOMString,
2972        language: DOMString,
2973        can_gc: CanGc,
2974    ) -> DomRoot<TextTrack> {
2975        let window = self.owner_window();
2976        // Step 1 & 2
2977        // FIXME(#22314, dlrobertson) set the ready state to Loaded
2978        let track = TextTrack::new(
2979            &window,
2980            "".into(),
2981            kind,
2982            label,
2983            language,
2984            TextTrackMode::Hidden,
2985            None,
2986            can_gc,
2987        );
2988        // Step 3 & 4
2989        self.TextTracks(can_gc).add(&track);
2990        // Step 5
2991        DomRoot::from_ref(&track)
2992    }
2993
2994    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
2995    fn GetVolume(&self) -> Fallible<Finite<f64>> {
2996        Ok(Finite::wrap(self.volume.get()))
2997    }
2998
2999    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3000    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
3001        let minimum_volume = 0.0;
3002        let maximum_volume = 1.0;
3003        if *value < minimum_volume || *value > maximum_volume {
3004            return Err(Error::IndexSize);
3005        }
3006
3007        if *value != self.volume.get() {
3008            self.volume.set(*value);
3009            if let Some(player) = self.player.borrow().as_ref() {
3010                let _ = player.lock().unwrap().set_volume(*value);
3011            }
3012            self.owner_global()
3013                .task_manager()
3014                .media_element_task_source()
3015                .queue_simple_event(self.upcast(), atom!("volumechange"));
3016            if !self.is_allowed_to_play() {
3017                self.internal_pause_steps();
3018            }
3019        }
3020
3021        Ok(())
3022    }
3023}
3024
3025impl VirtualMethods for HTMLMediaElement {
3026    fn super_type(&self) -> Option<&dyn VirtualMethods> {
3027        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
3028    }
3029
3030    fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation, can_gc: CanGc) {
3031        self.super_type()
3032            .unwrap()
3033            .attribute_mutated(attr, mutation, can_gc);
3034
3035        match *attr.local_name() {
3036            local_name!("muted") => {
3037                self.SetMuted(mutation.new_value(attr).is_some());
3038            },
3039            local_name!("src") => {
3040                // <https://html.spec.whatwg.org/multipage/#location-of-the-media-resource>
3041                // If a src attribute of a media element is set or changed, the user agent must invoke
3042                // the media element's media element load algorithm (Removing the src attribute does
3043                // not do this, even if there are source elements present).
3044                if mutation.new_value(attr).is_none() {
3045                    self.clear_current_frame_data();
3046                    return;
3047                }
3048                self.media_element_load_algorithm(can_gc);
3049            },
3050            local_name!("controls") => {
3051                if mutation.new_value(attr).is_some() {
3052                    self.render_controls(can_gc);
3053                } else {
3054                    self.remove_controls();
3055                }
3056            },
3057            _ => (),
3058        };
3059    }
3060
3061    /// <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document>
3062    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
3063        self.super_type().unwrap().unbind_from_tree(context, can_gc);
3064
3065        self.remove_controls();
3066
3067        if context.tree_connected {
3068            let task = MediaElementMicrotask::PauseIfNotInDocument {
3069                elem: DomRoot::from_ref(self),
3070            };
3071            ScriptThread::await_stable_state(Microtask::MediaElement(task));
3072        }
3073    }
3074}
3075
3076#[derive(JSTraceable, MallocSizeOf)]
3077pub(crate) enum MediaElementMicrotask {
3078    ResourceSelection {
3079        elem: DomRoot<HTMLMediaElement>,
3080        generation_id: u32,
3081        #[no_trace]
3082        base_url: ServoUrl,
3083    },
3084    PauseIfNotInDocument {
3085        elem: DomRoot<HTMLMediaElement>,
3086    },
3087    Seeked {
3088        elem: DomRoot<HTMLMediaElement>,
3089        generation_id: u32,
3090    },
3091    SelectNextSourceChild {
3092        elem: DomRoot<HTMLMediaElement>,
3093        generation_id: u32,
3094    },
3095    SelectNextSourceChildAfterWait {
3096        elem: DomRoot<HTMLMediaElement>,
3097        generation_id: u32,
3098    },
3099}
3100
3101impl MicrotaskRunnable for MediaElementMicrotask {
3102    fn handler(&self, can_gc: CanGc) {
3103        match self {
3104            &MediaElementMicrotask::ResourceSelection {
3105                ref elem,
3106                generation_id,
3107                ref base_url,
3108            } => {
3109                if generation_id == elem.generation_id.get() {
3110                    elem.resource_selection_algorithm_sync(base_url.clone(), can_gc);
3111                }
3112            },
3113            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
3114                if !elem.upcast::<Node>().is_connected() {
3115                    elem.internal_pause_steps();
3116                }
3117            },
3118            &MediaElementMicrotask::Seeked {
3119                ref elem,
3120                generation_id,
3121            } => {
3122                if generation_id == elem.generation_id.get() {
3123                    elem.seek_end();
3124                }
3125            },
3126            &MediaElementMicrotask::SelectNextSourceChild {
3127                ref elem,
3128                generation_id,
3129            } => {
3130                if generation_id == elem.generation_id.get() {
3131                    elem.select_next_source_child(can_gc);
3132                }
3133            },
3134            &MediaElementMicrotask::SelectNextSourceChildAfterWait {
3135                ref elem,
3136                generation_id,
3137            } => {
3138                if generation_id == elem.generation_id.get() {
3139                    elem.select_next_source_child_after_wait(can_gc);
3140                }
3141            },
3142        }
3143    }
3144
3145    fn enter_realm(&self) -> JSAutoRealm {
3146        match self {
3147            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
3148            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
3149            &MediaElementMicrotask::Seeked { ref elem, .. } |
3150            &MediaElementMicrotask::SelectNextSourceChild { ref elem, .. } |
3151            &MediaElementMicrotask::SelectNextSourceChildAfterWait { ref elem, .. } => {
3152                enter_realm(&**elem)
3153            },
3154        }
3155    }
3156}
3157
3158enum Resource {
3159    Object,
3160    Url(ServoUrl),
3161}
3162
3163#[derive(Debug, MallocSizeOf, PartialEq)]
3164enum DataBuffer {
3165    Payload(Vec<u8>),
3166    EndOfStream,
3167}
3168
3169#[derive(MallocSizeOf)]
3170struct BufferedDataSource {
3171    /// During initial setup and seeking (including clearing the buffer queue
3172    /// and resetting the end-of-stream state), the data source should be locked and
3173    /// any request for processing should be ignored until the media player informs us
3174    /// via the NeedData event that it is ready to accept incoming data.
3175    locked: Cell<bool>,
3176    /// Temporary storage for incoming data.
3177    buffers: VecDeque<DataBuffer>,
3178}
3179
3180impl BufferedDataSource {
3181    fn new() -> BufferedDataSource {
3182        BufferedDataSource {
3183            locked: Cell::new(true),
3184            buffers: VecDeque::default(),
3185        }
3186    }
3187
3188    fn set_locked(&self, locked: bool) {
3189        self.locked.set(locked)
3190    }
3191
3192    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
3193        debug_assert_ne!(
3194            self.buffers.back(),
3195            Some(&DataBuffer::EndOfStream),
3196            "The media backend not expects any further data after end of stream"
3197        );
3198
3199        self.buffers.push_back(buffer);
3200    }
3201
3202    fn process_into_player_from_queue(
3203        &mut self,
3204        player: &Arc<Mutex<dyn Player>>,
3205    ) -> Result<(), PlayerError> {
3206        // Early out if any request for processing should be ignored.
3207        if self.locked.get() {
3208            return Ok(());
3209        }
3210
3211        while let Some(buffer) = self.buffers.pop_front() {
3212            match buffer {
3213                DataBuffer::Payload(payload) => {
3214                    if let Err(e) = player.lock().unwrap().push_data(payload) {
3215                        warn!("Could not push input data to player {:?}", e);
3216                        return Err(e);
3217                    }
3218                },
3219                DataBuffer::EndOfStream => {
3220                    if let Err(e) = player.lock().unwrap().end_of_stream() {
3221                        warn!("Could not signal EOS to player {:?}", e);
3222                        return Err(e);
3223                    }
3224                },
3225            }
3226        }
3227
3228        Ok(())
3229    }
3230
3231    fn reset(&mut self) {
3232        self.locked.set(true);
3233        self.buffers.clear();
3234    }
3235}
3236
3237/// Indicates the reason why a fetch request was cancelled.
3238#[derive(Debug, MallocSizeOf, PartialEq)]
3239enum CancelReason {
3240    /// We were asked to stop pushing data to the player.
3241    Backoff,
3242    /// An error ocurred while fetching the media data.
3243    Error,
3244    /// The fetching process is aborted by the user.
3245    Abort,
3246}
3247
3248#[derive(MallocSizeOf)]
3249pub(crate) struct HTMLMediaElementFetchContext {
3250    /// The fetch request id.
3251    request_id: RequestId,
3252    /// Some if the request has been cancelled.
3253    cancel_reason: Option<CancelReason>,
3254    /// Indicates whether the fetched stream is seekable.
3255    is_seekable: bool,
3256    /// Indicates whether the fetched stream is origin clean.
3257    origin_clean: bool,
3258    /// The buffered data source which to be processed by media backend.
3259    data_source: RefCell<BufferedDataSource>,
3260    /// Fetch canceller. Allows cancelling the current fetch request by
3261    /// manually calling its .cancel() method or automatically on Drop.
3262    fetch_canceller: FetchCanceller,
3263}
3264
3265impl HTMLMediaElementFetchContext {
3266    fn new(
3267        request_id: RequestId,
3268        core_resource_thread: CoreResourceThread,
3269    ) -> HTMLMediaElementFetchContext {
3270        HTMLMediaElementFetchContext {
3271            request_id,
3272            cancel_reason: None,
3273            is_seekable: false,
3274            origin_clean: true,
3275            data_source: RefCell::new(BufferedDataSource::new()),
3276            fetch_canceller: FetchCanceller::new(request_id, core_resource_thread.clone()),
3277        }
3278    }
3279
3280    fn request_id(&self) -> RequestId {
3281        self.request_id
3282    }
3283
3284    fn is_seekable(&self) -> bool {
3285        self.is_seekable
3286    }
3287
3288    fn set_seekable(&mut self, seekable: bool) {
3289        self.is_seekable = seekable;
3290    }
3291
3292    fn origin_is_clean(&self) -> bool {
3293        self.origin_clean
3294    }
3295
3296    fn set_origin_clean(&mut self, origin_clean: bool) {
3297        self.origin_clean = origin_clean;
3298    }
3299
3300    fn data_source(&self) -> &RefCell<BufferedDataSource> {
3301        &self.data_source
3302    }
3303
3304    fn cancel(&mut self, reason: CancelReason) {
3305        if self.cancel_reason.is_some() {
3306            return;
3307        }
3308        self.cancel_reason = Some(reason);
3309        self.data_source.borrow_mut().reset();
3310        self.fetch_canceller.cancel();
3311    }
3312
3313    fn cancel_reason(&self) -> &Option<CancelReason> {
3314        &self.cancel_reason
3315    }
3316}
3317
3318struct HTMLMediaElementFetchListener {
3319    /// The element that initiated the request.
3320    element: Trusted<HTMLMediaElement>,
3321    /// The generation of the media element when this fetch started.
3322    generation_id: u32,
3323    /// The fetch request id.
3324    request_id: RequestId,
3325    /// Time of last progress notification.
3326    next_progress_event: Instant,
3327    /// Timing data for this resource.
3328    resource_timing: ResourceFetchTiming,
3329    /// Url for the resource.
3330    url: ServoUrl,
3331    /// Expected content length of the media asset being fetched or played.
3332    expected_content_length: Option<u64>,
3333    /// Actual content length of the media asset was fetched.
3334    fetched_content_length: u64,
3335    /// Discarded content length from the network for the ongoing
3336    /// request if range requests are not supported. Seek requests set it
3337    /// to the required position (in bytes).
3338    content_length_to_discard: u64,
3339}
3340
3341impl FetchResponseListener for HTMLMediaElementFetchListener {
3342    fn process_request_body(&mut self, _: RequestId) {}
3343
3344    fn process_request_eof(&mut self, _: RequestId) {}
3345
3346    fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
3347        let element = self.element.root();
3348
3349        let (metadata, origin_clean) = match metadata {
3350            Ok(fetch_metadata) => match fetch_metadata {
3351                FetchMetadata::Unfiltered(metadata) => (Some(metadata), true),
3352                FetchMetadata::Filtered { filtered, unsafe_ } => (
3353                    Some(unsafe_),
3354                    matches!(
3355                        filtered,
3356                        FilteredMetadata::Basic(_) | FilteredMetadata::Cors(_)
3357                    ),
3358                ),
3359            },
3360            Err(_) => (None, true),
3361        };
3362
3363        let (status_is_success, is_seekable) =
3364            metadata.as_ref().map_or((false, false), |metadata| {
3365                let status = &metadata.status;
3366                (status.is_success(), *status == StatusCode::PARTIAL_CONTENT)
3367            });
3368
3369        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3370        if !status_is_success {
3371            if element.ready_state.get() == ReadyState::HaveNothing {
3372                // => "If the media data cannot be fetched at all, due to network errors..."
3373                element.media_data_processing_failure_steps();
3374            } else {
3375                // => "If the connection is interrupted after some media data has been received..."
3376                element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3377            }
3378            return;
3379        }
3380
3381        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3382            current_fetch_context.set_seekable(is_seekable);
3383            current_fetch_context.set_origin_clean(origin_clean);
3384        }
3385
3386        if let Some(metadata) = metadata.as_ref() {
3387            if let Some(headers) = metadata.headers.as_ref() {
3388                // For range requests we get the size of the media asset from the Content-Range
3389                // header. Otherwise, we get it from the Content-Length header.
3390                let content_length =
3391                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
3392                        content_range.bytes_len()
3393                    } else {
3394                        headers
3395                            .typed_get::<ContentLength>()
3396                            .map(|content_length| content_length.0)
3397                    };
3398
3399                // We only set the expected input size if it changes.
3400                if content_length != self.expected_content_length {
3401                    if let Some(content_length) = content_length {
3402                        self.expected_content_length = Some(content_length);
3403                    }
3404                }
3405            }
3406        }
3407
3408        // Explicit media player initialization with live/seekable source.
3409        if let Some(expected_content_length) = self.expected_content_length {
3410            if let Err(e) = element
3411                .player
3412                .borrow()
3413                .as_ref()
3414                .unwrap()
3415                .lock()
3416                .unwrap()
3417                .set_input_size(expected_content_length)
3418            {
3419                warn!("Could not set player input size {:?}", e);
3420            }
3421        }
3422    }
3423
3424    fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
3425        let element = self.element.root();
3426
3427        self.fetched_content_length += chunk.len() as u64;
3428
3429        // If an error was received previously, we skip processing the payload.
3430        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3431            if let Some(CancelReason::Backoff) = current_fetch_context.cancel_reason() {
3432                return;
3433            }
3434
3435            // Discard chunk of the response body if fetch context doesn't support range requests.
3436            let payload = if !current_fetch_context.is_seekable() &&
3437                self.content_length_to_discard != 0
3438            {
3439                if chunk.len() as u64 > self.content_length_to_discard {
3440                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3441                    self.content_length_to_discard = 0;
3442                    shrink_chunk
3443                } else {
3444                    // Completely discard this response chunk.
3445                    self.content_length_to_discard -= chunk.len() as u64;
3446                    return;
3447                }
3448            } else {
3449                chunk
3450            };
3451
3452            if let Err(e) = {
3453                let mut data_source = current_fetch_context.data_source().borrow_mut();
3454                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3455                data_source
3456                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap())
3457            } {
3458                // If we are pushing too much data and we know that we can
3459                // restart the download later from where we left, we cancel
3460                // the current request. Otherwise, we continue the request
3461                // assuming that we may drop some frames.
3462                if e == PlayerError::EnoughData {
3463                    current_fetch_context.cancel(CancelReason::Backoff);
3464                }
3465                return;
3466            }
3467        }
3468
3469        // https://html.spec.whatwg.org/multipage/#concept-media-load-resource step 4,
3470        // => "If mode is remote" step 2
3471        if Instant::now() > self.next_progress_event {
3472            element
3473                .owner_global()
3474                .task_manager()
3475                .media_element_task_source()
3476                .queue_simple_event(element.upcast(), atom!("progress"));
3477            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3478        }
3479    }
3480
3481    fn process_response_eof(
3482        &mut self,
3483        _: RequestId,
3484        status: Result<ResourceFetchTiming, NetworkError>,
3485    ) {
3486        let element = self.element.root();
3487
3488        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3489        if status.is_ok() && self.fetched_content_length != 0 {
3490            // => "Once the entire media resource has been fetched..."
3491
3492            // There are no more chunks of the response body forthcoming, so we can
3493            // go ahead and notify the media backend not to expect any further data.
3494            if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut()
3495            {
3496                // On initial state change READY -> PAUSED the media player perform
3497                // seek to initial position by event with seek segment (TIME format)
3498                // while media stack operates in BYTES format and configuring segment
3499                // start and stop positions without the total size of the stream is not
3500                // possible. As fallback the media player perform seek with BYTES format
3501                // and initiate seek request via "seek-data" callback with required offset.
3502                if self.expected_content_length.is_none() {
3503                    if let Err(e) = element
3504                        .player
3505                        .borrow()
3506                        .as_ref()
3507                        .unwrap()
3508                        .lock()
3509                        .unwrap()
3510                        .set_input_size(self.fetched_content_length)
3511                    {
3512                        warn!("Could not set player input size {:?}", e);
3513                    }
3514                }
3515
3516                let mut data_source = current_fetch_context.data_source().borrow_mut();
3517
3518                data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3519                let _ = data_source
3520                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap());
3521            }
3522
3523            // Step 1. Fire an event named progress at the media element.
3524            element
3525                .upcast::<EventTarget>()
3526                .fire_event(atom!("progress"), CanGc::note());
3527
3528            // Step 2. Set the networkState to NETWORK_IDLE and fire an event named suspend at the
3529            // media element.
3530            element.network_state.set(NetworkState::Idle);
3531
3532            element
3533                .upcast::<EventTarget>()
3534                .fire_event(atom!("suspend"), CanGc::note());
3535        } else if status.is_err() && element.ready_state.get() != ReadyState::HaveNothing {
3536            // => "If the connection is interrupted after some media data has been received..."
3537            element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3538        } else {
3539            // => "If the media data can be fetched but is found by inspection to be in an
3540            // unsupported format, or can otherwise not be rendered at all"
3541            element.media_data_processing_failure_steps();
3542        }
3543    }
3544
3545    fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
3546        &mut self.resource_timing
3547    }
3548
3549    fn resource_timing(&self) -> &ResourceFetchTiming {
3550        &self.resource_timing
3551    }
3552
3553    fn submit_resource_timing(&mut self) {
3554        network_listener::submit_timing(self, CanGc::note())
3555    }
3556
3557    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3558        let global = &self.resource_timing_global();
3559        global.report_csp_violations(violations, None, None);
3560    }
3561}
3562
3563impl ResourceTimingListener for HTMLMediaElementFetchListener {
3564    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3565        let initiator_type = InitiatorType::LocalName(
3566            self.element
3567                .root()
3568                .upcast::<Element>()
3569                .local_name()
3570                .to_string(),
3571        );
3572        (initiator_type, self.url.clone())
3573    }
3574
3575    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3576        self.element.root().owner_document().global()
3577    }
3578}
3579
3580impl PreInvoke for HTMLMediaElementFetchListener {
3581    fn should_invoke(&self) -> bool {
3582        let element = self.element.root();
3583
3584        if element.generation_id.get() != self.generation_id || element.player.borrow().is_none() {
3585            return false;
3586        }
3587
3588        let Some(ref current_fetch_context) = *element.current_fetch_context.borrow() else {
3589            return false;
3590        };
3591
3592        // Whether the new fetch request was triggered.
3593        if current_fetch_context.request_id() != self.request_id {
3594            return false;
3595        }
3596
3597        // Whether the current fetch request was cancelled due to a network or decoding error, or
3598        // was aborted by the user.
3599        if let Some(cancel_reason) = current_fetch_context.cancel_reason() {
3600            if matches!(*cancel_reason, CancelReason::Error | CancelReason::Abort) {
3601                return false;
3602            }
3603        }
3604
3605        true
3606    }
3607}
3608
3609impl HTMLMediaElementFetchListener {
3610    fn new(element: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3611        Self {
3612            element: Trusted::new(element),
3613            generation_id: element.generation_id.get(),
3614            request_id,
3615            next_progress_event: Instant::now() + Duration::from_millis(350),
3616            resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
3617            url,
3618            expected_content_length: None,
3619            fetched_content_length: 0,
3620            content_length_to_discard: offset,
3621        }
3622    }
3623}