script/dom/html/
htmlmediaelement.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::cell::{Cell, RefCell};
6use std::collections::VecDeque;
7use std::rc::Rc;
8use std::sync::{Arc, Mutex, Weak};
9use std::time::{Duration, Instant};
10use std::{f64, mem};
11
12use compositing_traits::{CrossProcessCompositorApi, ImageUpdate, SerializableImageData};
13use content_security_policy::sandboxing_directive::SandboxingFlagSet;
14use dom_struct::dom_struct;
15use embedder_traits::{MediaPositionState, MediaSessionEvent, MediaSessionPlaybackState};
16use euclid::default::Size2D;
17use headers::{ContentLength, ContentRange, HeaderMapExt};
18use html5ever::{LocalName, Prefix, QualName, local_name, ns};
19use http::StatusCode;
20use http::header::{self, HeaderMap, HeaderValue};
21use ipc_channel::ipc::{self, IpcSharedMemory};
22use ipc_channel::router::ROUTER;
23use js::jsapi::JSAutoRealm;
24use layout_api::MediaFrame;
25use media::{GLPlayerMsg, GLPlayerMsgForward, WindowGLContext};
26use net_traits::request::{Destination, RequestId};
27use net_traits::{
28    CoreResourceThread, FetchMetadata, FilteredMetadata, NetworkError, ResourceFetchTiming,
29};
30use pixels::RasterImage;
31use script_bindings::codegen::InheritTypes::{
32    ElementTypeId, HTMLElementTypeId, HTMLMediaElementTypeId, NodeTypeId,
33};
34use servo_config::pref;
35use servo_media::player::audio::AudioRenderer;
36use servo_media::player::video::{VideoFrame, VideoFrameRenderer};
37use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, SeekLock, StreamType};
38use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
39use servo_url::ServoUrl;
40use stylo_atoms::Atom;
41use uuid::Uuid;
42use webrender_api::{
43    ExternalImageData, ExternalImageId, ExternalImageType, ImageBufferKind, ImageDescriptor,
44    ImageDescriptorFlags, ImageFormat, ImageKey,
45};
46
47use crate::document_loader::{LoadBlocker, LoadType};
48use crate::dom::attr::Attr;
49use crate::dom::audio::audiotrack::AudioTrack;
50use crate::dom::audio::audiotracklist::AudioTrackList;
51use crate::dom::bindings::cell::DomRefCell;
52use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::{
53    CanPlayTypeResult, HTMLMediaElementConstants, HTMLMediaElementMethods,
54};
55use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
56use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
57use crate::dom::bindings::codegen::Bindings::NavigatorBinding::Navigator_Binding::NavigatorMethods;
58use crate::dom::bindings::codegen::Bindings::NodeBinding::Node_Binding::NodeMethods;
59use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
60use crate::dom::bindings::codegen::Bindings::URLBinding::URLMethods;
61use crate::dom::bindings::codegen::Bindings::WindowBinding::Window_Binding::WindowMethods;
62use crate::dom::bindings::codegen::UnionTypes::{
63    MediaStreamOrBlob, VideoTrackOrAudioTrackOrTextTrack,
64};
65use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
66use crate::dom::bindings::inheritance::Castable;
67use crate::dom::bindings::num::Finite;
68use crate::dom::bindings::refcounted::Trusted;
69use crate::dom::bindings::reflector::DomGlobal;
70use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
71use crate::dom::bindings::str::{DOMString, USVString};
72use crate::dom::blob::Blob;
73use crate::dom::csp::{GlobalCspReporting, Violation};
74use crate::dom::document::Document;
75use crate::dom::element::{
76    AttributeMutation, AttributeMutationReason, CustomElementCreationMode, Element, ElementCreator,
77    cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
78};
79use crate::dom::event::Event;
80use crate::dom::eventtarget::EventTarget;
81use crate::dom::globalscope::GlobalScope;
82use crate::dom::html::htmlelement::HTMLElement;
83use crate::dom::html::htmlsourceelement::HTMLSourceElement;
84use crate::dom::html::htmlvideoelement::HTMLVideoElement;
85use crate::dom::mediaerror::MediaError;
86use crate::dom::mediafragmentparser::MediaFragmentParser;
87use crate::dom::medialist::MediaList;
88use crate::dom::mediastream::MediaStream;
89use crate::dom::node::{Node, NodeDamage, NodeTraits, UnbindContext};
90use crate::dom::performance::performanceresourcetiming::InitiatorType;
91use crate::dom::promise::Promise;
92use crate::dom::texttrack::TextTrack;
93use crate::dom::texttracklist::TextTrackList;
94use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
95use crate::dom::trackevent::TrackEvent;
96use crate::dom::url::URL;
97use crate::dom::videotrack::VideoTrack;
98use crate::dom::videotracklist::VideoTrackList;
99use crate::dom::virtualmethods::VirtualMethods;
100use crate::fetch::{FetchCanceller, create_a_potential_cors_request};
101use crate::microtask::{Microtask, MicrotaskRunnable};
102use crate::network_listener::{self, FetchResponseListener, ResourceTimingListener};
103use crate::realms::{InRealm, enter_realm};
104use crate::script_runtime::CanGc;
105use crate::script_thread::ScriptThread;
106use crate::task_source::SendableTaskSource;
107
108/// A CSS file to style the media controls.
109static MEDIA_CONTROL_CSS: &str = include_str!("../../resources/media-controls.css");
110
111/// A JS file to control the media controls.
112static MEDIA_CONTROL_JS: &str = include_str!("../../resources/media-controls.js");
113
114#[derive(MallocSizeOf, PartialEq)]
115enum FrameStatus {
116    Locked,
117    Unlocked,
118}
119
120#[derive(MallocSizeOf)]
121struct FrameHolder(
122    FrameStatus,
123    #[ignore_malloc_size_of = "defined in servo-media"] VideoFrame,
124);
125
126impl FrameHolder {
127    fn new(frame: VideoFrame) -> FrameHolder {
128        FrameHolder(FrameStatus::Unlocked, frame)
129    }
130
131    fn lock(&mut self) {
132        if self.0 == FrameStatus::Unlocked {
133            self.0 = FrameStatus::Locked;
134        };
135    }
136
137    fn unlock(&mut self) {
138        if self.0 == FrameStatus::Locked {
139            self.0 = FrameStatus::Unlocked;
140        };
141    }
142
143    fn set(&mut self, new_frame: VideoFrame) {
144        if self.0 == FrameStatus::Unlocked {
145            self.1 = new_frame
146        };
147    }
148
149    fn get(&self) -> (u32, Size2D<i32>, usize) {
150        if self.0 == FrameStatus::Locked {
151            (
152                self.1.get_texture_id(),
153                Size2D::new(self.1.get_width(), self.1.get_height()),
154                0,
155            )
156        } else {
157            unreachable!();
158        }
159    }
160
161    fn get_frame(&self) -> VideoFrame {
162        self.1.clone()
163    }
164}
165
166#[derive(MallocSizeOf)]
167pub(crate) struct MediaFrameRenderer {
168    player_id: Option<usize>,
169    glplayer_id: Option<u64>,
170    compositor_api: CrossProcessCompositorApi,
171    #[ignore_malloc_size_of = "Defined in other crates"]
172    player_context: WindowGLContext,
173    current_frame: Option<MediaFrame>,
174    old_frame: Option<ImageKey>,
175    very_old_frame: Option<ImageKey>,
176    current_frame_holder: Option<FrameHolder>,
177    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
178    poster_frame: Option<MediaFrame>,
179}
180
181impl MediaFrameRenderer {
182    fn new(compositor_api: CrossProcessCompositorApi, player_context: WindowGLContext) -> Self {
183        Self {
184            player_id: None,
185            glplayer_id: None,
186            compositor_api,
187            player_context,
188            current_frame: None,
189            old_frame: None,
190            very_old_frame: None,
191            current_frame_holder: None,
192            poster_frame: None,
193        }
194    }
195
196    fn setup(
197        &mut self,
198        player_id: usize,
199        task_source: SendableTaskSource,
200        weak_video_renderer: Weak<Mutex<MediaFrameRenderer>>,
201    ) {
202        self.player_id = Some(player_id);
203
204        let (glplayer_id, image_receiver) = self
205            .player_context
206            .glplayer_thread_sender
207            .as_ref()
208            .map(|sender| {
209                let (image_sender, image_receiver) = ipc::channel::<GLPlayerMsgForward>().unwrap();
210                sender
211                    .send(GLPlayerMsg::RegisterPlayer(image_sender))
212                    .unwrap();
213                match image_receiver.recv().unwrap() {
214                    GLPlayerMsgForward::PlayerId(id) => (Some(id), Some(image_receiver)),
215                    _ => unreachable!(),
216                }
217            })
218            .unwrap_or((None, None));
219
220        self.glplayer_id = glplayer_id;
221
222        let Some(image_receiver) = image_receiver else {
223            return;
224        };
225
226        ROUTER.add_typed_route(
227            image_receiver,
228            Box::new(move |message| {
229                let message = message.unwrap();
230                let weak_video_renderer = weak_video_renderer.clone();
231
232                task_source.queue(task!(handle_glplayer_message: move || {
233                    trace!("GLPlayer message {:?}", message);
234
235                    let Some(video_renderer) = weak_video_renderer.upgrade() else {
236                        return;
237                    };
238
239                    match message {
240                        GLPlayerMsgForward::Lock(sender) => {
241                            if let Some(holder) = video_renderer
242                                .lock()
243                                .unwrap()
244                                .current_frame_holder
245                                .as_mut() {
246                                    holder.lock();
247                                    sender.send(holder.get()).unwrap();
248                                };
249                        },
250                        GLPlayerMsgForward::Unlock() => {
251                            if let Some(holder) = video_renderer
252                                .lock()
253                                .unwrap()
254                                .current_frame_holder
255                                .as_mut() { holder.unlock() }
256                        },
257                        _ => (),
258                    }
259                }));
260            }),
261        );
262    }
263
264    fn reset(&mut self) {
265        self.player_id = None;
266
267        if let Some(glplayer_id) = self.glplayer_id.take() {
268            self.player_context
269                .send(GLPlayerMsg::UnregisterPlayer(glplayer_id));
270        }
271
272        self.current_frame_holder = None;
273
274        let mut updates = smallvec::smallvec![];
275
276        if let Some(current_frame) = self.current_frame.take() {
277            updates.push(ImageUpdate::DeleteImage(current_frame.image_key));
278        }
279
280        if let Some(old_image_key) = self.old_frame.take() {
281            updates.push(ImageUpdate::DeleteImage(old_image_key));
282        }
283
284        if let Some(very_old_image_key) = self.very_old_frame.take() {
285            updates.push(ImageUpdate::DeleteImage(very_old_image_key));
286        }
287
288        if !updates.is_empty() {
289            self.compositor_api.update_images(updates);
290        }
291    }
292
293    fn set_poster_frame(&mut self, image: Option<Arc<RasterImage>>) {
294        self.poster_frame = image.and_then(|image| {
295            image.id.map(|image_key| MediaFrame {
296                image_key,
297                width: image.metadata.width as i32,
298                height: image.metadata.height as i32,
299            })
300        });
301    }
302}
303
304impl Drop for MediaFrameRenderer {
305    fn drop(&mut self) {
306        self.reset();
307    }
308}
309
310impl VideoFrameRenderer for MediaFrameRenderer {
311    fn render(&mut self, frame: VideoFrame) {
312        if self.player_id.is_none() || (frame.is_gl_texture() && self.glplayer_id.is_none()) {
313            return;
314        }
315
316        let mut updates = smallvec::smallvec![];
317
318        if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
319            updates.push(ImageUpdate::DeleteImage(old_image_key));
320        }
321
322        let descriptor = ImageDescriptor::new(
323            frame.get_width(),
324            frame.get_height(),
325            ImageFormat::BGRA8,
326            ImageDescriptorFlags::empty(),
327        );
328
329        match &mut self.current_frame {
330            Some(current_frame)
331                if current_frame.width == frame.get_width() &&
332                    current_frame.height == frame.get_height() =>
333            {
334                if !frame.is_gl_texture() {
335                    updates.push(ImageUpdate::UpdateImage(
336                        current_frame.image_key,
337                        descriptor,
338                        SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data())),
339                        None,
340                    ));
341                }
342
343                self.current_frame_holder
344                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
345                    .set(frame);
346
347                if let Some(old_image_key) = self.old_frame.take() {
348                    updates.push(ImageUpdate::DeleteImage(old_image_key));
349                }
350            },
351            Some(current_frame) => {
352                self.old_frame = Some(current_frame.image_key);
353
354                let Some(new_image_key) = self.compositor_api.generate_image_key_blocking() else {
355                    return;
356                };
357
358                /* update current_frame */
359                current_frame.image_key = new_image_key;
360                current_frame.width = frame.get_width();
361                current_frame.height = frame.get_height();
362
363                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
364                    let texture_target = if frame.is_external_oes() {
365                        ImageBufferKind::TextureExternal
366                    } else {
367                        ImageBufferKind::Texture2D
368                    };
369
370                    SerializableImageData::External(ExternalImageData {
371                        id: ExternalImageId(self.glplayer_id.unwrap()),
372                        channel_index: 0,
373                        image_type: ExternalImageType::TextureHandle(texture_target),
374                        normalized_uvs: false,
375                    })
376                } else {
377                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
378                };
379
380                self.current_frame_holder
381                    .get_or_insert_with(|| FrameHolder::new(frame.clone()))
382                    .set(frame);
383
384                updates.push(ImageUpdate::AddImage(new_image_key, descriptor, image_data));
385            },
386            None => {
387                let Some(image_key) = self.compositor_api.generate_image_key_blocking() else {
388                    return;
389                };
390
391                self.current_frame = Some(MediaFrame {
392                    image_key,
393                    width: frame.get_width(),
394                    height: frame.get_height(),
395                });
396
397                let image_data = if frame.is_gl_texture() && self.glplayer_id.is_some() {
398                    let texture_target = if frame.is_external_oes() {
399                        ImageBufferKind::TextureExternal
400                    } else {
401                        ImageBufferKind::Texture2D
402                    };
403
404                    SerializableImageData::External(ExternalImageData {
405                        id: ExternalImageId(self.glplayer_id.unwrap()),
406                        channel_index: 0,
407                        image_type: ExternalImageType::TextureHandle(texture_target),
408                        normalized_uvs: false,
409                    })
410                } else {
411                    SerializableImageData::Raw(IpcSharedMemory::from_bytes(&frame.get_data()))
412                };
413
414                self.current_frame_holder = Some(FrameHolder::new(frame));
415
416                updates.push(ImageUpdate::AddImage(image_key, descriptor, image_data));
417            },
418        }
419        self.compositor_api.update_images(updates);
420    }
421}
422
423#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
424#[derive(JSTraceable, MallocSizeOf)]
425enum SrcObject {
426    MediaStream(Dom<MediaStream>),
427    Blob(Dom<Blob>),
428}
429
430impl From<MediaStreamOrBlob> for SrcObject {
431    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
432    fn from(src_object: MediaStreamOrBlob) -> SrcObject {
433        match src_object {
434            MediaStreamOrBlob::Blob(blob) => SrcObject::Blob(Dom::from_ref(&*blob)),
435            MediaStreamOrBlob::MediaStream(stream) => {
436                SrcObject::MediaStream(Dom::from_ref(&*stream))
437            },
438        }
439    }
440}
441
442#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
443enum LoadState {
444    NotLoaded,
445    LoadingFromSrcObject,
446    LoadingFromSrcAttribute,
447    LoadingFromSourceChild,
448    WaitingForSource,
449}
450
451/// <https://html.spec.whatwg.org/multipage/#loading-the-media-resource:media-element-29>
452#[cfg_attr(crown, crown::unrooted_must_root_lint::must_root)]
453#[derive(JSTraceable, MallocSizeOf)]
454struct SourceChildrenPointer {
455    source_before_pointer: Dom<HTMLSourceElement>,
456    inclusive: bool,
457}
458
459impl SourceChildrenPointer {
460    fn new(source_before_pointer: DomRoot<HTMLSourceElement>, inclusive: bool) -> Self {
461        Self {
462            source_before_pointer: source_before_pointer.as_traced(),
463            inclusive,
464        }
465    }
466}
467
468#[dom_struct]
469#[allow(non_snake_case)]
470pub(crate) struct HTMLMediaElement {
471    htmlelement: HTMLElement,
472    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
473    network_state: Cell<NetworkState>,
474    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
475    ready_state: Cell<ReadyState>,
476    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
477    src_object: DomRefCell<Option<SrcObject>>,
478    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
479    current_src: DomRefCell<String>,
480    /// Incremented whenever tasks associated with this element are cancelled.
481    generation_id: Cell<u32>,
482    /// <https://html.spec.whatwg.org/multipage/#fire-loadeddata>
483    ///
484    /// Reset to false every time the load algorithm is invoked.
485    fired_loadeddata_event: Cell<bool>,
486    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
487    error: MutNullableDom<MediaError>,
488    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
489    paused: Cell<bool>,
490    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
491    defaultPlaybackRate: Cell<f64>,
492    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
493    playbackRate: Cell<f64>,
494    /// <https://html.spec.whatwg.org/multipage/#attr-media-autoplay>
495    autoplaying: Cell<bool>,
496    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
497    delaying_the_load_event_flag: DomRefCell<Option<LoadBlocker>>,
498    /// <https://html.spec.whatwg.org/multipage/#list-of-pending-play-promises>
499    #[conditional_malloc_size_of]
500    pending_play_promises: DomRefCell<Vec<Rc<Promise>>>,
501    /// Play promises which are soon to be fulfilled by a queued task.
502    #[allow(clippy::type_complexity)]
503    #[conditional_malloc_size_of]
504    in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
505    #[ignore_malloc_size_of = "servo_media"]
506    #[no_trace]
507    player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
508    #[conditional_malloc_size_of]
509    #[no_trace]
510    video_renderer: Arc<Mutex<MediaFrameRenderer>>,
511    #[ignore_malloc_size_of = "servo_media"]
512    #[no_trace]
513    audio_renderer: DomRefCell<Option<Arc<Mutex<dyn AudioRenderer>>>>,
514    /// <https://html.spec.whatwg.org/multipage/#show-poster-flag>
515    show_poster: Cell<bool>,
516    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
517    duration: Cell<f64>,
518    /// <https://html.spec.whatwg.org/multipage/#current-playback-position>
519    current_playback_position: Cell<f64>,
520    /// <https://html.spec.whatwg.org/multipage/#official-playback-position>
521    official_playback_position: Cell<f64>,
522    /// <https://html.spec.whatwg.org/multipage/#default-playback-start-position>
523    default_playback_start_position: Cell<f64>,
524    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
525    volume: Cell<f64>,
526    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
527    seeking: Cell<bool>,
528    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
529    muted: Cell<bool>,
530    /// Loading state from source, if any.
531    load_state: Cell<LoadState>,
532    source_children_pointer: DomRefCell<Option<SourceChildrenPointer>>,
533    current_source_child: MutNullableDom<HTMLSourceElement>,
534    /// URL of the media resource, if any.
535    #[no_trace]
536    resource_url: DomRefCell<Option<ServoUrl>>,
537    /// URL of the media resource, if the resource is set through the src_object attribute and it
538    /// is a blob.
539    #[no_trace]
540    blob_url: DomRefCell<Option<ServoUrl>>,
541    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
542    played: DomRefCell<TimeRangesContainer>,
543    // https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
544    audio_tracks_list: MutNullableDom<AudioTrackList>,
545    // https://html.spec.whatwg.org/multipage/#dom-media-videotracks
546    video_tracks_list: MutNullableDom<VideoTrackList>,
547    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
548    text_tracks_list: MutNullableDom<TextTrackList>,
549    /// Time of last timeupdate notification.
550    #[ignore_malloc_size_of = "Defined in std::time"]
551    next_timeupdate_event: Cell<Instant>,
552    /// Latest fetch request context.
553    current_fetch_context: RefCell<Option<HTMLMediaElementFetchContext>>,
554    /// Media controls id.
555    /// In order to workaround the lack of privileged JS context, we secure the
556    /// the access to the "privileged" document.servoGetMediaControls(id) API by
557    /// keeping a whitelist of media controls identifiers.
558    media_controls_id: DomRefCell<Option<String>>,
559}
560
561/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
562#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
563#[repr(u8)]
564pub(crate) enum NetworkState {
565    Empty = HTMLMediaElementConstants::NETWORK_EMPTY as u8,
566    Idle = HTMLMediaElementConstants::NETWORK_IDLE as u8,
567    Loading = HTMLMediaElementConstants::NETWORK_LOADING as u8,
568    NoSource = HTMLMediaElementConstants::NETWORK_NO_SOURCE as u8,
569}
570
571/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
572#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
573#[repr(u8)]
574#[allow(clippy::enum_variant_names)] // Clippy warning silenced here because these names are from the specification.
575pub(crate) enum ReadyState {
576    HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
577    HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
578    HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
579    HaveFutureData = HTMLMediaElementConstants::HAVE_FUTURE_DATA as u8,
580    HaveEnoughData = HTMLMediaElementConstants::HAVE_ENOUGH_DATA as u8,
581}
582
583/// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
584#[derive(Clone, Copy, PartialEq)]
585enum PlaybackDirection {
586    Forwards,
587    Backwards,
588}
589
590impl HTMLMediaElement {
591    pub(crate) fn new_inherited(
592        tag_name: LocalName,
593        prefix: Option<Prefix>,
594        document: &Document,
595    ) -> Self {
596        Self {
597            htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
598            network_state: Cell::new(NetworkState::Empty),
599            ready_state: Cell::new(ReadyState::HaveNothing),
600            src_object: Default::default(),
601            current_src: DomRefCell::new("".to_owned()),
602            generation_id: Cell::new(0),
603            fired_loadeddata_event: Cell::new(false),
604            error: Default::default(),
605            paused: Cell::new(true),
606            defaultPlaybackRate: Cell::new(1.0),
607            playbackRate: Cell::new(1.0),
608            muted: Cell::new(false),
609            load_state: Cell::new(LoadState::NotLoaded),
610            source_children_pointer: DomRefCell::new(None),
611            current_source_child: Default::default(),
612            // FIXME(nox): Why is this initialised to true?
613            autoplaying: Cell::new(true),
614            delaying_the_load_event_flag: Default::default(),
615            pending_play_promises: Default::default(),
616            in_flight_play_promises_queue: Default::default(),
617            player: Default::default(),
618            video_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
619                document.window().compositor_api().clone(),
620                document.window().get_player_context(),
621            ))),
622            audio_renderer: Default::default(),
623            show_poster: Cell::new(true),
624            duration: Cell::new(f64::NAN),
625            current_playback_position: Cell::new(0.),
626            official_playback_position: Cell::new(0.),
627            default_playback_start_position: Cell::new(0.),
628            volume: Cell::new(1.0),
629            seeking: Cell::new(false),
630            resource_url: DomRefCell::new(None),
631            blob_url: DomRefCell::new(None),
632            played: DomRefCell::new(TimeRangesContainer::default()),
633            audio_tracks_list: Default::default(),
634            video_tracks_list: Default::default(),
635            text_tracks_list: Default::default(),
636            next_timeupdate_event: Cell::new(Instant::now() + Duration::from_millis(250)),
637            current_fetch_context: RefCell::new(None),
638            media_controls_id: DomRefCell::new(None),
639        }
640    }
641
642    pub(crate) fn network_state(&self) -> NetworkState {
643        self.network_state.get()
644    }
645
646    pub(crate) fn get_ready_state(&self) -> ReadyState {
647        self.ready_state.get()
648    }
649
650    fn media_type_id(&self) -> HTMLMediaElementTypeId {
651        match self.upcast::<Node>().type_id() {
652            NodeTypeId::Element(ElementTypeId::HTMLElement(
653                HTMLElementTypeId::HTMLMediaElement(media_type_id),
654            )) => media_type_id,
655            _ => unreachable!(),
656        }
657    }
658
659    fn play_media(&self) {
660        if let Some(ref player) = *self.player.borrow() {
661            if let Err(e) = player.lock().unwrap().set_rate(self.playbackRate.get()) {
662                warn!("Could not set the playback rate {:?}", e);
663            }
664            if let Err(e) = player.lock().unwrap().play() {
665                warn!("Could not play media {:?}", e);
666            }
667        }
668    }
669
670    /// Marks that element as delaying the load event or not.
671    ///
672    /// Nothing happens if the element was already delaying the load event and
673    /// we pass true to that method again.
674    ///
675    /// <https://html.spec.whatwg.org/multipage/#delaying-the-load-event-flag>
676    pub(crate) fn delay_load_event(&self, delay: bool, can_gc: CanGc) {
677        let blocker = &self.delaying_the_load_event_flag;
678        if delay && blocker.borrow().is_none() {
679            *blocker.borrow_mut() = Some(LoadBlocker::new(&self.owner_document(), LoadType::Media));
680        } else if !delay && blocker.borrow().is_some() {
681            LoadBlocker::terminate(blocker, can_gc);
682        }
683    }
684
685    /// <https://html.spec.whatwg.org/multipage/#time-marches-on>
686    fn time_marches_on(&self) {
687        // Step 6.
688        if Instant::now() > self.next_timeupdate_event.get() {
689            self.owner_global()
690                .task_manager()
691                .media_element_task_source()
692                .queue_simple_event(self.upcast(), atom!("timeupdate"));
693            self.next_timeupdate_event
694                .set(Instant::now() + Duration::from_millis(350));
695        }
696    }
697
698    /// <https://html.spec.whatwg.org/multipage/#internal-play-steps>
699    fn internal_play_steps(&self, can_gc: CanGc) {
700        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
701        // the media element's resource selection algorithm.
702        if self.network_state.get() == NetworkState::Empty {
703            self.invoke_resource_selection_algorithm(can_gc);
704        }
705
706        // Step 2. If the playback has ended and the direction of playback is forwards, seek to the
707        // earliest possible position of the media resource.
708        if self.ended_playback() && self.direction_of_playback() == PlaybackDirection::Forwards {
709            self.seek(
710                self.earliest_possible_position(),
711                /* approximate_for_speed */ false,
712            );
713        }
714
715        let state = self.ready_state.get();
716
717        // Step 3. If the media element's paused attribute is true, then:
718        if self.Paused() {
719            // Step 3.1. Change the value of paused to false.
720            self.paused.set(false);
721
722            // Step 3.2. If the show poster flag is true, set the element's show poster flag to
723            // false and run the time marches on steps.
724            if self.show_poster.get() {
725                self.show_poster.set(false);
726                self.time_marches_on();
727            }
728
729            // Step 3.3. Queue a media element task given the media element to fire an event named
730            // play at the element.
731            self.queue_media_element_task_to_fire_event(atom!("play"));
732
733            // Step 3.4. If the media element's readyState attribute has the value HAVE_NOTHING,
734            // HAVE_METADATA, or HAVE_CURRENT_DATA, queue a media element task given the media
735            // element to fire an event named waiting at the element. Otherwise, the media element's
736            // readyState attribute has the value HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA: notify about
737            // playing for the element.
738            match state {
739                ReadyState::HaveNothing |
740                ReadyState::HaveMetadata |
741                ReadyState::HaveCurrentData => {
742                    self.queue_media_element_task_to_fire_event(atom!("waiting"));
743                },
744                ReadyState::HaveFutureData | ReadyState::HaveEnoughData => {
745                    self.notify_about_playing();
746                },
747            }
748        }
749        // Step 4. Otherwise, if the media element's readyState attribute has the value
750        // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, take pending play promises and queue a media
751        // element task given the media element to resolve pending play promises with the
752        // result.
753        else if state == ReadyState::HaveFutureData || state == ReadyState::HaveEnoughData {
754            self.take_pending_play_promises(Ok(()));
755
756            let this = Trusted::new(self);
757            let generation_id = self.generation_id.get();
758
759            self.owner_global()
760                .task_manager()
761                .media_element_task_source()
762                .queue(task!(resolve_pending_play_promises: move || {
763                    let this = this.root();
764                    if generation_id != this.generation_id.get() {
765                        return;
766                    }
767
768                    this.fulfill_in_flight_play_promises(|| {
769                        this.play_media();
770                    });
771                }));
772        }
773
774        // Step 5. Set the media element's can autoplay flag to false.
775        self.autoplaying.set(false);
776    }
777
778    /// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
779    fn internal_pause_steps(&self) {
780        // Step 1. Set the media element's can autoplay flag to false.
781        self.autoplaying.set(false);
782
783        // Step 2. If the media element's paused attribute is false, run the following steps:
784        if !self.Paused() {
785            // Step 2.1. Change the value of paused to true.
786            self.paused.set(true);
787
788            // Step 2.2. Take pending play promises and let promises be the result.
789            self.take_pending_play_promises(Err(Error::Abort));
790
791            // Step 2.3. Queue a media element task given the media element and the following steps:
792            let this = Trusted::new(self);
793            let generation_id = self.generation_id.get();
794
795            self.owner_global()
796                .task_manager()
797                .media_element_task_source()
798                .queue(task!(internal_pause_steps: move || {
799                    let this = this.root();
800                    if generation_id != this.generation_id.get() {
801                        return;
802                    }
803
804                    this.fulfill_in_flight_play_promises(|| {
805                        // Step 2.3.1. Fire an event named timeupdate at the element.
806                        this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
807
808                        // Step 2.3.2. Fire an event named pause at the element.
809                        this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
810
811                        if let Some(ref player) = *this.player.borrow() {
812                            if let Err(e) = player.lock().unwrap().pause() {
813                                error!("Could not pause player {:?}", e);
814                            }
815                        }
816
817                        // Step 2.3.3. Reject pending play promises with promises and an
818                        // "AbortError" DOMException.
819                        // Done after running this closure in `fulfill_in_flight_play_promises`.
820                    });
821                }));
822
823            // Step 2.4. Set the official playback position to the current playback position.
824            self.official_playback_position
825                .set(self.current_playback_position.get());
826        }
827    }
828
829    /// <https://html.spec.whatwg.org/multipage/#allowed-to-play>
830    fn is_allowed_to_play(&self) -> bool {
831        true
832    }
833
834    /// <https://html.spec.whatwg.org/multipage/#notify-about-playing>
835    fn notify_about_playing(&self) {
836        // Step 1. Take pending play promises and let promises be the result.
837        self.take_pending_play_promises(Ok(()));
838
839        // Step 2. Queue a media element task given the element and the following steps:
840        let this = Trusted::new(self);
841        let generation_id = self.generation_id.get();
842
843        self.owner_global()
844            .task_manager()
845            .media_element_task_source()
846            .queue(task!(notify_about_playing: move || {
847                let this = this.root();
848                if generation_id != this.generation_id.get() {
849                    return;
850                }
851
852                this.fulfill_in_flight_play_promises(|| {
853                    // Step 2.1. Fire an event named playing at the element.
854                    this.upcast::<EventTarget>().fire_event(atom!("playing"), CanGc::note());
855                    this.play_media();
856
857                    // Step 2.2. Resolve pending play promises with promises.
858                    // Done after running this closure in `fulfill_in_flight_play_promises`.
859                });
860            }));
861    }
862
863    /// <https://html.spec.whatwg.org/multipage/#ready-states>
864    fn change_ready_state(&self, ready_state: ReadyState) {
865        let old_ready_state = self.ready_state.get();
866        self.ready_state.set(ready_state);
867
868        if self.network_state.get() == NetworkState::Empty {
869            return;
870        }
871
872        if old_ready_state == ready_state {
873            return;
874        }
875
876        // Step 1. Apply the first applicable set of substeps from the following list:
877        match (old_ready_state, ready_state) {
878            // => "If the previous ready state was HAVE_NOTHING, and the new ready state is
879            // HAVE_METADATA"
880            (ReadyState::HaveNothing, ReadyState::HaveMetadata) => {
881                // Queue a media element task given the media element to fire an event named
882                // loadedmetadata at the element.
883                self.queue_media_element_task_to_fire_event(atom!("loadedmetadata"));
884                // No other steps are applicable in this case.
885                return;
886            },
887            // => "If the previous ready state was HAVE_METADATA and the new ready state is
888            // HAVE_CURRENT_DATA or greater"
889            (ReadyState::HaveMetadata, new) if new >= ReadyState::HaveCurrentData => {
890                // If this is the first time this occurs for this media element since the load()
891                // algorithm was last invoked, the user agent must queue a media element task given
892                // the media element to fire an event named loadeddata at the element.
893                if !self.fired_loadeddata_event.get() {
894                    self.fired_loadeddata_event.set(true);
895
896                    let this = Trusted::new(self);
897                    let generation_id = self.generation_id.get();
898
899                    self.owner_global()
900                        .task_manager()
901                        .media_element_task_source()
902                        .queue(task!(media_reached_current_data: move || {
903                            let this = this.root();
904                            if generation_id != this.generation_id.get() {
905                                return;
906                            }
907
908                            this.upcast::<EventTarget>().fire_event(atom!("loadeddata"), CanGc::note());
909                            // Once the readyState attribute reaches HAVE_CURRENT_DATA, after the
910                            // loadeddata event has been fired, set the element's
911                            // delaying-the-load-event flag to false.
912                            this.delay_load_event(false, CanGc::note());
913                        }));
914                }
915
916                // Steps for the transition from HaveMetadata to HaveCurrentData
917                // or HaveFutureData also apply here, as per the next match
918                // expression.
919            },
920            (ReadyState::HaveFutureData, new) if new <= ReadyState::HaveCurrentData => {
921                // FIXME(nox): Queue a task to fire timeupdate and waiting
922                // events if the conditions call from the spec are met.
923
924                // No other steps are applicable in this case.
925                return;
926            },
927
928            _ => (),
929        }
930
931        // => "If the previous ready state was HAVE_CURRENT_DATA or less, and the new ready state is
932        // HAVE_FUTURE_DATA or more"
933        if old_ready_state <= ReadyState::HaveCurrentData &&
934            ready_state >= ReadyState::HaveFutureData
935        {
936            // The user agent must queue a media element task given the media element to fire an
937            // event named canplay at the element.
938            self.queue_media_element_task_to_fire_event(atom!("canplay"));
939
940            // If the element's paused attribute is false, the user agent must notify about playing
941            // for the element.
942            if !self.Paused() {
943                self.notify_about_playing();
944            }
945        }
946
947        // => "If the new ready state is HAVE_ENOUGH_DATA"
948        if ready_state == ReadyState::HaveEnoughData {
949            if self.eligible_for_autoplay() {
950                // Step 1. Set the paused attribute to false.
951                self.paused.set(false);
952
953                // Step 2. If the element's show poster flag is true, set it to false and run the
954                // time marches on steps.
955                if self.show_poster.get() {
956                    self.show_poster.set(false);
957                    self.time_marches_on();
958                }
959
960                // Step 3. Queue a media element task given the element to fire an event named play
961                // at the element.
962                self.queue_media_element_task_to_fire_event(atom!("play"));
963
964                // Step 4. Notify about playing for the element.
965                self.notify_about_playing();
966            }
967
968            // FIXME(nox): According to the spec, this should come *before* the
969            // "play" event.
970            self.queue_media_element_task_to_fire_event(atom!("canplaythrough"));
971        }
972    }
973
974    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
975    fn invoke_resource_selection_algorithm(&self, can_gc: CanGc) {
976        // Step 1. Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
977        self.network_state.set(NetworkState::NoSource);
978
979        // Step 2. Set the element's show poster flag to true.
980        self.show_poster.set(true);
981
982        // Step 3. Set the media element's delaying-the-load-event flag to true (this delays the
983        // load event).
984        self.delay_load_event(true, can_gc);
985
986        // Step 4. Await a stable state, allowing the task that invoked this algorithm to continue.
987        // If the resource selection mode in the synchronous section is
988        // "attribute", the URL of the resource to fetch is relative to the
989        // media element's node document when the src attribute was last
990        // changed, which is why we need to pass the base URL in the task
991        // right here.
992        let task = MediaElementMicrotask::ResourceSelection {
993            elem: DomRoot::from_ref(self),
994            generation_id: self.generation_id.get(),
995            base_url: self.owner_document().base_url(),
996        };
997
998        // FIXME(nox): This will later call the resource_selection_algorithm_sync
999        // method from below, if microtasks were trait objects, we would be able
1000        // to put the code directly in this method, without the boilerplate
1001        // indirections.
1002        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1003    }
1004
1005    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1006    fn resource_selection_algorithm_sync(&self, base_url: ServoUrl, can_gc: CanGc) {
1007        // TODO Step 5. If the media element's blocked-on-parser flag is false, then populate the
1008        // list of pending text tracks.
1009        // FIXME(ferjm): Implement blocked_on_parser logic
1010        // https://html.spec.whatwg.org/multipage/#blocked-on-parser
1011        // FIXME(nox): Maybe populate the list of pending text tracks.
1012
1013        enum Mode {
1014            Object,
1015            Attribute(String),
1016            Children(DomRoot<HTMLSourceElement>),
1017        }
1018
1019        // Step 6.
1020        let mode = if self.src_object.borrow().is_some() {
1021            // If the media element has an assigned media provider object, then let mode be object.
1022            Mode::Object
1023        } else if let Some(attribute) = self
1024            .upcast::<Element>()
1025            .get_attribute(&ns!(), &local_name!("src"))
1026        {
1027            // Otherwise, if the media element has no assigned media provider object but has a src
1028            // attribute, then let mode be attribute.
1029            Mode::Attribute((**attribute.value()).to_owned())
1030        } else if let Some(source) = self
1031            .upcast::<Node>()
1032            .children()
1033            .find_map(DomRoot::downcast::<HTMLSourceElement>)
1034        {
1035            // Otherwise, if the media element does not have an assigned media provider object and
1036            // does not have a src attribute, but does have a source element child, then let mode be
1037            // children and let candidate be the first such source element child in tree order.
1038            Mode::Children(source)
1039        } else {
1040            // Otherwise, the media element has no assigned media provider object and has neither a
1041            // src attribute nor a source element child:
1042            self.load_state.set(LoadState::NotLoaded);
1043
1044            // Step 6.none.1. Set the networkState to NETWORK_EMPTY.
1045            self.network_state.set(NetworkState::Empty);
1046
1047            // Step 6.none.2. Set the element's delaying-the-load-event flag to false. This stops
1048            // delaying the load event.
1049            self.delay_load_event(false, can_gc);
1050
1051            // Step 6.none.3. End the synchronous section and return.
1052            return;
1053        };
1054
1055        // Step 7. Set the media element's networkState to NETWORK_LOADING.
1056        self.network_state.set(NetworkState::Loading);
1057
1058        // Step 8. Queue a media element task given the media element to fire an event named
1059        // loadstart at the media element.
1060        self.queue_media_element_task_to_fire_event(atom!("loadstart"));
1061
1062        // Step 9. Run the appropriate steps from the following list:
1063        match mode {
1064            Mode::Object => {
1065                // => "If mode is object"
1066                self.load_from_src_object();
1067            },
1068            Mode::Attribute(src) => {
1069                // => "If mode is attribute"
1070                self.load_from_src_attribute(base_url, &src);
1071            },
1072            Mode::Children(source) => {
1073                // => "Otherwise (mode is children)""
1074                self.load_from_source_child(&source);
1075            },
1076        }
1077    }
1078
1079    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1080    fn load_from_src_object(&self) {
1081        self.load_state.set(LoadState::LoadingFromSrcObject);
1082
1083        // Step 9.object.1. Set the currentSrc attribute to the empty string.
1084        "".clone_into(&mut self.current_src.borrow_mut());
1085
1086        // Step 9.object.3. Run the resource fetch algorithm with the assigned media
1087        // provider object. If that algorithm returns without aborting this one, then the
1088        // load failed.
1089        // Note that the resource fetch algorithm itself takes care of the cleanup in case
1090        // of failure itself.
1091        self.resource_fetch_algorithm(Resource::Object);
1092    }
1093
1094    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1095    fn load_from_src_attribute(&self, base_url: ServoUrl, src: &str) {
1096        self.load_state.set(LoadState::LoadingFromSrcAttribute);
1097
1098        // Step 9.attribute.1. If the src attribute's value is the empty string, then end
1099        // the synchronous section, and jump down to the failed with attribute step below.
1100        if src.is_empty() {
1101            self.queue_dedicated_media_source_failure_steps();
1102            return;
1103        }
1104
1105        // Step 9.attribute.2. Let urlRecord be the result of encoding-parsing a URL given
1106        // the src attribute's value, relative to the media element's node document when the
1107        // src attribute was last changed.
1108        let Ok(url_record) = base_url.join(src) else {
1109            self.queue_dedicated_media_source_failure_steps();
1110            return;
1111        };
1112
1113        // Step 9.attribute.3. If urlRecord is not failure, then set the currentSrc
1114        // attribute to the result of applying the URL serializer to urlRecord.
1115        *self.current_src.borrow_mut() = url_record.as_str().into();
1116
1117        // Step 9.attribute.5. If urlRecord is not failure, then run the resource fetch
1118        // algorithm with urlRecord. If that algorithm returns without aborting this one,
1119        // then the load failed.
1120        // Note that the resource fetch algorithm itself takes care
1121        // of the cleanup in case of failure itself.
1122        self.resource_fetch_algorithm(Resource::Url(url_record));
1123    }
1124
1125    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1126    fn load_from_source_child(&self, source: &HTMLSourceElement) {
1127        self.load_state.set(LoadState::LoadingFromSourceChild);
1128
1129        // Step 9.children.1. Let pointer be a position defined by two adjacent nodes in the media
1130        // element's child list, treating the start of the list (before the first child in the list,
1131        // if any) and end of the list (after the last child in the list, if any) as nodes in their
1132        // own right. One node is the node before pointer, and the other node is the node after
1133        // pointer. Initially, let pointer be the position between the candidate node and the next
1134        // node, if there are any, or the end of the list, if it is the last node.
1135        *self.source_children_pointer.borrow_mut() =
1136            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), false));
1137
1138        let element = source.upcast::<Element>();
1139
1140        // Step 9.children.2. Process candidate: If candidate does not have a src attribute, or if
1141        // its src attribute's value is the empty string, then end the synchronous section, and jump
1142        // down to the failed with elements step below.
1143        let Some(src) = element
1144            .get_attribute(&ns!(), &local_name!("src"))
1145            .filter(|attribute| !attribute.value().is_empty())
1146        else {
1147            self.load_from_source_child_failure_steps(source);
1148            return;
1149        };
1150
1151        // Step 9.children.3. If candidate has a media attribute whose value does not match the
1152        // environment, then end the synchronous section, and jump down to the failed with elements
1153        // step below.
1154        if let Some(media) = element.get_attribute(&ns!(), &local_name!("media")) {
1155            if !MediaList::matches_environment(&element.owner_document(), &media.value()) {
1156                self.load_from_source_child_failure_steps(source);
1157                return;
1158            }
1159        }
1160
1161        // Step 9.children.4. Let urlRecord be the result of encoding-parsing a URL given
1162        // candidate's src attribute's value, relative to candidate's node document when the src
1163        // attribute was last changed.
1164        let Ok(url_record) = source.owner_document().base_url().join(&src.value()) else {
1165            // Step 9.children.5. If urlRecord is failure, then end the synchronous section,
1166            // and jump down to the failed with elements step below.
1167            self.load_from_source_child_failure_steps(source);
1168            return;
1169        };
1170
1171        // Step 9.children.6. If candidate has a type attribute whose value, when parsed as a MIME
1172        // type (including any codecs described by the codecs parameter, for types that define that
1173        // parameter), represents a type that the user agent knows it cannot render, then end the
1174        // synchronous section, and jump down to the failed with elements step below.
1175        if let Some(type_) = element.get_attribute(&ns!(), &local_name!("type")) {
1176            if ServoMedia::get().can_play_type(&type_.value()) == SupportsMediaType::No {
1177                self.load_from_source_child_failure_steps(source);
1178                return;
1179            }
1180        }
1181
1182        // Reset the media player before loading the next source child.
1183        self.reset_media_player();
1184
1185        self.current_source_child.set(Some(source));
1186
1187        // Step 9.children.7. Set the currentSrc attribute to the result of applying the URL
1188        // serializer to urlRecord.
1189        *self.current_src.borrow_mut() = url_record.as_str().into();
1190
1191        // Step 9.children.9. Run the resource fetch algorithm with urlRecord. If that
1192        // algorithm returns without aborting this one, then the load failed.
1193        // Note that the resource fetch algorithm itself takes care
1194        // of the cleanup in case of failure itself.
1195        self.resource_fetch_algorithm(Resource::Url(url_record));
1196    }
1197
1198    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1199    fn load_from_source_child_failure_steps(&self, source: &HTMLSourceElement) {
1200        // Step 9.children.10. Failed with elements: Queue a media element task given the media
1201        // element to fire an event named error at candidate.
1202        let trusted_this = Trusted::new(self);
1203        let trusted_source = Trusted::new(source);
1204        let generation_id = self.generation_id.get();
1205
1206        self.owner_global()
1207            .task_manager()
1208            .media_element_task_source()
1209            .queue(task!(queue_error_event: move || {
1210                let this = trusted_this.root();
1211                if generation_id != this.generation_id.get() {
1212                    return;
1213                }
1214
1215                let source = trusted_source.root();
1216                source.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1217            }));
1218
1219        // Step 9.children.11. Await a stable state.
1220        let task = MediaElementMicrotask::SelectNextSourceChild {
1221            elem: DomRoot::from_ref(self),
1222            generation_id: self.generation_id.get(),
1223        };
1224
1225        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1226    }
1227
1228    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1229    fn select_next_source_child(&self, can_gc: CanGc) {
1230        // Step 9.children.12. Forget the media element's media-resource-specific tracks.
1231        self.AudioTracks(can_gc).clear();
1232        self.VideoTracks(can_gc).clear();
1233
1234        // Step 9.children.13. Find next candidate: Let candidate be null.
1235        let mut source_candidate = None;
1236
1237        // Step 9.children.14. Search loop: If the node after pointer is the end of the list, then
1238        // jump to the waiting step below.
1239        // Step 9.children.15. If the node after pointer is a source element, let candidate be that
1240        // element.
1241        // Step 9.children.16. Advance pointer so that the node before pointer is now the node that
1242        // was after pointer, and the node after pointer is the node after the node that used to be
1243        // after pointer, if any.
1244        if let Some(ref source_children_pointer) = *self.source_children_pointer.borrow() {
1245            // Note that shared implementation between opaque types from
1246            // `inclusively_following_siblings` and `following_siblings` if not possible due to
1247            // precise capturing.
1248            if source_children_pointer.inclusive {
1249                for next_sibling in source_children_pointer
1250                    .source_before_pointer
1251                    .upcast::<Node>()
1252                    .inclusively_following_siblings()
1253                {
1254                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1255                    {
1256                        source_candidate = Some(next_source);
1257                        break;
1258                    }
1259                }
1260            } else {
1261                for next_sibling in source_children_pointer
1262                    .source_before_pointer
1263                    .upcast::<Node>()
1264                    .following_siblings()
1265                {
1266                    if let Some(next_source) = DomRoot::downcast::<HTMLSourceElement>(next_sibling)
1267                    {
1268                        source_candidate = Some(next_source);
1269                        break;
1270                    }
1271                }
1272            };
1273        }
1274
1275        // Step 9.children.17. If candidate is null, jump back to the search loop step. Otherwise,
1276        // jump back to the process candidate step.
1277        if let Some(source_candidate) = source_candidate {
1278            self.load_from_source_child(&source_candidate);
1279            return;
1280        }
1281
1282        self.load_state.set(LoadState::WaitingForSource);
1283
1284        *self.source_children_pointer.borrow_mut() = None;
1285
1286        // Step 9.children.18. Waiting: Set the element's networkState attribute to the
1287        // NETWORK_NO_SOURCE value.
1288        self.network_state.set(NetworkState::NoSource);
1289
1290        // Step 9.children.19. Set the element's show poster flag to true.
1291        self.show_poster.set(true);
1292
1293        // Step 9.children.20. Queue a media element task given the media element to set the
1294        // element's delaying-the-load-event flag to false. This stops delaying the load event.
1295        let this = Trusted::new(self);
1296        let generation_id = self.generation_id.get();
1297
1298        self.owner_global()
1299            .task_manager()
1300            .media_element_task_source()
1301            .queue(task!(queue_delay_load_event: move || {
1302                let this = this.root();
1303                if generation_id != this.generation_id.get() {
1304                    return;
1305                }
1306
1307                this.delay_load_event(false, CanGc::note());
1308            }));
1309
1310        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1311        // list. (This step might wait forever.)
1312    }
1313
1314    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1315    fn resource_selection_algorithm_failure_steps(&self) {
1316        match self.load_state.get() {
1317            LoadState::LoadingFromSrcObject => {
1318                // Step 9.object.4. Failed with media provider: Reaching this step indicates that
1319                // the media resource failed to load. Take pending play promises and queue a media
1320                // element task given the media element to run the dedicated media source failure
1321                // steps with the result.
1322                self.queue_dedicated_media_source_failure_steps();
1323            },
1324            LoadState::LoadingFromSrcAttribute => {
1325                // Step 9.attribute.6. Failed with attribute: Reaching this step indicates that the
1326                // media resource failed to load or that urlRecord is failure. Take pending play
1327                // promises and queue a media element task given the media element to run the
1328                // dedicated media source failure steps with the result.
1329                self.queue_dedicated_media_source_failure_steps();
1330            },
1331            LoadState::LoadingFromSourceChild => {
1332                // Step 9.children.10. Failed with elements: Queue a media element task given the
1333                // media element to fire an event named error at candidate.
1334                if let Some(source) = self.current_source_child.take() {
1335                    self.load_from_source_child_failure_steps(&source);
1336                }
1337            },
1338            _ => {},
1339        }
1340    }
1341
1342    fn fetch_request(&self, offset: Option<u64>, seek_lock: Option<SeekLock>) {
1343        if self.resource_url.borrow().is_none() && self.blob_url.borrow().is_none() {
1344            error!("Missing request url");
1345            if let Some(seek_lock) = seek_lock {
1346                seek_lock.unlock(/* successful seek */ false);
1347            }
1348            self.resource_selection_algorithm_failure_steps();
1349            return;
1350        }
1351
1352        let document = self.owner_document();
1353        let destination = match self.media_type_id() {
1354            HTMLMediaElementTypeId::HTMLAudioElement => Destination::Audio,
1355            HTMLMediaElementTypeId::HTMLVideoElement => Destination::Video,
1356        };
1357        let mut headers = HeaderMap::new();
1358        // FIXME(eijebong): Use typed headers once we have a constructor for the range header
1359        headers.insert(
1360            header::RANGE,
1361            HeaderValue::from_str(&format!("bytes={}-", offset.unwrap_or(0))).unwrap(),
1362        );
1363        let url = match self.resource_url.borrow().as_ref() {
1364            Some(url) => url.clone(),
1365            None => self.blob_url.borrow().as_ref().unwrap().clone(),
1366        };
1367
1368        let cors_setting = cors_setting_for_element(self.upcast());
1369        let global = self.global();
1370        let request = create_a_potential_cors_request(
1371            Some(document.webview_id()),
1372            url.clone(),
1373            destination,
1374            cors_setting,
1375            None,
1376            global.get_referrer(),
1377            document.insecure_requests_policy(),
1378            document.has_trustworthy_ancestor_or_current_origin(),
1379            global.policy_container(),
1380        )
1381        .headers(headers)
1382        .origin(document.origin().immutable().clone())
1383        .pipeline_id(Some(self.global().pipeline_id()))
1384        .referrer_policy(document.get_referrer_policy());
1385
1386        let mut current_fetch_context = self.current_fetch_context.borrow_mut();
1387        if let Some(ref mut current_fetch_context) = *current_fetch_context {
1388            current_fetch_context.cancel(CancelReason::Abort);
1389        }
1390
1391        *current_fetch_context = Some(HTMLMediaElementFetchContext::new(
1392            request.id,
1393            global.core_resource_thread(),
1394        ));
1395        let listener =
1396            HTMLMediaElementFetchListener::new(self, request.id, url.clone(), offset.unwrap_or(0));
1397
1398        self.owner_document().fetch_background(request, listener);
1399
1400        // Since we cancelled the previous fetch, from now on the media element
1401        // will only receive response data from the new fetch that's been
1402        // initiated. This means the player can resume operation, since all subsequent data
1403        // pushes will originate from the new seek offset.
1404        if let Some(seek_lock) = seek_lock {
1405            seek_lock.unlock(/* successful seek */ true);
1406        }
1407    }
1408
1409    /// <https://html.spec.whatwg.org/multipage/#eligible-for-autoplay>
1410    fn eligible_for_autoplay(&self) -> bool {
1411        // its can autoplay flag is true;
1412        self.autoplaying.get() &&
1413
1414        // its paused attribute is true;
1415        self.Paused() &&
1416
1417        // it has an autoplay attribute specified;
1418        self.Autoplay() &&
1419
1420        // its node document's active sandboxing flag set does not have the sandboxed automatic
1421        // features browsing context flag set; and
1422        {
1423            let document = self.owner_document();
1424
1425            !document.has_active_sandboxing_flag(
1426                SandboxingFlagSet::SANDBOXED_AUTOMATIC_FEATURES_BROWSING_CONTEXT_FLAG,
1427            )
1428        }
1429
1430        // its node document is allowed to use the "autoplay" feature.
1431        // TODO: Feature policy: https://html.spec.whatwg.org/iframe-embed-object.html#allowed-to-use
1432    }
1433
1434    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
1435    fn resource_fetch_algorithm(&self, resource: Resource) {
1436        if let Err(e) = self.create_media_player(&resource) {
1437            error!("Create media player error {:?}", e);
1438            self.resource_selection_algorithm_failure_steps();
1439            return;
1440        }
1441
1442        // Steps 1-2.
1443        // Unapplicable, the `resource` variable already conveys which mode
1444        // is in use.
1445
1446        // Step 3.
1447        // FIXME(nox): Remove all media-resource-specific text tracks.
1448
1449        // Step 5. Run the appropriate steps from the following list:
1450        match resource {
1451            Resource::Url(url) => {
1452                // Step 5.remote.1. Optionally, run the following substeps. This is the expected
1453                // behavior if the user agent intends to not attempt to fetch the resource until the
1454                // user requests it explicitly (e.g. as a way to implement the preload attribute's
1455                // none keyword).
1456                if self.Preload() == "none" && !self.autoplaying.get() {
1457                    // Step 5.remote.1.1. Set the networkState to NETWORK_IDLE.
1458                    self.network_state.set(NetworkState::Idle);
1459
1460                    // Step 5.remote.1.2. Queue a media element task given the media element to fire
1461                    // an event named suspend at the element.
1462                    self.queue_media_element_task_to_fire_event(atom!("suspend"));
1463
1464                    // Step 5.remote.1.3. Queue a media element task given the media element to set
1465                    // the element's delaying-the-load-event flag to false. This stops delaying the
1466                    // load event.
1467                    let this = Trusted::new(self);
1468                    let generation_id = self.generation_id.get();
1469
1470                    self.owner_global()
1471                        .task_manager()
1472                        .media_element_task_source()
1473                        .queue(task!(queue_delay_load_event: move || {
1474                            let this = this.root();
1475                            if generation_id != this.generation_id.get() {
1476                                return;
1477                            }
1478
1479                            this.delay_load_event(false, CanGc::note());
1480                        }));
1481
1482                    // TODO Steps 5.remote.1.4. Wait for the task to be run.
1483                    // FIXME(nox): Somehow we should wait for the task from previous
1484                    // step to be ran before continuing.
1485
1486                    // TODO Steps 5.remote.1.5-5.remote.1.7.
1487                    // FIXME(nox): Wait for an implementation-defined event and
1488                    // then continue with the normal set of steps instead of just
1489                    // returning.
1490                    return;
1491                }
1492
1493                *self.resource_url.borrow_mut() = Some(url);
1494
1495                // Steps 5.remote.2-5.remote.8
1496                self.fetch_request(None, None);
1497            },
1498            Resource::Object => {
1499                if let Some(ref src_object) = *self.src_object.borrow() {
1500                    match src_object {
1501                        SrcObject::Blob(blob) => {
1502                            let blob_url = URL::CreateObjectURL(&self.global(), blob);
1503                            *self.blob_url.borrow_mut() =
1504                                Some(ServoUrl::parse(&blob_url.str()).expect("infallible"));
1505                            self.fetch_request(None, None);
1506                        },
1507                        SrcObject::MediaStream(stream) => {
1508                            let tracks = &*stream.get_tracks();
1509                            for (pos, track) in tracks.iter().enumerate() {
1510                                if self
1511                                    .player
1512                                    .borrow()
1513                                    .as_ref()
1514                                    .unwrap()
1515                                    .lock()
1516                                    .unwrap()
1517                                    .set_stream(&track.id(), pos == tracks.len() - 1)
1518                                    .is_err()
1519                                {
1520                                    self.resource_selection_algorithm_failure_steps();
1521                                }
1522                            }
1523                        },
1524                    }
1525                }
1526            },
1527        }
1528    }
1529
1530    /// Queues a task to run the [dedicated media source failure steps][steps].
1531    ///
1532    /// [steps]: https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
1533    fn queue_dedicated_media_source_failure_steps(&self) {
1534        let this = Trusted::new(self);
1535        let generation_id = self.generation_id.get();
1536        self.take_pending_play_promises(Err(Error::NotSupported));
1537        self.owner_global()
1538            .task_manager()
1539            .media_element_task_source()
1540            .queue(task!(dedicated_media_source_failure_steps: move || {
1541                let this = this.root();
1542                if generation_id != this.generation_id.get() {
1543                    return;
1544                }
1545
1546                this.fulfill_in_flight_play_promises(|| {
1547                    // Step 1. Set the error attribute to the result of creating a MediaError with
1548                    // MEDIA_ERR_SRC_NOT_SUPPORTED.
1549                    this.error.set(Some(&*MediaError::new(
1550                        &this.owner_window(),
1551                        MEDIA_ERR_SRC_NOT_SUPPORTED, CanGc::note())));
1552
1553                    // Step 2. Forget the media element's media-resource-specific tracks.
1554                    this.AudioTracks(CanGc::note()).clear();
1555                    this.VideoTracks(CanGc::note()).clear();
1556
1557                    // Step 3. Set the element's networkState attribute to the NETWORK_NO_SOURCE
1558                    // value.
1559                    this.network_state.set(NetworkState::NoSource);
1560
1561                    // Step 4. Set the element's show poster flag to true.
1562                    this.show_poster.set(true);
1563
1564                    // Step 5. Fire an event named error at the media element.
1565                    this.upcast::<EventTarget>().fire_event(atom!("error"), CanGc::note());
1566
1567                    if let Some(ref player) = *this.player.borrow() {
1568                        if let Err(e) = player.lock().unwrap().stop() {
1569                            error!("Could not stop player {:?}", e);
1570                        }
1571                    }
1572
1573                    // Step 6. Reject pending play promises with promises and a "NotSupportedError"
1574                    // DOMException.
1575                    // Done after running this closure in `fulfill_in_flight_play_promises`.
1576                });
1577
1578                // Step 7. Set the element's delaying-the-load-event flag to false. This stops
1579                // delaying the load event.
1580                this.delay_load_event(false, CanGc::note());
1581            }));
1582    }
1583
1584    fn in_error_state(&self) -> bool {
1585        self.error.get().is_some()
1586    }
1587
1588    /// <https://html.spec.whatwg.org/multipage/#potentially-playing>
1589    fn is_potentially_playing(&self) -> bool {
1590        !self.paused.get() &&
1591            !self.ended_playback() &&
1592            self.error.get().is_none() &&
1593            !self.is_blocked_media_element()
1594    }
1595
1596    /// <https://html.spec.whatwg.org/multipage/#blocked-media-element>
1597    fn is_blocked_media_element(&self) -> bool {
1598        self.ready_state.get() <= ReadyState::HaveCurrentData ||
1599            self.is_paused_for_user_interaction() ||
1600            self.is_paused_for_in_band_content()
1601    }
1602
1603    /// <https://html.spec.whatwg.org/multipage/#paused-for-user-interaction>
1604    fn is_paused_for_user_interaction(&self) -> bool {
1605        // FIXME: we will likely be able to fill this placeholder once (if) we
1606        //        implement the MediaSession API.
1607        false
1608    }
1609
1610    /// <https://html.spec.whatwg.org/multipage/#paused-for-in-band-content>
1611    fn is_paused_for_in_band_content(&self) -> bool {
1612        // FIXME: we will likely be able to fill this placeholder once (if) we
1613        //        implement https://github.com/servo/servo/issues/22314
1614        false
1615    }
1616
1617    /// <https://html.spec.whatwg.org/multipage/#media-element-load-algorithm>
1618    fn media_element_load_algorithm(&self, can_gc: CanGc) {
1619        // Reset the flag that signals whether loadeddata was ever fired for
1620        // this invokation of the load algorithm.
1621        self.fired_loadeddata_event.set(false);
1622
1623        // TODO Step 1. Set this element's is currently stalled to false.
1624
1625        // Step 2. Abort any already-running instance of the resource selection algorithm for this
1626        // element.
1627        self.generation_id.set(self.generation_id.get() + 1);
1628
1629        self.load_state.set(LoadState::NotLoaded);
1630        *self.source_children_pointer.borrow_mut() = None;
1631        self.current_source_child.set(None);
1632
1633        // Step 3. Let pending tasks be a list of all tasks from the media element's media element
1634        // event task source in one of the task queues.
1635
1636        // Step 4. For each task in pending tasks that would resolve pending play promises or reject
1637        // pending play promises, immediately resolve or reject those promises in the order the
1638        // corresponding tasks were queued.
1639        while !self.in_flight_play_promises_queue.borrow().is_empty() {
1640            self.fulfill_in_flight_play_promises(|| ());
1641        }
1642
1643        // Step 5. Remove each task in pending tasks from its task queue.
1644        // Note that each media element's pending event and callback is scheduled with associated
1645        // generation id and will be aborted eventually (from Step 2).
1646
1647        let network_state = self.network_state.get();
1648
1649        // Step 6. If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE,
1650        // queue a media element task given the media element to fire an event named abort at the
1651        // media element.
1652        if network_state == NetworkState::Loading || network_state == NetworkState::Idle {
1653            self.queue_media_element_task_to_fire_event(atom!("abort"));
1654        }
1655
1656        // Reset the media player for any previously playing media resource (see Step 11).
1657        self.reset_media_player();
1658
1659        // Step 7. If the media element's networkState is not set to NETWORK_EMPTY, then:
1660        if network_state != NetworkState::Empty {
1661            // Step 7.1. Queue a media element task given the media element to fire an event named
1662            // emptied at the media element.
1663            self.queue_media_element_task_to_fire_event(atom!("emptied"));
1664
1665            // Step 7.2. If a fetching process is in progress for the media element, the user agent
1666            // should stop it.
1667            if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1668                current_fetch_context.cancel(CancelReason::Abort);
1669            }
1670
1671            // TODO Step 7.3. If the media element's assigned media provider object is a MediaSource
1672            // object, then detach it.
1673
1674            // Step 7.4. Forget the media element's media-resource-specific tracks.
1675            self.AudioTracks(can_gc).clear();
1676            self.VideoTracks(can_gc).clear();
1677
1678            // Step 7.5. If readyState is not set to HAVE_NOTHING, then set it to that state.
1679            if self.ready_state.get() != ReadyState::HaveNothing {
1680                self.change_ready_state(ReadyState::HaveNothing);
1681            }
1682
1683            // Step 7.6. If the paused attribute is false, then:
1684            if !self.Paused() {
1685                // Step 7.6.1. Set the paused attribute to true.
1686                self.paused.set(true);
1687
1688                // Step 7.6.2. Take pending play promises and reject pending play promises with the
1689                // result and an "AbortError" DOMException.
1690                self.take_pending_play_promises(Err(Error::Abort));
1691                self.fulfill_in_flight_play_promises(|| ());
1692            }
1693
1694            // Step 7.7. If seeking is true, set it to false.
1695            self.seeking.set(false);
1696
1697            // Step 7.8. Set the current playback position to 0.
1698            // Set the official playback position to 0.
1699            // If this changed the official playback position, then queue a media element task given
1700            // the media element to fire an event named timeupdate at the media element.
1701            self.current_playback_position.set(0.);
1702            if self.official_playback_position.get() != 0. {
1703                self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
1704            }
1705            self.official_playback_position.set(0.);
1706
1707            // TODO Step 7.9. Set the timeline offset to Not-a-Number (NaN).
1708
1709            // Step 7.10. Update the duration attribute to Not-a-Number (NaN).
1710            self.duration.set(f64::NAN);
1711        }
1712
1713        // Step 8. Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
1714        self.playbackRate.set(self.defaultPlaybackRate.get());
1715
1716        // Step 9. Set the error attribute to null and the can autoplay flag to true.
1717        self.error.set(None);
1718        self.autoplaying.set(true);
1719
1720        // Step 10. Invoke the media element's resource selection algorithm.
1721        self.invoke_resource_selection_algorithm(can_gc);
1722
1723        // Step 11. Note: Playback of any previously playing media resource for this element stops.
1724    }
1725
1726    /// Queue a media element task given the media element to fire an event at the media element.
1727    /// <https://html.spec.whatwg.org/multipage/#queue-a-media-element-task>
1728    fn queue_media_element_task_to_fire_event(&self, name: Atom) {
1729        let this = Trusted::new(self);
1730        let generation_id = self.generation_id.get();
1731
1732        self.owner_global()
1733            .task_manager()
1734            .media_element_task_source()
1735            .queue(task!(queue_event: move || {
1736                let this = this.root();
1737                if generation_id != this.generation_id.get() {
1738                    return;
1739                }
1740
1741                this.upcast::<EventTarget>().fire_event(name, CanGc::note());
1742            }));
1743    }
1744
1745    /// Appends a promise to the list of pending play promises.
1746    fn push_pending_play_promise(&self, promise: &Rc<Promise>) {
1747        self.pending_play_promises
1748            .borrow_mut()
1749            .push(promise.clone());
1750    }
1751
1752    /// Takes the pending play promises.
1753    ///
1754    /// The result with which these promises will be fulfilled is passed here
1755    /// and this method returns nothing because we actually just move the
1756    /// current list of pending play promises to the
1757    /// `in_flight_play_promises_queue` field.
1758    ///
1759    /// Each call to this method must be followed by a call to
1760    /// `fulfill_in_flight_play_promises`, to actually fulfill the promises
1761    /// which were taken and moved to the in-flight queue.
1762    fn take_pending_play_promises(&self, result: ErrorResult) {
1763        let pending_play_promises = std::mem::take(&mut *self.pending_play_promises.borrow_mut());
1764        self.in_flight_play_promises_queue
1765            .borrow_mut()
1766            .push_back((pending_play_promises.into(), result));
1767    }
1768
1769    /// Fulfills the next in-flight play promises queue after running a closure.
1770    ///
1771    /// See the comment on `take_pending_play_promises` for why this method
1772    /// does not take a list of promises to fulfill. Callers cannot just pop
1773    /// the front list off of `in_flight_play_promises_queue` and later fulfill
1774    /// the promises because that would mean putting
1775    /// `#[cfg_attr(crown, allow(crown::unrooted_must_root))]` on even more functions, potentially
1776    /// hiding actual safety bugs.
1777    #[cfg_attr(crown, allow(crown::unrooted_must_root))]
1778    fn fulfill_in_flight_play_promises<F>(&self, f: F)
1779    where
1780        F: FnOnce(),
1781    {
1782        let (promises, result) = self
1783            .in_flight_play_promises_queue
1784            .borrow_mut()
1785            .pop_front()
1786            .expect("there should be at least one list of in flight play promises");
1787        f();
1788        for promise in &*promises {
1789            match result {
1790                Ok(ref value) => promise.resolve_native(value, CanGc::note()),
1791                Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
1792            }
1793        }
1794    }
1795
1796    pub(crate) fn handle_source_child_insertion(&self, source: &HTMLSourceElement, can_gc: CanGc) {
1797        // <https://html.spec.whatwg.org/multipage/#the-source-element:html-element-insertion-steps>
1798        // Step 2. If parent is a media element that has no src attribute and whose networkState has
1799        // the value NETWORK_EMPTY, then invoke that media element's resource selection algorithm.
1800        if self.upcast::<Element>().has_attribute(&local_name!("src")) {
1801            return;
1802        }
1803
1804        if self.network_state.get() == NetworkState::Empty {
1805            self.invoke_resource_selection_algorithm(can_gc);
1806            return;
1807        }
1808
1809        // <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1810        // Step 9.children.22. Wait until the node after pointer is a node other than the end of the
1811        // list. (This step might wait forever.)
1812        if self.load_state.get() != LoadState::WaitingForSource {
1813            return;
1814        }
1815
1816        self.load_state.set(LoadState::LoadingFromSourceChild);
1817
1818        *self.source_children_pointer.borrow_mut() =
1819            Some(SourceChildrenPointer::new(DomRoot::from_ref(source), true));
1820
1821        // Step 9.children.23. Await a stable state.
1822        let task = MediaElementMicrotask::SelectNextSourceChildAfterWait {
1823            elem: DomRoot::from_ref(self),
1824            generation_id: self.generation_id.get(),
1825        };
1826
1827        ScriptThread::await_stable_state(Microtask::MediaElement(task));
1828    }
1829
1830    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm>
1831    fn select_next_source_child_after_wait(&self, can_gc: CanGc) {
1832        // Step 9.children.24. Set the element's delaying-the-load-event flag back to true (this
1833        // delays the load event again, in case it hasn't been fired yet).
1834        self.delay_load_event(true, can_gc);
1835
1836        // Step 9.children.25. Set the networkState back to NETWORK_LOADING.
1837        self.network_state.set(NetworkState::Loading);
1838
1839        // Step 9.children.26. Jump back to the find next candidate step above.
1840        self.select_next_source_child(can_gc);
1841    }
1842
1843    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1844    /// => "If the media data cannot be fetched at all, due to network errors..."
1845    /// => "If the media data can be fetched but is found by inspection to be in an unsupported
1846    /// format, or can otherwise not be rendered at all"
1847    fn media_data_processing_failure_steps(&self) {
1848        // Step 1. The user agent should cancel the fetching process.
1849        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1850            current_fetch_context.cancel(CancelReason::Error);
1851        }
1852
1853        // Step 2. Abort this subalgorithm, returning to the resource selection algorithm.
1854        self.resource_selection_algorithm_failure_steps();
1855    }
1856
1857    /// <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
1858    /// => "If the connection is interrupted after some media data has been received..."
1859    /// => "If the media data is corrupted"
1860    fn media_data_processing_fatal_steps(&self, error: u16, can_gc: CanGc) {
1861        *self.source_children_pointer.borrow_mut() = None;
1862        self.current_source_child.set(None);
1863
1864        // Step 1. The user agent should cancel the fetching process.
1865        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
1866            current_fetch_context.cancel(CancelReason::Error);
1867        }
1868
1869        // Step 2. Set the error attribute to the result of creating a MediaError with
1870        // MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
1871        self.error
1872            .set(Some(&*MediaError::new(&self.owner_window(), error, can_gc)));
1873
1874        // Step 3. Set the element's networkState attribute to the NETWORK_IDLE value.
1875        self.network_state.set(NetworkState::Idle);
1876
1877        // Step 4. Set the element's delaying-the-load-event flag to false. This stops delaying
1878        // the load event.
1879        self.delay_load_event(false, can_gc);
1880
1881        // Step 5. Fire an event named error at the media element.
1882        self.upcast::<EventTarget>()
1883            .fire_event(atom!("error"), can_gc);
1884
1885        // Step 6. Abort the overall resource selection algorithm.
1886    }
1887
1888    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1889    fn seek(&self, time: f64, _approximate_for_speed: bool) {
1890        // Step 1. Set the media element's show poster flag to false.
1891        self.show_poster.set(false);
1892
1893        // Step 2. If the media element's readyState is HAVE_NOTHING, return.
1894        if self.ready_state.get() == ReadyState::HaveNothing {
1895            return;
1896        }
1897
1898        // TODO Step 3. If the element's seeking IDL attribute is true, then another instance of
1899        // this algorithm is already running. Abort that other instance of the algorithm without
1900        // waiting for the step that it is running to complete.
1901
1902        // Step 4. Set the seeking IDL attribute to true.
1903        self.seeking.set(true);
1904
1905        // Step 5. If the seek was in response to a DOM method call or setting of an IDL attribute,
1906        // then continue the script. The remainder of these steps must be run in parallel.
1907
1908        // Step 6. If the new playback position is later than the end of the media resource, then
1909        // let it be the end of the media resource instead.
1910        let time = f64::min(time, self.Duration());
1911
1912        // Step 7. If the new playback position is less than the earliest possible position, let it
1913        // be that position instead.
1914        let time = f64::max(time, self.earliest_possible_position());
1915
1916        // Step 8. If the (possibly now changed) new playback position is not in one of the ranges
1917        // given in the seekable attribute, then let it be the position in one of the ranges given
1918        // in the seekable attribute that is the nearest to the new playback position. If there are
1919        // no ranges given in the seekable attribute, then set the seeking IDL attribute to false
1920        // and return.
1921        let seekable = self.seekable();
1922
1923        if seekable.is_empty() {
1924            self.seeking.set(false);
1925            return;
1926        }
1927
1928        let mut nearest_seekable_position = 0.0;
1929        let mut in_seekable_range = false;
1930        let mut nearest_seekable_distance = f64::MAX;
1931        for i in 0..seekable.len() {
1932            let start = seekable.start(i).unwrap().abs();
1933            let end = seekable.end(i).unwrap().abs();
1934            if time >= start && time <= end {
1935                nearest_seekable_position = time;
1936                in_seekable_range = true;
1937                break;
1938            } else if time < start {
1939                let distance = start - time;
1940                if distance < nearest_seekable_distance {
1941                    nearest_seekable_distance = distance;
1942                    nearest_seekable_position = start;
1943                }
1944            } else {
1945                let distance = time - end;
1946                if distance < nearest_seekable_distance {
1947                    nearest_seekable_distance = distance;
1948                    nearest_seekable_position = end;
1949                }
1950            }
1951        }
1952        let time = if in_seekable_range {
1953            time
1954        } else {
1955            nearest_seekable_position
1956        };
1957
1958        // Step 9. If the approximate-for-speed flag is set, adjust the new playback position to a
1959        // value that will allow for playback to resume promptly. If new playback position before
1960        // this step is before current playback position, then the adjusted new playback position
1961        // must also be before the current playback position. Similarly, if the new playback
1962        // position before this step is after current playback position, then the adjusted new
1963        // playback position must also be after the current playback position.
1964        // TODO: Note that servo-media with gstreamer does not support inaccurate seeking for now.
1965
1966        // Step 10. Queue a media element task given the media element to fire an event named
1967        // seeking at the element.
1968        self.queue_media_element_task_to_fire_event(atom!("seeking"));
1969
1970        // Step 11. Set the current playback position to the new playback position.
1971        self.current_playback_position.set(time);
1972
1973        if let Some(ref player) = *self.player.borrow() {
1974            if let Err(e) = player.lock().unwrap().seek(time) {
1975                error!("Seek error {:?}", e);
1976            }
1977        }
1978
1979        // Step 12. Wait until the user agent has established whether or not the media data for the
1980        // new playback position is available, and, if it is, until it has decoded enough data to
1981        // play back that position.
1982        // The rest of the steps are handled when the media engine signals a ready state change or
1983        // otherwise satisfies seek completion and signals a position change.
1984    }
1985
1986    /// <https://html.spec.whatwg.org/multipage/#dom-media-seek>
1987    fn seek_end(&self) {
1988        // Any time the user agent provides a stable state, the official playback position must be
1989        // set to the current playback position.
1990        self.official_playback_position
1991            .set(self.current_playback_position.get());
1992
1993        // Step 14. Set the seeking IDL attribute to false.
1994        self.seeking.set(false);
1995
1996        // Step 15. Run the time marches on steps.
1997        self.time_marches_on();
1998
1999        // Step 16. Queue a media element task given the media element to fire an event named
2000        // timeupdate at the element.
2001        self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2002
2003        // Step 17. Queue a media element task given the media element to fire an event named seeked
2004        // at the element.
2005        self.queue_media_element_task_to_fire_event(atom!("seeked"));
2006    }
2007
2008    /// <https://html.spec.whatwg.org/multipage/#poster-frame>
2009    pub(crate) fn set_poster_frame(&self, image: Option<Arc<RasterImage>>) {
2010        let queue_postershown_event = pref!(media_testing_enabled) && image.is_some();
2011
2012        self.video_renderer.lock().unwrap().set_poster_frame(image);
2013
2014        self.upcast::<Node>().dirty(NodeDamage::Other);
2015
2016        if queue_postershown_event {
2017            self.owner_global()
2018                .task_manager()
2019                .media_element_task_source()
2020                .queue_simple_event(self.upcast(), atom!("postershown"));
2021        }
2022    }
2023
2024    fn create_media_player(&self, resource: &Resource) -> Result<(), ()> {
2025        let stream_type = match *resource {
2026            Resource::Object => {
2027                if let Some(ref src_object) = *self.src_object.borrow() {
2028                    match src_object {
2029                        SrcObject::MediaStream(_) => StreamType::Stream,
2030                        _ => StreamType::Seekable,
2031                    }
2032                } else {
2033                    return Err(());
2034                }
2035            },
2036            _ => StreamType::Seekable,
2037        };
2038
2039        let window = self.owner_window();
2040        let (action_sender, action_receiver) = ipc::channel::<PlayerEvent>().unwrap();
2041        let video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>> = match self.media_type_id()
2042        {
2043            HTMLMediaElementTypeId::HTMLAudioElement => None,
2044            HTMLMediaElementTypeId::HTMLVideoElement => Some(self.video_renderer.clone()),
2045        };
2046
2047        let audio_renderer = self.audio_renderer.borrow().as_ref().cloned();
2048
2049        let pipeline_id = window.pipeline_id();
2050        let client_context_id =
2051            ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
2052        let player = ServoMedia::get().create_player(
2053            &client_context_id,
2054            stream_type,
2055            action_sender,
2056            video_renderer,
2057            audio_renderer,
2058            Box::new(window.get_player_context()),
2059        );
2060        let player_id = {
2061            let player_guard = player.lock().unwrap();
2062
2063            if let Err(e) = player_guard.set_mute(self.muted.get()) {
2064                log::warn!("Could not set mute state: {:?}", e);
2065            }
2066
2067            player_guard.get_id()
2068        };
2069
2070        *self.player.borrow_mut() = Some(player);
2071
2072        let trusted_node = Trusted::new(self);
2073        let task_source = self
2074            .owner_global()
2075            .task_manager()
2076            .media_element_task_source()
2077            .to_sendable();
2078        ROUTER.add_typed_route(
2079            action_receiver,
2080            Box::new(move |message| {
2081                let event = message.unwrap();
2082                trace!("Player event {:?}", event);
2083                let this = trusted_node.clone();
2084                task_source.queue(task!(handle_player_event: move || {
2085                    this.root().handle_player_event(player_id, &event, CanGc::note());
2086                }));
2087            }),
2088        );
2089
2090        let task_source = self
2091            .owner_global()
2092            .task_manager()
2093            .media_element_task_source()
2094            .to_sendable();
2095        let weak_video_renderer = Arc::downgrade(&self.video_renderer);
2096
2097        self.video_renderer
2098            .lock()
2099            .unwrap()
2100            .setup(player_id, task_source, weak_video_renderer);
2101
2102        Ok(())
2103    }
2104
2105    fn reset_media_player(&self) {
2106        if self.player.borrow().is_none() {
2107            return;
2108        }
2109
2110        if let Some(ref player) = *self.player.borrow() {
2111            if let Err(e) = player.lock().unwrap().stop() {
2112                error!("Could not stop player {:?}", e);
2113            }
2114        }
2115
2116        *self.player.borrow_mut() = None;
2117        self.video_renderer.lock().unwrap().reset();
2118        self.handle_resize(None, None);
2119    }
2120
2121    pub(crate) fn set_audio_track(&self, idx: usize, enabled: bool) {
2122        if let Some(ref player) = *self.player.borrow() {
2123            if let Err(err) = player.lock().unwrap().set_audio_track(idx as i32, enabled) {
2124                warn!("Could not set audio track {:#?}", err);
2125            }
2126        }
2127    }
2128
2129    pub(crate) fn set_video_track(&self, idx: usize, enabled: bool) {
2130        if let Some(ref player) = *self.player.borrow() {
2131            if let Err(err) = player.lock().unwrap().set_video_track(idx as i32, enabled) {
2132                warn!("Could not set video track {:#?}", err);
2133            }
2134        }
2135    }
2136
2137    /// <https://html.spec.whatwg.org/multipage/#direction-of-playback>
2138    fn direction_of_playback(&self) -> PlaybackDirection {
2139        // If the element's playbackRate is positive or zero, then the direction of playback is
2140        // forwards. Otherwise, it is backwards.
2141        if self.playbackRate.get() >= 0. {
2142            PlaybackDirection::Forwards
2143        } else {
2144            PlaybackDirection::Backwards
2145        }
2146    }
2147
2148    /// <https://html.spec.whatwg.org/multipage/#ended-playback>
2149    fn ended_playback(&self) -> bool {
2150        // A media element is said to have ended playback when:
2151
2152        // The element's readyState attribute is HAVE_METADATA or greater, and
2153        if self.ready_state.get() < ReadyState::HaveMetadata {
2154            return false;
2155        }
2156
2157        let playback_position = self.current_playback_position.get();
2158
2159        match self.direction_of_playback() {
2160            // Either: The current playback position is the end of the media resource, and the
2161            // direction of playback is forwards, and the media element does not have a loop
2162            // attribute specified.
2163            PlaybackDirection::Forwards => playback_position >= self.Duration() && !self.Loop(),
2164            // Or: The current playback position is the earliest possible position, and the
2165            // direction of playback is backwards.
2166            PlaybackDirection::Backwards => playback_position <= self.earliest_possible_position(),
2167        }
2168    }
2169
2170    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2171    fn end_of_playback_in_forwards_direction(&self) {
2172        // When the current playback position reaches the end of the media resource when the
2173        // direction of playback is forwards, then the user agent must follow these steps:
2174
2175        // Step 1. If the media element has a loop attribute specified, then seek to the earliest
2176        // posible position of the media resource and return.
2177        if self.Loop() {
2178            self.seek(
2179                self.earliest_possible_position(),
2180                /* approximate_for_speed */ false,
2181            );
2182            return;
2183        }
2184
2185        // Step 2. As defined above, the ended IDL attribute starts returning true once the event
2186        // loop returns to step 1.
2187
2188        // Step 3. Queue a media element task given the media element and the following steps:
2189        let this = Trusted::new(self);
2190        let generation_id = self.generation_id.get();
2191
2192        self.owner_global()
2193            .task_manager()
2194            .media_element_task_source()
2195            .queue(task!(reaches_the_end_steps: move || {
2196                let this = this.root();
2197                if generation_id != this.generation_id.get() {
2198                    return;
2199                }
2200
2201                // Step 3.1. Fire an event named timeupdate at the media element.
2202                this.upcast::<EventTarget>().fire_event(atom!("timeupdate"), CanGc::note());
2203
2204                // Step 3.2. If the media element has ended playback, the direction of playback is
2205                // forwards, and paused is false, then:
2206                if this.ended_playback() &&
2207                    this.direction_of_playback() == PlaybackDirection::Forwards &&
2208                    !this.Paused() {
2209                    // Step 3.2.1. Set the paused attribute to true.
2210                    this.paused.set(true);
2211
2212                    // Step 3.2.2. Fire an event named pause at the media element.
2213                    this.upcast::<EventTarget>().fire_event(atom!("pause"), CanGc::note());
2214
2215                    // Step 3.2.3. Take pending play promises and reject pending play promises with
2216                    // the result and an "AbortError" DOMException.
2217                    this.take_pending_play_promises(Err(Error::Abort));
2218                    this.fulfill_in_flight_play_promises(|| ());
2219                }
2220
2221                // Step 3.3. Fire an event named ended at the media element.
2222                this.upcast::<EventTarget>().fire_event(atom!("ended"), CanGc::note());
2223            }));
2224
2225        // <https://html.spec.whatwg.org/multipage/#dom-media-have_current_data>
2226        self.change_ready_state(ReadyState::HaveCurrentData);
2227    }
2228
2229    /// <https://html.spec.whatwg.org/multipage/#reaches-the-end>
2230    fn end_of_playback_in_backwards_direction(&self) {
2231        // When the current playback position reaches the earliest possible position of the media
2232        // resource when the direction of playback is backwards, then the user agent must only queue
2233        // a media element task given the media element to fire an event named timeupdate at the
2234        // element.
2235        if self.current_playback_position.get() <= self.earliest_possible_position() {
2236            self.queue_media_element_task_to_fire_event(atom!("timeupdate"));
2237        }
2238    }
2239
2240    fn playback_end(&self) {
2241        match self.direction_of_playback() {
2242            PlaybackDirection::Forwards => self.end_of_playback_in_forwards_direction(),
2243            PlaybackDirection::Backwards => self.end_of_playback_in_backwards_direction(),
2244        }
2245    }
2246
2247    fn playback_error(&self, error: &str, can_gc: CanGc) {
2248        error!("Player error: {:?}", error);
2249
2250        // If we have already flagged an error condition while processing
2251        // the network response, we should silently skip any observable
2252        // errors originating while decoding the erroneous response.
2253        if self.in_error_state() {
2254            return;
2255        }
2256
2257        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
2258        if self.ready_state.get() == ReadyState::HaveNothing {
2259            // => "If the media data can be fetched but is found by inspection to be in an
2260            // unsupported format, or can otherwise not be rendered at all"
2261            self.media_data_processing_failure_steps();
2262        } else {
2263            // => "If the media data is corrupted"
2264            self.media_data_processing_fatal_steps(MEDIA_ERR_DECODE, can_gc);
2265        }
2266    }
2267
2268    fn playback_metadata_updated(
2269        &self,
2270        metadata: &servo_media::player::metadata::Metadata,
2271        can_gc: CanGc,
2272    ) {
2273        // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
2274        // => If the media resource is found to have an audio track
2275        if !metadata.audio_tracks.is_empty() {
2276            for (i, _track) in metadata.audio_tracks.iter().enumerate() {
2277                // Step 1.
2278                let kind = match i {
2279                    0 => DOMString::from("main"),
2280                    _ => DOMString::new(),
2281                };
2282                let window = self.owner_window();
2283                let audio_track = AudioTrack::new(
2284                    &window,
2285                    DOMString::new(),
2286                    kind,
2287                    DOMString::new(),
2288                    DOMString::new(),
2289                    Some(&*self.AudioTracks(can_gc)),
2290                    can_gc,
2291                );
2292
2293                // Steps 2. & 3.
2294                self.AudioTracks(can_gc).add(&audio_track);
2295
2296                // Step 4
2297                if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2298                    let fragment = MediaFragmentParser::from(servo_url);
2299                    if let Some(id) = fragment.id() {
2300                        if audio_track.id() == id {
2301                            self.AudioTracks(can_gc)
2302                                .set_enabled(self.AudioTracks(can_gc).len() - 1, true);
2303                        }
2304                    }
2305
2306                    if fragment.tracks().contains(&audio_track.kind().into()) {
2307                        self.AudioTracks(can_gc)
2308                            .set_enabled(self.AudioTracks(can_gc).len() - 1, true);
2309                    }
2310                }
2311
2312                // Step 5. & 6,
2313                if self.AudioTracks(can_gc).enabled_index().is_none() {
2314                    self.AudioTracks(can_gc)
2315                        .set_enabled(self.AudioTracks(can_gc).len() - 1, true);
2316                }
2317
2318                // Steps 7.
2319                let event = TrackEvent::new(
2320                    self.global().as_window(),
2321                    atom!("addtrack"),
2322                    false,
2323                    false,
2324                    &Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
2325                    can_gc,
2326                );
2327
2328                event
2329                    .upcast::<Event>()
2330                    .fire(self.upcast::<EventTarget>(), can_gc);
2331            }
2332        }
2333
2334        // => If the media resource is found to have a video track
2335        if !metadata.video_tracks.is_empty() {
2336            for (i, _track) in metadata.video_tracks.iter().enumerate() {
2337                // Step 1.
2338                let kind = match i {
2339                    0 => DOMString::from("main"),
2340                    _ => DOMString::new(),
2341                };
2342                let window = self.owner_window();
2343                let video_track = VideoTrack::new(
2344                    &window,
2345                    DOMString::new(),
2346                    kind,
2347                    DOMString::new(),
2348                    DOMString::new(),
2349                    Some(&*self.VideoTracks(can_gc)),
2350                    can_gc,
2351                );
2352
2353                // Steps 2. & 3.
2354                self.VideoTracks(can_gc).add(&video_track);
2355
2356                // Step 4.
2357                if let Some(track) = self.VideoTracks(can_gc).item(0) {
2358                    if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2359                        let fragment = MediaFragmentParser::from(servo_url);
2360                        if let Some(id) = fragment.id() {
2361                            if track.id() == id {
2362                                self.VideoTracks(can_gc).set_selected(0, true);
2363                            }
2364                        } else if fragment.tracks().contains(&track.kind().into()) {
2365                            self.VideoTracks(can_gc).set_selected(0, true);
2366                        }
2367                    }
2368                }
2369
2370                // Step 5. & 6.
2371                if self.VideoTracks(can_gc).selected_index().is_none() {
2372                    self.VideoTracks(can_gc)
2373                        .set_selected(self.VideoTracks(can_gc).len() - 1, true);
2374                }
2375
2376                // Steps 7.
2377                let event = TrackEvent::new(
2378                    self.global().as_window(),
2379                    atom!("addtrack"),
2380                    false,
2381                    false,
2382                    &Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
2383                    can_gc,
2384                );
2385
2386                event
2387                    .upcast::<Event>()
2388                    .fire(self.upcast::<EventTarget>(), can_gc);
2389            }
2390        }
2391
2392        // => "Once enough of the media data has been fetched to determine the duration..."
2393        // TODO Step 1. Establish the media timeline for the purposes of the current playback
2394        // position and the earliest possible position, based on the media data.
2395
2396        // TODO Step 2. Update the timeline offset to the date and time that corresponds to the zero
2397        // time in the media timeline established in the previous step, if any. If no explicit time
2398        // and date is given by the media resource, the timeline offset must be set to Not-a-Number
2399        // (NaN).
2400
2401        // Step 3. Set the current playback position and the official playback position to the
2402        // earliest possible position.
2403        let earliest_possible_position = self.earliest_possible_position();
2404        self.current_playback_position
2405            .set(earliest_possible_position);
2406        self.official_playback_position
2407            .set(earliest_possible_position);
2408
2409        // Step 4. Update the duration attribute with the time of the last frame of the resource, if
2410        // known, on the media timeline established above. If it is not known (e.g. a stream that is
2411        // in principle infinite), update the duration attribute to the value positive Infinity.
2412        // Note: The user agent will queue a media element task given the media element to fire an
2413        // event named durationchange at the element at this point.
2414        let previous_duration = self.duration.get();
2415        if let Some(duration) = metadata.duration {
2416            self.duration.set(duration.as_secs_f64());
2417        } else {
2418            self.duration.set(f64::INFINITY);
2419        }
2420        if previous_duration != self.duration.get() {
2421            self.queue_media_element_task_to_fire_event(atom!("durationchange"));
2422        }
2423
2424        // Step 5. For video elements, set the videoWidth and videoHeight attributes, and queue a
2425        // media element task given the media element to fire an event named resize at the media
2426        // element.
2427        self.handle_resize(Some(metadata.width), Some(metadata.height));
2428
2429        // Step 6. Set the readyState attribute to HAVE_METADATA.
2430        self.change_ready_state(ReadyState::HaveMetadata);
2431
2432        // Step 7. Let jumped be false.
2433        let mut jumped = false;
2434
2435        // Step 8. If the media element's default playback start position is greater than zero, then
2436        // seek to that time, and let jumped be true.
2437        if self.default_playback_start_position.get() > 0. {
2438            self.seek(
2439                self.default_playback_start_position.get(),
2440                /* approximate_for_speed */ false,
2441            );
2442            jumped = true;
2443        }
2444
2445        // Step 9. Set the media element's default playback start position to zero.
2446        self.default_playback_start_position.set(0.);
2447
2448        // Step 10. Let the initial playback position be 0.
2449        // Step 11. If either the media resource or the URL of the current media resource indicate a
2450        // particular start time, then set the initial playback position to that time and, if jumped
2451        // is still false, seek to that time.
2452        if let Some(servo_url) = self.resource_url.borrow().as_ref() {
2453            let fragment = MediaFragmentParser::from(servo_url);
2454            if let Some(initial_playback_position) = fragment.start() {
2455                if initial_playback_position > 0. &&
2456                    initial_playback_position < self.duration.get() &&
2457                    !jumped
2458                {
2459                    self.seek(
2460                        initial_playback_position,
2461                        /* approximate_for_speed */ false,
2462                    )
2463                }
2464            }
2465        }
2466
2467        // Step 12. If there is no enabled audio track, then enable an audio track. This will cause
2468        // a change event to be fired.
2469        // Step 13. If there is no selected video track, then select a video track. This will cause
2470        // a change event to be fired.
2471        // Note that these steps are already handled by the earlier media track processing.
2472
2473        let global = self.global();
2474        let window = global.as_window();
2475
2476        // Update the media session metadata title with the obtained metadata.
2477        window.Navigator().MediaSession().update_title(
2478            metadata
2479                .title
2480                .clone()
2481                .unwrap_or(window.get_url().into_string()),
2482        );
2483    }
2484
2485    fn playback_video_frame_updated(&self) {
2486        // Check if the frame was resized
2487        if let Some(frame) = self.video_renderer.lock().unwrap().current_frame {
2488            self.handle_resize(Some(frame.width as u32), Some(frame.height as u32));
2489        }
2490    }
2491
2492    fn playback_need_data(&self) {
2493        // The player needs more data.
2494        // If we already have a valid fetch request, we do nothing.
2495        // Otherwise, if we have no request and the previous request was
2496        // cancelled because we got an EnoughData event, we restart
2497        // fetching where we left.
2498        if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2499            if let Some(reason) = current_fetch_context.cancel_reason() {
2500                // XXX(ferjm) Ideally we should just create a fetch request from
2501                // where we left. But keeping track of the exact next byte that the
2502                // media backend expects is not the easiest task, so I'm simply
2503                // seeking to the current playback position for now which will create
2504                // a new fetch request for the last rendered frame.
2505                if *reason == CancelReason::Backoff {
2506                    self.seek(
2507                        self.current_playback_position.get(),
2508                        /* approximate_for_speed */ false,
2509                    );
2510                }
2511                return;
2512            }
2513        }
2514
2515        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2516            if let Err(e) = {
2517                let mut data_source = current_fetch_context.data_source().borrow_mut();
2518                data_source.set_locked(false);
2519                data_source.process_into_player_from_queue(self.player.borrow().as_ref().unwrap())
2520            } {
2521                // If we are pushing too much data and we know that we can
2522                // restart the download later from where we left, we cancel
2523                // the current request. Otherwise, we continue the request
2524                // assuming that we may drop some frames.
2525                if e == PlayerError::EnoughData {
2526                    current_fetch_context.cancel(CancelReason::Backoff);
2527                }
2528            }
2529        }
2530    }
2531
2532    fn playback_enough_data(&self) {
2533        self.change_ready_state(ReadyState::HaveEnoughData);
2534
2535        // The player has enough data and it is asking us to stop pushing
2536        // bytes, so we cancel the ongoing fetch request iff we are able
2537        // to restart it from where we left. Otherwise, we continue the
2538        // current fetch request, assuming that some frames will be dropped.
2539        if let Some(ref mut current_fetch_context) = *self.current_fetch_context.borrow_mut() {
2540            if current_fetch_context.is_seekable() {
2541                current_fetch_context.cancel(CancelReason::Backoff);
2542            }
2543        }
2544    }
2545
2546    fn playback_position_changed(&self, position: f64) {
2547        let _ = self
2548            .played
2549            .borrow_mut()
2550            .add(self.current_playback_position.get(), position);
2551        self.current_playback_position.set(position);
2552        self.official_playback_position.set(position);
2553        self.time_marches_on();
2554
2555        let media_position_state =
2556            MediaPositionState::new(self.duration.get(), self.playbackRate.get(), position);
2557        debug!(
2558            "Sending media session event set position state {:?}",
2559            media_position_state
2560        );
2561        self.send_media_session_event(MediaSessionEvent::SetPositionState(media_position_state));
2562    }
2563
2564    fn playback_seek_done(&self) {
2565        // <https://html.spec.whatwg.org/multipage/#dom-media-seek>
2566        // Step 13. Await a stable state.
2567        let task = MediaElementMicrotask::Seeked {
2568            elem: DomRoot::from_ref(self),
2569            generation_id: self.generation_id.get(),
2570        };
2571
2572        ScriptThread::await_stable_state(Microtask::MediaElement(task));
2573    }
2574
2575    fn playback_state_changed(&self, state: &PlaybackState) {
2576        let mut media_session_playback_state = MediaSessionPlaybackState::None_;
2577        match *state {
2578            PlaybackState::Paused => {
2579                media_session_playback_state = MediaSessionPlaybackState::Paused;
2580                if self.ready_state.get() == ReadyState::HaveMetadata {
2581                    self.change_ready_state(ReadyState::HaveEnoughData);
2582                }
2583            },
2584            PlaybackState::Playing => {
2585                media_session_playback_state = MediaSessionPlaybackState::Playing;
2586            },
2587            PlaybackState::Buffering => {
2588                // Do not send the media session playback state change event
2589                // in this case as a None_ state is expected to clean up the
2590                // session.
2591                return;
2592            },
2593            _ => {},
2594        };
2595        debug!(
2596            "Sending media session event playback state changed to {:?}",
2597            media_session_playback_state
2598        );
2599        self.send_media_session_event(MediaSessionEvent::PlaybackStateChange(
2600            media_session_playback_state,
2601        ));
2602    }
2603
2604    fn handle_player_event(&self, player_id: usize, event: &PlayerEvent, can_gc: CanGc) {
2605        // Ignore the asynchronous event from previous player.
2606        if self
2607            .player
2608            .borrow()
2609            .as_ref()
2610            .is_none_or(|player| player.lock().unwrap().get_id() != player_id)
2611        {
2612            return;
2613        }
2614
2615        match *event {
2616            PlayerEvent::EndOfStream => self.playback_end(),
2617            PlayerEvent::Error(ref error) => self.playback_error(error, can_gc),
2618            PlayerEvent::VideoFrameUpdated => self.playback_video_frame_updated(),
2619            PlayerEvent::MetadataUpdated(ref metadata) => {
2620                self.playback_metadata_updated(metadata, can_gc)
2621            },
2622            PlayerEvent::NeedData => self.playback_need_data(),
2623            PlayerEvent::EnoughData => self.playback_enough_data(),
2624            PlayerEvent::PositionChanged(position) => self.playback_position_changed(position),
2625            PlayerEvent::SeekData(p, ref seek_lock) => {
2626                self.fetch_request(Some(p), Some(seek_lock.clone()))
2627            },
2628            PlayerEvent::SeekDone(_) => self.playback_seek_done(),
2629            PlayerEvent::StateChanged(ref state) => self.playback_state_changed(state),
2630        }
2631    }
2632
2633    fn seekable(&self) -> TimeRangesContainer {
2634        let mut seekable = TimeRangesContainer::default();
2635        if let Some(ref player) = *self.player.borrow() {
2636            if let Ok(ranges) = player.lock().unwrap().seekable() {
2637                for range in ranges {
2638                    let _ = seekable.add(range.start, range.end);
2639                }
2640            }
2641        }
2642        seekable
2643    }
2644
2645    /// <https://html.spec.whatwg.org/multipage/#earliest-possible-position>
2646    fn earliest_possible_position(&self) -> f64 {
2647        self.seekable()
2648            .start(0)
2649            .unwrap_or_else(|_| self.current_playback_position.get())
2650    }
2651
2652    fn render_controls(&self, can_gc: CanGc) {
2653        if self.upcast::<Element>().is_shadow_host() {
2654            // Bail out if we are already showing the controls.
2655            return;
2656        }
2657
2658        // FIXME(stevennovaryo): Recheck styling of media element to avoid
2659        //                       reparsing styles.
2660        let shadow_root = self
2661            .upcast::<Element>()
2662            .attach_ua_shadow_root(false, can_gc);
2663        let document = self.owner_document();
2664        let script = Element::create(
2665            QualName::new(None, ns!(html), local_name!("script")),
2666            None,
2667            &document,
2668            ElementCreator::ScriptCreated,
2669            CustomElementCreationMode::Asynchronous,
2670            None,
2671            can_gc,
2672        );
2673        // This is our hacky way to temporarily workaround the lack of a privileged
2674        // JS context.
2675        // The media controls UI accesses the document.servoGetMediaControls(id) API
2676        // to get an instance to the media controls ShadowRoot.
2677        // `id` needs to match the internally generated UUID assigned to a media element.
2678        let id = Uuid::new_v4().to_string();
2679        document.register_media_controls(&id, &shadow_root);
2680        let media_controls_script = MEDIA_CONTROL_JS.replace("@@@id@@@", &id);
2681        *self.media_controls_id.borrow_mut() = Some(id);
2682        script
2683            .upcast::<Node>()
2684            .set_text_content_for_element(Some(DOMString::from(media_controls_script)), can_gc);
2685        if let Err(e) = shadow_root
2686            .upcast::<Node>()
2687            .AppendChild(script.upcast::<Node>(), can_gc)
2688        {
2689            warn!("Could not render media controls {:?}", e);
2690            return;
2691        }
2692
2693        let style = Element::create(
2694            QualName::new(None, ns!(html), local_name!("style")),
2695            None,
2696            &document,
2697            ElementCreator::ScriptCreated,
2698            CustomElementCreationMode::Asynchronous,
2699            None,
2700            can_gc,
2701        );
2702
2703        style
2704            .upcast::<Node>()
2705            .set_text_content_for_element(Some(DOMString::from(MEDIA_CONTROL_CSS)), can_gc);
2706
2707        if let Err(e) = shadow_root
2708            .upcast::<Node>()
2709            .AppendChild(style.upcast::<Node>(), can_gc)
2710        {
2711            warn!("Could not render media controls {:?}", e);
2712        }
2713
2714        self.upcast::<Node>().dirty(NodeDamage::Other);
2715    }
2716
2717    fn remove_controls(&self) {
2718        if let Some(id) = self.media_controls_id.borrow_mut().take() {
2719            self.owner_document().unregister_media_controls(&id);
2720        }
2721    }
2722
2723    /// Gets the video frame at the current playback position.
2724    pub(crate) fn get_current_frame(&self) -> Option<VideoFrame> {
2725        self.video_renderer
2726            .lock()
2727            .unwrap()
2728            .current_frame_holder
2729            .as_ref()
2730            .map(|holder| holder.get_frame())
2731    }
2732
2733    /// Gets the current frame of the video element to present, if any.
2734    /// <https://html.spec.whatwg.org/multipage/#the-video-element:the-video-element-7>
2735    pub(crate) fn get_current_frame_to_present(&self) -> Option<MediaFrame> {
2736        let (current_frame, poster_frame) = {
2737            let renderer = self.video_renderer.lock().unwrap();
2738            (renderer.current_frame, renderer.poster_frame)
2739        };
2740
2741        // If the show poster flag is set (or there is no current video frame to
2742        // present) AND there is a poster frame, present that.
2743        if (self.show_poster.get() || current_frame.is_none()) && poster_frame.is_some() {
2744            return poster_frame;
2745        }
2746
2747        current_frame
2748    }
2749
2750    fn handle_resize(&self, width: Option<u32>, height: Option<u32>) {
2751        if let Some(video_elem) = self.downcast::<HTMLVideoElement>() {
2752            video_elem.resize(width, height);
2753            self.upcast::<Node>().dirty(NodeDamage::Other);
2754        }
2755    }
2756
2757    /// By default the audio is rendered through the audio sink automatically
2758    /// selected by the servo-media Player instance. However, in some cases, like
2759    /// the WebAudio MediaElementAudioSourceNode, we need to set a custom audio
2760    /// renderer.
2761    pub(crate) fn set_audio_renderer(
2762        &self,
2763        audio_renderer: Arc<Mutex<dyn AudioRenderer>>,
2764        can_gc: CanGc,
2765    ) {
2766        *self.audio_renderer.borrow_mut() = Some(audio_renderer);
2767        if let Some(ref player) = *self.player.borrow() {
2768            if let Err(e) = player.lock().unwrap().stop() {
2769                error!("Could not stop player {:?}", e);
2770            }
2771            self.media_element_load_algorithm(can_gc);
2772        }
2773    }
2774
2775    fn send_media_session_event(&self, event: MediaSessionEvent) {
2776        let global = self.global();
2777        let media_session = global.as_window().Navigator().MediaSession();
2778
2779        media_session.register_media_instance(self);
2780
2781        media_session.send_event(event);
2782    }
2783
2784    pub(crate) fn set_duration(&self, duration: f64) {
2785        self.duration.set(duration);
2786    }
2787
2788    pub(crate) fn reset(&self) {
2789        if let Some(ref player) = *self.player.borrow() {
2790            if let Err(e) = player.lock().unwrap().stop() {
2791                error!("Could not stop player {:?}", e);
2792            }
2793        }
2794    }
2795
2796    /// <https://html.spec.whatwg.org/multipage/#concept-media-load-resource>
2797    pub(crate) fn origin_is_clean(&self) -> bool {
2798        // Step 5.local (media provider object).
2799        if self.src_object.borrow().is_some() {
2800            // The resource described by the current media resource, if any,
2801            // contains the media data. It is CORS-same-origin.
2802            return true;
2803        }
2804
2805        // Step 5.remote (URL record).
2806        if self.resource_url.borrow().is_some() {
2807            // Update the media data with the contents
2808            // of response's unsafe response obtained in this fashion.
2809            // Response can be CORS-same-origin or CORS-cross-origin;
2810            if let Some(ref current_fetch_context) = *self.current_fetch_context.borrow() {
2811                return current_fetch_context.origin_is_clean();
2812            }
2813        }
2814
2815        true
2816    }
2817}
2818
2819impl HTMLMediaElementMethods<crate::DomTypeHolder> for HTMLMediaElement {
2820    /// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
2821    fn NetworkState(&self) -> u16 {
2822        self.network_state.get() as u16
2823    }
2824
2825    /// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
2826    fn ReadyState(&self) -> u16 {
2827        self.ready_state.get() as u16
2828    }
2829
2830    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2831    make_bool_getter!(Autoplay, "autoplay");
2832    // https://html.spec.whatwg.org/multipage/#dom-media-autoplay
2833    make_bool_setter!(SetAutoplay, "autoplay");
2834
2835    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2836    make_bool_getter!(Loop, "loop");
2837    // https://html.spec.whatwg.org/multipage/#attr-media-loop
2838    make_bool_setter!(SetLoop, "loop");
2839
2840    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2841    make_bool_getter!(DefaultMuted, "muted");
2842    // https://html.spec.whatwg.org/multipage/#dom-media-defaultmuted
2843    make_bool_setter!(SetDefaultMuted, "muted");
2844
2845    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2846    make_bool_getter!(Controls, "controls");
2847    // https://html.spec.whatwg.org/multipage/#dom-media-controls
2848    make_bool_setter!(SetControls, "controls");
2849
2850    // https://html.spec.whatwg.org/multipage/#dom-media-src
2851    make_url_getter!(Src, "src");
2852
2853    // https://html.spec.whatwg.org/multipage/#dom-media-src
2854    make_url_setter!(SetSrc, "src");
2855
2856    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2857    fn GetCrossOrigin(&self) -> Option<DOMString> {
2858        reflect_cross_origin_attribute(self.upcast::<Element>())
2859    }
2860    /// <https://html.spec.whatwg.org/multipage/#dom-media-crossOrigin>
2861    fn SetCrossOrigin(&self, value: Option<DOMString>, can_gc: CanGc) {
2862        set_cross_origin_attribute(self.upcast::<Element>(), value, can_gc);
2863    }
2864
2865    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2866    fn Muted(&self) -> bool {
2867        self.muted.get()
2868    }
2869
2870    /// <https://html.spec.whatwg.org/multipage/#dom-media-muted>
2871    fn SetMuted(&self, value: bool) {
2872        if self.muted.get() == value {
2873            return;
2874        }
2875
2876        if let Some(ref player) = *self.player.borrow() {
2877            let _ = player.lock().unwrap().set_mute(value);
2878        }
2879
2880        self.muted.set(value);
2881        self.owner_global()
2882            .task_manager()
2883            .media_element_task_source()
2884            .queue_simple_event(self.upcast(), atom!("volumechange"));
2885        if !self.is_allowed_to_play() {
2886            self.internal_pause_steps();
2887        }
2888    }
2889
2890    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
2891    fn GetSrcObject(&self) -> Option<MediaStreamOrBlob> {
2892        (*self.src_object.borrow())
2893            .as_ref()
2894            .map(|src_object| match src_object {
2895                SrcObject::Blob(blob) => MediaStreamOrBlob::Blob(DomRoot::from_ref(blob)),
2896                SrcObject::MediaStream(stream) => {
2897                    MediaStreamOrBlob::MediaStream(DomRoot::from_ref(stream))
2898                },
2899            })
2900    }
2901
2902    /// <https://html.spec.whatwg.org/multipage/#dom-media-srcobject>
2903    fn SetSrcObject(&self, value: Option<MediaStreamOrBlob>, can_gc: CanGc) {
2904        *self.src_object.borrow_mut() = value.map(|value| value.into());
2905        self.media_element_load_algorithm(can_gc);
2906    }
2907
2908    // https://html.spec.whatwg.org/multipage/#attr-media-preload
2909    // Missing/Invalid values are user-agent defined.
2910    make_enumerated_getter!(
2911        Preload,
2912        "preload",
2913        "none" | "metadata" | "auto",
2914        missing => "auto",
2915        invalid => "auto"
2916    );
2917
2918    // https://html.spec.whatwg.org/multipage/#attr-media-preload
2919    make_setter!(SetPreload, "preload");
2920
2921    /// <https://html.spec.whatwg.org/multipage/#dom-media-currentsrc>
2922    fn CurrentSrc(&self) -> USVString {
2923        USVString(self.current_src.borrow().clone())
2924    }
2925
2926    /// <https://html.spec.whatwg.org/multipage/#dom-media-load>
2927    fn Load(&self, can_gc: CanGc) {
2928        self.media_element_load_algorithm(can_gc);
2929    }
2930
2931    /// <https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype>
2932    fn CanPlayType(&self, type_: DOMString) -> CanPlayTypeResult {
2933        match ServoMedia::get().can_play_type(&type_.str()) {
2934            SupportsMediaType::No => CanPlayTypeResult::_empty,
2935            SupportsMediaType::Maybe => CanPlayTypeResult::Maybe,
2936            SupportsMediaType::Probably => CanPlayTypeResult::Probably,
2937        }
2938    }
2939
2940    /// <https://html.spec.whatwg.org/multipage/#dom-media-error>
2941    fn GetError(&self) -> Option<DomRoot<MediaError>> {
2942        self.error.get()
2943    }
2944
2945    /// <https://html.spec.whatwg.org/multipage/#dom-media-play>
2946    fn Play(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
2947        let promise = Promise::new_in_current_realm(comp, can_gc);
2948
2949        // TODO Step 1. If the media element is not allowed to play, then return a promise rejected
2950        // with a "NotAllowedError" DOMException.
2951
2952        // Step 2. If the media element's error attribute is not null and its code is
2953        // MEDIA_ERR_SRC_NOT_SUPPORTED, then return a promise rejected with a "NotSupportedError"
2954        // DOMException.
2955        if self
2956            .error
2957            .get()
2958            .is_some_and(|e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED)
2959        {
2960            promise.reject_error(Error::NotSupported, can_gc);
2961            return promise;
2962        }
2963
2964        // Step 3. Let promise be a new promise and append promise to the list of pending play
2965        // promises.
2966        self.push_pending_play_promise(&promise);
2967
2968        // Step 4. Run the internal play steps for the media element.
2969        self.internal_play_steps(can_gc);
2970
2971        // Step 5. Return promise.
2972        promise
2973    }
2974
2975    /// <https://html.spec.whatwg.org/multipage/#dom-media-pause>
2976    fn Pause(&self, can_gc: CanGc) {
2977        // Step 1. If the media element's networkState attribute has the value NETWORK_EMPTY, invoke
2978        // the media element's resource selection algorithm.
2979        if self.network_state.get() == NetworkState::Empty {
2980            self.invoke_resource_selection_algorithm(can_gc);
2981        }
2982
2983        // Step 2. Run the internal pause steps for the media element.
2984        self.internal_pause_steps();
2985    }
2986
2987    /// <https://html.spec.whatwg.org/multipage/#dom-media-paused>
2988    fn Paused(&self) -> bool {
2989        self.paused.get()
2990    }
2991
2992    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
2993    fn GetDefaultPlaybackRate(&self) -> Fallible<Finite<f64>> {
2994        Ok(Finite::wrap(self.defaultPlaybackRate.get()))
2995    }
2996
2997    /// <https://html.spec.whatwg.org/multipage/#dom-media-defaultplaybackrate>
2998    fn SetDefaultPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
2999        let min_allowed = -64.0;
3000        let max_allowed = 64.0;
3001        if *value < min_allowed || *value > max_allowed {
3002            return Err(Error::NotSupported);
3003        }
3004
3005        if *value != self.defaultPlaybackRate.get() {
3006            self.defaultPlaybackRate.set(*value);
3007            self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3008        }
3009
3010        Ok(())
3011    }
3012
3013    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3014    fn GetPlaybackRate(&self) -> Fallible<Finite<f64>> {
3015        Ok(Finite::wrap(self.playbackRate.get()))
3016    }
3017
3018    /// <https://html.spec.whatwg.org/multipage/#dom-media-playbackrate>
3019    fn SetPlaybackRate(&self, value: Finite<f64>) -> ErrorResult {
3020        let min_allowed = -64.0;
3021        let max_allowed = 64.0;
3022        if *value < min_allowed || *value > max_allowed {
3023            return Err(Error::NotSupported);
3024        }
3025
3026        if *value != self.playbackRate.get() {
3027            self.playbackRate.set(*value);
3028            self.queue_media_element_task_to_fire_event(atom!("ratechange"));
3029            if self.is_potentially_playing() {
3030                if let Some(ref player) = *self.player.borrow() {
3031                    if let Err(e) = player.lock().unwrap().set_rate(*value) {
3032                        warn!("Could not set the playback rate {:?}", e);
3033                    }
3034                }
3035            }
3036        }
3037
3038        Ok(())
3039    }
3040
3041    /// <https://html.spec.whatwg.org/multipage/#dom-media-duration>
3042    fn Duration(&self) -> f64 {
3043        self.duration.get()
3044    }
3045
3046    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3047    fn CurrentTime(&self) -> Finite<f64> {
3048        Finite::wrap(if self.default_playback_start_position.get() != 0. {
3049            self.default_playback_start_position.get()
3050        } else if self.seeking.get() {
3051            // Following the specification, any time the user agent provides a stable state, the
3052            // official playback position must be set to the current playback position, and
3053            // the `await a stable state` for `seek` (step 13) will be reached on receiving the
3054            // `seek completion` signal from media engine, so the current playback position should
3055            // be returned until the official playback position will be updated in `seek_end`.
3056            // <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:official-playback-position-2>
3057            // <https://html.spec.whatwg.org/multipage/#dom-media-seek>
3058            // Note that the other browsers do the similar (by checking `seeking` value or make no
3059            // difference between `current` and `official` playback positions).
3060            self.current_playback_position.get()
3061        } else {
3062            self.official_playback_position.get()
3063        })
3064    }
3065
3066    /// <https://html.spec.whatwg.org/multipage/#dom-media-currenttime>
3067    fn SetCurrentTime(&self, time: Finite<f64>) {
3068        if self.ready_state.get() == ReadyState::HaveNothing {
3069            self.default_playback_start_position.set(*time);
3070        } else {
3071            self.official_playback_position.set(*time);
3072            self.seek(*time, /* approximate_for_speed */ false);
3073        }
3074    }
3075
3076    /// <https://html.spec.whatwg.org/multipage/#dom-media-seeking>
3077    fn Seeking(&self) -> bool {
3078        self.seeking.get()
3079    }
3080
3081    /// <https://html.spec.whatwg.org/multipage/#dom-media-ended>
3082    fn Ended(&self) -> bool {
3083        self.ended_playback() && self.direction_of_playback() == PlaybackDirection::Forwards
3084    }
3085
3086    /// <https://html.spec.whatwg.org/multipage/#dom-media-fastseek>
3087    fn FastSeek(&self, time: Finite<f64>) {
3088        self.seek(*time, /* approximate_for_speed */ true);
3089    }
3090
3091    /// <https://html.spec.whatwg.org/multipage/#dom-media-played>
3092    fn Played(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3093        TimeRanges::new(
3094            self.global().as_window(),
3095            self.played.borrow().clone(),
3096            can_gc,
3097        )
3098    }
3099
3100    /// <https://html.spec.whatwg.org/multipage/#dom-media-seekable>
3101    fn Seekable(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3102        TimeRanges::new(self.global().as_window(), self.seekable(), can_gc)
3103    }
3104
3105    /// <https://html.spec.whatwg.org/multipage/#dom-media-buffered>
3106    fn Buffered(&self, can_gc: CanGc) -> DomRoot<TimeRanges> {
3107        let mut buffered = TimeRangesContainer::default();
3108        if let Some(ref player) = *self.player.borrow() {
3109            if let Ok(ranges) = player.lock().unwrap().buffered() {
3110                for range in ranges {
3111                    let _ = buffered.add(range.start, range.end);
3112                }
3113            }
3114        }
3115        TimeRanges::new(self.global().as_window(), buffered, can_gc)
3116    }
3117
3118    /// <https://html.spec.whatwg.org/multipage/#dom-media-audiotracks>
3119    fn AudioTracks(&self, can_gc: CanGc) -> DomRoot<AudioTrackList> {
3120        let window = self.owner_window();
3121        self.audio_tracks_list
3122            .or_init(|| AudioTrackList::new(&window, &[], Some(self), can_gc))
3123    }
3124
3125    /// <https://html.spec.whatwg.org/multipage/#dom-media-videotracks>
3126    fn VideoTracks(&self, can_gc: CanGc) -> DomRoot<VideoTrackList> {
3127        let window = self.owner_window();
3128        self.video_tracks_list
3129            .or_init(|| VideoTrackList::new(&window, &[], Some(self), can_gc))
3130    }
3131
3132    /// <https://html.spec.whatwg.org/multipage/#dom-media-texttracks>
3133    fn TextTracks(&self, can_gc: CanGc) -> DomRoot<TextTrackList> {
3134        let window = self.owner_window();
3135        self.text_tracks_list
3136            .or_init(|| TextTrackList::new(&window, &[], can_gc))
3137    }
3138
3139    /// <https://html.spec.whatwg.org/multipage/#dom-media-addtexttrack>
3140    fn AddTextTrack(
3141        &self,
3142        kind: TextTrackKind,
3143        label: DOMString,
3144        language: DOMString,
3145        can_gc: CanGc,
3146    ) -> DomRoot<TextTrack> {
3147        let window = self.owner_window();
3148        // Step 1 & 2
3149        // FIXME(#22314, dlrobertson) set the ready state to Loaded
3150        let track = TextTrack::new(
3151            &window,
3152            "".into(),
3153            kind,
3154            label,
3155            language,
3156            TextTrackMode::Hidden,
3157            None,
3158            can_gc,
3159        );
3160        // Step 3 & 4
3161        self.TextTracks(can_gc).add(&track);
3162        // Step 5
3163        DomRoot::from_ref(&track)
3164    }
3165
3166    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3167    fn GetVolume(&self) -> Fallible<Finite<f64>> {
3168        Ok(Finite::wrap(self.volume.get()))
3169    }
3170
3171    /// <https://html.spec.whatwg.org/multipage/#dom-media-volume>
3172    fn SetVolume(&self, value: Finite<f64>) -> ErrorResult {
3173        let minimum_volume = 0.0;
3174        let maximum_volume = 1.0;
3175        if *value < minimum_volume || *value > maximum_volume {
3176            return Err(Error::IndexSize(None));
3177        }
3178
3179        if *value != self.volume.get() {
3180            self.volume.set(*value);
3181            if let Some(player) = self.player.borrow().as_ref() {
3182                let _ = player.lock().unwrap().set_volume(*value);
3183            }
3184            self.owner_global()
3185                .task_manager()
3186                .media_element_task_source()
3187                .queue_simple_event(self.upcast(), atom!("volumechange"));
3188            if !self.is_allowed_to_play() {
3189                self.internal_pause_steps();
3190            }
3191        }
3192
3193        Ok(())
3194    }
3195}
3196
3197impl VirtualMethods for HTMLMediaElement {
3198    fn super_type(&self) -> Option<&dyn VirtualMethods> {
3199        Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
3200    }
3201
3202    fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation, can_gc: CanGc) {
3203        self.super_type()
3204            .unwrap()
3205            .attribute_mutated(attr, mutation, can_gc);
3206
3207        match *attr.local_name() {
3208            local_name!("muted") => {
3209                if let AttributeMutation::Set(
3210                    _,
3211                    AttributeMutationReason::ByCloning | AttributeMutationReason::ByParser,
3212                ) = mutation
3213                {
3214                    self.SetMuted(true);
3215                }
3216            },
3217            local_name!("src") => {
3218                // <https://html.spec.whatwg.org/multipage/#location-of-the-media-resource>
3219                // If a src attribute of a media element is set or changed, the user agent must invoke
3220                // the media element's media element load algorithm (Removing the src attribute does
3221                // not do this, even if there are source elements present).
3222                if !mutation.is_removal() {
3223                    self.media_element_load_algorithm(can_gc);
3224                }
3225            },
3226            local_name!("controls") => {
3227                if mutation.new_value(attr).is_some() {
3228                    self.render_controls(can_gc);
3229                } else {
3230                    self.remove_controls();
3231                }
3232            },
3233            _ => (),
3234        };
3235    }
3236
3237    /// <https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document>
3238    fn unbind_from_tree(&self, context: &UnbindContext, can_gc: CanGc) {
3239        self.super_type().unwrap().unbind_from_tree(context, can_gc);
3240
3241        self.remove_controls();
3242
3243        if context.tree_connected {
3244            let task = MediaElementMicrotask::PauseIfNotInDocument {
3245                elem: DomRoot::from_ref(self),
3246            };
3247            ScriptThread::await_stable_state(Microtask::MediaElement(task));
3248        }
3249    }
3250
3251    fn adopting_steps(&self, old_doc: &Document, can_gc: CanGc) {
3252        self.super_type().unwrap().adopting_steps(old_doc, can_gc);
3253
3254        // Note that media control id should be adopting between documents so "privileged"
3255        // document.servoGetMediaControls(id) API is keeping access to the whitelist of media
3256        // controls identifiers.
3257        if let Some(id) = &*self.media_controls_id.borrow() {
3258            let Some(shadow_root) = self.upcast::<Element>().shadow_root() else {
3259                error!("Missing media controls shadow root");
3260                return;
3261            };
3262
3263            old_doc.unregister_media_controls(id);
3264            self.owner_document()
3265                .register_media_controls(id, &shadow_root);
3266        }
3267    }
3268}
3269
3270#[derive(JSTraceable, MallocSizeOf)]
3271pub(crate) enum MediaElementMicrotask {
3272    ResourceSelection {
3273        elem: DomRoot<HTMLMediaElement>,
3274        generation_id: u32,
3275        #[no_trace]
3276        base_url: ServoUrl,
3277    },
3278    PauseIfNotInDocument {
3279        elem: DomRoot<HTMLMediaElement>,
3280    },
3281    Seeked {
3282        elem: DomRoot<HTMLMediaElement>,
3283        generation_id: u32,
3284    },
3285    SelectNextSourceChild {
3286        elem: DomRoot<HTMLMediaElement>,
3287        generation_id: u32,
3288    },
3289    SelectNextSourceChildAfterWait {
3290        elem: DomRoot<HTMLMediaElement>,
3291        generation_id: u32,
3292    },
3293}
3294
3295impl MicrotaskRunnable for MediaElementMicrotask {
3296    fn handler(&self, can_gc: CanGc) {
3297        match self {
3298            &MediaElementMicrotask::ResourceSelection {
3299                ref elem,
3300                generation_id,
3301                ref base_url,
3302            } => {
3303                if generation_id == elem.generation_id.get() {
3304                    elem.resource_selection_algorithm_sync(base_url.clone(), can_gc);
3305                }
3306            },
3307            MediaElementMicrotask::PauseIfNotInDocument { elem } => {
3308                if !elem.upcast::<Node>().is_connected() {
3309                    elem.internal_pause_steps();
3310                }
3311            },
3312            &MediaElementMicrotask::Seeked {
3313                ref elem,
3314                generation_id,
3315            } => {
3316                if generation_id == elem.generation_id.get() {
3317                    elem.seek_end();
3318                }
3319            },
3320            &MediaElementMicrotask::SelectNextSourceChild {
3321                ref elem,
3322                generation_id,
3323            } => {
3324                if generation_id == elem.generation_id.get() {
3325                    elem.select_next_source_child(can_gc);
3326                }
3327            },
3328            &MediaElementMicrotask::SelectNextSourceChildAfterWait {
3329                ref elem,
3330                generation_id,
3331            } => {
3332                if generation_id == elem.generation_id.get() {
3333                    elem.select_next_source_child_after_wait(can_gc);
3334                }
3335            },
3336        }
3337    }
3338
3339    fn enter_realm(&self) -> JSAutoRealm {
3340        match self {
3341            &MediaElementMicrotask::ResourceSelection { ref elem, .. } |
3342            &MediaElementMicrotask::PauseIfNotInDocument { ref elem } |
3343            &MediaElementMicrotask::Seeked { ref elem, .. } |
3344            &MediaElementMicrotask::SelectNextSourceChild { ref elem, .. } |
3345            &MediaElementMicrotask::SelectNextSourceChildAfterWait { ref elem, .. } => {
3346                enter_realm(&**elem)
3347            },
3348        }
3349    }
3350}
3351
3352enum Resource {
3353    Object,
3354    Url(ServoUrl),
3355}
3356
3357#[derive(Debug, MallocSizeOf, PartialEq)]
3358enum DataBuffer {
3359    Payload(Vec<u8>),
3360    EndOfStream,
3361}
3362
3363#[derive(MallocSizeOf)]
3364struct BufferedDataSource {
3365    /// During initial setup and seeking (including clearing the buffer queue
3366    /// and resetting the end-of-stream state), the data source should be locked and
3367    /// any request for processing should be ignored until the media player informs us
3368    /// via the NeedData event that it is ready to accept incoming data.
3369    locked: Cell<bool>,
3370    /// Temporary storage for incoming data.
3371    buffers: VecDeque<DataBuffer>,
3372}
3373
3374impl BufferedDataSource {
3375    fn new() -> BufferedDataSource {
3376        BufferedDataSource {
3377            locked: Cell::new(true),
3378            buffers: VecDeque::default(),
3379        }
3380    }
3381
3382    fn set_locked(&self, locked: bool) {
3383        self.locked.set(locked)
3384    }
3385
3386    fn add_buffer_to_queue(&mut self, buffer: DataBuffer) {
3387        debug_assert_ne!(
3388            self.buffers.back(),
3389            Some(&DataBuffer::EndOfStream),
3390            "The media backend not expects any further data after end of stream"
3391        );
3392
3393        self.buffers.push_back(buffer);
3394    }
3395
3396    fn process_into_player_from_queue(
3397        &mut self,
3398        player: &Arc<Mutex<dyn Player>>,
3399    ) -> Result<(), PlayerError> {
3400        // Early out if any request for processing should be ignored.
3401        if self.locked.get() {
3402            return Ok(());
3403        }
3404
3405        while let Some(buffer) = self.buffers.pop_front() {
3406            match buffer {
3407                DataBuffer::Payload(payload) => {
3408                    if let Err(e) = player.lock().unwrap().push_data(payload) {
3409                        warn!("Could not push input data to player {:?}", e);
3410                        return Err(e);
3411                    }
3412                },
3413                DataBuffer::EndOfStream => {
3414                    if let Err(e) = player.lock().unwrap().end_of_stream() {
3415                        warn!("Could not signal EOS to player {:?}", e);
3416                        return Err(e);
3417                    }
3418                },
3419            }
3420        }
3421
3422        Ok(())
3423    }
3424
3425    fn reset(&mut self) {
3426        self.locked.set(true);
3427        self.buffers.clear();
3428    }
3429}
3430
3431/// Indicates the reason why a fetch request was cancelled.
3432#[derive(Debug, MallocSizeOf, PartialEq)]
3433enum CancelReason {
3434    /// We were asked to stop pushing data to the player.
3435    Backoff,
3436    /// An error ocurred while fetching the media data.
3437    Error,
3438    /// The fetching process is aborted by the user.
3439    Abort,
3440}
3441
3442#[derive(MallocSizeOf)]
3443pub(crate) struct HTMLMediaElementFetchContext {
3444    /// The fetch request id.
3445    request_id: RequestId,
3446    /// Some if the request has been cancelled.
3447    cancel_reason: Option<CancelReason>,
3448    /// Indicates whether the fetched stream is seekable.
3449    is_seekable: bool,
3450    /// Indicates whether the fetched stream is origin clean.
3451    origin_clean: bool,
3452    /// The buffered data source which to be processed by media backend.
3453    data_source: RefCell<BufferedDataSource>,
3454    /// Fetch canceller. Allows cancelling the current fetch request by
3455    /// manually calling its .cancel() method or automatically on Drop.
3456    fetch_canceller: FetchCanceller,
3457}
3458
3459impl HTMLMediaElementFetchContext {
3460    fn new(
3461        request_id: RequestId,
3462        core_resource_thread: CoreResourceThread,
3463    ) -> HTMLMediaElementFetchContext {
3464        HTMLMediaElementFetchContext {
3465            request_id,
3466            cancel_reason: None,
3467            is_seekable: false,
3468            origin_clean: true,
3469            data_source: RefCell::new(BufferedDataSource::new()),
3470            fetch_canceller: FetchCanceller::new(request_id, core_resource_thread.clone()),
3471        }
3472    }
3473
3474    fn request_id(&self) -> RequestId {
3475        self.request_id
3476    }
3477
3478    fn is_seekable(&self) -> bool {
3479        self.is_seekable
3480    }
3481
3482    fn set_seekable(&mut self, seekable: bool) {
3483        self.is_seekable = seekable;
3484    }
3485
3486    fn origin_is_clean(&self) -> bool {
3487        self.origin_clean
3488    }
3489
3490    fn set_origin_clean(&mut self, origin_clean: bool) {
3491        self.origin_clean = origin_clean;
3492    }
3493
3494    fn data_source(&self) -> &RefCell<BufferedDataSource> {
3495        &self.data_source
3496    }
3497
3498    fn cancel(&mut self, reason: CancelReason) {
3499        if self.cancel_reason.is_some() {
3500            return;
3501        }
3502        self.cancel_reason = Some(reason);
3503        self.data_source.borrow_mut().reset();
3504        self.fetch_canceller.cancel();
3505    }
3506
3507    fn cancel_reason(&self) -> &Option<CancelReason> {
3508        &self.cancel_reason
3509    }
3510}
3511
3512struct HTMLMediaElementFetchListener {
3513    /// The element that initiated the request.
3514    element: Trusted<HTMLMediaElement>,
3515    /// The generation of the media element when this fetch started.
3516    generation_id: u32,
3517    /// The fetch request id.
3518    request_id: RequestId,
3519    /// Time of last progress notification.
3520    next_progress_event: Instant,
3521    /// Url for the resource.
3522    url: ServoUrl,
3523    /// Expected content length of the media asset being fetched or played.
3524    expected_content_length: Option<u64>,
3525    /// Actual content length of the media asset was fetched.
3526    fetched_content_length: u64,
3527    /// Discarded content length from the network for the ongoing
3528    /// request if range requests are not supported. Seek requests set it
3529    /// to the required position (in bytes).
3530    content_length_to_discard: u64,
3531}
3532
3533impl FetchResponseListener for HTMLMediaElementFetchListener {
3534    fn process_request_body(&mut self, _: RequestId) {}
3535
3536    fn process_request_eof(&mut self, _: RequestId) {}
3537
3538    fn process_response(&mut self, _: RequestId, metadata: Result<FetchMetadata, NetworkError>) {
3539        let element = self.element.root();
3540
3541        let (metadata, origin_clean) = match metadata {
3542            Ok(fetch_metadata) => match fetch_metadata {
3543                FetchMetadata::Unfiltered(metadata) => (Some(metadata), true),
3544                FetchMetadata::Filtered { filtered, unsafe_ } => (
3545                    Some(unsafe_),
3546                    matches!(
3547                        filtered,
3548                        FilteredMetadata::Basic(_) | FilteredMetadata::Cors(_)
3549                    ),
3550                ),
3551            },
3552            Err(_) => (None, true),
3553        };
3554
3555        let (status_is_success, is_seekable) =
3556            metadata.as_ref().map_or((false, false), |metadata| {
3557                let status = &metadata.status;
3558                (status.is_success(), *status == StatusCode::PARTIAL_CONTENT)
3559            });
3560
3561        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3562        if !status_is_success {
3563            if element.ready_state.get() == ReadyState::HaveNothing {
3564                // => "If the media data cannot be fetched at all, due to network errors..."
3565                element.media_data_processing_failure_steps();
3566            } else {
3567                // => "If the connection is interrupted after some media data has been received..."
3568                element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3569            }
3570            return;
3571        }
3572
3573        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3574            current_fetch_context.set_seekable(is_seekable);
3575            current_fetch_context.set_origin_clean(origin_clean);
3576        }
3577
3578        if let Some(metadata) = metadata.as_ref() {
3579            if let Some(headers) = metadata.headers.as_ref() {
3580                // For range requests we get the size of the media asset from the Content-Range
3581                // header. Otherwise, we get it from the Content-Length header.
3582                let content_length =
3583                    if let Some(content_range) = headers.typed_get::<ContentRange>() {
3584                        content_range.bytes_len()
3585                    } else {
3586                        headers
3587                            .typed_get::<ContentLength>()
3588                            .map(|content_length| content_length.0)
3589                    };
3590
3591                // We only set the expected input size if it changes.
3592                if content_length != self.expected_content_length {
3593                    if let Some(content_length) = content_length {
3594                        self.expected_content_length = Some(content_length);
3595                    }
3596                }
3597            }
3598        }
3599
3600        // Explicit media player initialization with live/seekable source.
3601        if let Some(expected_content_length) = self.expected_content_length {
3602            if let Err(e) = element
3603                .player
3604                .borrow()
3605                .as_ref()
3606                .unwrap()
3607                .lock()
3608                .unwrap()
3609                .set_input_size(expected_content_length)
3610            {
3611                warn!("Could not set player input size {:?}", e);
3612            }
3613        }
3614    }
3615
3616    fn process_response_chunk(&mut self, _: RequestId, chunk: Vec<u8>) {
3617        let element = self.element.root();
3618
3619        self.fetched_content_length += chunk.len() as u64;
3620
3621        // If an error was received previously, we skip processing the payload.
3622        if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut() {
3623            if let Some(CancelReason::Backoff) = current_fetch_context.cancel_reason() {
3624                return;
3625            }
3626
3627            // Discard chunk of the response body if fetch context doesn't support range requests.
3628            let payload = if !current_fetch_context.is_seekable() &&
3629                self.content_length_to_discard != 0
3630            {
3631                if chunk.len() as u64 > self.content_length_to_discard {
3632                    let shrink_chunk = chunk[self.content_length_to_discard as usize..].to_vec();
3633                    self.content_length_to_discard = 0;
3634                    shrink_chunk
3635                } else {
3636                    // Completely discard this response chunk.
3637                    self.content_length_to_discard -= chunk.len() as u64;
3638                    return;
3639                }
3640            } else {
3641                chunk
3642            };
3643
3644            if let Err(e) = {
3645                let mut data_source = current_fetch_context.data_source().borrow_mut();
3646                data_source.add_buffer_to_queue(DataBuffer::Payload(payload));
3647                data_source
3648                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap())
3649            } {
3650                // If we are pushing too much data and we know that we can
3651                // restart the download later from where we left, we cancel
3652                // the current request. Otherwise, we continue the request
3653                // assuming that we may drop some frames.
3654                if e == PlayerError::EnoughData {
3655                    current_fetch_context.cancel(CancelReason::Backoff);
3656                }
3657                return;
3658            }
3659        }
3660
3661        // https://html.spec.whatwg.org/multipage/#concept-media-load-resource step 4,
3662        // => "If mode is remote" step 2
3663        if Instant::now() > self.next_progress_event {
3664            element
3665                .owner_global()
3666                .task_manager()
3667                .media_element_task_source()
3668                .queue_simple_event(element.upcast(), atom!("progress"));
3669            self.next_progress_event = Instant::now() + Duration::from_millis(350);
3670        }
3671    }
3672
3673    fn process_response_eof(self, _: RequestId, status: Result<ResourceFetchTiming, NetworkError>) {
3674        let element = self.element.root();
3675
3676        // <https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list>
3677        if status.is_ok() && self.fetched_content_length != 0 {
3678            // => "Once the entire media resource has been fetched..."
3679
3680            // There are no more chunks of the response body forthcoming, so we can
3681            // go ahead and notify the media backend not to expect any further data.
3682            if let Some(ref mut current_fetch_context) = *element.current_fetch_context.borrow_mut()
3683            {
3684                // On initial state change READY -> PAUSED the media player perform
3685                // seek to initial position by event with seek segment (TIME format)
3686                // while media stack operates in BYTES format and configuring segment
3687                // start and stop positions without the total size of the stream is not
3688                // possible. As fallback the media player perform seek with BYTES format
3689                // and initiate seek request via "seek-data" callback with required offset.
3690                if self.expected_content_length.is_none() {
3691                    if let Err(e) = element
3692                        .player
3693                        .borrow()
3694                        .as_ref()
3695                        .unwrap()
3696                        .lock()
3697                        .unwrap()
3698                        .set_input_size(self.fetched_content_length)
3699                    {
3700                        warn!("Could not set player input size {:?}", e);
3701                    }
3702                }
3703
3704                let mut data_source = current_fetch_context.data_source().borrow_mut();
3705
3706                data_source.add_buffer_to_queue(DataBuffer::EndOfStream);
3707                let _ = data_source
3708                    .process_into_player_from_queue(element.player.borrow().as_ref().unwrap());
3709            }
3710
3711            // Step 1. Fire an event named progress at the media element.
3712            element
3713                .upcast::<EventTarget>()
3714                .fire_event(atom!("progress"), CanGc::note());
3715
3716            // Step 2. Set the networkState to NETWORK_IDLE and fire an event named suspend at the
3717            // media element.
3718            element.network_state.set(NetworkState::Idle);
3719
3720            element
3721                .upcast::<EventTarget>()
3722                .fire_event(atom!("suspend"), CanGc::note());
3723        } else if status.is_err() && element.ready_state.get() != ReadyState::HaveNothing {
3724            // => "If the connection is interrupted after some media data has been received..."
3725            element.media_data_processing_fatal_steps(MEDIA_ERR_NETWORK, CanGc::note());
3726        } else {
3727            // => "If the media data can be fetched but is found by inspection to be in an
3728            // unsupported format, or can otherwise not be rendered at all"
3729            element.media_data_processing_failure_steps();
3730        }
3731
3732        if let Ok(response) = status {
3733            network_listener::submit_timing(&self, &response, CanGc::note());
3734        }
3735    }
3736
3737    fn process_csp_violations(&mut self, _request_id: RequestId, violations: Vec<Violation>) {
3738        let global = &self.resource_timing_global();
3739        global.report_csp_violations(violations, None, None);
3740    }
3741
3742    fn should_invoke(&self) -> bool {
3743        let element = self.element.root();
3744
3745        if element.generation_id.get() != self.generation_id || element.player.borrow().is_none() {
3746            return false;
3747        }
3748
3749        let Some(ref current_fetch_context) = *element.current_fetch_context.borrow() else {
3750            return false;
3751        };
3752
3753        // Whether the new fetch request was triggered.
3754        if current_fetch_context.request_id() != self.request_id {
3755            return false;
3756        }
3757
3758        // Whether the current fetch request was cancelled due to a network or decoding error, or
3759        // was aborted by the user.
3760        if let Some(cancel_reason) = current_fetch_context.cancel_reason() {
3761            if matches!(*cancel_reason, CancelReason::Error | CancelReason::Abort) {
3762                return false;
3763            }
3764        }
3765
3766        true
3767    }
3768}
3769
3770impl ResourceTimingListener for HTMLMediaElementFetchListener {
3771    fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
3772        let initiator_type = InitiatorType::LocalName(
3773            self.element
3774                .root()
3775                .upcast::<Element>()
3776                .local_name()
3777                .to_string(),
3778        );
3779        (initiator_type, self.url.clone())
3780    }
3781
3782    fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
3783        self.element.root().owner_document().global()
3784    }
3785}
3786
3787impl HTMLMediaElementFetchListener {
3788    fn new(element: &HTMLMediaElement, request_id: RequestId, url: ServoUrl, offset: u64) -> Self {
3789        Self {
3790            element: Trusted::new(element),
3791            generation_id: element.generation_id.get(),
3792            request_id,
3793            next_progress_event: Instant::now() + Duration::from_millis(350),
3794            url,
3795            expected_content_length: None,
3796            fetched_content_length: 0,
3797            content_length_to_discard: offset,
3798        }
3799    }
3800}