Trait servo_media::Backend
source · pub trait Backend: Send + Sync {
Show 15 methods
// Required methods
fn create_player(
&self,
id: &ClientContextId,
stream_type: StreamType,
sender: IpcSender<PlayerEvent>,
video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>>,
audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>,
gl_context: Box<dyn PlayerGLContext>,
) -> Arc<Mutex<dyn Player>>;
fn create_audiostream(&self) -> MediaStreamId;
fn create_videostream(&self) -> MediaStreamId;
fn create_stream_output(&self) -> Box<dyn MediaOutput>;
fn create_stream_and_socket(
&self,
ty: MediaStreamType,
) -> (Box<dyn MediaSocket>, MediaStreamId);
fn create_audioinput_stream(
&self,
set: MediaTrackConstraintSet,
) -> Option<MediaStreamId>;
fn create_videoinput_stream(
&self,
set: MediaTrackConstraintSet,
) -> Option<MediaStreamId>;
fn create_audio_context(
&self,
id: &ClientContextId,
options: AudioContextOptions,
) -> Result<Arc<Mutex<AudioContext>>, AudioSinkError>;
fn create_webrtc(
&self,
signaller: Box<dyn WebRtcSignaller>,
) -> WebRtcController;
fn can_play_type(&self, media_type: &str) -> SupportsMediaType;
fn get_device_monitor(&self) -> Box<dyn MediaDeviceMonitor>;
// Provided methods
fn set_capture_mocking(&self, _mock: bool) { ... }
fn mute(&self, _id: &ClientContextId, _val: bool) { ... }
fn suspend(&self, _id: &ClientContextId) { ... }
fn resume(&self, _id: &ClientContextId) { ... }
}
Required Methods§
fn create_player( &self, id: &ClientContextId, stream_type: StreamType, sender: IpcSender<PlayerEvent>, video_renderer: Option<Arc<Mutex<dyn VideoFrameRenderer>>>, audio_renderer: Option<Arc<Mutex<dyn AudioRenderer>>>, gl_context: Box<dyn PlayerGLContext>, ) -> Arc<Mutex<dyn Player>>
fn create_audiostream(&self) -> MediaStreamId
fn create_videostream(&self) -> MediaStreamId
fn create_stream_output(&self) -> Box<dyn MediaOutput>
fn create_stream_and_socket( &self, ty: MediaStreamType, ) -> (Box<dyn MediaSocket>, MediaStreamId)
fn create_audioinput_stream( &self, set: MediaTrackConstraintSet, ) -> Option<MediaStreamId>
fn create_videoinput_stream( &self, set: MediaTrackConstraintSet, ) -> Option<MediaStreamId>
fn create_audio_context( &self, id: &ClientContextId, options: AudioContextOptions, ) -> Result<Arc<Mutex<AudioContext>>, AudioSinkError>
fn create_webrtc(&self, signaller: Box<dyn WebRtcSignaller>) -> WebRtcController
fn can_play_type(&self, media_type: &str) -> SupportsMediaType
fn get_device_monitor(&self) -> Box<dyn MediaDeviceMonitor>
Provided Methods§
fn set_capture_mocking(&self, _mock: bool)
sourcefn mute(&self, _id: &ClientContextId, _val: bool)
fn mute(&self, _id: &ClientContextId, _val: bool)
Allow muting/unmuting the media instances associated with the given client context identifier. Backend implementations are responsible for keeping a match between client contexts and the media instances created for these contexts. The client context identifier is currently an abstraction of Servo’s PipelineId.
sourcefn suspend(&self, _id: &ClientContextId)
fn suspend(&self, _id: &ClientContextId)
Allow suspending the activity of all media instances associated with the given client context identifier. Note that suspending does not involve releasing any resources, so media playback can be restarted. Backend implementations are responsible for keeping a match between client contexts and the media instances created for these contexts. The client context identifier is currently an abstraction of Servo’s PipelineId.
sourcefn resume(&self, _id: &ClientContextId)
fn resume(&self, _id: &ClientContextId)
Allow resuming the activity of all the media instances associated with the given client context identifier. Backend implementations are responsible for keeping a match between client contexts and the media instances created for these contexts. The client context identifier is currently an abstraction of Servo’s PipelineId.