1use std::ptr::NonNull;
8use std::sync::{Arc, Mutex};
9
10use arrayvec::ArrayVec;
11use base::Epoch;
12use compositing_traits::{
13 CrossProcessCompositorApi, ExternalImageSource, SerializableImageData,
14 WebRenderExternalImageApi,
15};
16use euclid::default::Size2D;
17use ipc_channel::ipc::IpcSender;
18use log::warn;
19use pixels::{SharedSnapshot, Snapshot, SnapshotAlphaMode, SnapshotPixelFormat};
20use rustc_hash::FxHashMap;
21use webgpu_traits::{
22 ContextConfiguration, PRESENTATION_BUFFER_COUNT, PendingTexture, WebGPUContextId, WebGPUMsg,
23};
24use webrender_api::units::DeviceIntSize;
25use webrender_api::{
26 ExternalImageData, ExternalImageId, ExternalImageType, ImageDescriptor, ImageDescriptorFlags,
27 ImageFormat, ImageKey,
28};
29use wgpu_core::device::HostMap;
30use wgpu_core::global::Global;
31use wgpu_core::id::{
32 self, BufferId, CommandBufferId, CommandEncoderId, DeviceId, QueueId, TextureId,
33};
34use wgpu_core::resource::{
35 BufferAccessError, BufferDescriptor, BufferMapOperation, CreateBufferError,
36};
37use wgpu_types::{
38 BufferUsages, COPY_BYTES_PER_ROW_ALIGNMENT, CommandBufferDescriptor, CommandEncoderDescriptor,
39 Extent3d, Origin3d, TexelCopyBufferInfo, TexelCopyBufferLayout, TexelCopyTextureInfo,
40 TextureAspect,
41};
42
43pub type WebGpuExternalImageMap = Arc<Mutex<FxHashMap<WebGPUContextId, ContextData>>>;
44
45const fn image_data(context_id: WebGPUContextId) -> ExternalImageData {
46 ExternalImageData {
47 id: ExternalImageId(context_id.0),
48 channel_index: 0,
49 image_type: ExternalImageType::Buffer,
50 normalized_uvs: false,
51 }
52}
53
54#[derive(Clone, Copy, Debug)]
56struct Buffer {
57 device_id: DeviceId,
58 queue_id: QueueId,
59 size: u64,
60}
61
62impl Buffer {
63 fn has_compatible_config(&self, config: &ContextConfiguration) -> bool {
65 config.device_id == self.device_id && self.size == config.buffer_size()
66 }
67}
68
69#[derive(Debug)]
71struct MappedBuffer {
72 buffer: Buffer,
73 data: NonNull<u8>,
74 len: u64,
75 image_size: Size2D<u32>,
76 image_format: ImageFormat,
77 is_opaque: bool,
78}
79
80unsafe impl Send for MappedBuffer {}
82
83impl MappedBuffer {
84 const fn slice(&'_ self) -> &'_ [u8] {
85 unsafe { std::slice::from_raw_parts(self.data.as_ptr(), self.len as usize) }
87 }
88
89 fn stride(&self) -> u32 {
90 (self.image_size.width * self.image_format.bytes_per_pixel() as u32)
91 .next_multiple_of(COPY_BYTES_PER_ROW_ALIGNMENT)
92 }
93}
94
95#[derive(Debug)]
96enum StagingBufferState {
97 Unassigned,
100 Available(Buffer),
102 Mapping(Buffer),
104 Mapped(MappedBuffer),
106}
107
108#[derive(Debug)]
110struct StagingBuffer {
111 global: Arc<Global>,
112 buffer_id: BufferId,
113 state: StagingBufferState,
114}
115
116unsafe impl Sync for StagingBuffer {}
119
120impl StagingBuffer {
121 fn new(global: Arc<Global>, buffer_id: BufferId) -> Self {
122 Self {
123 global,
124 buffer_id,
125 state: StagingBufferState::Unassigned,
126 }
127 }
128
129 const fn is_mapped(&self) -> bool {
130 matches!(self.state, StagingBufferState::Mapped(..))
131 }
132
133 fn is_available_and_has_compatible_config(&self, config: &ContextConfiguration) -> bool {
136 let StagingBufferState::Available(buffer) = &self.state else {
137 return false;
138 };
139 buffer.has_compatible_config(config)
140 }
141
142 const fn needs_assignment(&self) -> bool {
144 matches!(
145 self.state,
146 StagingBufferState::Unassigned | StagingBufferState::Available(_)
147 )
148 }
149
150 fn ensure_available(&mut self, config: &ContextConfiguration) -> Result<(), CreateBufferError> {
152 let recreate = match &self.state {
153 StagingBufferState::Unassigned => true,
154 StagingBufferState::Available(buffer) |
155 StagingBufferState::Mapping(buffer) |
156 StagingBufferState::Mapped(MappedBuffer { buffer, .. }) => {
157 if buffer.has_compatible_config(config) {
158 let _ = self.global.buffer_unmap(self.buffer_id);
159 false
160 } else {
161 self.global.buffer_drop(self.buffer_id);
162 true
163 }
164 },
165 };
166 if recreate {
167 let buffer_size = config.buffer_size();
168 let (_, error) = self.global.device_create_buffer(
169 config.device_id,
170 &BufferDescriptor {
171 label: None,
172 size: buffer_size,
173 usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST,
174 mapped_at_creation: false,
175 },
176 Some(self.buffer_id),
177 );
178 if let Some(error) = error {
179 return Err(error);
180 };
181 self.state = StagingBufferState::Available(Buffer {
182 device_id: config.device_id,
183 queue_id: config.queue_id,
184 size: buffer_size,
185 });
186 }
187 Ok(())
188 }
189
190 fn prepare_load_texture_command_buffer(
195 &mut self,
196 texture_id: TextureId,
197 encoder_id: CommandEncoderId,
198 config: &ContextConfiguration,
199 ) -> Result<CommandBufferId, Box<dyn std::error::Error>> {
200 self.ensure_available(config)?;
201 let StagingBufferState::Available(buffer) = &self.state else {
202 unreachable!("Should be made available by `ensure_available`")
203 };
204 let device_id = buffer.device_id;
205 let command_descriptor = CommandEncoderDescriptor { label: None };
206 let (encoder_id, error) = self.global.device_create_command_encoder(
207 device_id,
208 &command_descriptor,
209 Some(encoder_id),
210 );
211 if let Some(error) = error {
212 return Err(error.into());
213 };
214 let buffer_info = TexelCopyBufferInfo {
215 buffer: self.buffer_id,
216 layout: TexelCopyBufferLayout {
217 offset: 0,
218 bytes_per_row: Some(config.stride()),
219 rows_per_image: None,
220 },
221 };
222 let texture_info = TexelCopyTextureInfo {
223 texture: texture_id,
224 mip_level: 0,
225 origin: Origin3d::ZERO,
226 aspect: TextureAspect::All,
227 };
228 let copy_size = Extent3d {
229 width: config.size.width,
230 height: config.size.height,
231 depth_or_array_layers: 1,
232 };
233 self.global.command_encoder_copy_texture_to_buffer(
234 encoder_id,
235 &texture_info,
236 &buffer_info,
237 ©_size,
238 )?;
239 let (command_buffer_id, error) = self
240 .global
241 .command_encoder_finish(encoder_id, &CommandBufferDescriptor::default());
242 if let Some(error) = error {
243 return Err(error.into());
244 };
245 Ok(command_buffer_id)
246 }
247
248 fn unmap(&mut self) {
250 match self.state {
251 StagingBufferState::Unassigned | StagingBufferState::Available(_) => {},
252 StagingBufferState::Mapping(buffer) |
253 StagingBufferState::Mapped(MappedBuffer { buffer, .. }) => {
254 let _ = self.global.buffer_unmap(self.buffer_id);
255 self.state = StagingBufferState::Available(buffer)
256 },
257 }
258 }
259
260 fn snapshot(&self) -> Option<Snapshot> {
262 let StagingBufferState::Mapped(mapped) = &self.state else {
263 return None;
264 };
265 let format = match mapped.image_format {
266 ImageFormat::RGBA8 => SnapshotPixelFormat::RGBA,
267 ImageFormat::BGRA8 => SnapshotPixelFormat::BGRA,
268 _ => unreachable!("GPUCanvasContext does not support other formats per spec"),
269 };
270 let alpha_mode = if mapped.is_opaque {
271 SnapshotAlphaMode::AsOpaque {
272 premultiplied: false,
273 }
274 } else {
275 SnapshotAlphaMode::Transparent {
276 premultiplied: true,
277 }
278 };
279 let padded_byte_width = mapped.stride();
280 let data = mapped.slice();
281 let bytes_per_pixel = mapped.image_format.bytes_per_pixel() as usize;
282 let mut result_unpadded =
283 Vec::<u8>::with_capacity(mapped.image_size.area() as usize * bytes_per_pixel);
284 for row in 0..mapped.image_size.height {
285 let start = (row * padded_byte_width).try_into().ok()?;
286 result_unpadded
287 .extend(&data[start..start + mapped.image_size.width as usize * bytes_per_pixel]);
288 }
289 let mut snapshot =
290 Snapshot::from_vec(mapped.image_size, format, alpha_mode, result_unpadded);
291 if mapped.is_opaque {
292 snapshot.transform(SnapshotAlphaMode::Opaque, snapshot.format())
293 }
294 Some(snapshot)
295 }
296}
297
298impl Drop for StagingBuffer {
299 fn drop(&mut self) {
300 match self.state {
301 StagingBufferState::Unassigned => {},
302 StagingBufferState::Available(_) |
303 StagingBufferState::Mapping(_) |
304 StagingBufferState::Mapped(_) => {
305 self.global.buffer_drop(self.buffer_id);
306 },
307 }
308 }
309}
310
311pub struct WebGpuExternalImages {
312 pub image_map: WebGpuExternalImageMap,
313 pub locked_ids: FxHashMap<WebGPUContextId, PresentationStagingBuffer>,
314}
315
316impl WebGpuExternalImages {
317 pub fn new(image_map: WebGpuExternalImageMap) -> Self {
318 Self {
319 image_map,
320 locked_ids: Default::default(),
321 }
322 }
323}
324
325impl WebRenderExternalImageApi for WebGpuExternalImages {
326 fn lock(&mut self, id: u64) -> (ExternalImageSource<'_>, Size2D<i32>) {
327 let id = WebGPUContextId(id);
328 let presentation = {
329 let mut webgpu_contexts = self.image_map.lock().unwrap();
330 webgpu_contexts
331 .get_mut(&id)
332 .and_then(|context_data| context_data.presentation.clone())
333 };
334 let Some(presentation) = presentation else {
335 return (ExternalImageSource::Invalid, Size2D::zero());
336 };
337 self.locked_ids.insert(id, presentation);
338 let presentation = self.locked_ids.get(&id).unwrap();
339 let StagingBufferState::Mapped(mapped_buffer) = &presentation.staging_buffer.state else {
340 unreachable!("Presentation staging buffer should be mapped")
341 };
342 let size = mapped_buffer.image_size;
343 (
344 ExternalImageSource::RawData(mapped_buffer.slice()),
345 size.cast().cast_unit(),
346 )
347 }
348
349 fn unlock(&mut self, id: u64) {
350 let id = WebGPUContextId(id);
351 let Some(presentation) = self.locked_ids.remove(&id) else {
352 return;
353 };
354 let mut webgpu_contexts = self.image_map.lock().unwrap();
355 if let Some(context_data) = webgpu_contexts.get_mut(&id) {
356 presentation.maybe_destroy(context_data);
358 } else {
359 drop(presentation);
362 }
363 }
364}
365
366#[derive(Clone)]
370pub struct PresentationStagingBuffer {
371 epoch: Epoch,
372 staging_buffer: Arc<StagingBuffer>,
373}
374
375impl PresentationStagingBuffer {
376 fn new(epoch: Epoch, staging_buffer: StagingBuffer) -> Self {
377 Self {
378 epoch,
379 staging_buffer: Arc::new(staging_buffer),
380 }
381 }
382
383 fn maybe_destroy(self, context_data: &mut ContextData) {
386 if let Some(mut staging_buffer) = Arc::into_inner(self.staging_buffer) {
387 staging_buffer.unmap();
388 context_data.return_staging_buffer(staging_buffer);
389 }
390 }
391}
392
393pub struct ContextData {
395 image_key: Option<ImageKey>,
397 size: DeviceIntSize,
399 inactive_staging_buffers: ArrayVec<StagingBuffer, PRESENTATION_BUFFER_COUNT>,
404 presentation: Option<PresentationStagingBuffer>,
408 next_epoch: Epoch,
410}
411
412impl ContextData {
413 fn new(
414 global: &Arc<Global>,
415 buffer_ids: ArrayVec<id::BufferId, PRESENTATION_BUFFER_COUNT>,
416 size: DeviceIntSize,
417 ) -> Self {
418 Self {
419 image_key: None,
420 size,
421 inactive_staging_buffers: buffer_ids
422 .iter()
423 .map(|buffer_id| StagingBuffer::new(global.clone(), *buffer_id))
424 .collect(),
425 presentation: None,
426 next_epoch: Epoch(1),
427 }
428 }
429
430 fn get_or_make_available_buffer(
432 &'_ mut self,
433 config: &ContextConfiguration,
434 ) -> Option<StagingBuffer> {
435 self.inactive_staging_buffers
436 .iter()
437 .position(|staging_buffer| {
439 staging_buffer.is_available_and_has_compatible_config(config)
440 })
441 .or_else(|| {
443 self.inactive_staging_buffers
444 .iter()
445 .position(|staging_buffer| staging_buffer.needs_assignment())
446 })
447 .or_else(|| {
449 if self.inactive_staging_buffers.is_empty() {
450 None
451 } else {
452 Some(0)
453 }
454 })
455 .and_then(|index| {
456 let mut staging_buffer = self.inactive_staging_buffers.remove(index);
457 if staging_buffer.ensure_available(config).is_ok() {
458 Some(staging_buffer)
459 } else {
460 self.inactive_staging_buffers.push(staging_buffer);
462 None
463 }
464 })
465 }
466
467 fn destroy(
470 mut self,
471 script_sender: &IpcSender<WebGPUMsg>,
472 compositor_api: &CrossProcessCompositorApi,
473 ) {
474 for staging_buffer in self.inactive_staging_buffers {
476 if let Err(error) = script_sender.send(WebGPUMsg::FreeBuffer(staging_buffer.buffer_id))
477 {
478 warn!(
479 "Unable to send FreeBuffer({:?}) ({error})",
480 staging_buffer.buffer_id
481 );
482 };
483 }
484 if let Some(image_key) = self.image_key.take() {
485 compositor_api.delete_image(image_key);
486 }
487 }
488
489 fn next_epoch(&mut self) -> Epoch {
491 let epoch = self.next_epoch;
492 self.next_epoch.next();
493 epoch
494 }
495
496 fn replace_presentation(&mut self, presentation: PresentationStagingBuffer) {
499 let stale_presentation = if presentation.epoch >=
500 self.presentation
501 .as_ref()
502 .map(|p| p.epoch)
503 .unwrap_or_default()
504 {
505 self.presentation.replace(presentation)
506 } else {
507 Some(presentation)
508 };
509 if let Some(stale_presentation) = stale_presentation {
510 stale_presentation.maybe_destroy(self);
511 }
512 }
513
514 fn clear_presentation(&mut self) {
515 if let Some(stale_presentation) = self.presentation.take() {
516 stale_presentation.maybe_destroy(self);
517 }
518 }
519
520 fn return_staging_buffer(&mut self, staging_buffer: StagingBuffer) {
521 self.inactive_staging_buffers.push(staging_buffer)
522 }
523}
524
525impl crate::WGPU {
526 pub(crate) fn create_context(
527 &self,
528 context_id: WebGPUContextId,
529 size: DeviceIntSize,
530 buffer_ids: ArrayVec<id::BufferId, PRESENTATION_BUFFER_COUNT>,
531 ) {
532 let context_data = ContextData::new(&self.global, buffer_ids, size);
533 assert!(
534 self.wgpu_image_map
535 .lock()
536 .unwrap()
537 .insert(context_id, context_data)
538 .is_none(),
539 "Context should be created only once!"
540 );
541 }
542
543 pub(crate) fn set_image_key(&self, context_id: WebGPUContextId, image_key: ImageKey) {
544 let mut webgpu_contexts = self.wgpu_image_map.lock().unwrap();
545 let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
546
547 if let Some(old_image_key) = context_data.image_key.replace(image_key) {
548 self.compositor_api.delete_image(old_image_key);
549 }
550
551 self.compositor_api.add_image(
552 image_key,
553 ImageDescriptor {
554 format: ImageFormat::BGRA8,
555 size: context_data.size,
556 stride: None,
557 offset: 0,
558 flags: ImageDescriptorFlags::empty(),
559 },
560 SerializableImageData::External(image_data(context_id)),
561 );
562 }
563
564 pub(crate) fn get_image(
565 &self,
566 context_id: WebGPUContextId,
567 pending_texture: Option<PendingTexture>,
568 sender: IpcSender<SharedSnapshot>,
569 ) {
570 let mut webgpu_contexts = self.wgpu_image_map.lock().unwrap();
571 let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
572 if let Some(PendingTexture {
573 texture_id,
574 encoder_id,
575 configuration,
576 }) = pending_texture
577 {
578 let Some(staging_buffer) = context_data.get_or_make_available_buffer(&configuration)
579 else {
580 warn!("Failure obtaining available staging buffer");
581 sender
582 .send(SharedSnapshot::cleared(configuration.size))
583 .unwrap();
584 return;
585 };
586
587 let epoch = context_data.next_epoch();
588 let wgpu_image_map = self.wgpu_image_map.clone();
589 let sender = sender.clone();
590 drop(webgpu_contexts);
591 self.texture_download(
592 texture_id,
593 encoder_id,
594 staging_buffer,
595 configuration,
596 move |staging_buffer| {
597 let mut webgpu_contexts = wgpu_image_map.lock().unwrap();
598 let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
599 sender
600 .send(
601 staging_buffer
602 .snapshot()
603 .as_ref()
604 .map(Snapshot::to_shared)
605 .unwrap_or_else(|| SharedSnapshot::cleared(configuration.size)),
606 )
607 .unwrap();
608 if staging_buffer.is_mapped() {
609 context_data.replace_presentation(PresentationStagingBuffer::new(
610 epoch,
611 staging_buffer,
612 ));
613 } else {
614 context_data.return_staging_buffer(staging_buffer);
616 }
617 },
618 );
619 } else {
620 sender
621 .send(
622 context_data
623 .presentation
624 .as_ref()
625 .and_then(|presentation_staging_buffer| {
626 presentation_staging_buffer.staging_buffer.snapshot()
627 })
628 .unwrap_or_else(Snapshot::empty)
629 .to_shared(),
630 )
631 .unwrap();
632 }
633 }
634
635 pub(crate) fn present(
638 &self,
639 context_id: WebGPUContextId,
640 pending_texture: Option<PendingTexture>,
641 size: Size2D<u32>,
642 canvas_epoch: Epoch,
643 ) {
644 let mut webgpu_contexts = self.wgpu_image_map.lock().unwrap();
645 let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
646
647 let Some(image_key) = context_data.image_key else {
648 return;
649 };
650
651 let Some(PendingTexture {
652 texture_id,
653 encoder_id,
654 configuration,
655 }) = pending_texture
656 else {
657 context_data.clear_presentation();
658 self.compositor_api.update_image(
659 image_key,
660 ImageDescriptor {
661 format: ImageFormat::BGRA8,
662 size: size.cast_unit().cast(),
663 stride: None,
664 offset: 0,
665 flags: ImageDescriptorFlags::empty(),
666 },
667 SerializableImageData::External(image_data(context_id)),
668 Some(canvas_epoch),
669 );
670 return;
671 };
672 let Some(staging_buffer) = context_data.get_or_make_available_buffer(&configuration) else {
673 warn!("Failure obtaining available staging buffer");
674 context_data.clear_presentation();
675 self.compositor_api.update_image(
676 image_key,
677 configuration.into(),
678 SerializableImageData::External(image_data(context_id)),
679 Some(canvas_epoch),
680 );
681 return;
682 };
683 let epoch = context_data.next_epoch();
684 let wgpu_image_map = self.wgpu_image_map.clone();
685 let compositor_api = self.compositor_api.clone();
686 drop(webgpu_contexts);
687 self.texture_download(
688 texture_id,
689 encoder_id,
690 staging_buffer,
691 configuration,
692 move |staging_buffer| {
693 let mut webgpu_contexts = wgpu_image_map.lock().unwrap();
694 let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
695 if staging_buffer.is_mapped() {
696 context_data.replace_presentation(PresentationStagingBuffer::new(
697 epoch,
698 staging_buffer,
699 ));
700 } else {
701 context_data.return_staging_buffer(staging_buffer);
702 context_data.clear_presentation();
703 }
704 compositor_api.update_image(
706 image_key,
707 configuration.into(),
708 SerializableImageData::External(image_data(context_id)),
709 Some(canvas_epoch),
710 );
711 },
712 );
713 }
714
715 fn texture_download(
722 &self,
723 texture_id: TextureId,
724 encoder_id: CommandEncoderId,
725 mut staging_buffer: StagingBuffer,
726 config: ContextConfiguration,
727 callback: impl FnOnce(StagingBuffer) + Send + 'static,
728 ) {
729 let Ok(command_buffer_id) =
730 staging_buffer.prepare_load_texture_command_buffer(texture_id, encoder_id, &config)
731 else {
732 return callback(staging_buffer);
733 };
734 let StagingBufferState::Available(buffer) = &staging_buffer.state else {
735 unreachable!("`prepare_load_texture_command_buffer` should make buffer available")
736 };
737 let buffer_id = staging_buffer.buffer_id;
738 let buffer_size = buffer.size;
739 {
740 let _guard = self.poller.lock();
741 let result = self
742 .global
743 .queue_submit(buffer.queue_id, &[command_buffer_id]);
744 if result.is_err() {
745 return callback(staging_buffer);
746 }
747 }
748 staging_buffer.state = match staging_buffer.state {
749 StagingBufferState::Available(buffer) => StagingBufferState::Mapping(buffer),
750 _ => unreachable!("`prepare_load_texture_command_buffer` should make buffer available"),
751 };
752 let map_callback = {
753 let token = self.poller.token();
754 Box::new(move |result: Result<(), BufferAccessError>| {
755 drop(token);
756 staging_buffer.state = match staging_buffer.state {
757 StagingBufferState::Mapping(buffer) => {
758 if let Ok((data, len)) = result.and_then(|_| {
759 staging_buffer.global.buffer_get_mapped_range(
760 staging_buffer.buffer_id,
761 0,
762 Some(buffer.size),
763 )
764 }) {
765 StagingBufferState::Mapped(MappedBuffer {
766 buffer,
767 data,
768 len,
769 image_size: config.size,
770 image_format: config.format,
771 is_opaque: config.is_opaque,
772 })
773 } else {
774 StagingBufferState::Available(buffer)
775 }
776 },
777 _ => {
778 unreachable!("Mapping buffer should have StagingBufferState::Mapping state")
779 },
780 };
781 callback(staging_buffer);
782 })
783 };
784 let map_op = BufferMapOperation {
785 host: HostMap::Read,
786 callback: Some(map_callback),
787 };
788 let _ = self
790 .global
791 .buffer_map_async(buffer_id, 0, Some(buffer_size), map_op);
792 self.poller.wake();
793 }
794
795 pub(crate) fn destroy_context(&mut self, context_id: WebGPUContextId) {
796 self.wgpu_image_map
797 .lock()
798 .unwrap()
799 .remove(&context_id)
800 .unwrap()
801 .destroy(&self.script_sender, &self.compositor_api);
802 }
803}