webgpu/
canvas_context.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5//! Main process implementation of [GPUCanvasContext](https://www.w3.org/TR/webgpu/#canvas-context)
6
7use std::ptr::NonNull;
8use std::sync::{Arc, Mutex};
9
10use arrayvec::ArrayVec;
11use base::Epoch;
12use compositing_traits::{
13    CrossProcessCompositorApi, ExternalImageSource, SerializableImageData,
14    WebrenderExternalImageApi,
15};
16use euclid::default::Size2D;
17use ipc_channel::ipc::IpcSender;
18use log::warn;
19use pixels::{IpcSnapshot, Snapshot, SnapshotAlphaMode, SnapshotPixelFormat};
20use rustc_hash::FxHashMap;
21use webgpu_traits::{
22    ContextConfiguration, PRESENTATION_BUFFER_COUNT, PendingTexture, WebGPUContextId, WebGPUMsg,
23};
24use webrender_api::units::DeviceIntSize;
25use webrender_api::{
26    ExternalImageData, ExternalImageId, ExternalImageType, ImageDescriptor, ImageDescriptorFlags,
27    ImageFormat, ImageKey,
28};
29use wgpu_core::device::HostMap;
30use wgpu_core::global::Global;
31use wgpu_core::id::{
32    self, BufferId, CommandBufferId, CommandEncoderId, DeviceId, QueueId, TextureId,
33};
34use wgpu_core::resource::{
35    BufferAccessError, BufferDescriptor, BufferMapOperation, CreateBufferError,
36};
37use wgpu_types::{
38    BufferUsages, COPY_BYTES_PER_ROW_ALIGNMENT, CommandBufferDescriptor, CommandEncoderDescriptor,
39    Extent3d, Origin3d, TexelCopyBufferInfo, TexelCopyBufferLayout, TexelCopyTextureInfo,
40    TextureAspect,
41};
42
43pub type WGPUImageMap = Arc<Mutex<FxHashMap<WebGPUContextId, ContextData>>>;
44
45const fn image_data(context_id: WebGPUContextId) -> ExternalImageData {
46    ExternalImageData {
47        id: ExternalImageId(context_id.0),
48        channel_index: 0,
49        image_type: ExternalImageType::Buffer,
50        normalized_uvs: false,
51    }
52}
53
54/// Allocated buffer on GPU device
55#[derive(Clone, Copy, Debug)]
56struct Buffer {
57    device_id: DeviceId,
58    queue_id: QueueId,
59    size: u64,
60}
61
62impl Buffer {
63    /// Returns true if buffer is compatible with provided configuration
64    fn has_compatible_config(&self, config: &ContextConfiguration) -> bool {
65        config.device_id == self.device_id && self.size == config.buffer_size()
66    }
67}
68
69/// Mapped GPUBuffer
70#[derive(Debug)]
71struct MappedBuffer {
72    buffer: Buffer,
73    data: NonNull<u8>,
74    len: u64,
75    image_size: Size2D<u32>,
76    image_format: ImageFormat,
77    is_opaque: bool,
78}
79
80// Mapped buffer can be shared between safely (it's read-only)
81unsafe impl Send for MappedBuffer {}
82
83impl MappedBuffer {
84    const fn slice(&'_ self) -> &'_ [u8] {
85        // Safety: Pointer is from wgpu, and we only use it here
86        unsafe { std::slice::from_raw_parts(self.data.as_ptr(), self.len as usize) }
87    }
88
89    fn stride(&self) -> u32 {
90        (self.image_size.width * self.image_format.bytes_per_pixel() as u32)
91            .next_multiple_of(COPY_BYTES_PER_ROW_ALIGNMENT)
92    }
93}
94
95#[derive(Debug)]
96enum StagingBufferState {
97    /// The Initial state: the buffer has yet to be created with only an
98    /// id reserved for it.
99    Unassigned,
100    /// The buffer is allocated in the WGPU Device and is ready to be used.
101    Available(Buffer),
102    /// `mapAsync` is currently running on the buffer.
103    Mapping(Buffer),
104    /// The buffer is currently mapped.
105    Mapped(MappedBuffer),
106}
107
108/// A staging buffer used for texture to buffer to CPU copy operations.
109#[derive(Debug)]
110struct StagingBuffer {
111    global: Arc<Global>,
112    buffer_id: BufferId,
113    state: StagingBufferState,
114}
115
116// [`StagingBuffer`] only used for reading (never for writing)
117// so it is safe to share between threads.
118unsafe impl Sync for StagingBuffer {}
119
120impl StagingBuffer {
121    fn new(global: Arc<Global>, buffer_id: BufferId) -> Self {
122        Self {
123            global,
124            buffer_id,
125            state: StagingBufferState::Unassigned,
126        }
127    }
128
129    const fn is_mapped(&self) -> bool {
130        matches!(self.state, StagingBufferState::Mapped(..))
131    }
132
133    /// Return true if buffer can be used directly with provided config
134    /// without any additional work
135    fn is_available_and_has_compatible_config(&self, config: &ContextConfiguration) -> bool {
136        let StagingBufferState::Available(buffer) = &self.state else {
137            return false;
138        };
139        buffer.has_compatible_config(config)
140    }
141
142    /// Return true if buffer is not mapping or being mapped
143    const fn needs_assignment(&self) -> bool {
144        matches!(
145            self.state,
146            StagingBufferState::Unassigned | StagingBufferState::Available(_)
147        )
148    }
149
150    /// Make buffer available by unmapping / destroying it and then recreating it if needed.
151    fn ensure_available(&mut self, config: &ContextConfiguration) -> Result<(), CreateBufferError> {
152        let recreate = match &self.state {
153            StagingBufferState::Unassigned => true,
154            StagingBufferState::Available(buffer) |
155            StagingBufferState::Mapping(buffer) |
156            StagingBufferState::Mapped(MappedBuffer { buffer, .. }) => {
157                if buffer.has_compatible_config(config) {
158                    let _ = self.global.buffer_unmap(self.buffer_id);
159                    false
160                } else {
161                    self.global.buffer_drop(self.buffer_id);
162                    true
163                }
164            },
165        };
166        if recreate {
167            let buffer_size = config.buffer_size();
168            let (_, error) = self.global.device_create_buffer(
169                config.device_id,
170                &BufferDescriptor {
171                    label: None,
172                    size: buffer_size,
173                    usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST,
174                    mapped_at_creation: false,
175                },
176                Some(self.buffer_id),
177            );
178            if let Some(error) = error {
179                return Err(error);
180            };
181            self.state = StagingBufferState::Available(Buffer {
182                device_id: config.device_id,
183                queue_id: config.queue_id,
184                size: buffer_size,
185            });
186        }
187        Ok(())
188    }
189
190    /// Makes buffer available and prepares command encoder
191    /// that will copy texture to this staging buffer.
192    ///
193    /// Caller must submit command buffer to queue.
194    fn prepare_load_texture_command_buffer(
195        &mut self,
196        texture_id: TextureId,
197        encoder_id: CommandEncoderId,
198        config: &ContextConfiguration,
199    ) -> Result<CommandBufferId, Box<dyn std::error::Error>> {
200        self.ensure_available(config)?;
201        let StagingBufferState::Available(buffer) = &self.state else {
202            unreachable!("Should be made available by `ensure_available`")
203        };
204        let device_id = buffer.device_id;
205        let command_descriptor = CommandEncoderDescriptor { label: None };
206        let (encoder_id, error) = self.global.device_create_command_encoder(
207            device_id,
208            &command_descriptor,
209            Some(encoder_id),
210        );
211        if let Some(error) = error {
212            return Err(error.into());
213        };
214        let buffer_info = TexelCopyBufferInfo {
215            buffer: self.buffer_id,
216            layout: TexelCopyBufferLayout {
217                offset: 0,
218                bytes_per_row: Some(config.stride()),
219                rows_per_image: None,
220            },
221        };
222        let texture_info = TexelCopyTextureInfo {
223            texture: texture_id,
224            mip_level: 0,
225            origin: Origin3d::ZERO,
226            aspect: TextureAspect::All,
227        };
228        let copy_size = Extent3d {
229            width: config.size.width,
230            height: config.size.height,
231            depth_or_array_layers: 1,
232        };
233        self.global.command_encoder_copy_texture_to_buffer(
234            encoder_id,
235            &texture_info,
236            &buffer_info,
237            &copy_size,
238        )?;
239        let (command_buffer_id, error) = self
240            .global
241            .command_encoder_finish(encoder_id, &CommandBufferDescriptor::default());
242        if let Some(error) = error {
243            return Err(error.into());
244        };
245        Ok(command_buffer_id)
246    }
247
248    /// Unmaps the buffer or cancels a mapping operation if one is in progress.
249    fn unmap(&mut self) {
250        match self.state {
251            StagingBufferState::Unassigned | StagingBufferState::Available(_) => {},
252            StagingBufferState::Mapping(buffer) |
253            StagingBufferState::Mapped(MappedBuffer { buffer, .. }) => {
254                let _ = self.global.buffer_unmap(self.buffer_id);
255                self.state = StagingBufferState::Available(buffer)
256            },
257        }
258    }
259
260    /// Obtain a snapshot from this buffer if is mapped or return `None` if it is not mapped.
261    fn snapshot(&self) -> Option<Snapshot> {
262        let StagingBufferState::Mapped(mapped) = &self.state else {
263            return None;
264        };
265        let format = match mapped.image_format {
266            ImageFormat::RGBA8 => SnapshotPixelFormat::RGBA,
267            ImageFormat::BGRA8 => SnapshotPixelFormat::BGRA,
268            _ => unreachable!("GPUCanvasContext does not support other formats per spec"),
269        };
270        let alpha_mode = if mapped.is_opaque {
271            SnapshotAlphaMode::AsOpaque {
272                premultiplied: false,
273            }
274        } else {
275            SnapshotAlphaMode::Transparent {
276                premultiplied: true,
277            }
278        };
279        let padded_byte_width = mapped.stride();
280        let data = mapped.slice();
281        let bytes_per_pixel = mapped.image_format.bytes_per_pixel() as usize;
282        let mut result_unpadded =
283            Vec::<u8>::with_capacity(mapped.image_size.area() as usize * bytes_per_pixel);
284        for row in 0..mapped.image_size.height {
285            let start = (row * padded_byte_width).try_into().ok()?;
286            result_unpadded
287                .extend(&data[start..start + mapped.image_size.width as usize * bytes_per_pixel]);
288        }
289        let mut snapshot =
290            Snapshot::from_vec(mapped.image_size, format, alpha_mode, result_unpadded);
291        if mapped.is_opaque {
292            snapshot.transform(SnapshotAlphaMode::Opaque, snapshot.format())
293        }
294        Some(snapshot)
295    }
296}
297
298impl Drop for StagingBuffer {
299    fn drop(&mut self) {
300        match self.state {
301            StagingBufferState::Unassigned => {},
302            StagingBufferState::Available(_) |
303            StagingBufferState::Mapping(_) |
304            StagingBufferState::Mapped(_) => {
305                self.global.buffer_drop(self.buffer_id);
306            },
307        }
308    }
309}
310
311#[derive(Default)]
312pub struct WGPUExternalImages {
313    pub images: WGPUImageMap,
314    pub locked_ids: FxHashMap<WebGPUContextId, PresentationStagingBuffer>,
315}
316
317impl WebrenderExternalImageApi for WGPUExternalImages {
318    fn lock(&mut self, id: u64) -> (ExternalImageSource<'_>, Size2D<i32>) {
319        let id = WebGPUContextId(id);
320        let presentation = {
321            let mut webgpu_contexts = self.images.lock().unwrap();
322            webgpu_contexts
323                .get_mut(&id)
324                .and_then(|context_data| context_data.presentation.clone())
325        };
326        let Some(presentation) = presentation else {
327            return (ExternalImageSource::Invalid, Size2D::zero());
328        };
329        self.locked_ids.insert(id, presentation);
330        let presentation = self.locked_ids.get(&id).unwrap();
331        let StagingBufferState::Mapped(mapped_buffer) = &presentation.staging_buffer.state else {
332            unreachable!("Presentation staging buffer should be mapped")
333        };
334        let size = mapped_buffer.image_size;
335        (
336            ExternalImageSource::RawData(mapped_buffer.slice()),
337            size.cast().cast_unit(),
338        )
339    }
340
341    fn unlock(&mut self, id: u64) {
342        let id = WebGPUContextId(id);
343        let Some(presentation) = self.locked_ids.remove(&id) else {
344            return;
345        };
346        let mut webgpu_contexts = self.images.lock().unwrap();
347        if let Some(context_data) = webgpu_contexts.get_mut(&id) {
348            // We use this to return staging buffer if a newer one exists.
349            presentation.maybe_destroy(context_data);
350        } else {
351            // This will not free this buffer id in script,
352            // but that's okay because we still have many free ids.
353            drop(presentation);
354        }
355    }
356}
357
358/// Staging buffer currently used for presenting the epoch.
359///
360/// Users should [`ContextData::replace_presentation`] when done.
361#[derive(Clone)]
362pub struct PresentationStagingBuffer {
363    epoch: Epoch,
364    staging_buffer: Arc<StagingBuffer>,
365}
366
367impl PresentationStagingBuffer {
368    fn new(epoch: Epoch, staging_buffer: StagingBuffer) -> Self {
369        Self {
370            epoch,
371            staging_buffer: Arc::new(staging_buffer),
372        }
373    }
374
375    /// If the internal staging buffer is not shared,
376    /// unmap it and call [`ContextData::return_staging_buffer`] with it.
377    fn maybe_destroy(self, context_data: &mut ContextData) {
378        if let Some(mut staging_buffer) = Arc::into_inner(self.staging_buffer) {
379            staging_buffer.unmap();
380            context_data.return_staging_buffer(staging_buffer);
381        }
382    }
383}
384
385/// The embedder process-side representation of what is the `GPUCanvasContext` in script.
386pub struct ContextData {
387    /// The [`ImageKey`] of the WebRender image associated with this context.
388    image_key: ImageKey,
389    /// Staging buffers that are not actively used.
390    ///
391    /// Staging buffer here are either [`StagingBufferState::Unassigned`] or [`StagingBufferState::Available`].
392    /// They are removed from here when they are in process of being mapped or are already mapped.
393    inactive_staging_buffers: ArrayVec<StagingBuffer, PRESENTATION_BUFFER_COUNT>,
394    /// The [`PresentationStagingBuffer`] of the most recent presentation. This will
395    /// be `None` directly after initialization, as clearing is handled completely in
396    /// the `ScriptThread`.
397    presentation: Option<PresentationStagingBuffer>,
398    /// Next epoch to be used
399    next_epoch: Epoch,
400}
401
402impl ContextData {
403    fn new(
404        image_key: ImageKey,
405        global: &Arc<Global>,
406        buffer_ids: ArrayVec<id::BufferId, PRESENTATION_BUFFER_COUNT>,
407    ) -> Self {
408        Self {
409            image_key,
410            inactive_staging_buffers: buffer_ids
411                .iter()
412                .map(|buffer_id| StagingBuffer::new(global.clone(), *buffer_id))
413                .collect(),
414            presentation: None,
415            next_epoch: Epoch(1),
416        }
417    }
418
419    /// Returns `None` if no staging buffer is unused or failure when making it available
420    fn get_or_make_available_buffer(
421        &'_ mut self,
422        config: &ContextConfiguration,
423    ) -> Option<StagingBuffer> {
424        self.inactive_staging_buffers
425            .iter()
426            // Try to get first preallocated GPUBuffer.
427            .position(|staging_buffer| {
428                staging_buffer.is_available_and_has_compatible_config(config)
429            })
430            // Fall back to the first inactive staging buffer.
431            .or_else(|| {
432                self.inactive_staging_buffers
433                    .iter()
434                    .position(|staging_buffer| staging_buffer.needs_assignment())
435            })
436            // Or just the use first one.
437            .or_else(|| {
438                if self.inactive_staging_buffers.is_empty() {
439                    None
440                } else {
441                    Some(0)
442                }
443            })
444            .and_then(|index| {
445                let mut staging_buffer = self.inactive_staging_buffers.remove(index);
446                if staging_buffer.ensure_available(config).is_ok() {
447                    Some(staging_buffer)
448                } else {
449                    // If we fail to make it available, return it to the list of inactive staging buffers.
450                    self.inactive_staging_buffers.push(staging_buffer);
451                    None
452                }
453            })
454    }
455
456    /// Destroy the context that this [`ContextData`] represents,
457    /// freeing all of its buffers, and deleting the associated WebRender image.
458    fn destroy(
459        self,
460        script_sender: &IpcSender<WebGPUMsg>,
461        compositor_api: &CrossProcessCompositorApi,
462    ) {
463        // This frees the id in the `ScriptThread`.
464        for staging_buffer in self.inactive_staging_buffers {
465            if let Err(error) = script_sender.send(WebGPUMsg::FreeBuffer(staging_buffer.buffer_id))
466            {
467                warn!(
468                    "Unable to send FreeBuffer({:?}) ({error})",
469                    staging_buffer.buffer_id
470                );
471            };
472        }
473        compositor_api.delete_image(self.image_key);
474    }
475
476    /// Advance the [`Epoch`] and return the new one.
477    fn next_epoch(&mut self) -> Epoch {
478        let epoch = self.next_epoch;
479        self.next_epoch.next();
480        epoch
481    }
482
483    /// If the given [`PresentationStagingBuffer`] is for a newer presentation, replace the existing
484    /// one. Deallocate the older one by calling [`Self::return_staging_buffer`] on it.
485    fn replace_presentation(&mut self, presentation: PresentationStagingBuffer) {
486        let stale_presentation = if presentation.epoch >=
487            self.presentation
488                .as_ref()
489                .map(|p| p.epoch)
490                .unwrap_or(Epoch(0))
491        {
492            self.presentation.replace(presentation)
493        } else {
494            Some(presentation)
495        };
496        if let Some(stale_presentation) = stale_presentation {
497            stale_presentation.maybe_destroy(self);
498        }
499    }
500
501    fn clear_presentation(&mut self) {
502        if let Some(stale_presentation) = self.presentation.take() {
503            stale_presentation.maybe_destroy(self);
504        }
505    }
506
507    fn return_staging_buffer(&mut self, staging_buffer: StagingBuffer) {
508        self.inactive_staging_buffers.push(staging_buffer)
509    }
510}
511
512impl crate::WGPU {
513    pub(crate) fn create_context(
514        &self,
515        context_id: WebGPUContextId,
516        image_key: ImageKey,
517        size: DeviceIntSize,
518        buffer_ids: ArrayVec<id::BufferId, PRESENTATION_BUFFER_COUNT>,
519    ) {
520        let context_data = ContextData::new(image_key, &self.global, buffer_ids);
521        self.compositor_api.add_image(
522            image_key,
523            ImageDescriptor {
524                format: ImageFormat::BGRA8,
525                size,
526                stride: None,
527                offset: 0,
528                flags: ImageDescriptorFlags::empty(),
529            },
530            SerializableImageData::External(image_data(context_id)),
531        );
532        assert!(
533            self.wgpu_image_map
534                .lock()
535                .unwrap()
536                .insert(context_id, context_data)
537                .is_none(),
538            "Context should be created only once!"
539        );
540    }
541
542    pub(crate) fn get_image(
543        &self,
544        context_id: WebGPUContextId,
545        pending_texture: Option<PendingTexture>,
546        sender: IpcSender<IpcSnapshot>,
547    ) {
548        let mut webgpu_contexts = self.wgpu_image_map.lock().unwrap();
549        let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
550        if let Some(PendingTexture {
551            texture_id,
552            encoder_id,
553            configuration,
554        }) = pending_texture
555        {
556            let Some(staging_buffer) = context_data.get_or_make_available_buffer(&configuration)
557            else {
558                warn!("Failure obtaining available staging buffer");
559                sender
560                    .send(Snapshot::cleared(configuration.size).as_ipc())
561                    .unwrap();
562                return;
563            };
564
565            let epoch = context_data.next_epoch();
566            let wgpu_image_map = self.wgpu_image_map.clone();
567            let sender = sender.clone();
568            drop(webgpu_contexts);
569            self.texture_download(
570                texture_id,
571                encoder_id,
572                staging_buffer,
573                configuration,
574                move |staging_buffer| {
575                    let mut webgpu_contexts = wgpu_image_map.lock().unwrap();
576                    let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
577                    sender
578                        .send(
579                            staging_buffer
580                                .snapshot()
581                                .unwrap_or_else(|| Snapshot::cleared(configuration.size))
582                                .as_ipc(),
583                        )
584                        .unwrap();
585                    if staging_buffer.is_mapped() {
586                        context_data.replace_presentation(PresentationStagingBuffer::new(
587                            epoch,
588                            staging_buffer,
589                        ));
590                    } else {
591                        // failure
592                        context_data.return_staging_buffer(staging_buffer);
593                    }
594                },
595            );
596        } else {
597            sender
598                .send(
599                    context_data
600                        .presentation
601                        .as_ref()
602                        .and_then(|presentation_staging_buffer| {
603                            presentation_staging_buffer.staging_buffer.snapshot()
604                        })
605                        .unwrap_or_else(Snapshot::empty)
606                        .as_ipc(),
607                )
608                .unwrap();
609        }
610    }
611
612    /// Read the texture to the staging buffer, map it to CPU memory, and update the
613    /// image in WebRender when complete.
614    pub(crate) fn present(
615        &self,
616        context_id: WebGPUContextId,
617        pending_texture: Option<PendingTexture>,
618        size: Size2D<u32>,
619        canvas_epoch: Epoch,
620    ) {
621        let mut webgpu_contexts = self.wgpu_image_map.lock().unwrap();
622        let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
623        let image_key = context_data.image_key;
624        let Some(PendingTexture {
625            texture_id,
626            encoder_id,
627            configuration,
628        }) = pending_texture
629        else {
630            context_data.clear_presentation();
631            self.compositor_api.update_image(
632                image_key,
633                ImageDescriptor {
634                    format: ImageFormat::BGRA8,
635                    size: size.cast_unit().cast(),
636                    stride: None,
637                    offset: 0,
638                    flags: ImageDescriptorFlags::empty(),
639                },
640                SerializableImageData::External(image_data(context_id)),
641                Some(canvas_epoch),
642            );
643            return;
644        };
645        let Some(staging_buffer) = context_data.get_or_make_available_buffer(&configuration) else {
646            warn!("Failure obtaining available staging buffer");
647            context_data.clear_presentation();
648            self.compositor_api.update_image(
649                image_key,
650                configuration.into(),
651                SerializableImageData::External(image_data(context_id)),
652                Some(canvas_epoch),
653            );
654            return;
655        };
656        let epoch = context_data.next_epoch();
657        let wgpu_image_map = self.wgpu_image_map.clone();
658        let compositor_api = self.compositor_api.clone();
659        drop(webgpu_contexts);
660        self.texture_download(
661            texture_id,
662            encoder_id,
663            staging_buffer,
664            configuration,
665            move |staging_buffer| {
666                let mut webgpu_contexts = wgpu_image_map.lock().unwrap();
667                let context_data = webgpu_contexts.get_mut(&context_id).unwrap();
668                if staging_buffer.is_mapped() {
669                    context_data.replace_presentation(PresentationStagingBuffer::new(
670                        epoch,
671                        staging_buffer,
672                    ));
673                } else {
674                    context_data.return_staging_buffer(staging_buffer);
675                    context_data.clear_presentation();
676                }
677                // update image in WR
678                compositor_api.update_image(
679                    image_key,
680                    configuration.into(),
681                    SerializableImageData::External(image_data(context_id)),
682                    Some(canvas_epoch),
683                );
684            },
685        );
686    }
687
688    /// Copies data from provided texture using `encoder_id` to the provided [`StagingBuffer`].
689    ///
690    /// `callback` is guaranteed to be called.
691    ///
692    /// Returns a [`StagingBuffer`] with the [`StagingBufferState::Mapped`] state
693    /// on success or [`StagingBufferState::Available`] on failure.
694    fn texture_download(
695        &self,
696        texture_id: TextureId,
697        encoder_id: CommandEncoderId,
698        mut staging_buffer: StagingBuffer,
699        config: ContextConfiguration,
700        callback: impl FnOnce(StagingBuffer) + Send + 'static,
701    ) {
702        let Ok(command_buffer_id) =
703            staging_buffer.prepare_load_texture_command_buffer(texture_id, encoder_id, &config)
704        else {
705            return callback(staging_buffer);
706        };
707        let StagingBufferState::Available(buffer) = &staging_buffer.state else {
708            unreachable!("`prepare_load_texture_command_buffer` should make buffer available")
709        };
710        let buffer_id = staging_buffer.buffer_id;
711        let buffer_size = buffer.size;
712        {
713            let _guard = self.poller.lock();
714            let result = self
715                .global
716                .queue_submit(buffer.queue_id, &[command_buffer_id]);
717            if result.is_err() {
718                return callback(staging_buffer);
719            }
720        }
721        staging_buffer.state = match staging_buffer.state {
722            StagingBufferState::Available(buffer) => StagingBufferState::Mapping(buffer),
723            _ => unreachable!("`prepare_load_texture_command_buffer` should make buffer available"),
724        };
725        let map_callback = {
726            let token = self.poller.token();
727            Box::new(move |result: Result<(), BufferAccessError>| {
728                drop(token);
729                staging_buffer.state = match staging_buffer.state {
730                    StagingBufferState::Mapping(buffer) => {
731                        if let Ok((data, len)) = result.and_then(|_| {
732                            staging_buffer.global.buffer_get_mapped_range(
733                                staging_buffer.buffer_id,
734                                0,
735                                Some(buffer.size),
736                            )
737                        }) {
738                            StagingBufferState::Mapped(MappedBuffer {
739                                buffer,
740                                data,
741                                len,
742                                image_size: config.size,
743                                image_format: config.format,
744                                is_opaque: config.is_opaque,
745                            })
746                        } else {
747                            StagingBufferState::Available(buffer)
748                        }
749                    },
750                    _ => {
751                        unreachable!("Mapping buffer should have StagingBufferState::Mapping state")
752                    },
753                };
754                callback(staging_buffer);
755            })
756        };
757        let map_op = BufferMapOperation {
758            host: HostMap::Read,
759            callback: Some(map_callback),
760        };
761        // error is handled by map_callback
762        let _ = self
763            .global
764            .buffer_map_async(buffer_id, 0, Some(buffer_size), map_op);
765        self.poller.wake();
766    }
767
768    pub(crate) fn destroy_context(&mut self, context_id: WebGPUContextId) {
769        self.wgpu_image_map
770            .lock()
771            .unwrap()
772            .remove(&context_id)
773            .unwrap()
774            .destroy(&self.script_sender, &self.compositor_api);
775    }
776}