wgpu_hal/vulkan/
device.rs

1use alloc::{
2    borrow::{Cow, ToOwned as _},
3    collections::BTreeMap,
4    ffi::CString,
5    sync::Arc,
6    vec::Vec,
7};
8use core::{
9    ffi::CStr,
10    mem::{self, MaybeUninit},
11    num::NonZeroU32,
12    ptr,
13};
14
15use arrayvec::ArrayVec;
16use ash::{ext, khr, vk};
17use hashbrown::hash_map::Entry;
18use parking_lot::Mutex;
19
20use super::{conv, RawTlasInstance};
21use crate::TlasInstance;
22
23impl super::DeviceShared {
24    /// Set the name of `object` to `name`.
25    ///
26    /// If `name` contains an interior null byte, then the name set will be truncated to that byte.
27    ///
28    /// # Safety
29    ///
30    /// It must be valid to set `object`'s debug name
31    pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
32        let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
33            return;
34        };
35
36        // Keep variables outside the if-else block to ensure they do not
37        // go out of scope while we hold a pointer to them
38        let mut buffer: [u8; 64] = [0u8; 64];
39        let buffer_vec: Vec<u8>;
40
41        // Append a null terminator to the string
42        let name_bytes = if name.len() < buffer.len() {
43            // Common case, string is very small. Allocate a copy on the stack.
44            buffer[..name.len()].copy_from_slice(name.as_bytes());
45            // Add null terminator
46            buffer[name.len()] = 0;
47            &buffer[..name.len() + 1]
48        } else {
49            // Less common case, the string is large.
50            // This requires a heap allocation.
51            buffer_vec = name
52                .as_bytes()
53                .iter()
54                .cloned()
55                .chain(core::iter::once(0))
56                .collect();
57            &buffer_vec
58        };
59
60        let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
61
62        let _result = unsafe {
63            extension.set_debug_utils_object_name(
64                &vk::DebugUtilsObjectNameInfoEXT::default()
65                    .object_handle(object)
66                    .object_name(name),
67            )
68        };
69    }
70
71    pub fn make_render_pass(
72        &self,
73        key: super::RenderPassKey,
74    ) -> Result<vk::RenderPass, crate::DeviceError> {
75        Ok(match self.render_passes.lock().entry(key) {
76            Entry::Occupied(e) => *e.get(),
77            Entry::Vacant(e) => {
78                let super::RenderPassKey {
79                    ref colors,
80                    ref depth_stencil,
81                    sample_count,
82                    multiview,
83                } = *e.key();
84
85                let mut vk_attachments = Vec::new();
86                let mut color_refs = Vec::with_capacity(colors.len());
87                let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
88                let mut ds_ref = None;
89                let samples = vk::SampleCountFlags::from_raw(sample_count);
90                let unused = vk::AttachmentReference {
91                    attachment: vk::ATTACHMENT_UNUSED,
92                    layout: vk::ImageLayout::UNDEFINED,
93                };
94                for cat in colors.iter() {
95                    let (color_ref, resolve_ref) =
96                        if let Some(super::ColorAttachmentKey { base, resolve }) = cat {
97                            let super::AttachmentKey {
98                                format,
99                                layout,
100                                ops,
101                            } = *base;
102
103                            let color_ref = vk::AttachmentReference {
104                                attachment: vk_attachments.len() as u32,
105                                layout,
106                            };
107                            vk_attachments.push({
108                                let (load_op, store_op) = conv::map_attachment_ops(ops);
109                                vk::AttachmentDescription::default()
110                                    .format(format)
111                                    .samples(samples)
112                                    .load_op(load_op)
113                                    .store_op(store_op)
114                                    .initial_layout(layout)
115                                    .final_layout(layout)
116                            });
117                            let resolve_ref = if let Some(rat) = resolve {
118                                let super::AttachmentKey {
119                                    format,
120                                    layout,
121                                    ops,
122                                } = *rat;
123
124                                let (load_op, store_op) = conv::map_attachment_ops(ops);
125                                let vk_attachment = vk::AttachmentDescription::default()
126                                    .format(format)
127                                    .samples(vk::SampleCountFlags::TYPE_1)
128                                    .load_op(load_op)
129                                    .store_op(store_op)
130                                    .initial_layout(layout)
131                                    .final_layout(layout);
132                                vk_attachments.push(vk_attachment);
133
134                                vk::AttachmentReference {
135                                    attachment: vk_attachments.len() as u32 - 1,
136                                    layout,
137                                }
138                            } else {
139                                unused
140                            };
141
142                            (color_ref, resolve_ref)
143                        } else {
144                            (unused, unused)
145                        };
146
147                    color_refs.push(color_ref);
148                    resolve_refs.push(resolve_ref);
149                }
150
151                if let Some(ds) = depth_stencil {
152                    let super::DepthStencilAttachmentKey {
153                        ref base,
154                        stencil_ops,
155                    } = *ds;
156
157                    let super::AttachmentKey {
158                        format,
159                        layout,
160                        ops,
161                    } = *base;
162
163                    ds_ref = Some(vk::AttachmentReference {
164                        attachment: vk_attachments.len() as u32,
165                        layout,
166                    });
167                    let (load_op, store_op) = conv::map_attachment_ops(ops);
168                    let (stencil_load_op, stencil_store_op) = conv::map_attachment_ops(stencil_ops);
169                    let vk_attachment = vk::AttachmentDescription::default()
170                        .format(format)
171                        .samples(samples)
172                        .load_op(load_op)
173                        .store_op(store_op)
174                        .stencil_load_op(stencil_load_op)
175                        .stencil_store_op(stencil_store_op)
176                        .initial_layout(layout)
177                        .final_layout(layout);
178                    vk_attachments.push(vk_attachment);
179                }
180
181                let vk_subpasses = [{
182                    let mut vk_subpass = vk::SubpassDescription::default()
183                        .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
184                        .color_attachments(&color_refs)
185                        .resolve_attachments(&resolve_refs);
186
187                    if self
188                        .workarounds
189                        .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
190                        && resolve_refs.is_empty()
191                    {
192                        vk_subpass.p_resolve_attachments = ptr::null();
193                    }
194
195                    if let Some(ref reference) = ds_ref {
196                        vk_subpass = vk_subpass.depth_stencil_attachment(reference)
197                    }
198                    vk_subpass
199                }];
200
201                let mut vk_info = vk::RenderPassCreateInfo::default()
202                    .attachments(&vk_attachments)
203                    .subpasses(&vk_subpasses);
204
205                let mut multiview_info;
206                let mask;
207                if let Some(multiview) = multiview {
208                    // Sanity checks, better to panic here than cause a driver crash
209                    assert!(multiview.get() <= 8);
210                    assert!(multiview.get() > 1);
211
212                    // Right now we enable all bits on the view masks and correlation masks.
213                    // This means we're rendering to all views in the subpass, and that all views
214                    // can be rendered concurrently.
215                    mask = [(1 << multiview.get()) - 1];
216
217                    // On Vulkan 1.1 or later, this is an alias for core functionality
218                    multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
219                        .view_masks(&mask)
220                        .correlation_masks(&mask);
221                    vk_info = vk_info.push_next(&mut multiview_info);
222                }
223
224                let raw = unsafe {
225                    self.raw
226                        .create_render_pass(&vk_info, None)
227                        .map_err(super::map_host_device_oom_err)?
228                };
229
230                *e.insert(raw)
231            }
232        })
233    }
234
235    fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
236        &self,
237        buffer: &'a super::Buffer,
238        ranges: I,
239    ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange>> {
240        let block = buffer.block.as_ref()?.lock();
241        let mask = self.private_caps.non_coherent_map_mask;
242        Some(ranges.map(move |range| {
243            vk::MappedMemoryRange::default()
244                .memory(*block.memory())
245                .offset((block.offset() + range.start) & !mask)
246                .size((range.end - range.start + mask) & !mask)
247        }))
248    }
249}
250
251impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
252    unsafe fn allocate_memory(
253        &self,
254        size: u64,
255        memory_type: u32,
256        flags: gpu_alloc::AllocationFlags,
257    ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
258        let mut info = vk::MemoryAllocateInfo::default()
259            .allocation_size(size)
260            .memory_type_index(memory_type);
261
262        let mut info_flags;
263
264        if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
265            info_flags = vk::MemoryAllocateFlagsInfo::default()
266                .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
267            info = info.push_next(&mut info_flags);
268        }
269
270        match unsafe { self.raw.allocate_memory(&info, None) } {
271            Ok(memory) => {
272                self.memory_allocations_counter.add(1);
273                Ok(memory)
274            }
275            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
276                Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
277            }
278            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
279                Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
280            }
281            // We don't use VK_KHR_external_memory
282            // VK_ERROR_INVALID_EXTERNAL_HANDLE
283            // We don't use VK_KHR_buffer_device_address
284            // VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR
285            Err(err) => handle_unexpected(err),
286        }
287    }
288
289    unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
290        self.memory_allocations_counter.sub(1);
291
292        unsafe { self.raw.free_memory(memory, None) };
293    }
294
295    unsafe fn map_memory(
296        &self,
297        memory: &mut vk::DeviceMemory,
298        offset: u64,
299        size: u64,
300    ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
301        match unsafe {
302            self.raw
303                .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
304        } {
305            Ok(ptr) => Ok(ptr::NonNull::new(ptr.cast::<u8>())
306                .expect("Pointer to memory mapping must not be null")),
307            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
308                Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
309            }
310            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
311                Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
312            }
313            Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
314            Err(err) => handle_unexpected(err),
315        }
316    }
317
318    unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
319        unsafe { self.raw.unmap_memory(*memory) };
320    }
321
322    unsafe fn invalidate_memory_ranges(
323        &self,
324        _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
325    ) -> Result<(), gpu_alloc::OutOfMemory> {
326        // should never be called
327        unimplemented!()
328    }
329
330    unsafe fn flush_memory_ranges(
331        &self,
332        _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
333    ) -> Result<(), gpu_alloc::OutOfMemory> {
334        // should never be called
335        unimplemented!()
336    }
337}
338
339impl
340    gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
341    for super::DeviceShared
342{
343    unsafe fn create_descriptor_pool(
344        &self,
345        descriptor_count: &gpu_descriptor::DescriptorTotalCount,
346        max_sets: u32,
347        flags: gpu_descriptor::DescriptorPoolCreateFlags,
348    ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
349        //Note: ignoring other types, since they can't appear here
350        let unfiltered_counts = [
351            (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
352            (
353                vk::DescriptorType::SAMPLED_IMAGE,
354                descriptor_count.sampled_image,
355            ),
356            (
357                vk::DescriptorType::STORAGE_IMAGE,
358                descriptor_count.storage_image,
359            ),
360            (
361                vk::DescriptorType::UNIFORM_BUFFER,
362                descriptor_count.uniform_buffer,
363            ),
364            (
365                vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
366                descriptor_count.uniform_buffer_dynamic,
367            ),
368            (
369                vk::DescriptorType::STORAGE_BUFFER,
370                descriptor_count.storage_buffer,
371            ),
372            (
373                vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
374                descriptor_count.storage_buffer_dynamic,
375            ),
376            (
377                vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
378                descriptor_count.acceleration_structure,
379            ),
380        ];
381
382        let filtered_counts = unfiltered_counts
383            .iter()
384            .cloned()
385            .filter(|&(_, count)| count != 0)
386            .map(|(ty, count)| vk::DescriptorPoolSize {
387                ty,
388                descriptor_count: count,
389            })
390            .collect::<ArrayVec<_, 8>>();
391
392        let mut vk_flags =
393            if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
394                vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
395            } else {
396                vk::DescriptorPoolCreateFlags::empty()
397            };
398        if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
399            vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
400        }
401        let vk_info = vk::DescriptorPoolCreateInfo::default()
402            .max_sets(max_sets)
403            .flags(vk_flags)
404            .pool_sizes(&filtered_counts);
405
406        match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
407            Ok(pool) => Ok(pool),
408            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
409                Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
410            }
411            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
412                Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
413            }
414            Err(vk::Result::ERROR_FRAGMENTATION) => {
415                Err(gpu_descriptor::CreatePoolError::Fragmentation)
416            }
417            Err(err) => handle_unexpected(err),
418        }
419    }
420
421    unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
422        unsafe { self.raw.destroy_descriptor_pool(pool, None) }
423    }
424
425    unsafe fn alloc_descriptor_sets<'a>(
426        &self,
427        pool: &mut vk::DescriptorPool,
428        layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
429        sets: &mut impl Extend<vk::DescriptorSet>,
430    ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
431        let result = unsafe {
432            self.raw.allocate_descriptor_sets(
433                &vk::DescriptorSetAllocateInfo::default()
434                    .descriptor_pool(*pool)
435                    .set_layouts(
436                        &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
437                            layouts.cloned(),
438                        ),
439                    ),
440            )
441        };
442
443        match result {
444            Ok(vk_sets) => {
445                sets.extend(vk_sets);
446                Ok(())
447            }
448            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
449            | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
450                Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
451            }
452            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
453                Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
454            }
455            Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
456                Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
457            }
458            Err(err) => handle_unexpected(err),
459        }
460    }
461
462    unsafe fn dealloc_descriptor_sets<'a>(
463        &self,
464        pool: &mut vk::DescriptorPool,
465        sets: impl Iterator<Item = vk::DescriptorSet>,
466    ) {
467        let result = unsafe {
468            self.raw.free_descriptor_sets(
469                *pool,
470                &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
471            )
472        };
473        match result {
474            Ok(()) => {}
475            Err(err) => handle_unexpected(err),
476        }
477    }
478}
479
480struct CompiledStage {
481    create_info: vk::PipelineShaderStageCreateInfo<'static>,
482    _entry_point: CString,
483    temp_raw_module: Option<vk::ShaderModule>,
484}
485
486impl super::Device {
487    pub(super) unsafe fn create_swapchain(
488        &self,
489        surface: &super::Surface,
490        config: &crate::SurfaceConfiguration,
491        provided_old_swapchain: Option<super::Swapchain>,
492    ) -> Result<super::Swapchain, crate::SurfaceError> {
493        profiling::scope!("Device::create_swapchain");
494        let functor = khr::swapchain::Device::new(&surface.instance.raw, &self.shared.raw);
495
496        let old_swapchain = match provided_old_swapchain {
497            Some(osc) => osc.raw,
498            None => vk::SwapchainKHR::null(),
499        };
500
501        let color_space = if config.format == wgt::TextureFormat::Rgba16Float {
502            // Enable wide color gamut mode
503            // Vulkan swapchain for Android only supports DISPLAY_P3_NONLINEAR_EXT and EXTENDED_SRGB_LINEAR_EXT
504            vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT
505        } else {
506            vk::ColorSpaceKHR::SRGB_NONLINEAR
507        };
508
509        let original_format = self.shared.private_caps.map_texture_format(config.format);
510        let mut raw_flags = vk::SwapchainCreateFlagsKHR::empty();
511        let mut raw_view_formats: Vec<vk::Format> = vec![];
512        if !config.view_formats.is_empty() {
513            raw_flags |= vk::SwapchainCreateFlagsKHR::MUTABLE_FORMAT;
514            raw_view_formats = config
515                .view_formats
516                .iter()
517                .map(|f| self.shared.private_caps.map_texture_format(*f))
518                .collect();
519            raw_view_formats.push(original_format);
520        }
521
522        let mut info = vk::SwapchainCreateInfoKHR::default()
523            .flags(raw_flags)
524            .surface(surface.raw)
525            .min_image_count(config.maximum_frame_latency + 1) // TODO: https://github.com/gfx-rs/wgpu/issues/2869
526            .image_format(original_format)
527            .image_color_space(color_space)
528            .image_extent(vk::Extent2D {
529                width: config.extent.width,
530                height: config.extent.height,
531            })
532            .image_array_layers(config.extent.depth_or_array_layers)
533            .image_usage(conv::map_texture_usage(config.usage))
534            .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
535            .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
536            .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
537            .present_mode(conv::map_present_mode(config.present_mode))
538            .clipped(true)
539            .old_swapchain(old_swapchain);
540
541        let mut format_list_info = vk::ImageFormatListCreateInfo::default();
542        if !raw_view_formats.is_empty() {
543            format_list_info = format_list_info.view_formats(&raw_view_formats);
544            info = info.push_next(&mut format_list_info);
545        }
546
547        let result = {
548            profiling::scope!("vkCreateSwapchainKHR");
549            unsafe { functor.create_swapchain(&info, None) }
550        };
551
552        // doing this before bailing out with error
553        if old_swapchain != vk::SwapchainKHR::null() {
554            unsafe { functor.destroy_swapchain(old_swapchain, None) }
555        }
556
557        let raw = match result {
558            Ok(swapchain) => swapchain,
559            Err(error) => {
560                return Err(match error {
561                    vk::Result::ERROR_SURFACE_LOST_KHR
562                    | vk::Result::ERROR_INITIALIZATION_FAILED => crate::SurfaceError::Lost,
563                    vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
564                        crate::SurfaceError::Other("Native window is in use")
565                    }
566                    // We don't use VK_EXT_image_compression_control
567                    // VK_ERROR_COMPRESSION_EXHAUSTED_EXT
568                    other => super::map_host_device_oom_and_lost_err(other).into(),
569                });
570            }
571        };
572
573        let images =
574            unsafe { functor.get_swapchain_images(raw) }.map_err(super::map_host_device_oom_err)?;
575
576        let fence = unsafe {
577            self.shared
578                .raw
579                .create_fence(&vk::FenceCreateInfo::default(), None)
580                .map_err(super::map_host_device_oom_err)?
581        };
582
583        // NOTE: It's important that we define the same number of acquire/present semaphores
584        // as we will need to index into them with the image index.
585        let acquire_semaphores = (0..=images.len())
586            .map(|i| {
587                super::SwapchainAcquireSemaphore::new(&self.shared, i)
588                    .map(Mutex::new)
589                    .map(Arc::new)
590            })
591            .collect::<Result<Vec<_>, _>>()?;
592
593        let present_semaphores = (0..=images.len())
594            .map(|i| Arc::new(Mutex::new(super::SwapchainPresentSemaphores::new(i))))
595            .collect::<Vec<_>>();
596
597        Ok(super::Swapchain {
598            raw,
599            functor,
600            device: Arc::clone(&self.shared),
601            images,
602            fence,
603            config: config.clone(),
604            acquire_semaphores,
605            next_acquire_index: 0,
606            present_semaphores,
607            next_present_time: None,
608        })
609    }
610
611    /// # Safety
612    ///
613    /// - `vk_image` must be created respecting `desc`
614    /// - If `drop_callback` is [`None`], wgpu-hal will take ownership of `vk_image`. If
615    ///   `drop_callback` is [`Some`], `vk_image` must be valid until the callback is called.
616    /// - If the `ImageCreateFlags` does not contain `MUTABLE_FORMAT`, the `view_formats` of `desc` must be empty.
617    pub unsafe fn texture_from_raw(
618        &self,
619        vk_image: vk::Image,
620        desc: &crate::TextureDescriptor,
621        drop_callback: Option<crate::DropCallback>,
622    ) -> super::Texture {
623        let mut raw_flags = vk::ImageCreateFlags::empty();
624        let mut view_formats = vec![];
625        for tf in desc.view_formats.iter() {
626            if *tf == desc.format {
627                continue;
628            }
629            view_formats.push(*tf);
630        }
631        if !view_formats.is_empty() {
632            raw_flags |=
633                vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
634            view_formats.push(desc.format)
635        }
636        if desc.format.is_multi_planar_format() {
637            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
638        }
639
640        let identity = self.shared.texture_identity_factory.next();
641
642        let drop_guard = crate::DropGuard::from_option(drop_callback);
643
644        super::Texture {
645            raw: vk_image,
646            drop_guard,
647            external_memory: None,
648            block: None,
649            format: desc.format,
650            copy_size: desc.copy_extent(),
651            identity,
652        }
653    }
654
655    #[cfg(windows)]
656    fn find_memory_type_index(
657        &self,
658        type_bits_req: u32,
659        flags_req: vk::MemoryPropertyFlags,
660    ) -> Option<usize> {
661        let mem_properties = unsafe {
662            self.shared
663                .instance
664                .raw
665                .get_physical_device_memory_properties(self.shared.physical_device)
666        };
667
668        // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryProperties.html
669        for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
670            let types_bits = 1 << i;
671            let is_required_memory_type = type_bits_req & types_bits != 0;
672            let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
673            if is_required_memory_type && has_required_properties {
674                return Some(i);
675            }
676        }
677
678        None
679    }
680
681    fn create_image_without_memory(
682        &self,
683        desc: &crate::TextureDescriptor,
684        external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
685    ) -> Result<ImageWithoutMemory, crate::DeviceError> {
686        let copy_size = desc.copy_extent();
687
688        let mut raw_flags = vk::ImageCreateFlags::empty();
689        if desc.dimension == wgt::TextureDimension::D3
690            && desc.usage.contains(wgt::TextureUses::COLOR_TARGET)
691        {
692            raw_flags |= vk::ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE;
693        }
694        if desc.is_cube_compatible() {
695            raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
696        }
697
698        let original_format = self.shared.private_caps.map_texture_format(desc.format);
699        let mut vk_view_formats = vec![];
700        if !desc.view_formats.is_empty() {
701            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
702
703            if self.shared.private_caps.image_format_list {
704                vk_view_formats = desc
705                    .view_formats
706                    .iter()
707                    .map(|f| self.shared.private_caps.map_texture_format(*f))
708                    .collect();
709                vk_view_formats.push(original_format)
710            }
711        }
712        if desc.format.is_multi_planar_format() {
713            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
714        }
715
716        let mut vk_info = vk::ImageCreateInfo::default()
717            .flags(raw_flags)
718            .image_type(conv::map_texture_dimension(desc.dimension))
719            .format(original_format)
720            .extent(conv::map_copy_extent(&copy_size))
721            .mip_levels(desc.mip_level_count)
722            .array_layers(desc.array_layer_count())
723            .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
724            .tiling(vk::ImageTiling::OPTIMAL)
725            .usage(conv::map_texture_usage(desc.usage))
726            .sharing_mode(vk::SharingMode::EXCLUSIVE)
727            .initial_layout(vk::ImageLayout::UNDEFINED);
728
729        let mut format_list_info = vk::ImageFormatListCreateInfo::default();
730        if !vk_view_formats.is_empty() {
731            format_list_info = format_list_info.view_formats(&vk_view_formats);
732            vk_info = vk_info.push_next(&mut format_list_info);
733        }
734
735        if let Some(ext_info) = external_memory_image_create_info {
736            vk_info = vk_info.push_next(ext_info);
737        }
738
739        let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
740        fn map_err(err: vk::Result) -> crate::DeviceError {
741            // We don't use VK_EXT_image_compression_control
742            // VK_ERROR_COMPRESSION_EXHAUSTED_EXT
743            super::map_host_device_oom_and_ioca_err(err)
744        }
745        let req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
746
747        Ok(ImageWithoutMemory {
748            raw,
749            requirements: req,
750            copy_size,
751        })
752    }
753
754    /// # Safety
755    ///
756    /// - Vulkan (with VK_KHR_external_memory_win32)
757    /// - The `d3d11_shared_handle` must be valid and respecting `desc`
758    /// - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT` flag is used because we need to hold a reference to the handle
759    #[cfg(windows)]
760    pub unsafe fn texture_from_d3d11_shared_handle(
761        &self,
762        d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
763        desc: &crate::TextureDescriptor,
764    ) -> Result<super::Texture, crate::DeviceError> {
765        if !self
766            .shared
767            .features
768            .contains(wgt::Features::VULKAN_EXTERNAL_MEMORY_WIN32)
769        {
770            log::error!("Vulkan driver does not support VK_KHR_external_memory_win32");
771            return Err(crate::DeviceError::Unexpected);
772        }
773
774        let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
775            .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
776
777        let image =
778            self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
779
780        // Some external memory types require dedicated allocation
781        // https://docs.vulkan.org/guide/latest/extensions/external.html#_importing_memory
782        let mut dedicated_allocate_info =
783            vk::MemoryDedicatedAllocateInfo::default().image(image.raw);
784
785        let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
786            .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
787            .handle(d3d11_shared_handle.0 as _);
788        // TODO: We should use `push_next` instead, but currently ash does not provide this method for the `ImportMemoryWin32HandleInfoKHR` type.
789        #[allow(clippy::unnecessary_mut_passed)]
790        {
791            import_memory_info.p_next = <*const _>::cast(&mut dedicated_allocate_info);
792        }
793
794        let mem_type_index = self
795            .find_memory_type_index(
796                image.requirements.memory_type_bits,
797                vk::MemoryPropertyFlags::DEVICE_LOCAL,
798            )
799            .ok_or(crate::DeviceError::Unexpected)?;
800
801        let memory_allocate_info = vk::MemoryAllocateInfo::default()
802            .allocation_size(image.requirements.size)
803            .memory_type_index(mem_type_index as _)
804            .push_next(&mut import_memory_info);
805        let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
806            .map_err(super::map_host_device_oom_err)?;
807
808        unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
809            .map_err(super::map_host_device_oom_err)?;
810
811        if let Some(label) = desc.label {
812            unsafe { self.shared.set_object_name(image.raw, label) };
813        }
814
815        let identity = self.shared.texture_identity_factory.next();
816
817        self.counters.textures.add(1);
818
819        Ok(super::Texture {
820            raw: image.raw,
821            drop_guard: None,
822            external_memory: Some(memory),
823            block: None,
824            format: desc.format,
825            copy_size: image.copy_size,
826            identity,
827        })
828    }
829
830    fn create_shader_module_impl(
831        &self,
832        spv: &[u32],
833    ) -> Result<vk::ShaderModule, crate::DeviceError> {
834        let vk_info = vk::ShaderModuleCreateInfo::default()
835            .flags(vk::ShaderModuleCreateFlags::empty())
836            .code(spv);
837
838        let raw = unsafe {
839            profiling::scope!("vkCreateShaderModule");
840            self.shared
841                .raw
842                .create_shader_module(&vk_info, None)
843                .map_err(map_err)?
844        };
845        fn map_err(err: vk::Result) -> crate::DeviceError {
846            // We don't use VK_NV_glsl_shader
847            // VK_ERROR_INVALID_SHADER_NV
848            super::map_host_device_oom_err(err)
849        }
850        Ok(raw)
851    }
852
853    fn compile_stage(
854        &self,
855        stage: &crate::ProgrammableStage<super::ShaderModule>,
856        naga_stage: naga::ShaderStage,
857        binding_map: &naga::back::spv::BindingMap,
858    ) -> Result<CompiledStage, crate::PipelineError> {
859        let stage_flags = crate::auxil::map_naga_stage(naga_stage);
860        let vk_module = match *stage.module {
861            super::ShaderModule::Raw(raw) => raw,
862            super::ShaderModule::Intermediate {
863                ref naga_shader,
864                runtime_checks,
865            } => {
866                let pipeline_options = naga::back::spv::PipelineOptions {
867                    entry_point: stage.entry_point.to_owned(),
868                    shader_stage: naga_stage,
869                };
870                let needs_temp_options = !runtime_checks.bounds_checks
871                    || !runtime_checks.force_loop_bounding
872                    || !binding_map.is_empty()
873                    || naga_shader.debug_source.is_some()
874                    || !stage.zero_initialize_workgroup_memory;
875                let mut temp_options;
876                let options = if needs_temp_options {
877                    temp_options = self.naga_options.clone();
878                    if !runtime_checks.bounds_checks {
879                        temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
880                            index: naga::proc::BoundsCheckPolicy::Unchecked,
881                            buffer: naga::proc::BoundsCheckPolicy::Unchecked,
882                            image_load: naga::proc::BoundsCheckPolicy::Unchecked,
883                            binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
884                        };
885                    }
886                    if !runtime_checks.force_loop_bounding {
887                        temp_options.force_loop_bounding = false;
888                    }
889                    if !binding_map.is_empty() {
890                        temp_options.binding_map = binding_map.clone();
891                    }
892
893                    if let Some(ref debug) = naga_shader.debug_source {
894                        temp_options.debug_info = Some(naga::back::spv::DebugInfo {
895                            source_code: &debug.source_code,
896                            file_name: debug.file_name.as_ref().into(),
897                            language: naga::back::spv::SourceLanguage::WGSL,
898                        })
899                    }
900                    if !stage.zero_initialize_workgroup_memory {
901                        temp_options.zero_initialize_workgroup_memory =
902                            naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
903                    }
904
905                    &temp_options
906                } else {
907                    &self.naga_options
908                };
909
910                let (module, info) = naga::back::pipeline_constants::process_overrides(
911                    &naga_shader.module,
912                    &naga_shader.info,
913                    Some((naga_stage, stage.entry_point)),
914                    stage.constants,
915                )
916                .map_err(|e| {
917                    crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
918                })?;
919
920                let spv = {
921                    profiling::scope!("naga::spv::write_vec");
922                    naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
923                }
924                .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
925                self.create_shader_module_impl(&spv)?
926            }
927        };
928
929        let mut flags = vk::PipelineShaderStageCreateFlags::empty();
930        if self.shared.features.contains(wgt::Features::SUBGROUP) {
931            flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
932        }
933
934        let entry_point = CString::new(stage.entry_point).unwrap();
935        let mut create_info = vk::PipelineShaderStageCreateInfo::default()
936            .flags(flags)
937            .stage(conv::map_shader_stage(stage_flags))
938            .module(vk_module);
939
940        // Circumvent struct lifetime check because of a self-reference inside CompiledStage
941        create_info.p_name = entry_point.as_ptr();
942
943        Ok(CompiledStage {
944            create_info,
945            _entry_point: entry_point,
946            temp_raw_module: match *stage.module {
947                super::ShaderModule::Raw(_) => None,
948                super::ShaderModule::Intermediate { .. } => Some(vk_module),
949            },
950        })
951    }
952
953    /// Returns the queue family index of the device's internal queue.
954    ///
955    /// This is useful for constructing memory barriers needed for queue family ownership transfer when
956    /// external memory is involved (from/to `VK_QUEUE_FAMILY_EXTERNAL_KHR` and `VK_QUEUE_FAMILY_FOREIGN_EXT`
957    /// for example).
958    pub fn queue_family_index(&self) -> u32 {
959        self.shared.family_index
960    }
961
962    pub fn queue_index(&self) -> u32 {
963        self.shared.queue_index
964    }
965
966    pub fn raw_device(&self) -> &ash::Device {
967        &self.shared.raw
968    }
969
970    pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
971        self.shared.physical_device
972    }
973
974    pub fn raw_queue(&self) -> vk::Queue {
975        self.shared.raw_queue
976    }
977
978    pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
979        &self.shared.enabled_extensions
980    }
981
982    pub fn shared_instance(&self) -> &super::InstanceShared {
983        &self.shared.instance
984    }
985
986    fn error_if_would_oom_on_resource_allocation(
987        &self,
988        needs_host_access: bool,
989        size: u64,
990    ) -> Result<(), crate::DeviceError> {
991        let Some(threshold) = self
992            .shared
993            .instance
994            .memory_budget_thresholds
995            .for_resource_creation
996        else {
997            return Ok(());
998        };
999
1000        if !self
1001            .shared
1002            .enabled_extensions
1003            .contains(&ext::memory_budget::NAME)
1004        {
1005            return Ok(());
1006        }
1007
1008        let get_physical_device_properties = self
1009            .shared
1010            .instance
1011            .get_physical_device_properties
1012            .as_ref()
1013            .unwrap();
1014
1015        let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
1016
1017        let mut memory_properties =
1018            vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
1019
1020        unsafe {
1021            get_physical_device_properties.get_physical_device_memory_properties2(
1022                self.shared.physical_device,
1023                &mut memory_properties,
1024            );
1025        }
1026
1027        let mut host_visible_heaps = [false; vk::MAX_MEMORY_HEAPS];
1028        let mut device_local_heaps = [false; vk::MAX_MEMORY_HEAPS];
1029
1030        let memory_properties = memory_properties.memory_properties;
1031
1032        for i in 0..memory_properties.memory_type_count {
1033            let memory_type = memory_properties.memory_types[i as usize];
1034            let flags = memory_type.property_flags;
1035
1036            if flags.intersects(
1037                vk::MemoryPropertyFlags::LAZILY_ALLOCATED | vk::MemoryPropertyFlags::PROTECTED,
1038            ) {
1039                continue; // not used by gpu-alloc
1040            }
1041
1042            if flags.contains(vk::MemoryPropertyFlags::HOST_VISIBLE) {
1043                host_visible_heaps[memory_type.heap_index as usize] = true;
1044            }
1045
1046            if flags.contains(vk::MemoryPropertyFlags::DEVICE_LOCAL) {
1047                device_local_heaps[memory_type.heap_index as usize] = true;
1048            }
1049        }
1050
1051        let heaps = if needs_host_access {
1052            host_visible_heaps
1053        } else {
1054            device_local_heaps
1055        };
1056
1057        // NOTE: We might end up checking multiple heaps since gpu-alloc doesn't have a way
1058        // for us to query the heap the resource will end up on. But this is unlikely,
1059        // there is usually only one heap on integrated GPUs and two on dedicated GPUs.
1060
1061        for (i, check) in heaps.iter().enumerate() {
1062            if !check {
1063                continue;
1064            }
1065
1066            let heap_usage = memory_budget_properties.heap_usage[i];
1067            let heap_budget = memory_budget_properties.heap_budget[i];
1068
1069            if heap_usage + size >= heap_budget / 100 * threshold as u64 {
1070                return Err(crate::DeviceError::OutOfMemory);
1071            }
1072        }
1073
1074        Ok(())
1075    }
1076}
1077
1078impl crate::Device for super::Device {
1079    type A = super::Api;
1080
1081    unsafe fn create_buffer(
1082        &self,
1083        desc: &crate::BufferDescriptor,
1084    ) -> Result<super::Buffer, crate::DeviceError> {
1085        let vk_info = vk::BufferCreateInfo::default()
1086            .size(desc.size)
1087            .usage(conv::map_buffer_usage(desc.usage))
1088            .sharing_mode(vk::SharingMode::EXCLUSIVE);
1089
1090        let raw = unsafe {
1091            self.shared
1092                .raw
1093                .create_buffer(&vk_info, None)
1094                .map_err(super::map_host_device_oom_and_ioca_err)?
1095        };
1096        let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
1097
1098        let mut alloc_usage = if desc
1099            .usage
1100            .intersects(wgt::BufferUses::MAP_READ | wgt::BufferUses::MAP_WRITE)
1101        {
1102            let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
1103            //TODO: find a way to use `crate::MemoryFlags::PREFER_COHERENT`
1104            flags.set(
1105                gpu_alloc::UsageFlags::DOWNLOAD,
1106                desc.usage.contains(wgt::BufferUses::MAP_READ),
1107            );
1108            flags.set(
1109                gpu_alloc::UsageFlags::UPLOAD,
1110                desc.usage.contains(wgt::BufferUses::MAP_WRITE),
1111            );
1112            flags
1113        } else {
1114            gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
1115        };
1116        alloc_usage.set(
1117            gpu_alloc::UsageFlags::TRANSIENT,
1118            desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
1119        );
1120
1121        let needs_host_access = alloc_usage.contains(gpu_alloc::UsageFlags::HOST_ACCESS);
1122
1123        self.error_if_would_oom_on_resource_allocation(needs_host_access, req.size)
1124            .inspect_err(|_| {
1125                unsafe { self.shared.raw.destroy_buffer(raw, None) };
1126            })?;
1127
1128        let alignment_mask = req.alignment - 1;
1129
1130        let block = unsafe {
1131            self.mem_allocator.lock().alloc(
1132                &*self.shared,
1133                gpu_alloc::Request {
1134                    size: req.size,
1135                    align_mask: alignment_mask,
1136                    usage: alloc_usage,
1137                    memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1138                },
1139            )
1140        }
1141        .inspect_err(|_| {
1142            unsafe { self.shared.raw.destroy_buffer(raw, None) };
1143        })?;
1144
1145        unsafe {
1146            self.shared
1147                .raw
1148                .bind_buffer_memory(raw, *block.memory(), block.offset())
1149        }
1150        .map_err(super::map_host_device_oom_and_ioca_err)
1151        .inspect_err(|_| {
1152            unsafe { self.shared.raw.destroy_buffer(raw, None) };
1153        })?;
1154
1155        if let Some(label) = desc.label {
1156            unsafe { self.shared.set_object_name(raw, label) };
1157        }
1158
1159        self.counters.buffer_memory.add(block.size() as isize);
1160        self.counters.buffers.add(1);
1161
1162        Ok(super::Buffer {
1163            raw,
1164            block: Some(Mutex::new(super::BufferMemoryBacking::Managed(block))),
1165        })
1166    }
1167    unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
1168        unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
1169        if let Some(block) = buffer.block {
1170            let block = block.into_inner();
1171            self.counters.buffer_memory.sub(block.size() as isize);
1172            match block {
1173                super::BufferMemoryBacking::Managed(block) => unsafe {
1174                    self.mem_allocator.lock().dealloc(&*self.shared, block)
1175                },
1176                super::BufferMemoryBacking::VulkanMemory { memory, .. } => unsafe {
1177                    self.shared.raw.free_memory(memory, None);
1178                },
1179            }
1180        }
1181
1182        self.counters.buffers.sub(1);
1183    }
1184
1185    unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
1186        self.counters.buffers.add(1);
1187    }
1188
1189    unsafe fn map_buffer(
1190        &self,
1191        buffer: &super::Buffer,
1192        range: crate::MemoryRange,
1193    ) -> Result<crate::BufferMapping, crate::DeviceError> {
1194        if let Some(ref block) = buffer.block {
1195            let size = range.end - range.start;
1196            let mut block = block.lock();
1197            if let super::BufferMemoryBacking::Managed(ref mut block) = *block {
1198                let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
1199                let is_coherent = block
1200                    .props()
1201                    .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
1202                Ok(crate::BufferMapping { ptr, is_coherent })
1203            } else {
1204                crate::hal_usage_error("tried to map externally created buffer")
1205            }
1206        } else {
1207            crate::hal_usage_error("tried to map external buffer")
1208        }
1209    }
1210    unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
1211        if let Some(ref block) = buffer.block {
1212            match &mut *block.lock() {
1213                super::BufferMemoryBacking::Managed(block) => unsafe { block.unmap(&*self.shared) },
1214                super::BufferMemoryBacking::VulkanMemory { .. } => {
1215                    crate::hal_usage_error("tried to unmap externally created buffer")
1216                }
1217            };
1218        } else {
1219            crate::hal_usage_error("tried to unmap external buffer")
1220        }
1221    }
1222
1223    unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1224    where
1225        I: Iterator<Item = crate::MemoryRange>,
1226    {
1227        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1228            unsafe {
1229                self.shared
1230                    .raw
1231                    .flush_mapped_memory_ranges(
1232                        &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1233                    )
1234            }
1235            .unwrap();
1236        }
1237    }
1238    unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1239    where
1240        I: Iterator<Item = crate::MemoryRange>,
1241    {
1242        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1243            unsafe {
1244                self.shared
1245                    .raw
1246                    .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1247                        [vk::MappedMemoryRange; 32],
1248                    >::from_iter(vk_ranges))
1249            }
1250            .unwrap();
1251        }
1252    }
1253
1254    unsafe fn create_texture(
1255        &self,
1256        desc: &crate::TextureDescriptor,
1257    ) -> Result<super::Texture, crate::DeviceError> {
1258        let image = self.create_image_without_memory(desc, None)?;
1259
1260        self.error_if_would_oom_on_resource_allocation(false, image.requirements.size)
1261            .inspect_err(|_| {
1262                unsafe { self.shared.raw.destroy_image(image.raw, None) };
1263            })?;
1264
1265        let block = unsafe {
1266            self.mem_allocator.lock().alloc(
1267                &*self.shared,
1268                gpu_alloc::Request {
1269                    size: image.requirements.size,
1270                    align_mask: image.requirements.alignment - 1,
1271                    usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1272                    memory_types: image.requirements.memory_type_bits & self.valid_ash_memory_types,
1273                },
1274            )
1275        }
1276        .inspect_err(|_| {
1277            unsafe { self.shared.raw.destroy_image(image.raw, None) };
1278        })?;
1279
1280        self.counters.texture_memory.add(block.size() as isize);
1281
1282        unsafe {
1283            self.shared
1284                .raw
1285                .bind_image_memory(image.raw, *block.memory(), block.offset())
1286        }
1287        .map_err(super::map_host_device_oom_err)
1288        .inspect_err(|_| {
1289            unsafe { self.shared.raw.destroy_image(image.raw, None) };
1290        })?;
1291
1292        if let Some(label) = desc.label {
1293            unsafe { self.shared.set_object_name(image.raw, label) };
1294        }
1295
1296        let identity = self.shared.texture_identity_factory.next();
1297
1298        self.counters.textures.add(1);
1299
1300        Ok(super::Texture {
1301            raw: image.raw,
1302            drop_guard: None,
1303            external_memory: None,
1304            block: Some(block),
1305            format: desc.format,
1306            copy_size: image.copy_size,
1307            identity,
1308        })
1309    }
1310    unsafe fn destroy_texture(&self, texture: super::Texture) {
1311        if texture.drop_guard.is_none() {
1312            unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1313        }
1314        if let Some(memory) = texture.external_memory {
1315            unsafe { self.shared.raw.free_memory(memory, None) };
1316        }
1317        if let Some(block) = texture.block {
1318            self.counters.texture_memory.sub(block.size() as isize);
1319
1320            unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1321        }
1322
1323        self.counters.textures.sub(1);
1324    }
1325
1326    unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1327        self.counters.textures.add(1);
1328    }
1329
1330    unsafe fn create_texture_view(
1331        &self,
1332        texture: &super::Texture,
1333        desc: &crate::TextureViewDescriptor,
1334    ) -> Result<super::TextureView, crate::DeviceError> {
1335        let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1336        let raw_format = self.shared.private_caps.map_texture_format(desc.format);
1337        let mut vk_info = vk::ImageViewCreateInfo::default()
1338            .flags(vk::ImageViewCreateFlags::empty())
1339            .image(texture.raw)
1340            .view_type(conv::map_view_dimension(desc.dimension))
1341            .format(raw_format)
1342            .subresource_range(subresource_range);
1343        let layers =
1344            NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1345
1346        let mut image_view_info;
1347        if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1348            image_view_info =
1349                vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1350            vk_info = vk_info.push_next(&mut image_view_info);
1351        }
1352
1353        let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1354            .map_err(super::map_host_device_oom_and_ioca_err)?;
1355
1356        if let Some(label) = desc.label {
1357            unsafe { self.shared.set_object_name(raw, label) };
1358        }
1359
1360        let identity = self.shared.texture_view_identity_factory.next();
1361
1362        self.counters.texture_views.add(1);
1363
1364        Ok(super::TextureView {
1365            raw_texture: texture.raw,
1366            raw,
1367            layers,
1368            format: desc.format,
1369            raw_format,
1370            base_mip_level: desc.range.base_mip_level,
1371            dimension: desc.dimension,
1372            texture_identity: texture.identity,
1373            view_identity: identity,
1374        })
1375    }
1376    unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1377        unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1378
1379        self.counters.texture_views.sub(1);
1380    }
1381
1382    unsafe fn create_sampler(
1383        &self,
1384        desc: &crate::SamplerDescriptor,
1385    ) -> Result<super::Sampler, crate::DeviceError> {
1386        let mut create_info = vk::SamplerCreateInfo::default()
1387            .flags(vk::SamplerCreateFlags::empty())
1388            .mag_filter(conv::map_filter_mode(desc.mag_filter))
1389            .min_filter(conv::map_filter_mode(desc.min_filter))
1390            .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1391            .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1392            .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1393            .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1394            .min_lod(desc.lod_clamp.start)
1395            .max_lod(desc.lod_clamp.end);
1396
1397        if let Some(fun) = desc.compare {
1398            create_info = create_info
1399                .compare_enable(true)
1400                .compare_op(conv::map_comparison(fun));
1401        }
1402
1403        if desc.anisotropy_clamp != 1 {
1404            // We only enable anisotropy if it is supported, and wgpu-hal interface guarantees
1405            // the clamp is in the range [1, 16] which is always supported if anisotropy is.
1406            create_info = create_info
1407                .anisotropy_enable(true)
1408                .max_anisotropy(desc.anisotropy_clamp as f32);
1409        }
1410
1411        if let Some(color) = desc.border_color {
1412            create_info = create_info.border_color(conv::map_border_color(color));
1413        }
1414
1415        let mut sampler_cache_guard = self.shared.sampler_cache.lock();
1416
1417        let raw = sampler_cache_guard.create_sampler(&self.shared.raw, create_info)?;
1418
1419        // Note: Cached samplers will just continually overwrite the label
1420        //
1421        // https://github.com/gfx-rs/wgpu/issues/6867
1422        if let Some(label) = desc.label {
1423            // SAFETY: we are holding a lock on the sampler cache,
1424            // so we can only be setting the name from one thread.
1425            unsafe { self.shared.set_object_name(raw, label) };
1426        }
1427
1428        drop(sampler_cache_guard);
1429
1430        self.counters.samplers.add(1);
1431
1432        Ok(super::Sampler { raw, create_info })
1433    }
1434    unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1435        self.shared.sampler_cache.lock().destroy_sampler(
1436            &self.shared.raw,
1437            sampler.create_info,
1438            sampler.raw,
1439        );
1440
1441        self.counters.samplers.sub(1);
1442    }
1443
1444    unsafe fn create_command_encoder(
1445        &self,
1446        desc: &crate::CommandEncoderDescriptor<super::Queue>,
1447    ) -> Result<super::CommandEncoder, crate::DeviceError> {
1448        let vk_info = vk::CommandPoolCreateInfo::default()
1449            .queue_family_index(desc.queue.family_index)
1450            .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1451
1452        let raw = unsafe {
1453            self.shared
1454                .raw
1455                .create_command_pool(&vk_info, None)
1456                .map_err(super::map_host_device_oom_err)?
1457        };
1458
1459        self.counters.command_encoders.add(1);
1460
1461        Ok(super::CommandEncoder {
1462            raw,
1463            device: Arc::clone(&self.shared),
1464            active: vk::CommandBuffer::null(),
1465            bind_point: vk::PipelineBindPoint::default(),
1466            temp: super::Temp::default(),
1467            free: Vec::new(),
1468            discarded: Vec::new(),
1469            rpass_debug_marker_active: false,
1470            end_of_pass_timer_query: None,
1471            framebuffers: Default::default(),
1472            temp_texture_views: Default::default(),
1473            counters: Arc::clone(&self.counters),
1474        })
1475    }
1476
1477    unsafe fn create_bind_group_layout(
1478        &self,
1479        desc: &crate::BindGroupLayoutDescriptor,
1480    ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1481        let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1482        let mut types = Vec::new();
1483        for entry in desc.entries {
1484            let count = entry.count.map_or(1, |c| c.get());
1485            if entry.binding as usize >= types.len() {
1486                types.resize(
1487                    entry.binding as usize + 1,
1488                    (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1489                );
1490            }
1491            types[entry.binding as usize] = (
1492                conv::map_binding_type(entry.ty),
1493                entry.count.map_or(1, |c| c.get()),
1494            );
1495
1496            match entry.ty {
1497                wgt::BindingType::Buffer {
1498                    ty,
1499                    has_dynamic_offset,
1500                    ..
1501                } => match ty {
1502                    wgt::BufferBindingType::Uniform => {
1503                        if has_dynamic_offset {
1504                            desc_count.uniform_buffer_dynamic += count;
1505                        } else {
1506                            desc_count.uniform_buffer += count;
1507                        }
1508                    }
1509                    wgt::BufferBindingType::Storage { .. } => {
1510                        if has_dynamic_offset {
1511                            desc_count.storage_buffer_dynamic += count;
1512                        } else {
1513                            desc_count.storage_buffer += count;
1514                        }
1515                    }
1516                },
1517                wgt::BindingType::Sampler { .. } => {
1518                    desc_count.sampler += count;
1519                }
1520                wgt::BindingType::Texture { .. } => {
1521                    desc_count.sampled_image += count;
1522                }
1523                wgt::BindingType::StorageTexture { .. } => {
1524                    desc_count.storage_image += count;
1525                }
1526                wgt::BindingType::AccelerationStructure { .. } => {
1527                    desc_count.acceleration_structure += count;
1528                }
1529                wgt::BindingType::ExternalTexture => unimplemented!(),
1530            }
1531        }
1532
1533        //Note: not bothering with on stack array here as it's low frequency
1534        let vk_bindings = desc
1535            .entries
1536            .iter()
1537            .map(|entry| vk::DescriptorSetLayoutBinding {
1538                binding: entry.binding,
1539                descriptor_type: types[entry.binding as usize].0,
1540                descriptor_count: types[entry.binding as usize].1,
1541                stage_flags: conv::map_shader_stage(entry.visibility),
1542                p_immutable_samplers: ptr::null(),
1543                _marker: Default::default(),
1544            })
1545            .collect::<Vec<_>>();
1546
1547        let binding_arrays: Vec<_> = desc
1548            .entries
1549            .iter()
1550            .enumerate()
1551            .filter_map(|(idx, entry)| entry.count.map(|count| (idx as u32, count)))
1552            .collect();
1553
1554        let vk_info = vk::DescriptorSetLayoutCreateInfo::default()
1555            .bindings(&vk_bindings)
1556            .flags(if !binding_arrays.is_empty() {
1557                vk::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL
1558            } else {
1559                vk::DescriptorSetLayoutCreateFlags::empty()
1560            });
1561
1562        let partially_bound = desc
1563            .flags
1564            .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1565
1566        let binding_flag_vec = desc
1567            .entries
1568            .iter()
1569            .map(|entry| {
1570                let mut flags = vk::DescriptorBindingFlags::empty();
1571
1572                if partially_bound && entry.count.is_some() {
1573                    flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1574                }
1575
1576                if entry.count.is_some() {
1577                    flags |= vk::DescriptorBindingFlags::UPDATE_AFTER_BIND;
1578                }
1579
1580                flags
1581            })
1582            .collect::<Vec<_>>();
1583
1584        let mut binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::default()
1585            .binding_flags(&binding_flag_vec);
1586
1587        let vk_info = vk_info.push_next(&mut binding_flag_info);
1588
1589        let raw = unsafe {
1590            self.shared
1591                .raw
1592                .create_descriptor_set_layout(&vk_info, None)
1593                .map_err(super::map_host_device_oom_err)?
1594        };
1595
1596        if let Some(label) = desc.label {
1597            unsafe { self.shared.set_object_name(raw, label) };
1598        }
1599
1600        self.counters.bind_group_layouts.add(1);
1601
1602        Ok(super::BindGroupLayout {
1603            raw,
1604            desc_count,
1605            types: types.into_boxed_slice(),
1606            binding_arrays,
1607        })
1608    }
1609    unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1610        unsafe {
1611            self.shared
1612                .raw
1613                .destroy_descriptor_set_layout(bg_layout.raw, None)
1614        };
1615
1616        self.counters.bind_group_layouts.sub(1);
1617    }
1618
1619    unsafe fn create_pipeline_layout(
1620        &self,
1621        desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1622    ) -> Result<super::PipelineLayout, crate::DeviceError> {
1623        //Note: not bothering with on stack array here as it's low frequency
1624        let vk_set_layouts = desc
1625            .bind_group_layouts
1626            .iter()
1627            .map(|bgl| bgl.raw)
1628            .collect::<Vec<_>>();
1629        let vk_push_constant_ranges = desc
1630            .push_constant_ranges
1631            .iter()
1632            .map(|pcr| vk::PushConstantRange {
1633                stage_flags: conv::map_shader_stage(pcr.stages),
1634                offset: pcr.range.start,
1635                size: pcr.range.end - pcr.range.start,
1636            })
1637            .collect::<Vec<_>>();
1638
1639        let vk_info = vk::PipelineLayoutCreateInfo::default()
1640            .flags(vk::PipelineLayoutCreateFlags::empty())
1641            .set_layouts(&vk_set_layouts)
1642            .push_constant_ranges(&vk_push_constant_ranges);
1643
1644        let raw = {
1645            profiling::scope!("vkCreatePipelineLayout");
1646            unsafe {
1647                self.shared
1648                    .raw
1649                    .create_pipeline_layout(&vk_info, None)
1650                    .map_err(super::map_host_device_oom_err)?
1651            }
1652        };
1653
1654        if let Some(label) = desc.label {
1655            unsafe { self.shared.set_object_name(raw, label) };
1656        }
1657
1658        let mut binding_arrays = BTreeMap::new();
1659        for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1660            for &(binding, binding_array_size) in &layout.binding_arrays {
1661                binding_arrays.insert(
1662                    naga::ResourceBinding {
1663                        group: group as u32,
1664                        binding,
1665                    },
1666                    naga::back::spv::BindingInfo {
1667                        binding_array_size: Some(binding_array_size.get()),
1668                    },
1669                );
1670            }
1671        }
1672
1673        self.counters.pipeline_layouts.add(1);
1674
1675        Ok(super::PipelineLayout {
1676            raw,
1677            binding_arrays,
1678        })
1679    }
1680    unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1681        unsafe {
1682            self.shared
1683                .raw
1684                .destroy_pipeline_layout(pipeline_layout.raw, None)
1685        };
1686
1687        self.counters.pipeline_layouts.sub(1);
1688    }
1689
1690    unsafe fn create_bind_group(
1691        &self,
1692        desc: &crate::BindGroupDescriptor<
1693            super::BindGroupLayout,
1694            super::Buffer,
1695            super::Sampler,
1696            super::TextureView,
1697            super::AccelerationStructure,
1698        >,
1699    ) -> Result<super::BindGroup, crate::DeviceError> {
1700        let contains_binding_arrays = !desc.layout.binding_arrays.is_empty();
1701
1702        let desc_set_layout_flags = if contains_binding_arrays {
1703            gpu_descriptor::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND
1704        } else {
1705            gpu_descriptor::DescriptorSetLayoutCreateFlags::empty()
1706        };
1707
1708        let mut vk_sets = unsafe {
1709            self.desc_allocator.lock().allocate(
1710                &*self.shared,
1711                &desc.layout.raw,
1712                desc_set_layout_flags,
1713                &desc.layout.desc_count,
1714                1,
1715            )?
1716        };
1717
1718        let set = vk_sets.pop().unwrap();
1719        if let Some(label) = desc.label {
1720            unsafe { self.shared.set_object_name(*set.raw(), label) };
1721        }
1722
1723        /// Helper for splitting off and initializing a given number of elements on a pre-allocated
1724        /// stack, based on items returned from an [`ExactSizeIterator`].  Typically created from a
1725        /// [`MaybeUninit`] slice (see [`Vec::spare_capacity_mut()`]).
1726        /// The updated [`ExtensionStack`] of remaining uninitialized elements is returned, safely
1727        /// representing that the initialized and remaining elements are two independent mutable
1728        /// borrows.
1729        struct ExtendStack<'a, T> {
1730            remainder: &'a mut [MaybeUninit<T>],
1731        }
1732
1733        impl<'a, T> ExtendStack<'a, T> {
1734            fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1735                Self {
1736                    remainder: vec.spare_capacity_mut(),
1737                }
1738            }
1739
1740            fn extend_one(self, value: T) -> (Self, &'a mut T) {
1741                let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1742                let init = to_init.write(value);
1743                (Self { remainder }, init)
1744            }
1745
1746            fn extend(
1747                self,
1748                iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1749            ) -> (Self, &'a mut [T]) {
1750                let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1751
1752                for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1753                    to_init.write(value);
1754                }
1755
1756                // we can't use the safe (yet unstable) MaybeUninit::write_slice() here because of having an iterator to write
1757
1758                let init = {
1759                    // SAFETY: The loop above has initialized exactly as many items as to_init is
1760                    // long, so it is safe to cast away the MaybeUninit<T> wrapper into T.
1761
1762                    // Additional safety docs from unstable slice_assume_init_mut
1763                    // SAFETY: similar to safety notes for `slice_get_ref`, but we have a
1764                    // mutable reference which is also guaranteed to be valid for writes.
1765                    unsafe { mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1766                };
1767                (Self { remainder }, init)
1768            }
1769        }
1770
1771        let mut writes = Vec::with_capacity(desc.entries.len());
1772        let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1773        let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1774        let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1775        let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1776        // TODO: This length could be reduced to just the number of top-level acceleration
1777        // structure bindings, where multiple consecutive TLAS bindings that are set via
1778        // one `WriteDescriptorSet` count towards one "info" struct, not the total number of
1779        // acceleration structure bindings to write:
1780        let mut acceleration_structure_infos =
1781            Vec::with_capacity(desc.acceleration_structures.len());
1782        let mut acceleration_structure_infos =
1783            ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1784        let mut raw_acceleration_structures =
1785            Vec::with_capacity(desc.acceleration_structures.len());
1786        let mut raw_acceleration_structures =
1787            ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1788        for entry in desc.entries {
1789            let (ty, size) = desc.layout.types[entry.binding as usize];
1790            if size == 0 {
1791                continue; // empty slot
1792            }
1793            let mut write = vk::WriteDescriptorSet::default()
1794                .dst_set(*set.raw())
1795                .dst_binding(entry.binding)
1796                .descriptor_type(ty);
1797
1798            write = match ty {
1799                vk::DescriptorType::SAMPLER => {
1800                    let start = entry.resource_index;
1801                    let end = start + entry.count;
1802                    let local_image_infos;
1803                    (image_infos, local_image_infos) =
1804                        image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1805                            |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1806                        ));
1807                    write.image_info(local_image_infos)
1808                }
1809                vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1810                    let start = entry.resource_index;
1811                    let end = start + entry.count;
1812                    let local_image_infos;
1813                    (image_infos, local_image_infos) =
1814                        image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1815                            |binding| {
1816                                let layout =
1817                                    conv::derive_image_layout(binding.usage, binding.view.format);
1818                                vk::DescriptorImageInfo::default()
1819                                    .image_view(binding.view.raw)
1820                                    .image_layout(layout)
1821                            },
1822                        ));
1823                    write.image_info(local_image_infos)
1824                }
1825                vk::DescriptorType::UNIFORM_BUFFER
1826                | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1827                | vk::DescriptorType::STORAGE_BUFFER
1828                | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1829                    let start = entry.resource_index;
1830                    let end = start + entry.count;
1831                    let local_buffer_infos;
1832                    (buffer_infos, local_buffer_infos) =
1833                        buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1834                            |binding| {
1835                                vk::DescriptorBufferInfo::default()
1836                                    .buffer(binding.buffer.raw)
1837                                    .offset(binding.offset)
1838                                    .range(
1839                                        binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1840                                    )
1841                            },
1842                        ));
1843                    write.buffer_info(local_buffer_infos)
1844                }
1845                vk::DescriptorType::ACCELERATION_STRUCTURE_KHR => {
1846                    let start = entry.resource_index;
1847                    let end = start + entry.count;
1848
1849                    let local_raw_acceleration_structures;
1850                    (
1851                        raw_acceleration_structures,
1852                        local_raw_acceleration_structures,
1853                    ) = raw_acceleration_structures.extend(
1854                        desc.acceleration_structures[start as usize..end as usize]
1855                            .iter()
1856                            .map(|acceleration_structure| acceleration_structure.raw),
1857                    );
1858
1859                    let local_acceleration_structure_infos;
1860                    (
1861                        acceleration_structure_infos,
1862                        local_acceleration_structure_infos,
1863                    ) = acceleration_structure_infos.extend_one(
1864                        vk::WriteDescriptorSetAccelerationStructureKHR::default()
1865                            .acceleration_structures(local_raw_acceleration_structures),
1866                    );
1867
1868                    write
1869                        .descriptor_count(entry.count)
1870                        .push_next(local_acceleration_structure_infos)
1871                }
1872                _ => unreachable!(),
1873            };
1874
1875            writes.push(write);
1876        }
1877
1878        unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1879
1880        self.counters.bind_groups.add(1);
1881
1882        Ok(super::BindGroup { set })
1883    }
1884
1885    unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1886        unsafe {
1887            self.desc_allocator
1888                .lock()
1889                .free(&*self.shared, Some(group.set))
1890        };
1891
1892        self.counters.bind_groups.sub(1);
1893    }
1894
1895    unsafe fn create_shader_module(
1896        &self,
1897        desc: &crate::ShaderModuleDescriptor,
1898        shader: crate::ShaderInput,
1899    ) -> Result<super::ShaderModule, crate::ShaderError> {
1900        let spv = match shader {
1901            crate::ShaderInput::Naga(naga_shader) => {
1902                if self
1903                    .shared
1904                    .workarounds
1905                    .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1906                    || !naga_shader.module.overrides.is_empty()
1907                {
1908                    return Ok(super::ShaderModule::Intermediate {
1909                        naga_shader,
1910                        runtime_checks: desc.runtime_checks,
1911                    });
1912                }
1913                let mut naga_options = self.naga_options.clone();
1914                naga_options.debug_info =
1915                    naga_shader
1916                        .debug_source
1917                        .as_ref()
1918                        .map(|d| naga::back::spv::DebugInfo {
1919                            source_code: d.source_code.as_ref(),
1920                            file_name: d.file_name.as_ref().into(),
1921                            language: naga::back::spv::SourceLanguage::WGSL,
1922                        });
1923                if !desc.runtime_checks.bounds_checks {
1924                    naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1925                        index: naga::proc::BoundsCheckPolicy::Unchecked,
1926                        buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1927                        image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1928                        binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1929                    };
1930                }
1931                Cow::Owned(
1932                    naga::back::spv::write_vec(
1933                        &naga_shader.module,
1934                        &naga_shader.info,
1935                        &naga_options,
1936                        None,
1937                    )
1938                    .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?,
1939                )
1940            }
1941            crate::ShaderInput::Msl { .. } => {
1942                panic!("MSL_SHADER_PASSTHROUGH is not enabled for this backend")
1943            }
1944            crate::ShaderInput::Dxil { .. } | crate::ShaderInput::Hlsl { .. } => {
1945                panic!("`Features::HLSL_DXIL_SHADER_PASSTHROUGH` is not enabled")
1946            }
1947            crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1948        };
1949
1950        let raw = self.create_shader_module_impl(&spv)?;
1951
1952        if let Some(label) = desc.label {
1953            unsafe { self.shared.set_object_name(raw, label) };
1954        }
1955
1956        self.counters.shader_modules.add(1);
1957
1958        Ok(super::ShaderModule::Raw(raw))
1959    }
1960
1961    unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1962        match module {
1963            super::ShaderModule::Raw(raw) => {
1964                unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1965            }
1966            super::ShaderModule::Intermediate { .. } => {}
1967        }
1968
1969        self.counters.shader_modules.sub(1);
1970    }
1971
1972    unsafe fn create_render_pipeline(
1973        &self,
1974        desc: &crate::RenderPipelineDescriptor<
1975            super::PipelineLayout,
1976            super::ShaderModule,
1977            super::PipelineCache,
1978        >,
1979    ) -> Result<super::RenderPipeline, crate::PipelineError> {
1980        let dynamic_states = [
1981            vk::DynamicState::VIEWPORT,
1982            vk::DynamicState::SCISSOR,
1983            vk::DynamicState::BLEND_CONSTANTS,
1984            vk::DynamicState::STENCIL_REFERENCE,
1985        ];
1986        let mut compatible_rp_key = super::RenderPassKey {
1987            sample_count: desc.multisample.count,
1988            multiview: desc.multiview,
1989            ..Default::default()
1990        };
1991        let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1992        let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1993        let mut vertex_attributes = Vec::new();
1994
1995        for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1996            vertex_buffers.push(vk::VertexInputBindingDescription {
1997                binding: i as u32,
1998                stride: vb.array_stride as u32,
1999                input_rate: match vb.step_mode {
2000                    wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
2001                    wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
2002                },
2003            });
2004            for at in vb.attributes {
2005                vertex_attributes.push(vk::VertexInputAttributeDescription {
2006                    location: at.shader_location,
2007                    binding: i as u32,
2008                    format: conv::map_vertex_format(at.format),
2009                    offset: at.offset as u32,
2010                });
2011            }
2012        }
2013
2014        let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
2015            .vertex_binding_descriptions(&vertex_buffers)
2016            .vertex_attribute_descriptions(&vertex_attributes);
2017
2018        let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
2019            .topology(conv::map_topology(desc.primitive.topology))
2020            .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
2021
2022        let compiled_vs = self.compile_stage(
2023            &desc.vertex_stage,
2024            naga::ShaderStage::Vertex,
2025            &desc.layout.binding_arrays,
2026        )?;
2027        stages.push(compiled_vs.create_info);
2028        let compiled_fs = match desc.fragment_stage {
2029            Some(ref stage) => {
2030                let compiled = self.compile_stage(
2031                    stage,
2032                    naga::ShaderStage::Fragment,
2033                    &desc.layout.binding_arrays,
2034                )?;
2035                stages.push(compiled.create_info);
2036                Some(compiled)
2037            }
2038            None => None,
2039        };
2040
2041        let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
2042            .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
2043            .front_face(conv::map_front_face(desc.primitive.front_face))
2044            .line_width(1.0)
2045            .depth_clamp_enable(desc.primitive.unclipped_depth);
2046        if let Some(face) = desc.primitive.cull_mode {
2047            vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
2048        }
2049        let mut vk_rasterization_conservative_state =
2050            vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
2051                .conservative_rasterization_mode(
2052                    vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
2053                );
2054        if desc.primitive.conservative {
2055            vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
2056        }
2057
2058        let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
2059        if let Some(ref ds) = desc.depth_stencil {
2060            let vk_format = self.shared.private_caps.map_texture_format(ds.format);
2061            let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
2062                vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
2063            } else {
2064                vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
2065            };
2066            compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
2067                base: super::AttachmentKey::compatible(vk_format, vk_layout),
2068                stencil_ops: crate::AttachmentOps::all(),
2069            });
2070
2071            if ds.is_depth_enabled() {
2072                vk_depth_stencil = vk_depth_stencil
2073                    .depth_test_enable(true)
2074                    .depth_write_enable(ds.depth_write_enabled)
2075                    .depth_compare_op(conv::map_comparison(ds.depth_compare));
2076            }
2077            if ds.stencil.is_enabled() {
2078                let s = &ds.stencil;
2079                let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
2080                let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
2081                vk_depth_stencil = vk_depth_stencil
2082                    .stencil_test_enable(true)
2083                    .front(front)
2084                    .back(back);
2085            }
2086
2087            if ds.bias.is_enabled() {
2088                vk_rasterization = vk_rasterization
2089                    .depth_bias_enable(true)
2090                    .depth_bias_constant_factor(ds.bias.constant as f32)
2091                    .depth_bias_clamp(ds.bias.clamp)
2092                    .depth_bias_slope_factor(ds.bias.slope_scale);
2093            }
2094        }
2095
2096        let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
2097            .flags(vk::PipelineViewportStateCreateFlags::empty())
2098            .scissor_count(1)
2099            .viewport_count(1);
2100
2101        let vk_sample_mask = [
2102            desc.multisample.mask as u32,
2103            (desc.multisample.mask >> 32) as u32,
2104        ];
2105        let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
2106            .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
2107            .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
2108            .sample_mask(&vk_sample_mask);
2109
2110        let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
2111        for cat in desc.color_targets {
2112            let (key, attarchment) = if let Some(cat) = cat.as_ref() {
2113                let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
2114                    .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
2115                if let Some(ref blend) = cat.blend {
2116                    let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
2117                    let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
2118                    vk_attachment = vk_attachment
2119                        .blend_enable(true)
2120                        .color_blend_op(color_op)
2121                        .src_color_blend_factor(color_src)
2122                        .dst_color_blend_factor(color_dst)
2123                        .alpha_blend_op(alpha_op)
2124                        .src_alpha_blend_factor(alpha_src)
2125                        .dst_alpha_blend_factor(alpha_dst);
2126                }
2127
2128                let vk_format = self.shared.private_caps.map_texture_format(cat.format);
2129                (
2130                    Some(super::ColorAttachmentKey {
2131                        base: super::AttachmentKey::compatible(
2132                            vk_format,
2133                            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
2134                        ),
2135                        resolve: None,
2136                    }),
2137                    vk_attachment,
2138                )
2139            } else {
2140                (None, vk::PipelineColorBlendAttachmentState::default())
2141            };
2142
2143            compatible_rp_key.colors.push(key);
2144            vk_attachments.push(attarchment);
2145        }
2146
2147        let vk_color_blend =
2148            vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
2149
2150        let vk_dynamic_state =
2151            vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
2152
2153        let raw_pass = self.shared.make_render_pass(compatible_rp_key)?;
2154
2155        let vk_infos = [{
2156            vk::GraphicsPipelineCreateInfo::default()
2157                .layout(desc.layout.raw)
2158                .stages(&stages)
2159                .vertex_input_state(&vk_vertex_input)
2160                .input_assembly_state(&vk_input_assembly)
2161                .rasterization_state(&vk_rasterization)
2162                .viewport_state(&vk_viewport)
2163                .multisample_state(&vk_multisample)
2164                .depth_stencil_state(&vk_depth_stencil)
2165                .color_blend_state(&vk_color_blend)
2166                .dynamic_state(&vk_dynamic_state)
2167                .render_pass(raw_pass)
2168        }];
2169
2170        let pipeline_cache = desc
2171            .cache
2172            .map(|it| it.raw)
2173            .unwrap_or(vk::PipelineCache::null());
2174
2175        let mut raw_vec = {
2176            profiling::scope!("vkCreateGraphicsPipelines");
2177            unsafe {
2178                self.shared
2179                    .raw
2180                    .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2181                    .map_err(|(_, e)| super::map_pipeline_err(e))
2182            }?
2183        };
2184
2185        let raw = raw_vec.pop().unwrap();
2186        if let Some(label) = desc.label {
2187            unsafe { self.shared.set_object_name(raw, label) };
2188        }
2189
2190        if let Some(raw_module) = compiled_vs.temp_raw_module {
2191            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2192        }
2193        if let Some(CompiledStage {
2194            temp_raw_module: Some(raw_module),
2195            ..
2196        }) = compiled_fs
2197        {
2198            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2199        }
2200
2201        self.counters.render_pipelines.add(1);
2202
2203        Ok(super::RenderPipeline { raw })
2204    }
2205    unsafe fn create_mesh_pipeline(
2206        &self,
2207        desc: &crate::MeshPipelineDescriptor<
2208            <Self::A as crate::Api>::PipelineLayout,
2209            <Self::A as crate::Api>::ShaderModule,
2210            <Self::A as crate::Api>::PipelineCache,
2211        >,
2212    ) -> Result<<Self::A as crate::Api>::RenderPipeline, crate::PipelineError> {
2213        let dynamic_states = [
2214            vk::DynamicState::VIEWPORT,
2215            vk::DynamicState::SCISSOR,
2216            vk::DynamicState::BLEND_CONSTANTS,
2217            vk::DynamicState::STENCIL_REFERENCE,
2218        ];
2219        let mut compatible_rp_key = super::RenderPassKey {
2220            sample_count: desc.multisample.count,
2221            multiview: desc.multiview,
2222            ..Default::default()
2223        };
2224        let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
2225
2226        let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
2227            .topology(conv::map_topology(desc.primitive.topology))
2228            .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
2229
2230        let compiled_ts = match desc.task_stage {
2231            Some(ref stage) => {
2232                let mut compiled = self.compile_stage(
2233                    stage,
2234                    naga::ShaderStage::Task,
2235                    &desc.layout.binding_arrays,
2236                )?;
2237                compiled.create_info.stage = vk::ShaderStageFlags::TASK_EXT;
2238                stages.push(compiled.create_info);
2239                Some(compiled)
2240            }
2241            None => None,
2242        };
2243
2244        let mut compiled_ms = self.compile_stage(
2245            &desc.mesh_stage,
2246            naga::ShaderStage::Mesh,
2247            &desc.layout.binding_arrays,
2248        )?;
2249        compiled_ms.create_info.stage = vk::ShaderStageFlags::MESH_EXT;
2250        stages.push(compiled_ms.create_info);
2251        let compiled_fs = match desc.fragment_stage {
2252            Some(ref stage) => {
2253                let compiled = self.compile_stage(
2254                    stage,
2255                    naga::ShaderStage::Fragment,
2256                    &desc.layout.binding_arrays,
2257                )?;
2258                stages.push(compiled.create_info);
2259                Some(compiled)
2260            }
2261            None => None,
2262        };
2263
2264        let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
2265            .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
2266            .front_face(conv::map_front_face(desc.primitive.front_face))
2267            .line_width(1.0)
2268            .depth_clamp_enable(desc.primitive.unclipped_depth);
2269        if let Some(face) = desc.primitive.cull_mode {
2270            vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
2271        }
2272        let mut vk_rasterization_conservative_state =
2273            vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
2274                .conservative_rasterization_mode(
2275                    vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
2276                );
2277        if desc.primitive.conservative {
2278            vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
2279        }
2280
2281        let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
2282        if let Some(ref ds) = desc.depth_stencil {
2283            let vk_format = self.shared.private_caps.map_texture_format(ds.format);
2284            let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
2285                vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
2286            } else {
2287                vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
2288            };
2289            compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
2290                base: super::AttachmentKey::compatible(vk_format, vk_layout),
2291                stencil_ops: crate::AttachmentOps::all(),
2292            });
2293
2294            if ds.is_depth_enabled() {
2295                vk_depth_stencil = vk_depth_stencil
2296                    .depth_test_enable(true)
2297                    .depth_write_enable(ds.depth_write_enabled)
2298                    .depth_compare_op(conv::map_comparison(ds.depth_compare));
2299            }
2300            if ds.stencil.is_enabled() {
2301                let s = &ds.stencil;
2302                let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
2303                let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
2304                vk_depth_stencil = vk_depth_stencil
2305                    .stencil_test_enable(true)
2306                    .front(front)
2307                    .back(back);
2308            }
2309
2310            if ds.bias.is_enabled() {
2311                vk_rasterization = vk_rasterization
2312                    .depth_bias_enable(true)
2313                    .depth_bias_constant_factor(ds.bias.constant as f32)
2314                    .depth_bias_clamp(ds.bias.clamp)
2315                    .depth_bias_slope_factor(ds.bias.slope_scale);
2316            }
2317        }
2318
2319        let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
2320            .flags(vk::PipelineViewportStateCreateFlags::empty())
2321            .scissor_count(1)
2322            .viewport_count(1);
2323
2324        let vk_sample_mask = [
2325            desc.multisample.mask as u32,
2326            (desc.multisample.mask >> 32) as u32,
2327        ];
2328        let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
2329            .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
2330            .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
2331            .sample_mask(&vk_sample_mask);
2332
2333        let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
2334        for cat in desc.color_targets {
2335            let (key, attarchment) = if let Some(cat) = cat.as_ref() {
2336                let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
2337                    .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
2338                if let Some(ref blend) = cat.blend {
2339                    let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
2340                    let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
2341                    vk_attachment = vk_attachment
2342                        .blend_enable(true)
2343                        .color_blend_op(color_op)
2344                        .src_color_blend_factor(color_src)
2345                        .dst_color_blend_factor(color_dst)
2346                        .alpha_blend_op(alpha_op)
2347                        .src_alpha_blend_factor(alpha_src)
2348                        .dst_alpha_blend_factor(alpha_dst);
2349                }
2350
2351                let vk_format = self.shared.private_caps.map_texture_format(cat.format);
2352                (
2353                    Some(super::ColorAttachmentKey {
2354                        base: super::AttachmentKey::compatible(
2355                            vk_format,
2356                            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
2357                        ),
2358                        resolve: None,
2359                    }),
2360                    vk_attachment,
2361                )
2362            } else {
2363                (None, vk::PipelineColorBlendAttachmentState::default())
2364            };
2365
2366            compatible_rp_key.colors.push(key);
2367            vk_attachments.push(attarchment);
2368        }
2369
2370        let vk_color_blend =
2371            vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
2372
2373        let vk_dynamic_state =
2374            vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
2375
2376        let raw_pass = self.shared.make_render_pass(compatible_rp_key)?;
2377
2378        let vk_infos = [{
2379            vk::GraphicsPipelineCreateInfo::default()
2380                .layout(desc.layout.raw)
2381                .stages(&stages)
2382                .input_assembly_state(&vk_input_assembly)
2383                .rasterization_state(&vk_rasterization)
2384                .viewport_state(&vk_viewport)
2385                .multisample_state(&vk_multisample)
2386                .depth_stencil_state(&vk_depth_stencil)
2387                .color_blend_state(&vk_color_blend)
2388                .dynamic_state(&vk_dynamic_state)
2389                .render_pass(raw_pass)
2390        }];
2391
2392        let pipeline_cache = desc
2393            .cache
2394            .map(|it| it.raw)
2395            .unwrap_or(vk::PipelineCache::null());
2396
2397        let mut raw_vec = {
2398            profiling::scope!("vkCreateGraphicsPipelines");
2399            unsafe {
2400                self.shared
2401                    .raw
2402                    .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2403                    .map_err(|(_, e)| super::map_pipeline_err(e))
2404            }?
2405        };
2406
2407        let raw = raw_vec.pop().unwrap();
2408        if let Some(label) = desc.label {
2409            unsafe { self.shared.set_object_name(raw, label) };
2410        }
2411        // NOTE: this could leak shaders in case of an error.
2412        if let Some(CompiledStage {
2413            temp_raw_module: Some(raw_module),
2414            ..
2415        }) = compiled_ts
2416        {
2417            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2418        }
2419        if let Some(raw_module) = compiled_ms.temp_raw_module {
2420            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2421        }
2422        if let Some(CompiledStage {
2423            temp_raw_module: Some(raw_module),
2424            ..
2425        }) = compiled_fs
2426        {
2427            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2428        }
2429
2430        self.counters.render_pipelines.add(1);
2431
2432        Ok(super::RenderPipeline { raw })
2433    }
2434
2435    unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2436        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2437
2438        self.counters.render_pipelines.sub(1);
2439    }
2440
2441    unsafe fn create_compute_pipeline(
2442        &self,
2443        desc: &crate::ComputePipelineDescriptor<
2444            super::PipelineLayout,
2445            super::ShaderModule,
2446            super::PipelineCache,
2447        >,
2448    ) -> Result<super::ComputePipeline, crate::PipelineError> {
2449        let compiled = self.compile_stage(
2450            &desc.stage,
2451            naga::ShaderStage::Compute,
2452            &desc.layout.binding_arrays,
2453        )?;
2454
2455        let vk_infos = [{
2456            vk::ComputePipelineCreateInfo::default()
2457                .layout(desc.layout.raw)
2458                .stage(compiled.create_info)
2459        }];
2460
2461        let pipeline_cache = desc
2462            .cache
2463            .map(|it| it.raw)
2464            .unwrap_or(vk::PipelineCache::null());
2465
2466        let mut raw_vec = {
2467            profiling::scope!("vkCreateComputePipelines");
2468            unsafe {
2469                self.shared
2470                    .raw
2471                    .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2472                    .map_err(|(_, e)| super::map_pipeline_err(e))
2473            }?
2474        };
2475
2476        let raw = raw_vec.pop().unwrap();
2477        if let Some(label) = desc.label {
2478            unsafe { self.shared.set_object_name(raw, label) };
2479        }
2480
2481        if let Some(raw_module) = compiled.temp_raw_module {
2482            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2483        }
2484
2485        self.counters.compute_pipelines.add(1);
2486
2487        Ok(super::ComputePipeline { raw })
2488    }
2489
2490    unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2491        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2492
2493        self.counters.compute_pipelines.sub(1);
2494    }
2495
2496    unsafe fn create_pipeline_cache(
2497        &self,
2498        desc: &crate::PipelineCacheDescriptor<'_>,
2499    ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2500        let mut info = vk::PipelineCacheCreateInfo::default();
2501        if let Some(data) = desc.data {
2502            info = info.initial_data(data)
2503        }
2504        profiling::scope!("vkCreatePipelineCache");
2505        let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2506            .map_err(super::map_host_device_oom_err)?;
2507
2508        Ok(super::PipelineCache { raw })
2509    }
2510    fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2511        Some(self.shared.pipeline_cache_validation_key)
2512    }
2513    unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2514        unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2515    }
2516    unsafe fn create_query_set(
2517        &self,
2518        desc: &wgt::QuerySetDescriptor<crate::Label>,
2519    ) -> Result<super::QuerySet, crate::DeviceError> {
2520        // Assume each query is 256 bytes.
2521        // On an AMD W6800 with driver version 32.0.12030.9, occlusion queries are 256.
2522        self.error_if_would_oom_on_resource_allocation(true, desc.count as u64 * 256)?;
2523
2524        let (vk_type, pipeline_statistics) = match desc.ty {
2525            wgt::QueryType::Occlusion => (
2526                vk::QueryType::OCCLUSION,
2527                vk::QueryPipelineStatisticFlags::empty(),
2528            ),
2529            wgt::QueryType::PipelineStatistics(statistics) => (
2530                vk::QueryType::PIPELINE_STATISTICS,
2531                conv::map_pipeline_statistics(statistics),
2532            ),
2533            wgt::QueryType::Timestamp => (
2534                vk::QueryType::TIMESTAMP,
2535                vk::QueryPipelineStatisticFlags::empty(),
2536            ),
2537        };
2538
2539        let vk_info = vk::QueryPoolCreateInfo::default()
2540            .query_type(vk_type)
2541            .query_count(desc.count)
2542            .pipeline_statistics(pipeline_statistics);
2543
2544        let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2545            .map_err(super::map_host_device_oom_err)?;
2546        if let Some(label) = desc.label {
2547            unsafe { self.shared.set_object_name(raw, label) };
2548        }
2549
2550        self.counters.query_sets.add(1);
2551
2552        Ok(super::QuerySet { raw })
2553    }
2554
2555    unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2556        unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2557
2558        self.counters.query_sets.sub(1);
2559    }
2560
2561    unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2562        self.counters.fences.add(1);
2563
2564        Ok(if self.shared.private_caps.timeline_semaphores {
2565            let mut sem_type_info =
2566                vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2567            let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2568            let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2569                .map_err(super::map_host_device_oom_err)?;
2570
2571            super::Fence::TimelineSemaphore(raw)
2572        } else {
2573            super::Fence::FencePool {
2574                last_completed: 0,
2575                active: Vec::new(),
2576                free: Vec::new(),
2577            }
2578        })
2579    }
2580    unsafe fn destroy_fence(&self, fence: super::Fence) {
2581        match fence {
2582            super::Fence::TimelineSemaphore(raw) => {
2583                unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2584            }
2585            super::Fence::FencePool {
2586                active,
2587                free,
2588                last_completed: _,
2589            } => {
2590                for (_, raw) in active {
2591                    unsafe { self.shared.raw.destroy_fence(raw, None) };
2592                }
2593                for raw in free {
2594                    unsafe { self.shared.raw.destroy_fence(raw, None) };
2595                }
2596            }
2597        }
2598
2599        self.counters.fences.sub(1);
2600    }
2601    unsafe fn get_fence_value(
2602        &self,
2603        fence: &super::Fence,
2604    ) -> Result<crate::FenceValue, crate::DeviceError> {
2605        fence.get_latest(
2606            &self.shared.raw,
2607            self.shared.extension_fns.timeline_semaphore.as_ref(),
2608        )
2609    }
2610    unsafe fn wait(
2611        &self,
2612        fence: &super::Fence,
2613        wait_value: crate::FenceValue,
2614        timeout_ms: u32,
2615    ) -> Result<bool, crate::DeviceError> {
2616        let timeout_ns = timeout_ms as u64 * super::MILLIS_TO_NANOS;
2617        self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2618    }
2619
2620    unsafe fn start_graphics_debugger_capture(&self) -> bool {
2621        #[cfg(feature = "renderdoc")]
2622        {
2623            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2624            let raw_vk_instance =
2625                vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2626            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2627            unsafe {
2628                self.render_doc
2629                    .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2630            }
2631        }
2632        #[cfg(not(feature = "renderdoc"))]
2633        false
2634    }
2635    unsafe fn stop_graphics_debugger_capture(&self) {
2636        #[cfg(feature = "renderdoc")]
2637        {
2638            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2639            let raw_vk_instance =
2640                vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2641            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2642
2643            unsafe {
2644                self.render_doc
2645                    .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2646            }
2647        }
2648    }
2649
2650    unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2651        let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2652        data.ok()
2653    }
2654
2655    unsafe fn get_acceleration_structure_build_sizes<'a>(
2656        &self,
2657        desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2658    ) -> crate::AccelerationStructureBuildSizes {
2659        const CAPACITY: usize = 8;
2660
2661        let ray_tracing_functions = self
2662            .shared
2663            .extension_fns
2664            .ray_tracing
2665            .as_ref()
2666            .expect("Feature `RAY_TRACING` not enabled");
2667
2668        let (geometries, primitive_counts) = match *desc.entries {
2669            crate::AccelerationStructureEntries::Instances(ref instances) => {
2670                let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2671
2672                let geometry = vk::AccelerationStructureGeometryKHR::default()
2673                    .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2674                    .geometry(vk::AccelerationStructureGeometryDataKHR {
2675                        instances: instance_data,
2676                    });
2677
2678                (
2679                    smallvec::smallvec![geometry],
2680                    smallvec::smallvec![instances.count],
2681                )
2682            }
2683            crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2684                let mut primitive_counts =
2685                    smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2686                let mut geometries = smallvec::SmallVec::<
2687                    [vk::AccelerationStructureGeometryKHR; CAPACITY],
2688                >::with_capacity(in_geometries.len());
2689
2690                for triangles in in_geometries {
2691                    let mut triangle_data =
2692                        vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2693                            .index_type(vk::IndexType::NONE_KHR)
2694                            .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2695                            .max_vertex(triangles.vertex_count)
2696                            .vertex_stride(triangles.vertex_stride)
2697                            // The vulkan spec suggests we could pass a non-zero invalid address here if fetching
2698                            // the real address has significant overhead, but we pass the real one to be on the
2699                            // safe side for now.
2700                            // from https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html
2701                            // > The srcAccelerationStructure, dstAccelerationStructure, and mode members
2702                            // > of pBuildInfo are ignored. Any VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR
2703                            // > members of pBuildInfo are ignored by this command, except that the hostAddress
2704                            // > member of VkAccelerationStructureGeometryTrianglesDataKHR::transformData will
2705                            // > be examined to check if it is NULL.
2706                            .transform_data(vk::DeviceOrHostAddressConstKHR {
2707                                device_address: if desc
2708                                    .flags
2709                                    .contains(wgt::AccelerationStructureFlags::USE_TRANSFORM)
2710                                {
2711                                    unsafe {
2712                                        ray_tracing_functions
2713                                            .buffer_device_address
2714                                            .get_buffer_device_address(
2715                                                &vk::BufferDeviceAddressInfo::default().buffer(
2716                                                    triangles
2717                                                        .transform
2718                                                        .as_ref()
2719                                                        .unwrap()
2720                                                        .buffer
2721                                                        .raw,
2722                                                ),
2723                                            )
2724                                    }
2725                                } else {
2726                                    0
2727                                },
2728                            });
2729
2730                    let pritive_count = if let Some(ref indices) = triangles.indices {
2731                        triangle_data =
2732                            triangle_data.index_type(conv::map_index_format(indices.format));
2733                        indices.count / 3
2734                    } else {
2735                        triangles.vertex_count
2736                    };
2737
2738                    let geometry = vk::AccelerationStructureGeometryKHR::default()
2739                        .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2740                        .geometry(vk::AccelerationStructureGeometryDataKHR {
2741                            triangles: triangle_data,
2742                        })
2743                        .flags(conv::map_acceleration_structure_geometry_flags(
2744                            triangles.flags,
2745                        ));
2746
2747                    geometries.push(geometry);
2748                    primitive_counts.push(pritive_count);
2749                }
2750                (geometries, primitive_counts)
2751            }
2752            crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2753                let mut primitive_counts =
2754                    smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2755                let mut geometries = smallvec::SmallVec::<
2756                    [vk::AccelerationStructureGeometryKHR; CAPACITY],
2757                >::with_capacity(in_geometries.len());
2758                for aabb in in_geometries {
2759                    let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2760                        .stride(aabb.stride);
2761
2762                    let geometry = vk::AccelerationStructureGeometryKHR::default()
2763                        .geometry_type(vk::GeometryTypeKHR::AABBS)
2764                        .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2765                        .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2766
2767                    geometries.push(geometry);
2768                    primitive_counts.push(aabb.count);
2769                }
2770                (geometries, primitive_counts)
2771            }
2772        };
2773
2774        let ty = match *desc.entries {
2775            crate::AccelerationStructureEntries::Instances(_) => {
2776                vk::AccelerationStructureTypeKHR::TOP_LEVEL
2777            }
2778            _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2779        };
2780
2781        let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2782            .ty(ty)
2783            .flags(conv::map_acceleration_structure_flags(desc.flags))
2784            .geometries(&geometries);
2785
2786        let mut raw = Default::default();
2787        unsafe {
2788            ray_tracing_functions
2789                .acceleration_structure
2790                .get_acceleration_structure_build_sizes(
2791                    vk::AccelerationStructureBuildTypeKHR::DEVICE,
2792                    &geometry_info,
2793                    &primitive_counts,
2794                    &mut raw,
2795                )
2796        }
2797
2798        crate::AccelerationStructureBuildSizes {
2799            acceleration_structure_size: raw.acceleration_structure_size,
2800            update_scratch_size: raw.update_scratch_size,
2801            build_scratch_size: raw.build_scratch_size,
2802        }
2803    }
2804
2805    unsafe fn get_acceleration_structure_device_address(
2806        &self,
2807        acceleration_structure: &super::AccelerationStructure,
2808    ) -> wgt::BufferAddress {
2809        let ray_tracing_functions = self
2810            .shared
2811            .extension_fns
2812            .ray_tracing
2813            .as_ref()
2814            .expect("Feature `RAY_TRACING` not enabled");
2815
2816        unsafe {
2817            ray_tracing_functions
2818                .acceleration_structure
2819                .get_acceleration_structure_device_address(
2820                    &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2821                        .acceleration_structure(acceleration_structure.raw),
2822                )
2823        }
2824    }
2825
2826    unsafe fn create_acceleration_structure(
2827        &self,
2828        desc: &crate::AccelerationStructureDescriptor,
2829    ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2830        let ray_tracing_functions = self
2831            .shared
2832            .extension_fns
2833            .ray_tracing
2834            .as_ref()
2835            .expect("Feature `RAY_TRACING` not enabled");
2836
2837        let vk_buffer_info = vk::BufferCreateInfo::default()
2838            .size(desc.size)
2839            .usage(
2840                vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2841                    | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2842            )
2843            .sharing_mode(vk::SharingMode::EXCLUSIVE);
2844
2845        unsafe {
2846            let raw_buffer = self
2847                .shared
2848                .raw
2849                .create_buffer(&vk_buffer_info, None)
2850                .map_err(super::map_host_device_oom_and_ioca_err)?;
2851            let req = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2852
2853            self.error_if_would_oom_on_resource_allocation(false, req.size)
2854                .inspect_err(|_| {
2855                    self.shared.raw.destroy_buffer(raw_buffer, None);
2856                })?;
2857
2858            let block = self
2859                .mem_allocator
2860                .lock()
2861                .alloc(
2862                    &*self.shared,
2863                    gpu_alloc::Request {
2864                        size: req.size,
2865                        align_mask: req.alignment - 1,
2866                        usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
2867                        memory_types: req.memory_type_bits & self.valid_ash_memory_types,
2868                    },
2869                )
2870                .inspect_err(|_| {
2871                    self.shared.raw.destroy_buffer(raw_buffer, None);
2872                })?;
2873
2874            self.shared
2875                .raw
2876                .bind_buffer_memory(raw_buffer, *block.memory(), block.offset())
2877                .map_err(super::map_host_device_oom_and_ioca_err)
2878                .inspect_err(|_| {
2879                    self.shared.raw.destroy_buffer(raw_buffer, None);
2880                })?;
2881
2882            if let Some(label) = desc.label {
2883                self.shared.set_object_name(raw_buffer, label);
2884            }
2885
2886            let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2887                .buffer(raw_buffer)
2888                .offset(0)
2889                .size(desc.size)
2890                .ty(conv::map_acceleration_structure_format(desc.format));
2891
2892            let raw_acceleration_structure = ray_tracing_functions
2893                .acceleration_structure
2894                .create_acceleration_structure(&vk_info, None)
2895                .map_err(super::map_host_oom_and_ioca_err)
2896                .inspect_err(|_| {
2897                    self.shared.raw.destroy_buffer(raw_buffer, None);
2898                })?;
2899
2900            if let Some(label) = desc.label {
2901                self.shared
2902                    .set_object_name(raw_acceleration_structure, label);
2903            }
2904
2905            let pool = if desc.allow_compaction {
2906                let vk_info = vk::QueryPoolCreateInfo::default()
2907                    .query_type(vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR)
2908                    .query_count(1);
2909
2910                let raw = self
2911                    .shared
2912                    .raw
2913                    .create_query_pool(&vk_info, None)
2914                    .map_err(super::map_host_device_oom_err)
2915                    .inspect_err(|_| {
2916                        ray_tracing_functions
2917                            .acceleration_structure
2918                            .destroy_acceleration_structure(raw_acceleration_structure, None);
2919                        self.shared.raw.destroy_buffer(raw_buffer, None);
2920                    })?;
2921                Some(raw)
2922            } else {
2923                None
2924            };
2925
2926            Ok(super::AccelerationStructure {
2927                raw: raw_acceleration_structure,
2928                buffer: raw_buffer,
2929                block: Mutex::new(block),
2930                compacted_size_query: pool,
2931            })
2932        }
2933    }
2934
2935    unsafe fn destroy_acceleration_structure(
2936        &self,
2937        acceleration_structure: super::AccelerationStructure,
2938    ) {
2939        let ray_tracing_functions = self
2940            .shared
2941            .extension_fns
2942            .ray_tracing
2943            .as_ref()
2944            .expect("Feature `RAY_TRACING` not enabled");
2945
2946        unsafe {
2947            ray_tracing_functions
2948                .acceleration_structure
2949                .destroy_acceleration_structure(acceleration_structure.raw, None);
2950            self.shared
2951                .raw
2952                .destroy_buffer(acceleration_structure.buffer, None);
2953            self.mem_allocator
2954                .lock()
2955                .dealloc(&*self.shared, acceleration_structure.block.into_inner());
2956            if let Some(query) = acceleration_structure.compacted_size_query {
2957                self.shared.raw.destroy_query_pool(query, None)
2958            }
2959        }
2960    }
2961
2962    fn get_internal_counters(&self) -> wgt::HalCounters {
2963        self.counters
2964            .memory_allocations
2965            .set(self.shared.memory_allocations_counter.read());
2966
2967        self.counters.as_ref().clone()
2968    }
2969
2970    fn tlas_instance_to_bytes(&self, instance: TlasInstance) -> Vec<u8> {
2971        const MAX_U24: u32 = (1u32 << 24u32) - 1u32;
2972        let temp = RawTlasInstance {
2973            transform: instance.transform,
2974            custom_data_and_mask: (instance.custom_data & MAX_U24)
2975                | (u32::from(instance.mask) << 24),
2976            shader_binding_table_record_offset_and_flags: 0,
2977            acceleration_structure_reference: instance.blas_address,
2978        };
2979        bytemuck::bytes_of(&temp).to_vec()
2980    }
2981
2982    fn check_if_oom(&self) -> Result<(), crate::DeviceError> {
2983        let Some(threshold) = self
2984            .shared
2985            .instance
2986            .memory_budget_thresholds
2987            .for_device_loss
2988        else {
2989            return Ok(());
2990        };
2991
2992        if !self
2993            .shared
2994            .enabled_extensions
2995            .contains(&ext::memory_budget::NAME)
2996        {
2997            return Ok(());
2998        }
2999
3000        let get_physical_device_properties = self
3001            .shared
3002            .instance
3003            .get_physical_device_properties
3004            .as_ref()
3005            .unwrap();
3006
3007        let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
3008
3009        let mut memory_properties =
3010            vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
3011
3012        unsafe {
3013            get_physical_device_properties.get_physical_device_memory_properties2(
3014                self.shared.physical_device,
3015                &mut memory_properties,
3016            );
3017        }
3018
3019        let memory_properties = memory_properties.memory_properties;
3020
3021        for i in 0..memory_properties.memory_heap_count {
3022            let heap_usage = memory_budget_properties.heap_usage[i as usize];
3023            let heap_budget = memory_budget_properties.heap_budget[i as usize];
3024
3025            if heap_usage >= heap_budget / 100 * threshold as u64 {
3026                return Err(crate::DeviceError::OutOfMemory);
3027            }
3028        }
3029
3030        Ok(())
3031    }
3032}
3033
3034impl super::DeviceShared {
3035    pub(super) fn new_binary_semaphore(
3036        &self,
3037        name: &str,
3038    ) -> Result<vk::Semaphore, crate::DeviceError> {
3039        unsafe {
3040            let semaphore = self
3041                .raw
3042                .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
3043                .map_err(super::map_host_device_oom_err)?;
3044
3045            self.set_object_name(semaphore, name);
3046
3047            Ok(semaphore)
3048        }
3049    }
3050
3051    pub(super) fn wait_for_fence(
3052        &self,
3053        fence: &super::Fence,
3054        wait_value: crate::FenceValue,
3055        timeout_ns: u64,
3056    ) -> Result<bool, crate::DeviceError> {
3057        profiling::scope!("Device::wait");
3058        match *fence {
3059            super::Fence::TimelineSemaphore(raw) => {
3060                let semaphores = [raw];
3061                let values = [wait_value];
3062                let vk_info = vk::SemaphoreWaitInfo::default()
3063                    .semaphores(&semaphores)
3064                    .values(&values);
3065                let result = match self.extension_fns.timeline_semaphore {
3066                    Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
3067                        ext.wait_semaphores(&vk_info, timeout_ns)
3068                    },
3069                    Some(super::ExtensionFn::Promoted) => unsafe {
3070                        self.raw.wait_semaphores(&vk_info, timeout_ns)
3071                    },
3072                    None => unreachable!(),
3073                };
3074                match result {
3075                    Ok(()) => Ok(true),
3076                    Err(vk::Result::TIMEOUT) => Ok(false),
3077                    Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
3078                }
3079            }
3080            super::Fence::FencePool {
3081                last_completed,
3082                ref active,
3083                free: _,
3084            } => {
3085                if wait_value <= last_completed {
3086                    Ok(true)
3087                } else {
3088                    match active.iter().find(|&&(value, _)| value >= wait_value) {
3089                        Some(&(_, raw)) => {
3090                            match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
3091                                Ok(()) => Ok(true),
3092                                Err(vk::Result::TIMEOUT) => Ok(false),
3093                                Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
3094                            }
3095                        }
3096                        None => {
3097                            crate::hal_usage_error(format!(
3098                                "no signals reached value {wait_value}"
3099                            ));
3100                        }
3101                    }
3102                }
3103            }
3104        }
3105    }
3106}
3107
3108impl From<gpu_alloc::AllocationError> for crate::DeviceError {
3109    fn from(error: gpu_alloc::AllocationError) -> Self {
3110        use gpu_alloc::AllocationError as Ae;
3111        match error {
3112            Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::TooManyObjects => Self::OutOfMemory,
3113            Ae::NoCompatibleMemoryTypes => crate::hal_usage_error(error),
3114        }
3115    }
3116}
3117impl From<gpu_alloc::MapError> for crate::DeviceError {
3118    fn from(error: gpu_alloc::MapError) -> Self {
3119        use gpu_alloc::MapError as Me;
3120        match error {
3121            Me::OutOfDeviceMemory | Me::OutOfHostMemory | Me::MapFailed => Self::OutOfMemory,
3122            Me::NonHostVisible | Me::AlreadyMapped => crate::hal_usage_error(error),
3123        }
3124    }
3125}
3126impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
3127    fn from(error: gpu_descriptor::AllocationError) -> Self {
3128        use gpu_descriptor::AllocationError as Ae;
3129        match error {
3130            Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
3131        }
3132    }
3133}
3134
3135/// We usually map unexpected vulkan errors to the [`crate::DeviceError::Unexpected`]
3136/// variant to be more robust even in cases where the driver is not
3137/// complying with the spec.
3138///
3139/// However, we implement a few Trait methods that don't have an equivalent
3140/// error variant. In those cases we use this function.
3141fn handle_unexpected(err: vk::Result) -> ! {
3142    panic!("Unexpected Vulkan error: `{err}`")
3143}
3144
3145struct ImageWithoutMemory {
3146    raw: vk::Image,
3147    requirements: vk::MemoryRequirements,
3148    copy_size: crate::CopyExtent,
3149}