wgpu_core/command/
memory_init.rs1use alloc::{
2 sync::Arc,
3 vec::{Drain, Vec},
4};
5use core::ops::Range;
6
7use hashbrown::hash_map::Entry;
8
9use crate::{
10 device::Device,
11 init_tracker::*,
12 resource::{DestroyedResourceError, ParentDevice, Texture, Trackable},
13 snatch::SnatchGuard,
14 track::{DeviceTracker, TextureTracker},
15 FastHashMap,
16};
17
18use super::{clear::clear_texture, BakedCommands, ClearError};
19
20#[derive(Clone)]
23pub(crate) struct TextureSurfaceDiscard {
24 pub texture: Arc<Texture>,
25 pub mip_level: u32,
26 pub layer: u32,
27}
28
29pub(crate) type SurfacesInDiscardState = Vec<TextureSurfaceDiscard>;
30
31#[derive(Default)]
32pub(crate) struct CommandBufferTextureMemoryActions {
33 init_actions: Vec<TextureInitTrackerAction>,
36 discards: Vec<TextureSurfaceDiscard>,
40}
41
42impl CommandBufferTextureMemoryActions {
43 pub(crate) fn drain_init_actions(&mut self) -> Drain<TextureInitTrackerAction> {
44 self.init_actions.drain(..)
45 }
46
47 pub(crate) fn discard(&mut self, discard: TextureSurfaceDiscard) {
48 self.discards.push(discard);
49 }
50
51 #[must_use]
55 pub(crate) fn register_init_action(
56 &mut self,
57 action: &TextureInitTrackerAction,
58 ) -> SurfacesInDiscardState {
59 let mut immediately_necessary_clears = SurfacesInDiscardState::new();
60
61 self.init_actions.extend(
69 action
70 .texture
71 .initialization_status
72 .read()
73 .check_action(action),
74 );
75
76 let init_actions = &mut self.init_actions;
80 self.discards.retain(|discarded_surface| {
81 if discarded_surface.texture.is_equal(&action.texture)
82 && action.range.layer_range.contains(&discarded_surface.layer)
83 && action
84 .range
85 .mip_range
86 .contains(&discarded_surface.mip_level)
87 {
88 if let MemoryInitKind::NeedsInitializedMemory = action.kind {
89 immediately_necessary_clears.push(discarded_surface.clone());
90
91 init_actions.push(TextureInitTrackerAction {
95 texture: discarded_surface.texture.clone(),
96 range: TextureInitRange {
97 mip_range: discarded_surface.mip_level
98 ..(discarded_surface.mip_level + 1),
99 layer_range: discarded_surface.layer..(discarded_surface.layer + 1),
100 },
101 kind: MemoryInitKind::ImplicitlyInitialized,
102 });
103 }
104 false
105 } else {
106 true
107 }
108 });
109
110 immediately_necessary_clears
111 }
112
113 pub(crate) fn register_implicit_init(
116 &mut self,
117 texture: &Arc<Texture>,
118 range: TextureInitRange,
119 ) {
120 let must_be_empty = self.register_init_action(&TextureInitTrackerAction {
121 texture: texture.clone(),
122 range,
123 kind: MemoryInitKind::ImplicitlyInitialized,
124 });
125 assert!(must_be_empty.is_empty());
126 }
127}
128
129pub(crate) fn fixup_discarded_surfaces<InitIter: Iterator<Item = TextureSurfaceDiscard>>(
134 inits: InitIter,
135 encoder: &mut dyn hal::DynCommandEncoder,
136 texture_tracker: &mut TextureTracker,
137 device: &Device,
138 snatch_guard: &SnatchGuard<'_>,
139) {
140 for init in inits {
141 clear_texture(
142 &init.texture,
143 TextureInitRange {
144 mip_range: init.mip_level..(init.mip_level + 1),
145 layer_range: init.layer..(init.layer + 1),
146 },
147 encoder,
148 texture_tracker,
149 &device.alignments,
150 device.zero_buffer.as_ref(),
151 snatch_guard,
152 )
153 .unwrap();
154 }
155}
156
157impl BakedCommands {
158 pub(crate) fn initialize_buffer_memory(
161 &mut self,
162 device_tracker: &mut DeviceTracker,
163 snatch_guard: &SnatchGuard<'_>,
164 ) -> Result<(), DestroyedResourceError> {
165 profiling::scope!("initialize_buffer_memory");
166
167 let mut uninitialized_ranges_per_buffer = FastHashMap::default();
171 for buffer_use in self.buffer_memory_init_actions.drain(..) {
172 let mut initialization_status = buffer_use.buffer.initialization_status.write();
173
174 let end_remainder = buffer_use.range.end % wgt::COPY_BUFFER_ALIGNMENT;
176 let end = if end_remainder == 0 {
177 buffer_use.range.end
178 } else {
179 buffer_use.range.end + wgt::COPY_BUFFER_ALIGNMENT - end_remainder
180 };
181 let uninitialized_ranges = initialization_status.drain(buffer_use.range.start..end);
182
183 match buffer_use.kind {
184 MemoryInitKind::ImplicitlyInitialized => {}
185 MemoryInitKind::NeedsInitializedMemory => {
186 match uninitialized_ranges_per_buffer.entry(buffer_use.buffer.tracker_index()) {
187 Entry::Vacant(e) => {
188 e.insert((
189 buffer_use.buffer.clone(),
190 uninitialized_ranges.collect::<Vec<Range<wgt::BufferAddress>>>(),
191 ));
192 }
193 Entry::Occupied(mut e) => {
194 e.get_mut().1.extend(uninitialized_ranges);
195 }
196 }
197 }
198 }
199 }
200
201 for (buffer, mut ranges) in uninitialized_ranges_per_buffer.into_values() {
202 ranges.sort_by_key(|r| r.start);
204 for i in (1..ranges.len()).rev() {
205 assert!(ranges[i - 1].end <= ranges[i].start);
207 if ranges[i].start == ranges[i - 1].end {
208 ranges[i - 1].end = ranges[i].end;
209 ranges.swap_remove(i); }
211 }
212
213 let transition = device_tracker
219 .buffers
220 .set_single(&buffer, wgt::BufferUses::COPY_DST);
221
222 let raw_buf = buffer.try_raw(snatch_guard)?;
223
224 unsafe {
225 self.encoder.raw.transition_buffers(
226 transition
227 .map(|pending| pending.into_hal(&buffer, snatch_guard))
228 .as_slice(),
229 );
230 }
231
232 for range in ranges.iter() {
233 assert!(
234 range.start % wgt::COPY_BUFFER_ALIGNMENT == 0,
235 "Buffer {:?} has an uninitialized range with a start \
236 not aligned to 4 (start was {})",
237 raw_buf,
238 range.start
239 );
240 assert!(
241 range.end % wgt::COPY_BUFFER_ALIGNMENT == 0,
242 "Buffer {:?} has an uninitialized range with an end \
243 not aligned to 4 (end was {})",
244 raw_buf,
245 range.end
246 );
247
248 unsafe {
249 self.encoder.raw.clear_buffer(raw_buf, range.clone());
250 }
251 }
252 }
253 Ok(())
254 }
255
256 pub(crate) fn initialize_texture_memory(
261 &mut self,
262 device_tracker: &mut DeviceTracker,
263 device: &Device,
264 snatch_guard: &SnatchGuard<'_>,
265 ) -> Result<(), DestroyedResourceError> {
266 profiling::scope!("initialize_texture_memory");
267
268 let mut ranges: Vec<TextureInitRange> = Vec::new();
269 for texture_use in self.texture_memory_actions.drain_init_actions() {
270 let mut initialization_status = texture_use.texture.initialization_status.write();
271 let use_range = texture_use.range;
272 let affected_mip_trackers = initialization_status
273 .mips
274 .iter_mut()
275 .enumerate()
276 .skip(use_range.mip_range.start as usize)
277 .take((use_range.mip_range.end - use_range.mip_range.start) as usize);
278
279 match texture_use.kind {
280 MemoryInitKind::ImplicitlyInitialized => {
281 for (_, mip_tracker) in affected_mip_trackers {
282 mip_tracker.drain(use_range.layer_range.clone());
283 }
284 }
285 MemoryInitKind::NeedsInitializedMemory => {
286 for (mip_level, mip_tracker) in affected_mip_trackers {
287 for layer_range in mip_tracker.drain(use_range.layer_range.clone()) {
288 ranges.push(TextureInitRange {
289 mip_range: (mip_level as u32)..(mip_level as u32 + 1),
290 layer_range,
291 });
292 }
293 }
294 }
295 }
296
297 for range in ranges.drain(..) {
299 let clear_result = clear_texture(
300 &texture_use.texture,
301 range,
302 self.encoder.raw.as_mut(),
303 &mut device_tracker.textures,
304 &device.alignments,
305 device.zero_buffer.as_ref(),
306 snatch_guard,
307 );
308
309 if let Err(ClearError::DestroyedResource(e)) = clear_result {
313 return Err(e);
314 }
315
316 if let Err(error) = clear_result {
318 panic!("{error}");
319 }
320 }
321 }
322
323 for surface_discard in self.texture_memory_actions.discards.iter() {
327 surface_discard
328 .texture
329 .initialization_status
330 .write()
331 .discard(surface_discard.mip_level, surface_discard.layer);
332 }
333
334 Ok(())
335 }
336}