1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330
use std::{collections::hash_map::Entry, ops::Range, sync::Arc, vec::Drain};
use crate::{
device::Device,
init_tracker::*,
resource::{DestroyedResourceError, ParentDevice, Texture, Trackable},
snatch::SnatchGuard,
track::{DeviceTracker, TextureTracker},
FastHashMap,
};
use super::{clear::clear_texture, BakedCommands, ClearError};
/// Surface that was discarded by `StoreOp::Discard` of a preceding renderpass.
/// Any read access to this surface needs to be preceded by a texture initialization.
#[derive(Clone)]
pub(crate) struct TextureSurfaceDiscard {
pub texture: Arc<Texture>,
pub mip_level: u32,
pub layer: u32,
}
pub(crate) type SurfacesInDiscardState = Vec<TextureSurfaceDiscard>;
#[derive(Default)]
pub(crate) struct CommandBufferTextureMemoryActions {
/// The tracker actions that we need to be executed before the command
/// buffer is executed.
init_actions: Vec<TextureInitTrackerAction>,
/// All the discards that haven't been followed by init again within the
/// command buffer i.e. everything in this list resets the texture init
/// state *after* the command buffer execution
discards: Vec<TextureSurfaceDiscard>,
}
impl CommandBufferTextureMemoryActions {
pub(crate) fn drain_init_actions(&mut self) -> Drain<TextureInitTrackerAction> {
self.init_actions.drain(..)
}
pub(crate) fn discard(&mut self, discard: TextureSurfaceDiscard) {
self.discards.push(discard);
}
// Registers a TextureInitTrackerAction.
// Returns previously discarded surface that need to be initialized *immediately* now.
// Only returns a non-empty list if action is MemoryInitKind::NeedsInitializedMemory.
#[must_use]
pub(crate) fn register_init_action(
&mut self,
action: &TextureInitTrackerAction,
) -> SurfacesInDiscardState {
let mut immediately_necessary_clears = SurfacesInDiscardState::new();
// Note that within a command buffer we may stack arbitrary memory init
// actions on the same texture Since we react to them in sequence, they
// are going to be dropped again at queue submit
//
// We don't need to add MemoryInitKind::NeedsInitializedMemory to
// init_actions if a surface is part of the discard list. But that would
// mean splitting up the action which is more than we'd win here.
self.init_actions.extend(
action
.texture
.initialization_status
.read()
.check_action(action),
);
// We expect very few discarded surfaces at any point in time which is
// why a simple linear search is likely best. (i.e. most of the time
// self.discards is empty!)
let init_actions = &mut self.init_actions;
self.discards.retain(|discarded_surface| {
if discarded_surface.texture.is_equal(&action.texture)
&& action.range.layer_range.contains(&discarded_surface.layer)
&& action
.range
.mip_range
.contains(&discarded_surface.mip_level)
{
if let MemoryInitKind::NeedsInitializedMemory = action.kind {
immediately_necessary_clears.push(discarded_surface.clone());
// Mark surface as implicitly initialized (this is relevant
// because it might have been uninitialized prior to
// discarding
init_actions.push(TextureInitTrackerAction {
texture: discarded_surface.texture.clone(),
range: TextureInitRange {
mip_range: discarded_surface.mip_level
..(discarded_surface.mip_level + 1),
layer_range: discarded_surface.layer..(discarded_surface.layer + 1),
},
kind: MemoryInitKind::ImplicitlyInitialized,
});
}
false
} else {
true
}
});
immediately_necessary_clears
}
// Shortcut for register_init_action when it is known that the action is an
// implicit init, not requiring any immediate resource init.
pub(crate) fn register_implicit_init(
&mut self,
texture: &Arc<Texture>,
range: TextureInitRange,
) {
let must_be_empty = self.register_init_action(&TextureInitTrackerAction {
texture: texture.clone(),
range,
kind: MemoryInitKind::ImplicitlyInitialized,
});
assert!(must_be_empty.is_empty());
}
}
// Utility function that takes discarded surfaces from (several calls to)
// register_init_action and initializes them on the spot.
//
// Takes care of barriers as well!
pub(crate) fn fixup_discarded_surfaces<InitIter: Iterator<Item = TextureSurfaceDiscard>>(
inits: InitIter,
encoder: &mut dyn hal::DynCommandEncoder,
texture_tracker: &mut TextureTracker,
device: &Device,
snatch_guard: &SnatchGuard<'_>,
) {
for init in inits {
clear_texture(
&init.texture,
TextureInitRange {
mip_range: init.mip_level..(init.mip_level + 1),
layer_range: init.layer..(init.layer + 1),
},
encoder,
texture_tracker,
&device.alignments,
device.zero_buffer.as_ref(),
snatch_guard,
)
.unwrap();
}
}
impl BakedCommands {
// inserts all buffer initializations that are going to be needed for
// executing the commands and updates resource init states accordingly
pub(crate) fn initialize_buffer_memory(
&mut self,
device_tracker: &mut DeviceTracker,
snatch_guard: &SnatchGuard<'_>,
) -> Result<(), DestroyedResourceError> {
profiling::scope!("initialize_buffer_memory");
// Gather init ranges for each buffer so we can collapse them.
// It is not possible to do this at an earlier point since previously
// executed command buffer change the resource init state.
let mut uninitialized_ranges_per_buffer = FastHashMap::default();
for buffer_use in self.buffer_memory_init_actions.drain(..) {
let mut initialization_status = buffer_use.buffer.initialization_status.write();
// align the end to 4
let end_remainder = buffer_use.range.end % wgt::COPY_BUFFER_ALIGNMENT;
let end = if end_remainder == 0 {
buffer_use.range.end
} else {
buffer_use.range.end + wgt::COPY_BUFFER_ALIGNMENT - end_remainder
};
let uninitialized_ranges = initialization_status.drain(buffer_use.range.start..end);
match buffer_use.kind {
MemoryInitKind::ImplicitlyInitialized => {}
MemoryInitKind::NeedsInitializedMemory => {
match uninitialized_ranges_per_buffer.entry(buffer_use.buffer.tracker_index()) {
Entry::Vacant(e) => {
e.insert((
buffer_use.buffer.clone(),
uninitialized_ranges.collect::<Vec<Range<wgt::BufferAddress>>>(),
));
}
Entry::Occupied(mut e) => {
e.get_mut().1.extend(uninitialized_ranges);
}
}
}
}
}
for (buffer, mut ranges) in uninitialized_ranges_per_buffer.into_values() {
// Collapse touching ranges.
ranges.sort_by_key(|r| r.start);
for i in (1..ranges.len()).rev() {
// The memory init tracker made sure of this!
assert!(ranges[i - 1].end <= ranges[i].start);
if ranges[i].start == ranges[i - 1].end {
ranges[i - 1].end = ranges[i].end;
ranges.swap_remove(i); // Ordering not important at this point
}
}
// Don't do use_replace since the buffer may already no longer have
// a ref_count.
//
// However, we *know* that it is currently in use, so the tracker
// must already know about it.
let transition = device_tracker
.buffers
.set_single(&buffer, hal::BufferUses::COPY_DST);
let raw_buf = buffer.try_raw(snatch_guard)?;
unsafe {
self.encoder.raw.transition_buffers(
transition
.map(|pending| pending.into_hal(&buffer, snatch_guard))
.as_slice(),
);
}
for range in ranges.iter() {
assert!(
range.start % wgt::COPY_BUFFER_ALIGNMENT == 0,
"Buffer {:?} has an uninitialized range with a start \
not aligned to 4 (start was {})",
raw_buf,
range.start
);
assert!(
range.end % wgt::COPY_BUFFER_ALIGNMENT == 0,
"Buffer {:?} has an uninitialized range with an end \
not aligned to 4 (end was {})",
raw_buf,
range.end
);
unsafe {
self.encoder.raw.clear_buffer(raw_buf, range.clone());
}
}
}
Ok(())
}
// inserts all texture initializations that are going to be needed for
// executing the commands and updates resource init states accordingly any
// textures that are left discarded by this command buffer will be marked as
// uninitialized
pub(crate) fn initialize_texture_memory(
&mut self,
device_tracker: &mut DeviceTracker,
device: &Device,
snatch_guard: &SnatchGuard<'_>,
) -> Result<(), DestroyedResourceError> {
profiling::scope!("initialize_texture_memory");
let mut ranges: Vec<TextureInitRange> = Vec::new();
for texture_use in self.texture_memory_actions.drain_init_actions() {
let mut initialization_status = texture_use.texture.initialization_status.write();
let use_range = texture_use.range;
let affected_mip_trackers = initialization_status
.mips
.iter_mut()
.enumerate()
.skip(use_range.mip_range.start as usize)
.take((use_range.mip_range.end - use_range.mip_range.start) as usize);
match texture_use.kind {
MemoryInitKind::ImplicitlyInitialized => {
for (_, mip_tracker) in affected_mip_trackers {
mip_tracker.drain(use_range.layer_range.clone());
}
}
MemoryInitKind::NeedsInitializedMemory => {
for (mip_level, mip_tracker) in affected_mip_trackers {
for layer_range in mip_tracker.drain(use_range.layer_range.clone()) {
ranges.push(TextureInitRange {
mip_range: (mip_level as u32)..(mip_level as u32 + 1),
layer_range,
});
}
}
}
}
// TODO: Could we attempt some range collapsing here?
for range in ranges.drain(..) {
let clear_result = clear_texture(
&texture_use.texture,
range,
self.encoder.raw.as_mut(),
&mut device_tracker.textures,
&device.alignments,
device.zero_buffer.as_ref(),
snatch_guard,
);
// A Texture can be destroyed between the command recording
// and now, this is out of our control so we have to handle
// it gracefully.
if let Err(ClearError::DestroyedResource(e)) = clear_result {
return Err(e);
}
// Other errors are unexpected.
if let Err(error) = clear_result {
panic!("{error}");
}
}
}
// Now that all buffers/textures have the proper init state for before
// cmdbuf start, we discard init states for textures it left discarded
// after its execution.
for surface_discard in self.texture_memory_actions.discards.iter() {
surface_discard
.texture
.initialization_status
.write()
.discard(surface_discard.mip_level, surface_discard.layer);
}
Ok(())
}
}