webrender/
frame_builder.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5use api::{ColorF, DebugFlags, ExternalScrollId, FontRenderMode, ImageKey, MinimapData, PremultipliedColorF};
6use api::units::*;
7use plane_split::BspSplitter;
8use crate::batch::{BatchBuilder, AlphaBatchBuilder, AlphaBatchContainer};
9use crate::clip::{ClipStore, ClipTree};
10use crate::command_buffer::{PrimitiveCommand, CommandBufferList, CommandBufferIndex};
11use crate::{debug_colors, ChunkPool};
12use crate::spatial_node::SpatialNodeType;
13use crate::spatial_tree::{SpatialTree, SpatialNodeIndex};
14use crate::composite::{CompositorKind, CompositeState, CompositeStatePreallocator};
15use crate::debug_item::DebugItem;
16use crate::gpu_cache::{GpuCache, GpuCacheHandle};
17use crate::gpu_types::{PrimitiveHeaders, TransformPalette, ZBufferIdGenerator};
18use crate::gpu_types::{QuadSegment, TransformData};
19use crate::internal_types::{FastHashMap, PlaneSplitter, FrameId, FrameStamp};
20use crate::picture::{DirtyRegion, SliceId, TileCacheInstance};
21use crate::picture::{SurfaceInfo, SurfaceIndex, ResolvedSurfaceTexture};
22use crate::picture::{SubpixelMode, RasterConfig, PictureCompositeMode};
23use crate::prepare::prepare_picture;
24use crate::prim_store::{PictureIndex, PrimitiveScratchBuffer};
25use crate::prim_store::{DeferredResolve, PrimitiveInstance};
26use crate::profiler::{self, TransactionProfile};
27use crate::render_backend::{DataStores, ScratchBuffer};
28use crate::renderer::{GpuBufferF, GpuBufferBuilderF, GpuBufferI, GpuBufferBuilderI, GpuBufferBuilder};
29use crate::render_target::{PictureCacheTarget, PictureCacheTargetKind};
30use crate::render_target::{RenderTargetContext, RenderTargetKind, RenderTarget};
31use crate::render_task_graph::{Pass, RenderTaskGraph, RenderTaskId, SubPassSurface};
32use crate::render_task_graph::{RenderPass, RenderTaskGraphBuilder};
33use crate::render_task::{RenderTaskKind, StaticRenderTaskSurface};
34use crate::resource_cache::ResourceCache;
35use crate::scene::{BuiltScene, SceneProperties};
36use crate::space::SpaceMapper;
37use crate::segment::SegmentBuilder;
38use crate::surface::SurfaceBuilder;
39use std::sync::Arc;
40use std::{f32, mem};
41use crate::util::{MaxRect, VecHelper, Preallocator};
42use crate::visibility::{update_prim_visibility, FrameVisibilityState, FrameVisibilityContext};
43use crate::internal_types::{FrameVec, FrameMemory};
44
45#[derive(Clone, Copy, Debug)]
46#[cfg_attr(feature = "capture", derive(Serialize))]
47#[cfg_attr(feature = "replay", derive(Deserialize))]
48pub struct FrameBuilderConfig {
49    pub default_font_render_mode: FontRenderMode,
50    pub dual_source_blending_is_supported: bool,
51    /// True if we're running tests (i.e. via wrench).
52    pub testing: bool,
53    pub gpu_supports_fast_clears: bool,
54    pub gpu_supports_advanced_blend: bool,
55    pub advanced_blend_is_coherent: bool,
56    pub gpu_supports_render_target_partial_update: bool,
57    /// Whether ImageBufferKind::TextureExternal images must first be copied
58    /// to a regular texture before rendering.
59    pub external_images_require_copy: bool,
60    pub batch_lookback_count: usize,
61    pub background_color: Option<ColorF>,
62    pub compositor_kind: CompositorKind,
63    pub tile_size_override: Option<DeviceIntSize>,
64    pub max_surface_override: Option<usize>,
65    pub max_depth_ids: i32,
66    pub max_target_size: i32,
67    pub force_invalidation: bool,
68    pub is_software: bool,
69    pub low_quality_pinch_zoom: bool,
70    pub max_shared_surface_size: i32,
71    pub enable_dithering: bool,
72    pub precise_linear_gradients: bool,
73    pub precise_radial_gradients: bool,
74    pub precise_conic_gradients: bool,
75}
76
77/// A set of common / global resources that are retained between
78/// new display lists, such that any GPU cache handles can be
79/// persisted even when a new display list arrives.
80#[cfg_attr(feature = "capture", derive(Serialize))]
81pub struct FrameGlobalResources {
82    /// The image shader block for the most common / default
83    /// set of image parameters (color white, stretch == rect.size).
84    pub default_image_handle: GpuCacheHandle,
85
86    /// A GPU cache config for drawing cut-out rectangle primitives.
87    /// This is used to 'cut out' overlay tiles where a compositor
88    /// surface exists.
89    pub default_black_rect_handle: GpuCacheHandle,
90}
91
92impl FrameGlobalResources {
93    pub fn empty() -> Self {
94        FrameGlobalResources {
95            default_image_handle: GpuCacheHandle::new(),
96            default_black_rect_handle: GpuCacheHandle::new(),
97        }
98    }
99
100    pub fn update(
101        &mut self,
102        gpu_cache: &mut GpuCache,
103    ) {
104        if let Some(mut request) = gpu_cache.request(&mut self.default_image_handle) {
105            request.push(PremultipliedColorF::WHITE);
106            request.push(PremultipliedColorF::WHITE);
107            request.push([
108                -1.0,       // -ve means use prim rect for stretch size
109                0.0,
110                0.0,
111                0.0,
112            ]);
113        }
114
115        if let Some(mut request) = gpu_cache.request(&mut self.default_black_rect_handle) {
116            request.push(PremultipliedColorF::BLACK);
117        }
118    }
119}
120
121pub struct FrameScratchBuffer {
122    dirty_region_stack: Vec<DirtyRegion>,
123    surface_stack: Vec<(PictureIndex, SurfaceIndex)>,
124}
125
126impl Default for FrameScratchBuffer {
127    fn default() -> Self {
128        FrameScratchBuffer {
129            dirty_region_stack: Vec::new(),
130            surface_stack: Vec::new(),
131        }
132    }
133}
134
135impl FrameScratchBuffer {
136    pub fn begin_frame(&mut self) {
137        self.dirty_region_stack.clear();
138        self.surface_stack.clear();
139    }
140}
141
142/// Produces the frames that are sent to the renderer.
143#[cfg_attr(feature = "capture", derive(Serialize))]
144pub struct FrameBuilder {
145    pub globals: FrameGlobalResources,
146    #[cfg_attr(feature = "capture", serde(skip))]
147    prim_headers_prealloc: Preallocator,
148    #[cfg_attr(feature = "capture", serde(skip))]
149    composite_state_prealloc: CompositeStatePreallocator,
150    #[cfg_attr(feature = "capture", serde(skip))]
151    plane_splitters: Vec<PlaneSplitter>,
152}
153
154pub struct FrameBuildingContext<'a> {
155    pub global_device_pixel_scale: DevicePixelScale,
156    pub scene_properties: &'a SceneProperties,
157    pub global_screen_world_rect: WorldRect,
158    pub spatial_tree: &'a SpatialTree,
159    pub max_local_clip: LayoutRect,
160    pub debug_flags: DebugFlags,
161    pub fb_config: &'a FrameBuilderConfig,
162    pub root_spatial_node_index: SpatialNodeIndex,
163}
164
165pub struct FrameBuildingState<'a> {
166    pub rg_builder: &'a mut RenderTaskGraphBuilder,
167    pub clip_store: &'a mut ClipStore,
168    pub resource_cache: &'a mut ResourceCache,
169    pub gpu_cache: &'a mut GpuCache,
170    pub transforms: &'a mut TransformPalette,
171    pub segment_builder: SegmentBuilder,
172    pub surfaces: &'a mut Vec<SurfaceInfo>,
173    pub dirty_region_stack: Vec<DirtyRegion>,
174    pub composite_state: &'a mut CompositeState,
175    pub num_visible_primitives: u32,
176    pub plane_splitters: &'a mut [PlaneSplitter],
177    pub surface_builder: SurfaceBuilder,
178    pub cmd_buffers: &'a mut CommandBufferList,
179    pub clip_tree: &'a ClipTree,
180    pub frame_gpu_data: &'a mut GpuBufferBuilder,
181    /// When using a render task to produce pixels that are associated with
182    /// an image key (for example snapshotted pictures), inserting the image
183    /// key / task id association in this hashmap allows the image item to
184    /// register a dependency to the render task. This ensures that the
185    /// render task is produced before the image that renders it if they
186    /// are happening in the same frame.
187    /// This mechanism relies on the item producing the render task to be
188    /// traversed before the image that displays it (in other words, the
189    /// picture must appear before the image in the display list).
190    pub image_dependencies: FastHashMap<ImageKey, RenderTaskId>,
191    pub visited_pictures: &'a mut [bool],
192}
193
194impl<'a> FrameBuildingState<'a> {
195    /// Retrieve the current dirty region during primitive traversal.
196    pub fn current_dirty_region(&self) -> &DirtyRegion {
197        self.dirty_region_stack.last().unwrap()
198    }
199
200    /// Push a new dirty region for child primitives to cull / clip against.
201    pub fn push_dirty_region(&mut self, region: DirtyRegion) {
202        self.dirty_region_stack.push(region);
203    }
204
205    /// Pop the top dirty region from the stack.
206    pub fn pop_dirty_region(&mut self) {
207        self.dirty_region_stack.pop().unwrap();
208    }
209
210    /// Push a primitive command to a set of command buffers
211    pub fn push_prim(
212        &mut self,
213        cmd: &PrimitiveCommand,
214        spatial_node_index: SpatialNodeIndex,
215        targets: &[CommandBufferIndex],
216    ) {
217        for cmd_buffer_index in targets {
218            let cmd_buffer = self.cmd_buffers.get_mut(*cmd_buffer_index);
219            cmd_buffer.add_prim(cmd, spatial_node_index);
220        }
221    }
222
223    /// Push a command to a set of command buffers
224    pub fn push_cmd(
225        &mut self,
226        cmd: &PrimitiveCommand,
227        targets: &[CommandBufferIndex],
228    ) {
229        for cmd_buffer_index in targets {
230            let cmd_buffer = self.cmd_buffers.get_mut(*cmd_buffer_index);
231            cmd_buffer.add_cmd(cmd);
232        }
233    }
234
235    /// Set the active list of segments in a set of command buffers
236    pub fn set_segments(
237        &mut self,
238        segments: &[QuadSegment],
239        targets: &[CommandBufferIndex],
240    ) {
241        for cmd_buffer_index in targets {
242            let cmd_buffer = self.cmd_buffers.get_mut(*cmd_buffer_index);
243            cmd_buffer.set_segments(segments);
244        }
245    }
246}
247
248/// Immutable context of a picture when processing children.
249#[derive(Debug)]
250pub struct PictureContext {
251    pub pic_index: PictureIndex,
252    pub surface_spatial_node_index: SpatialNodeIndex,
253    pub raster_spatial_node_index: SpatialNodeIndex,
254    pub visibility_spatial_node_index: SpatialNodeIndex,
255    /// The surface that this picture will render on.
256    pub surface_index: SurfaceIndex,
257    pub dirty_region_count: usize,
258    pub subpixel_mode: SubpixelMode,
259}
260
261/// Mutable state of a picture that gets modified when
262/// the children are processed.
263pub struct PictureState {
264    pub map_local_to_pic: SpaceMapper<LayoutPixel, PicturePixel>,
265    pub map_pic_to_vis: SpaceMapper<PicturePixel, VisPixel>,
266}
267
268impl FrameBuilder {
269    pub fn new() -> Self {
270        FrameBuilder {
271            globals: FrameGlobalResources::empty(),
272            prim_headers_prealloc: Preallocator::new(0),
273            composite_state_prealloc: CompositeStatePreallocator::default(),
274            plane_splitters: Vec::new(),
275        }
276    }
277
278    /// Compute the contribution (bounding rectangles, and resources) of layers and their
279    /// primitives in screen space.
280    fn build_layer_screen_rects_and_cull_layers(
281        &mut self,
282        scene: &mut BuiltScene,
283        present: bool,
284        global_screen_world_rect: WorldRect,
285        resource_cache: &mut ResourceCache,
286        gpu_cache: &mut GpuCache,
287        rg_builder: &mut RenderTaskGraphBuilder,
288        global_device_pixel_scale: DevicePixelScale,
289        scene_properties: &SceneProperties,
290        transform_palette: &mut TransformPalette,
291        data_stores: &mut DataStores,
292        scratch: &mut ScratchBuffer,
293        debug_flags: DebugFlags,
294        composite_state: &mut CompositeState,
295        tile_caches: &mut FastHashMap<SliceId, Box<TileCacheInstance>>,
296        spatial_tree: &SpatialTree,
297        cmd_buffers: &mut CommandBufferList,
298        frame_gpu_data: &mut GpuBufferBuilder,
299        frame_memory: &FrameMemory,
300        profile: &mut TransactionProfile,
301    ) {
302        profile_scope!("build_layer_screen_rects_and_cull_layers");
303
304        let render_picture_cache_slices = present;
305
306        let root_spatial_node_index = spatial_tree.root_reference_frame_index();
307
308        const MAX_CLIP_COORD: f32 = 1.0e9;
309
310        // Reset all plane splitters. These are retained from frame to frame to reduce
311        // per-frame allocations
312        self.plane_splitters.resize_with(scene.num_plane_splitters, BspSplitter::new);
313        for splitter in &mut self.plane_splitters {
314            splitter.reset();
315        }
316
317        let frame_context = FrameBuildingContext {
318            global_device_pixel_scale,
319            scene_properties,
320            global_screen_world_rect,
321            spatial_tree,
322            max_local_clip: LayoutRect {
323                min: LayoutPoint::new(-MAX_CLIP_COORD, -MAX_CLIP_COORD),
324                max: LayoutPoint::new(MAX_CLIP_COORD, MAX_CLIP_COORD),
325            },
326            debug_flags,
327            fb_config: &scene.config,
328            root_spatial_node_index,
329        };
330
331        scene.picture_graph.build_update_passes(
332            &mut scene.prim_store.pictures,
333            &frame_context,
334        );
335
336        scene.picture_graph.assign_surfaces(
337            &mut scene.prim_store.pictures,
338            &mut scene.surfaces,
339            tile_caches,
340            &frame_context,
341        );
342
343        // Add a "fake" surface that we will use as parent for
344        // snapshotted pictures.
345        let root_spatial_node = frame_context.spatial_tree.root_reference_frame_index();
346        let snapshot_surface = SurfaceIndex(scene.surfaces.len());
347        scene.surfaces.push(SurfaceInfo::new(
348            root_spatial_node,
349            root_spatial_node,
350            WorldRect::max_rect(),
351            &frame_context.spatial_tree,
352            euclid::Scale::new(1.0),
353            (1.0, 1.0),
354            (1.0, 1.0),
355            false,
356            false,
357        ));
358
359        scene.picture_graph.propagate_bounding_rects(
360            &mut scene.prim_store.pictures,
361            &mut scene.surfaces,
362            &frame_context,
363        );
364
365        // In order to handle picture snapshots consistently we need
366        // the visibility and prepare passes to visit them first before
367        // traversing the scene. This ensures that out-of-view snapshots
368        // are rendered and that snapshots are consistently produced
369        // relative to the root spatial node.
370        // However it means that the visibility and prepare passes may
371        // visit some pictures multiple times, so we keep track of visited
372        // pictures during each traversal to avoid that.
373        let n_pics = scene.prim_store.pictures.len();
374        let mut visited_pictures = frame_memory.new_vec_with_capacity(n_pics);
375        for _ in 0..n_pics {
376            visited_pictures.push(false);
377        }
378
379        {
380            profile_scope!("UpdateVisibility");
381            profile_marker!("UpdateVisibility");
382            profile.start_time(profiler::FRAME_VISIBILITY_TIME);
383
384            let visibility_context = FrameVisibilityContext {
385                global_device_pixel_scale,
386                spatial_tree,
387                global_screen_world_rect,
388                debug_flags,
389                scene_properties,
390                config: scene.config,
391                root_spatial_node_index,
392            };
393
394            for pic_index in scene.snapshot_pictures.iter() {
395                let mut visibility_state = FrameVisibilityState {
396                    clip_store: &mut scene.clip_store,
397                    resource_cache,
398                    gpu_cache,
399                    data_stores,
400                    clip_tree: &mut scene.clip_tree,
401                    composite_state,
402                    rg_builder,
403                    prim_instances: &mut scene.prim_instances,
404                    surfaces: &mut scene.surfaces,
405                    surface_stack: scratch.frame.surface_stack.take(),
406                    profile,
407                    scratch,
408                    visited_pictures: &mut visited_pictures,
409                };
410
411                let world_culling_rect = WorldRect::max_rect();
412
413                // For now, snapshots are updated every frame. For the
414                // pictures displaying the snapshot via images pick up
415                // the changes, we have to make sure that the image's
416                // generation counter is incremented early in the frame,
417                // before the main visibility pass visits the image items.
418                let pic = &scene.prim_store.pictures[pic_index.0];
419                let snapshot = pic.snapshot
420                    .unwrap();
421                let key = snapshot.key.as_image();
422                visibility_state.resource_cache
423                    .increment_image_generation(key);
424
425                if let Some(node) = pic.clip_root {
426                    visibility_state.clip_tree.push_clip_root_node(node);
427                }
428                update_prim_visibility(
429                    *pic_index,
430                    None,
431                    &world_culling_rect,
432                    &scene.prim_store,
433                    true,
434                    &visibility_context,
435                    &mut visibility_state,
436                    &mut None,
437                );
438                if scene.prim_store.pictures[pic_index.0].clip_root.is_some() {
439                    visibility_state.clip_tree.pop_clip_root();
440                }
441            }
442
443            for pic_index in scene.tile_cache_pictures.iter().rev() {
444                if !render_picture_cache_slices {
445                    break;
446                }
447                let pic = &mut scene.prim_store.pictures[pic_index.0];
448
449                match pic.raster_config {
450                    Some(RasterConfig { surface_index, composite_mode: PictureCompositeMode::TileCache { slice_id }, .. }) => {
451                        let tile_cache = tile_caches
452                            .get_mut(&slice_id)
453                            .expect("bug: non-existent tile cache");
454
455                        let mut visibility_state = FrameVisibilityState {
456                            clip_store: &mut scene.clip_store,
457                            resource_cache,
458                            gpu_cache,
459                            data_stores,
460                            clip_tree: &mut scene.clip_tree,
461                            composite_state,
462                            rg_builder,
463                            prim_instances: &mut scene.prim_instances,
464                            surfaces: &mut scene.surfaces,
465                            surface_stack: scratch.frame.surface_stack.take(),
466                            profile,
467                            scratch,
468                            visited_pictures: &mut visited_pictures,
469                        };
470
471                        // If we have a tile cache for this picture, see if any of the
472                        // relative transforms have changed, which means we need to
473                        // re-map the dependencies of any child primitives.
474                        let world_culling_rect = tile_cache.pre_update(
475                            surface_index,
476                            &visibility_context,
477                            &mut visibility_state,
478                        );
479
480                        // Push a new surface, supplying the list of clips that should be
481                        // ignored, since they are handled by clipping when drawing this surface.
482                        visibility_state.push_surface(
483                            *pic_index,
484                            surface_index,
485                        );
486                        visibility_state.clip_tree.push_clip_root_node(tile_cache.shared_clip_node_id);
487
488                        update_prim_visibility(
489                            *pic_index,
490                            None,
491                            &world_culling_rect,
492                            &scene.prim_store,
493                            true,
494                            &visibility_context,
495                            &mut visibility_state,
496                            &mut Some(tile_cache),
497                        );
498
499                        // Build the dirty region(s) for this tile cache.
500                        tile_cache.post_update(
501                            &visibility_context,
502                            &mut visibility_state.composite_state,
503                            &mut visibility_state.resource_cache,
504                        );
505
506                        visibility_state.clip_tree.pop_clip_root();
507                        visibility_state.pop_surface();
508                        visibility_state.scratch.frame.surface_stack = visibility_state.surface_stack.take();
509                    }
510                    _ => {
511                        panic!("bug: not a tile cache");
512                    }
513                }
514            }
515
516            profile.end_time(profiler::FRAME_VISIBILITY_TIME);
517        }
518
519        profile.start_time(profiler::FRAME_PREPARE_TIME);
520
521        // Reset the visited pictures for the prepare pass.
522        visited_pictures.clear();
523        for _ in 0..n_pics {
524            visited_pictures.push(false);
525        }
526        let mut frame_state = FrameBuildingState {
527            rg_builder,
528            clip_store: &mut scene.clip_store,
529            resource_cache,
530            gpu_cache,
531            transforms: transform_palette,
532            segment_builder: SegmentBuilder::new(),
533            surfaces: &mut scene.surfaces,
534            dirty_region_stack: scratch.frame.dirty_region_stack.take(),
535            composite_state,
536            num_visible_primitives: 0,
537            plane_splitters: &mut self.plane_splitters,
538            surface_builder: SurfaceBuilder::new(),
539            cmd_buffers,
540            clip_tree: &mut scene.clip_tree,
541            frame_gpu_data,
542            image_dependencies: FastHashMap::default(),
543            visited_pictures: &mut visited_pictures,
544        };
545
546
547        if !scene.snapshot_pictures.is_empty() {
548            // Push a default dirty region which does not cull any
549            // primitive.
550            let mut default_dirty_region = DirtyRegion::new(
551                root_spatial_node_index,
552                root_spatial_node_index,
553            );
554            default_dirty_region.add_dirty_region(
555                PictureRect::max_rect(),
556                frame_context.spatial_tree,
557            );
558            frame_state.push_dirty_region(default_dirty_region);
559
560            frame_state.surface_builder.push_surface(
561                snapshot_surface,
562                false,
563                PictureRect::max_rect(),
564                None,
565                frame_state.surfaces,
566                frame_state.rg_builder,
567            );
568        }
569
570        for pic_index in &scene.snapshot_pictures {
571
572            prepare_picture(
573                *pic_index,
574                &mut scene.prim_store,
575                Some(snapshot_surface),
576                SubpixelMode::Allow,
577                &frame_context,
578                &mut frame_state,
579                data_stores,
580                &mut scratch.primitive,
581                tile_caches,
582                &mut scene.prim_instances
583            );
584        }
585
586        if !scene.snapshot_pictures.is_empty() {
587            frame_state.surface_builder.pop_empty_surface();
588            frame_state.pop_dirty_region();
589        }
590
591        // Push a default dirty region which culls primitives
592        // against the screen world rect, in absence of any
593        // other dirty regions.
594        let mut default_dirty_region = DirtyRegion::new(
595            root_spatial_node_index,
596            root_spatial_node_index,
597        );
598        default_dirty_region.add_dirty_region(
599            frame_context.global_screen_world_rect.cast_unit(),
600            frame_context.spatial_tree,
601        );
602        frame_state.push_dirty_region(default_dirty_region);
603
604        for pic_index in &scene.tile_cache_pictures {
605            if !render_picture_cache_slices {
606                break;
607            }
608
609            prepare_picture(
610                *pic_index,
611                &mut scene.prim_store,
612                None,
613                SubpixelMode::Allow,
614                &frame_context,
615                &mut frame_state,
616                data_stores,
617                &mut scratch.primitive,
618                tile_caches,
619                &mut scene.prim_instances
620            );
621        }
622
623        frame_state.pop_dirty_region();
624        frame_state.surface_builder.finalize();
625        profile.end_time(profiler::FRAME_PREPARE_TIME);
626        profile.set(profiler::VISIBLE_PRIMITIVES, frame_state.num_visible_primitives);
627
628        scratch.frame.dirty_region_stack = frame_state.dirty_region_stack.take();
629
630        {
631            profile_marker!("BlockOnResources");
632
633            resource_cache.block_until_all_resources_added(
634                gpu_cache,
635                profile,
636            );
637        }
638    }
639
640    pub fn build(
641        &mut self,
642        scene: &mut BuiltScene,
643        present: bool,
644        resource_cache: &mut ResourceCache,
645        gpu_cache: &mut GpuCache,
646        rg_builder: &mut RenderTaskGraphBuilder,
647        stamp: FrameStamp,
648        device_origin: DeviceIntPoint,
649        scene_properties: &SceneProperties,
650        data_stores: &mut DataStores,
651        scratch: &mut ScratchBuffer,
652        debug_flags: DebugFlags,
653        tile_caches: &mut FastHashMap<SliceId, Box<TileCacheInstance>>,
654        spatial_tree: &mut SpatialTree,
655        dirty_rects_are_valid: bool,
656        profile: &mut TransactionProfile,
657        minimap_data: FastHashMap<ExternalScrollId, MinimapData>,
658        chunk_pool: Arc<ChunkPool>,
659    ) -> Frame {
660        profile_scope!("build");
661        profile_marker!("BuildFrame");
662
663        let mut frame_memory = FrameMemory::new(chunk_pool, stamp.frame_id());
664
665        profile.set(profiler::PRIMITIVES, scene.prim_instances.len());
666        profile.set(profiler::PICTURE_CACHE_SLICES, scene.tile_cache_config.picture_cache_slice_count);
667        scratch.begin_frame();
668        gpu_cache.begin_frame(stamp);
669        resource_cache.begin_frame(stamp, gpu_cache, profile);
670
671        // TODO(gw): Follow up patches won't clear this, as they'll be assigned
672        //           statically during scene building.
673        scene.surfaces.clear();
674
675        self.globals.update(gpu_cache);
676
677        spatial_tree.update_tree(scene_properties);
678        let mut transform_palette = spatial_tree.build_transform_palette(&frame_memory);
679        scene.clip_store.begin_frame(&mut scratch.clip_store);
680
681        rg_builder.begin_frame(stamp.frame_id());
682
683        // TODO(dp): Remove me completely!!
684        let global_device_pixel_scale = DevicePixelScale::new(1.0);
685
686        let output_size = scene.output_rect.size();
687        let screen_world_rect = (scene.output_rect.to_f32() / global_device_pixel_scale).round_out();
688
689        let mut composite_state = CompositeState::new(
690            scene.config.compositor_kind,
691            scene.config.max_depth_ids,
692            dirty_rects_are_valid,
693            scene.config.low_quality_pinch_zoom,
694            &frame_memory,
695        );
696
697        self.composite_state_prealloc.preallocate(&mut composite_state);
698
699        let mut cmd_buffers = CommandBufferList::new();
700
701        // TODO(gw): Recycle backing vec buffers for gpu buffer builder between frames
702        let mut gpu_buffer_builder = GpuBufferBuilder {
703            f32: GpuBufferBuilderF::new(&frame_memory),
704            i32: GpuBufferBuilderI::new(&frame_memory),
705        };
706
707        self.build_layer_screen_rects_and_cull_layers(
708            scene,
709            present,
710            screen_world_rect,
711            resource_cache,
712            gpu_cache,
713            rg_builder,
714            global_device_pixel_scale,
715            scene_properties,
716            &mut transform_palette,
717            data_stores,
718            scratch,
719            debug_flags,
720            &mut composite_state,
721            tile_caches,
722            spatial_tree,
723            &mut cmd_buffers,
724            &mut gpu_buffer_builder,
725            &frame_memory,
726            profile,
727        );
728
729        self.render_minimap(&mut scratch.primitive, &spatial_tree, minimap_data);
730
731        profile.start_time(profiler::FRAME_BATCHING_TIME);
732
733        let mut deferred_resolves = frame_memory.new_vec();
734
735        // Finish creating the frame graph and build it.
736        let render_tasks = rg_builder.end_frame(
737            resource_cache,
738            gpu_cache,
739            &mut deferred_resolves,
740            scene.config.max_shared_surface_size,
741            &frame_memory,
742        );
743
744        let mut passes = frame_memory.new_vec();
745        let mut has_texture_cache_tasks = false;
746        let mut prim_headers = PrimitiveHeaders::new(&frame_memory);
747        self.prim_headers_prealloc.preallocate_framevec(&mut prim_headers.headers_int);
748        self.prim_headers_prealloc.preallocate_framevec(&mut prim_headers.headers_float);
749
750        {
751            profile_marker!("Batching");
752
753            // Used to generated a unique z-buffer value per primitive.
754            let mut z_generator = ZBufferIdGenerator::new(scene.config.max_depth_ids);
755            let use_dual_source_blending = scene.config.dual_source_blending_is_supported;
756
757            for pass in render_tasks.passes.iter().rev() {
758                let mut ctx = RenderTargetContext {
759                    global_device_pixel_scale,
760                    prim_store: &scene.prim_store,
761                    clip_store: &scene.clip_store,
762                    resource_cache,
763                    use_dual_source_blending,
764                    use_advanced_blending: scene.config.gpu_supports_advanced_blend,
765                    break_advanced_blend_batches: !scene.config.advanced_blend_is_coherent,
766                    batch_lookback_count: scene.config.batch_lookback_count,
767                    spatial_tree,
768                    data_stores,
769                    surfaces: &scene.surfaces,
770                    scratch: &mut scratch.primitive,
771                    screen_world_rect,
772                    globals: &self.globals,
773                    tile_caches,
774                    root_spatial_node_index: spatial_tree.root_reference_frame_index(),
775                    frame_memory: &mut frame_memory,
776                };
777
778                let pass = build_render_pass(
779                    pass,
780                    output_size,
781                    &mut ctx,
782                    gpu_cache,
783                    &mut gpu_buffer_builder,
784                    &render_tasks,
785                    &scene.clip_store,
786                    &mut transform_palette,
787                    &mut prim_headers,
788                    &mut z_generator,
789                    scene.config.gpu_supports_fast_clears,
790                    &scene.prim_instances,
791                    &cmd_buffers,
792                );
793
794                has_texture_cache_tasks |= !pass.texture_cache.is_empty();
795                has_texture_cache_tasks |= !pass.picture_cache.is_empty();
796
797                passes.push(pass);
798            }
799
800            if present {
801                let mut ctx = RenderTargetContext {
802                    global_device_pixel_scale,
803                    clip_store: &scene.clip_store,
804                    prim_store: &scene.prim_store,
805                    resource_cache,
806                    use_dual_source_blending,
807                    use_advanced_blending: scene.config.gpu_supports_advanced_blend,
808                    break_advanced_blend_batches: !scene.config.advanced_blend_is_coherent,
809                    batch_lookback_count: scene.config.batch_lookback_count,
810                    spatial_tree,
811                    data_stores,
812                    surfaces: &scene.surfaces,
813                    scratch: &mut scratch.primitive,
814                    screen_world_rect,
815                    globals: &self.globals,
816                    tile_caches,
817                    root_spatial_node_index: spatial_tree.root_reference_frame_index(),
818                    frame_memory: &mut frame_memory,
819                };
820
821                self.build_composite_pass(
822                    scene,
823                    &mut ctx,
824                    gpu_cache,
825                    &mut deferred_resolves,
826                    &mut composite_state,
827                );
828            }
829        }
830
831        profile.end_time(profiler::FRAME_BATCHING_TIME);
832
833        let gpu_cache_frame_id = gpu_cache.end_frame(profile).frame_id();
834
835        resource_cache.end_frame(profile);
836
837        self.prim_headers_prealloc.record_vec(&prim_headers.headers_int);
838        self.composite_state_prealloc.record(&composite_state);
839
840        composite_state.end_frame();
841        scene.clip_store.end_frame(&mut scratch.clip_store);
842        scratch.end_frame();
843
844        let gpu_buffer_f = gpu_buffer_builder.f32.finalize(&render_tasks);
845        let gpu_buffer_i = gpu_buffer_builder.i32.finalize(&render_tasks);
846
847        Frame {
848            device_rect: DeviceIntRect::from_origin_and_size(
849                device_origin,
850                scene.output_rect.size(),
851            ),
852            present,
853            passes,
854            transform_palette: transform_palette.finish(),
855            render_tasks,
856            deferred_resolves,
857            gpu_cache_frame_id,
858            has_been_rendered: false,
859            has_texture_cache_tasks,
860            prim_headers,
861            debug_items: mem::replace(&mut scratch.primitive.debug_items, Vec::new()),
862            composite_state,
863            gpu_buffer_f,
864            gpu_buffer_i,
865            allocator_memory: frame_memory,
866        }
867    }
868
869    fn render_minimap(
870        &self,
871        scratch: &mut PrimitiveScratchBuffer,
872        spatial_tree: &SpatialTree,
873        minimap_data_store: FastHashMap<ExternalScrollId, MinimapData>) {
874      // TODO: Replace minimap_data_store with Option<FastHastMap>?
875      if minimap_data_store.is_empty() {
876        return
877      }
878
879      // In our main walk over the spatial tree (below), for nodes inside a
880      // subtree rooted at a root-content node, we need some information from
881      // that enclosing root-content node. To collect this information, do an
882      // preliminary walk over the spatial tree now and collect the root-content
883      // info in a HashMap.
884      struct RootContentInfo {
885        transform: LayoutToWorldTransform,
886        clip: LayoutRect
887      }
888      let mut root_content_info = FastHashMap::<ExternalScrollId, RootContentInfo>::default();
889      spatial_tree.visit_nodes(|index, node| {
890        if let SpatialNodeType::ScrollFrame(ref scroll_frame_info) = node.node_type {
891          if let Some(minimap_data) = minimap_data_store.get(&scroll_frame_info.external_id) {
892            if minimap_data.is_root_content {
893              let transform = spatial_tree.get_world_viewport_transform(index).into_transform();
894              root_content_info.insert(scroll_frame_info.external_id, RootContentInfo{
895                transform,
896                clip: scroll_frame_info.viewport_rect
897              });
898            }
899          }
900        }
901      });
902
903      // This is the main walk over the spatial tree. For every scroll frame node which
904      // has minimap data, compute the rects we want to render for that minimap in world
905      // coordinates and add them to `scratch.debug_items`.
906      spatial_tree.visit_nodes(|index, node| {
907        if let SpatialNodeType::ScrollFrame(ref scroll_frame_info) = node.node_type {
908          if let Some(minimap_data) = minimap_data_store.get(&scroll_frame_info.external_id) {
909            const HORIZONTAL_PADDING: f32 = 5.0;
910            const VERTICAL_PADDING: f32 = 10.0;
911            const PAGE_BORDER_COLOR: ColorF = debug_colors::BLACK;
912            const BACKGROUND_COLOR: ColorF = ColorF { r: 0.3, g: 0.3, b: 0.3, a: 0.3};
913            const DISPLAYPORT_BACKGROUND_COLOR: ColorF = ColorF { r: 1.0, g: 1.0, b: 1.0, a: 0.4};
914            const LAYOUT_PORT_COLOR: ColorF = debug_colors::RED;
915            const VISUAL_PORT_COLOR: ColorF = debug_colors::BLUE;
916            const DISPLAYPORT_COLOR: ColorF = debug_colors::LIME;
917
918            let viewport = scroll_frame_info.viewport_rect;
919
920            // Scale the minimap to make it 100px wide (if there's space), and the full height
921            // of the scroll frame's viewport, minus some padding. Position it at the left edge
922            // of the scroll frame's viewport.
923            let scale_factor_x = 100f32.min(viewport.width() - (2.0 * HORIZONTAL_PADDING))
924                                   / minimap_data.scrollable_rect.width();
925            let scale_factor_y = (viewport.height() - (2.0 * VERTICAL_PADDING))
926                                / minimap_data.scrollable_rect.height();
927            if scale_factor_x <= 0.0 || scale_factor_y <= 0.0 {
928              return;
929            }
930            let transform = LayoutTransform::scale(scale_factor_x, scale_factor_y, 1.0)
931                .then_translate(LayoutVector3D::new(HORIZONTAL_PADDING, VERTICAL_PADDING, 0.0))
932                .then_translate(LayoutVector3D::new(viewport.min.x, viewport.min.y, 0.0));
933
934            // Transforms for transforming rects in this scroll frame's local coordintes, to world coordinates.
935            // For scroll frames inside a root-content subtree, we apply this transform in two parts
936            // (local to root-content, and root-content to world), so that we can make additional
937            // adjustments in root-content space. For scroll frames outside of a root-content subtree,
938            // the entire world transform will be in `local_to_root_content`.
939            let world_transform = spatial_tree
940                .get_world_viewport_transform(index)
941                .into_transform();
942            let mut local_to_root_content =
943                world_transform.with_destination::<LayoutPixel>();
944            let mut root_content_to_world = LayoutToWorldTransform::default();
945            let mut root_content_clip = None;
946            if minimap_data.root_content_scroll_id != 0 {
947              if let Some(RootContentInfo{transform: root_content_transform, clip}) = root_content_info.get(&ExternalScrollId(minimap_data.root_content_scroll_id, minimap_data.root_content_pipeline_id)) {
948                // Exclude the root-content node's zoom transform from `local_to_root_content`.
949                // This ensures that the minimap remains unaffected by pinch-zooming
950                // (in essence, remaining attached to the *visual* viewport, rather than to
951                // the *layout* viewport which is what happens by default).
952                let zoom_transform = minimap_data.zoom_transform;
953                local_to_root_content = world_transform
954                  .then(&root_content_transform.inverse().unwrap())
955                  .then(&zoom_transform.inverse().unwrap());
956                root_content_to_world = root_content_transform.clone();
957                root_content_clip = Some(clip);
958              }
959            }
960
961            let mut add_rect = |rect, border, fill| -> Option<()> {
962              const STROKE_WIDTH: f32 = 2.0;
963              // Place rect in scroll frame's local coordinate space
964              let transformed_rect = transform.outer_transformed_box2d(&rect)?;
965
966              // Transform to world coordinates, using root-content coords as an intermediate step.
967              let mut root_content_rect = local_to_root_content.outer_transformed_box2d(&transformed_rect)?;
968              // In root-content coords, apply the root content node's viewport clip.
969              // This prevents subframe minimaps from leaking into the chrome area when the root
970              // scroll frame is scrolled.
971              // TODO: The minimaps of nested subframes can still leak outside of the viewports of
972              // their containing subframes. Should have a more proper fix for this.
973              if let Some(clip) = root_content_clip {
974                root_content_rect = root_content_rect.intersection(clip)?;
975              }
976              let world_rect = root_content_to_world.outer_transformed_box2d(&root_content_rect)?;
977
978              scratch.push_debug_rect_with_stroke_width(world_rect, border, STROKE_WIDTH);
979
980              // Add world coordinate rects to scratch.debug_items
981              if let Some(fill_color) = fill {
982                let interior_world_rect = WorldRect::new(
983                    world_rect.min + WorldVector2D::new(STROKE_WIDTH, STROKE_WIDTH),
984                    world_rect.max - WorldVector2D::new(STROKE_WIDTH, STROKE_WIDTH)
985                );
986                scratch.push_debug_rect(interior_world_rect * DevicePixelScale::new(1.0), 1, border, fill_color);
987              }
988
989              Some(())
990            };
991
992            add_rect(minimap_data.scrollable_rect, PAGE_BORDER_COLOR, Some(BACKGROUND_COLOR));
993            add_rect(minimap_data.displayport, DISPLAYPORT_COLOR, Some(DISPLAYPORT_BACKGROUND_COLOR));
994            // Only render a distinct layout viewport for the root content.
995            // For other scroll frames, the visual and layout viewports coincide.
996            if minimap_data.is_root_content {
997              add_rect(minimap_data.layout_viewport, LAYOUT_PORT_COLOR, None);
998            }
999            add_rect(minimap_data.visual_viewport, VISUAL_PORT_COLOR, None);
1000          }
1001        }
1002      });
1003    }
1004
1005    fn build_composite_pass(
1006        &self,
1007        scene: &BuiltScene,
1008        ctx: &RenderTargetContext,
1009        gpu_cache: &mut GpuCache,
1010        deferred_resolves: &mut FrameVec<DeferredResolve>,
1011        composite_state: &mut CompositeState,
1012    ) {
1013        for pic_index in &scene.tile_cache_pictures {
1014            let pic = &ctx.prim_store.pictures[pic_index.0];
1015
1016            match pic.raster_config {
1017                Some(RasterConfig { composite_mode: PictureCompositeMode::TileCache { slice_id }, .. }) => {
1018                    // Tile cache instances are added to the composite config, rather than
1019                    // directly added to batches. This allows them to be drawn with various
1020                    // present modes during render, such as partial present etc.
1021                    let tile_cache = &ctx.tile_caches[&slice_id];
1022                    let map_local_to_world = SpaceMapper::new_with_target(
1023                        ctx.root_spatial_node_index,
1024                        tile_cache.spatial_node_index,
1025                        ctx.screen_world_rect,
1026                        ctx.spatial_tree,
1027                    );
1028                    let world_clip_rect = map_local_to_world
1029                        .map(&tile_cache.local_clip_rect)
1030                        .expect("bug: unable to map clip rect");
1031                    let device_clip_rect = (world_clip_rect * ctx.global_device_pixel_scale).round();
1032
1033                    composite_state.push_surface(
1034                        tile_cache,
1035                        device_clip_rect,
1036                        ctx.resource_cache,
1037                        gpu_cache,
1038                        deferred_resolves,
1039                    );
1040                }
1041                _ => {
1042                    panic!("bug: found a top-level prim that isn't a tile cache");
1043                }
1044            }
1045        }
1046    }
1047}
1048
1049/// Processes this pass to prepare it for rendering.
1050///
1051/// Among other things, this allocates output regions for each of our tasks
1052/// (added via `add_render_task`) in a RenderTarget and assigns it into that
1053/// target.
1054pub fn build_render_pass(
1055    src_pass: &Pass,
1056    screen_size: DeviceIntSize,
1057    ctx: &mut RenderTargetContext,
1058    gpu_cache: &mut GpuCache,
1059    gpu_buffer_builder: &mut GpuBufferBuilder,
1060    render_tasks: &RenderTaskGraph,
1061    clip_store: &ClipStore,
1062    transforms: &mut TransformPalette,
1063    prim_headers: &mut PrimitiveHeaders,
1064    z_generator: &mut ZBufferIdGenerator,
1065    gpu_supports_fast_clears: bool,
1066    prim_instances: &[PrimitiveInstance],
1067    cmd_buffers: &CommandBufferList,
1068) -> RenderPass {
1069    profile_scope!("build_render_pass");
1070
1071    // TODO(gw): In this initial frame graph work, we try to maintain the existing
1072    //           build_render_pass code as closely as possible, to make the review
1073    //           simpler and reduce chance of regressions. However, future work should
1074    //           include refactoring this to more closely match the built frame graph.
1075    let mut pass = RenderPass::new(src_pass, ctx.frame_memory);
1076
1077    for sub_pass in &src_pass.sub_passes {
1078        match sub_pass.surface {
1079            SubPassSurface::Dynamic { target_kind, texture_id, used_rect } => {
1080                match target_kind {
1081                    RenderTargetKind::Color => {
1082                        let mut target = RenderTarget::new(
1083                            RenderTargetKind::Color,
1084                            false,
1085                            texture_id,
1086                            screen_size,
1087                            gpu_supports_fast_clears,
1088                            Some(used_rect),
1089                            &ctx.frame_memory,
1090                        );
1091
1092                        for task_id in &sub_pass.task_ids {
1093                            target.add_task(
1094                                *task_id,
1095                                ctx,
1096                                gpu_cache,
1097                                gpu_buffer_builder,
1098                                render_tasks,
1099                                clip_store,
1100                                transforms,
1101                            );
1102                        }
1103
1104                        pass.color.targets.push(target);
1105                    }
1106                    RenderTargetKind::Alpha => {
1107                        let mut target = RenderTarget::new(
1108                            RenderTargetKind::Alpha,
1109                            false,
1110                            texture_id,
1111                            screen_size,
1112                            gpu_supports_fast_clears,
1113                            Some(used_rect),
1114                            &ctx.frame_memory,
1115                        );
1116
1117                        for task_id in &sub_pass.task_ids {
1118                            target.add_task(
1119                                *task_id,
1120                                ctx,
1121                                gpu_cache,
1122                                gpu_buffer_builder,
1123                                render_tasks,
1124                                clip_store,
1125                                transforms,
1126                            );
1127                        }
1128
1129                        pass.alpha.targets.push(target);
1130                    }
1131                }
1132            }
1133            SubPassSurface::Persistent { surface: StaticRenderTaskSurface::PictureCache { ref surface, .. }, .. } => {
1134                assert_eq!(sub_pass.task_ids.len(), 1);
1135                let task_id = sub_pass.task_ids[0];
1136                let task = &render_tasks[task_id];
1137                let target_rect = task.get_target_rect();
1138
1139                match task.kind {
1140                    RenderTaskKind::Picture(ref pic_task) => {
1141                        let cmd_buffer = cmd_buffers.get(pic_task.cmd_buffer_index);
1142                        let mut dirty_rect = pic_task.scissor_rect.expect("bug: must be set for cache tasks");
1143                        let mut valid_rect = pic_task.valid_rect.expect("bug: must be set for cache tasks");
1144
1145                        // If we have a surface size, clip the dirty and vaild rects
1146                        // to that size. This ensures that native compositors will
1147                        // pass sanity checks (Bug 1971296).
1148                        if let ResolvedSurfaceTexture::Native { size, .. } = surface {
1149                            let surface_size_rect = <DeviceIntRect>::from_size(*size);
1150                            dirty_rect = dirty_rect.intersection(&surface_size_rect).unwrap_or_default();
1151                            valid_rect = valid_rect.intersection(&surface_size_rect).unwrap_or_default();
1152                        }
1153
1154                        let batcher = AlphaBatchBuilder::new(
1155                            screen_size,
1156                            ctx.break_advanced_blend_batches,
1157                            ctx.batch_lookback_count,
1158                            task_id,
1159                            task_id.into(),
1160                            &ctx.frame_memory,
1161                        );
1162
1163                        let mut batch_builder = BatchBuilder::new(batcher);
1164
1165                        cmd_buffer.iter_prims(&mut |cmd, spatial_node_index, segments| {
1166                            batch_builder.add_prim_to_batch(
1167                                cmd,
1168                                spatial_node_index,
1169                                ctx,
1170                                gpu_cache,
1171                                render_tasks,
1172                                prim_headers,
1173                                transforms,
1174                                pic_task.raster_spatial_node_index,
1175                                pic_task.surface_spatial_node_index,
1176                                z_generator,
1177                                prim_instances,
1178                                gpu_buffer_builder,
1179                                segments,
1180                            );
1181                        });
1182
1183                        let batcher = batch_builder.finalize();
1184
1185                        let mut batch_containers = ctx.frame_memory.new_vec();
1186                        let mut alpha_batch_container = AlphaBatchContainer::new(
1187                            Some(dirty_rect),
1188                            &ctx.frame_memory
1189                        );
1190
1191                        batcher.build(
1192                            &mut batch_containers,
1193                            &mut alpha_batch_container,
1194                            target_rect,
1195                            None,
1196                        );
1197                        debug_assert!(batch_containers.is_empty());
1198
1199                        let target = PictureCacheTarget {
1200                            surface: surface.clone(),
1201                            clear_color: pic_task.clear_color,
1202                            kind: PictureCacheTargetKind::Draw {
1203                                alpha_batch_container,
1204                            },
1205                            dirty_rect,
1206                            valid_rect,
1207                        };
1208
1209                        pass.picture_cache.push(target);
1210                    }
1211                    RenderTaskKind::TileComposite(ref tile_task) => {
1212                        let mut dirty_rect = tile_task.scissor_rect;
1213                        let mut valid_rect = tile_task.valid_rect;
1214                        // If we have a surface size, clip the dirty and vaild rects
1215                        // to that size. This ensures that native compositors will
1216                        // pass sanity checks (Bug 1971296).
1217                        if let ResolvedSurfaceTexture::Native { size, .. } = surface {
1218                            let surface_size_rect = <DeviceIntRect>::from_size(*size);
1219                            dirty_rect = dirty_rect.intersection(&surface_size_rect).unwrap_or_default();
1220                            valid_rect = valid_rect.intersection(&surface_size_rect).unwrap_or_default();
1221                        }
1222
1223                        let target = PictureCacheTarget {
1224                            surface: surface.clone(),
1225                            clear_color: Some(tile_task.clear_color),
1226                            kind: PictureCacheTargetKind::Blit {
1227                                task_id: tile_task.task_id.expect("bug: no source task_id set"),
1228                                sub_rect_offset: tile_task.sub_rect_offset,
1229                            },
1230                            dirty_rect,
1231                            valid_rect,
1232                        };
1233
1234                        pass.picture_cache.push(target);
1235                    }
1236                    _ => {
1237                        unreachable!();
1238                    }
1239                };
1240            }
1241            SubPassSurface::Persistent { surface: StaticRenderTaskSurface::TextureCache { target_kind, texture, .. } } => {
1242                let texture = pass.texture_cache
1243                    .entry(texture)
1244                    .or_insert_with(||
1245                        RenderTarget::new(
1246                            target_kind,
1247                            true,
1248                            texture,
1249                            screen_size,
1250                            gpu_supports_fast_clears,
1251                            None,
1252                            &ctx.frame_memory
1253                        )
1254                    );
1255                for task_id in &sub_pass.task_ids {
1256                    texture.add_task(
1257                        *task_id,
1258                        ctx,
1259                        gpu_cache,
1260                        gpu_buffer_builder,
1261                        render_tasks,
1262                        clip_store,
1263                        transforms,
1264                    );
1265                }
1266            }
1267            SubPassSurface::Persistent { surface: StaticRenderTaskSurface::ReadOnly { .. } } => {
1268                panic!("Should not create a render pass for read-only task locations.");
1269            }
1270        }
1271    }
1272
1273    pass.color.build(
1274        ctx,
1275        gpu_cache,
1276        render_tasks,
1277        prim_headers,
1278        transforms,
1279        z_generator,
1280        prim_instances,
1281        cmd_buffers,
1282        gpu_buffer_builder,
1283    );
1284    pass.alpha.build(
1285        ctx,
1286        gpu_cache,
1287        render_tasks,
1288        prim_headers,
1289        transforms,
1290        z_generator,
1291        prim_instances,
1292        cmd_buffers,
1293        gpu_buffer_builder,
1294    );
1295
1296    for target in &mut pass.texture_cache.values_mut() {
1297        target.build(
1298            ctx,
1299            gpu_cache,
1300            render_tasks,
1301            prim_headers,
1302            transforms,
1303            z_generator,
1304            prim_instances,
1305            cmd_buffers,
1306            gpu_buffer_builder,
1307        );
1308    }
1309
1310    pass
1311}
1312
1313/// A rendering-oriented representation of the frame built by the render backend
1314/// and presented to the renderer.
1315///
1316/// # Safety
1317///
1318/// The frame's allocator memory must be dropped after all of the frame's containers.
1319/// This is handled in the renderer and in `RenderedDocument`'s Drop implementation.
1320#[cfg_attr(feature = "capture", derive(Serialize))]
1321#[cfg_attr(feature = "replay", derive(Deserialize))]
1322pub struct Frame {
1323    /// The rectangle to show the frame in, on screen.
1324    pub device_rect: DeviceIntRect,
1325    pub present: bool,
1326    pub passes: FrameVec<RenderPass>,
1327
1328    pub transform_palette: FrameVec<TransformData>,
1329    pub render_tasks: RenderTaskGraph,
1330    pub prim_headers: PrimitiveHeaders,
1331
1332    /// The GPU cache frame that the contents of Self depend on
1333    pub gpu_cache_frame_id: FrameId,
1334
1335    /// List of textures that we don't know about yet
1336    /// from the backend thread. The render thread
1337    /// will use a callback to resolve these and
1338    /// patch the data structures.
1339    pub deferred_resolves: FrameVec<DeferredResolve>,
1340
1341    /// True if this frame contains any render tasks
1342    /// that write to the texture cache.
1343    pub has_texture_cache_tasks: bool,
1344
1345    /// True if this frame has been drawn by the
1346    /// renderer.
1347    pub has_been_rendered: bool,
1348
1349    /// Debugging information to overlay for this frame.
1350    pub debug_items: Vec<DebugItem>,
1351
1352    /// Contains picture cache tiles, and associated information.
1353    /// Used by the renderer to composite tiles into the framebuffer,
1354    /// or hand them off to an OS compositor.
1355    pub composite_state: CompositeState,
1356
1357    /// Main GPU data buffer constructed (primarily) during the prepare
1358    /// pass for primitives that were visible and dirty.
1359    pub gpu_buffer_f: GpuBufferF,
1360    pub gpu_buffer_i: GpuBufferI,
1361
1362    /// The backing store for the frame's allocator.
1363    ///
1364    /// # Safety
1365    ///
1366    /// Must not be dropped while frame allocations are alive.
1367    ///
1368    /// Rust has deterministic drop order [1]. We rely on `allocator_memory`
1369    /// being the last member of the `Frame` struct so that it is dropped
1370    /// after the frame's containers.
1371    ///
1372    /// [1]: https://doc.rust-lang.org/reference/destructors.html
1373    pub allocator_memory: FrameMemory,
1374}
1375
1376impl Frame {
1377    // This frame must be flushed if it writes to the
1378    // texture cache, and hasn't been drawn yet.
1379    pub fn must_be_drawn(&self) -> bool {
1380        self.has_texture_cache_tasks && !self.has_been_rendered
1381    }
1382
1383    // Returns true if this frame doesn't alter what is on screen currently.
1384    pub fn is_nop(&self) -> bool {
1385        // If there are no off-screen passes, that implies that there are no
1386        // picture cache tiles, and no texture cache tasks being updates. If this
1387        // is the case, we can consider the frame a nop (higher level checks
1388        // test if a composite is needed due to picture cache surfaces moving
1389        // or external surfaces being updated).
1390        self.passes.is_empty()
1391    }
1392}