1use super::super::shader_source::{OPTIMIZED_SHADERS, UNOPTIMIZED_SHADERS};
6use api::{ImageDescriptor, ImageFormat, Parameter, BoolParameter, IntParameter, ImageRendering};
7use api::{MixBlendMode, ImageBufferKind, VoidPtrToSizeFn};
8use api::{CrashAnnotator, CrashAnnotation, CrashAnnotatorGuard};
9use api::units::*;
10use euclid::default::Transform3D;
11use gleam::gl;
12use crate::render_api::MemoryReport;
13use crate::internal_types::{FastHashMap, RenderTargetInfo, Swizzle, SwizzleSettings};
14use crate::util::round_up_to_multiple;
15use crate::profiler;
16use log::Level;
17use smallvec::SmallVec;
18use std::{
19 borrow::Cow,
20 cell::{Cell, RefCell},
21 cmp,
22 collections::hash_map::Entry,
23 marker::PhantomData,
24 mem,
25 num::NonZeroUsize,
26 os::raw::c_void,
27 ops::Add,
28 path::PathBuf,
29 ptr,
30 rc::Rc,
31 slice,
32 sync::Arc,
33 thread,
34 time::Duration,
35};
36use webrender_build::shader::{
37 ProgramSourceDigest, ShaderKind, ShaderVersion, build_shader_main_string,
38 build_shader_prefix_string, do_build_shader_string, shader_source_from_file,
39};
40use malloc_size_of::MallocSizeOfOps;
41
42#[derive(Debug, Copy, Clone, PartialEq, Ord, Eq, PartialOrd)]
44#[cfg_attr(feature = "capture", derive(Serialize))]
45#[cfg_attr(feature = "replay", derive(Deserialize))]
46pub struct GpuFrameId(usize);
47
48impl GpuFrameId {
49 pub fn new(value: usize) -> Self {
50 GpuFrameId(value)
51 }
52}
53
54impl Add<usize> for GpuFrameId {
55 type Output = GpuFrameId;
56
57 fn add(self, other: usize) -> GpuFrameId {
58 GpuFrameId(self.0 + other)
59 }
60}
61
62pub struct TextureSlot(pub usize);
63
64const DEFAULT_TEXTURE: TextureSlot = TextureSlot(0);
66
67#[repr(u32)]
68pub enum DepthFunction {
69 Always = gl::ALWAYS,
70 Less = gl::LESS,
71 LessEqual = gl::LEQUAL,
72}
73
74#[repr(u32)]
75#[derive(Copy, Clone, Debug, Eq, PartialEq)]
76#[cfg_attr(feature = "capture", derive(Serialize))]
77#[cfg_attr(feature = "replay", derive(Deserialize))]
78pub enum TextureFilter {
79 Nearest,
80 Linear,
81 Trilinear,
82}
83
84#[derive(Clone, Debug)]
86#[cfg_attr(feature = "capture", derive(Serialize))]
87#[cfg_attr(feature = "replay", derive(Deserialize))]
88pub struct TextureFormatPair<T> {
89 pub internal: T,
91 pub external: T,
93}
94
95impl<T: Copy> From<T> for TextureFormatPair<T> {
96 fn from(value: T) -> Self {
97 TextureFormatPair {
98 internal: value,
99 external: value,
100 }
101 }
102}
103
104#[derive(Debug)]
105pub enum VertexAttributeKind {
106 F32,
107 U8Norm,
108 U16Norm,
109 I32,
110 U16,
111}
112
113#[derive(Debug)]
114pub struct VertexAttribute {
115 pub name: &'static str,
116 pub count: u32,
117 pub kind: VertexAttributeKind,
118}
119
120#[derive(Debug)]
121pub struct VertexDescriptor {
122 pub vertex_attributes: &'static [VertexAttribute],
123 pub instance_attributes: &'static [VertexAttribute],
124}
125
126enum FBOTarget {
127 Read,
128 Draw,
129}
130
131#[derive(Debug, Clone)]
133pub enum UploadMethod {
134 Immediate,
136 PixelBuffer(VertexUsageHint),
138}
139
140pub unsafe trait Texel: Copy + Default {
142 fn image_format() -> ImageFormat;
143}
144
145unsafe impl Texel for u8 {
146 fn image_format() -> ImageFormat { ImageFormat::R8 }
147}
148
149fn depth_target_size_in_bytes(dimensions: &DeviceIntSize) -> usize {
151 let pixels = dimensions.width * dimensions.height;
154 (pixels as usize) * 4
155}
156
157pub fn get_gl_target(target: ImageBufferKind) -> gl::GLuint {
158 match target {
159 ImageBufferKind::Texture2D => gl::TEXTURE_2D,
160 ImageBufferKind::TextureRect => gl::TEXTURE_RECTANGLE,
161 ImageBufferKind::TextureExternal => gl::TEXTURE_EXTERNAL_OES,
162 ImageBufferKind::TextureExternalBT709 => gl::TEXTURE_EXTERNAL_OES,
163 }
164}
165
166pub fn from_gl_target(target: gl::GLuint) -> ImageBufferKind {
167 match target {
168 gl::TEXTURE_2D => ImageBufferKind::Texture2D,
169 gl::TEXTURE_RECTANGLE => ImageBufferKind::TextureRect,
170 gl::TEXTURE_EXTERNAL_OES => ImageBufferKind::TextureExternal,
171 _ => panic!("Unexpected target {:?}", target),
172 }
173}
174
175fn supports_extension(extensions: &[String], extension: &str) -> bool {
176 extensions.iter().any(|s| s == extension)
177}
178
179fn get_shader_version(gl: &dyn gl::Gl) -> ShaderVersion {
180 match gl.get_type() {
181 gl::GlType::Gl => ShaderVersion::Gl,
182 gl::GlType::Gles => ShaderVersion::Gles,
183 }
184}
185
186pub fn get_unoptimized_shader_source(shader_name: &str, base_path: Option<&PathBuf>) -> Cow<'static, str> {
189 if let Some(ref base) = base_path {
190 let shader_path = base.join(&format!("{}.glsl", shader_name));
191 Cow::Owned(shader_source_from_file(&shader_path))
192 } else {
193 Cow::Borrowed(
194 UNOPTIMIZED_SHADERS
195 .get(shader_name)
196 .expect("Shader not found")
197 .source
198 )
199 }
200}
201
202impl VertexAttributeKind {
203 fn size_in_bytes(&self) -> u32 {
204 match *self {
205 VertexAttributeKind::F32 => 4,
206 VertexAttributeKind::U8Norm => 1,
207 VertexAttributeKind::U16Norm => 2,
208 VertexAttributeKind::I32 => 4,
209 VertexAttributeKind::U16 => 2,
210 }
211 }
212}
213
214impl VertexAttribute {
215 fn size_in_bytes(&self) -> u32 {
216 self.count * self.kind.size_in_bytes()
217 }
218
219 fn bind_to_vao(
220 &self,
221 attr_index: gl::GLuint,
222 divisor: gl::GLuint,
223 stride: gl::GLint,
224 offset: gl::GLuint,
225 gl: &dyn gl::Gl,
226 ) {
227 gl.enable_vertex_attrib_array(attr_index);
228 gl.vertex_attrib_divisor(attr_index, divisor);
229
230 match self.kind {
231 VertexAttributeKind::F32 => {
232 gl.vertex_attrib_pointer(
233 attr_index,
234 self.count as gl::GLint,
235 gl::FLOAT,
236 false,
237 stride,
238 offset,
239 );
240 }
241 VertexAttributeKind::U8Norm => {
242 gl.vertex_attrib_pointer(
243 attr_index,
244 self.count as gl::GLint,
245 gl::UNSIGNED_BYTE,
246 true,
247 stride,
248 offset,
249 );
250 }
251 VertexAttributeKind::U16Norm => {
252 gl.vertex_attrib_pointer(
253 attr_index,
254 self.count as gl::GLint,
255 gl::UNSIGNED_SHORT,
256 true,
257 stride,
258 offset,
259 );
260 }
261 VertexAttributeKind::I32 => {
262 gl.vertex_attrib_i_pointer(
263 attr_index,
264 self.count as gl::GLint,
265 gl::INT,
266 stride,
267 offset,
268 );
269 }
270 VertexAttributeKind::U16 => {
271 gl.vertex_attrib_i_pointer(
272 attr_index,
273 self.count as gl::GLint,
274 gl::UNSIGNED_SHORT,
275 stride,
276 offset,
277 );
278 }
279 }
280 }
281}
282
283impl VertexDescriptor {
284 fn instance_stride(&self) -> u32 {
285 self.instance_attributes
286 .iter()
287 .map(|attr| attr.size_in_bytes())
288 .sum()
289 }
290
291 fn bind_attributes(
292 attributes: &[VertexAttribute],
293 start_index: usize,
294 divisor: u32,
295 gl: &dyn gl::Gl,
296 vbo: VBOId,
297 ) {
298 vbo.bind(gl);
299
300 let stride: u32 = attributes
301 .iter()
302 .map(|attr| attr.size_in_bytes())
303 .sum();
304
305 let mut offset = 0;
306 for (i, attr) in attributes.iter().enumerate() {
307 let attr_index = (start_index + i) as gl::GLuint;
308 attr.bind_to_vao(attr_index, divisor, stride as _, offset, gl);
309 offset += attr.size_in_bytes();
310 }
311 }
312
313 fn bind(&self, gl: &dyn gl::Gl, main: VBOId, instance: VBOId, instance_divisor: u32) {
314 Self::bind_attributes(self.vertex_attributes, 0, 0, gl, main);
315
316 if !self.instance_attributes.is_empty() {
317 Self::bind_attributes(
318 self.instance_attributes,
319 self.vertex_attributes.len(),
320 instance_divisor,
321 gl,
322 instance,
323 );
324 }
325 }
326}
327
328impl VBOId {
329 fn bind(&self, gl: &dyn gl::Gl) {
330 gl.bind_buffer(gl::ARRAY_BUFFER, self.0);
331 }
332}
333
334impl IBOId {
335 fn bind(&self, gl: &dyn gl::Gl) {
336 gl.bind_buffer(gl::ELEMENT_ARRAY_BUFFER, self.0);
337 }
338}
339
340impl FBOId {
341 fn bind(&self, gl: &dyn gl::Gl, target: FBOTarget) {
342 let target = match target {
343 FBOTarget::Read => gl::READ_FRAMEBUFFER,
344 FBOTarget::Draw => gl::DRAW_FRAMEBUFFER,
345 };
346 gl.bind_framebuffer(target, self.0);
347 }
348}
349
350pub struct Stream<'a> {
351 attributes: &'a [VertexAttribute],
352 vbo: VBOId,
353}
354
355pub struct VBO<V> {
356 id: gl::GLuint,
357 target: gl::GLenum,
358 allocated_count: usize,
359 marker: PhantomData<V>,
360}
361
362impl<V> VBO<V> {
363 pub fn allocated_count(&self) -> usize {
364 self.allocated_count
365 }
366
367 pub fn stream_with<'a>(&self, attributes: &'a [VertexAttribute]) -> Stream<'a> {
368 debug_assert_eq!(
369 mem::size_of::<V>(),
370 attributes.iter().map(|a| a.size_in_bytes() as usize).sum::<usize>()
371 );
372 Stream {
373 attributes,
374 vbo: VBOId(self.id),
375 }
376 }
377}
378
379impl<T> Drop for VBO<T> {
380 fn drop(&mut self) {
381 debug_assert!(thread::panicking() || self.id == 0);
382 }
383}
384
385#[cfg_attr(feature = "replay", derive(Clone))]
386#[derive(Debug)]
387pub struct ExternalTexture {
388 id: gl::GLuint,
389 target: gl::GLuint,
390 uv_rect: TexelRect,
391 image_rendering: ImageRendering,
392}
393
394impl ExternalTexture {
395 pub fn new(
396 id: u32,
397 target: ImageBufferKind,
398 uv_rect: TexelRect,
399 image_rendering: ImageRendering,
400 ) -> Self {
401 ExternalTexture {
402 id,
403 target: get_gl_target(target),
404 uv_rect,
405 image_rendering,
406 }
407 }
408
409 #[cfg(feature = "replay")]
410 pub fn internal_id(&self) -> gl::GLuint {
411 self.id
412 }
413
414 pub fn get_uv_rect(&self) -> TexelRect {
415 self.uv_rect
416 }
417}
418
419bitflags! {
420 #[derive(Default, Debug, Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]
421 pub struct TextureFlags: u32 {
422 const IS_SHARED_TEXTURE_CACHE = 1 << 0;
424 }
425}
426
427#[derive(Debug)]
433pub struct Texture {
434 id: gl::GLuint,
435 target: gl::GLuint,
436 format: ImageFormat,
437 size: DeviceIntSize,
438 filter: TextureFilter,
439 flags: TextureFlags,
440 active_swizzle: Cell<Swizzle>,
442 fbo: Option<FBOId>,
446 fbo_with_depth: Option<FBOId>,
464 last_frame_used: GpuFrameId,
465}
466
467impl Texture {
468 pub fn get_dimensions(&self) -> DeviceIntSize {
469 self.size
470 }
471
472 pub fn get_format(&self) -> ImageFormat {
473 self.format
474 }
475
476 pub fn get_filter(&self) -> TextureFilter {
477 self.filter
478 }
479
480 pub fn get_target(&self) -> ImageBufferKind {
481 from_gl_target(self.target)
482 }
483
484 pub fn supports_depth(&self) -> bool {
485 self.fbo_with_depth.is_some()
486 }
487
488 pub fn last_frame_used(&self) -> GpuFrameId {
489 self.last_frame_used
490 }
491
492 pub fn used_in_frame(&self, frame_id: GpuFrameId) -> bool {
493 self.last_frame_used == frame_id
494 }
495
496 pub fn is_render_target(&self) -> bool {
497 self.fbo.is_some()
498 }
499
500 pub fn used_recently(&self, current_frame_id: GpuFrameId, threshold: usize) -> bool {
503 self.last_frame_used + threshold >= current_frame_id
504 }
505
506 pub fn flags(&self) -> &TextureFlags {
508 &self.flags
509 }
510
511 pub fn flags_mut(&mut self) -> &mut TextureFlags {
513 &mut self.flags
514 }
515
516 pub fn size_in_bytes(&self) -> usize {
519 let bpp = self.format.bytes_per_pixel() as usize;
520 let w = self.size.width as usize;
521 let h = self.size.height as usize;
522 bpp * w * h
523 }
524
525 #[cfg(feature = "replay")]
526 pub fn into_external(mut self) -> ExternalTexture {
527 let ext = ExternalTexture {
528 id: self.id,
529 target: self.target,
530 uv_rect: TexelRect::new(
532 0.0,
533 0.0,
534 self.size.width as f32,
535 self.size.height as f32,
536 ),
537 image_rendering: ImageRendering::Auto,
538 };
539 self.id = 0; ext
541 }
542}
543
544impl Drop for Texture {
545 fn drop(&mut self) {
546 debug_assert!(thread::panicking() || self.id == 0);
547 }
548}
549
550pub struct Program {
551 id: gl::GLuint,
552 u_transform: gl::GLint,
553 u_texture_size: gl::GLint,
554 source_info: ProgramSourceInfo,
555 is_initialized: bool,
556}
557
558impl Program {
559 pub fn is_initialized(&self) -> bool {
560 self.is_initialized
561 }
562}
563
564impl Drop for Program {
565 fn drop(&mut self) {
566 debug_assert!(
567 thread::panicking() || self.id == 0,
568 "renderer::deinit not called"
569 );
570 }
571}
572
573pub struct CustomVAO {
574 id: gl::GLuint,
575}
576
577impl Drop for CustomVAO {
578 fn drop(&mut self) {
579 debug_assert!(
580 thread::panicking() || self.id == 0,
581 "renderer::deinit not called"
582 );
583 }
584}
585
586pub struct VAO {
587 id: gl::GLuint,
588 ibo_id: IBOId,
589 main_vbo_id: VBOId,
590 instance_vbo_id: VBOId,
591 instance_stride: usize,
592 instance_divisor: u32,
593 owns_vertices_and_indices: bool,
594}
595
596impl Drop for VAO {
597 fn drop(&mut self) {
598 debug_assert!(
599 thread::panicking() || self.id == 0,
600 "renderer::deinit not called"
601 );
602 }
603}
604
605#[derive(Debug)]
606pub struct PBO {
607 id: gl::GLuint,
608 reserved_size: usize,
609}
610
611impl PBO {
612 pub fn get_reserved_size(&self) -> usize {
613 self.reserved_size
614 }
615}
616
617impl Drop for PBO {
618 fn drop(&mut self) {
619 debug_assert!(
620 thread::panicking() || self.id == 0,
621 "renderer::deinit not called or PBO not returned to pool"
622 );
623 }
624}
625
626pub struct BoundPBO<'a> {
627 device: &'a mut Device,
628 pub data: &'a [u8]
629}
630
631impl<'a> Drop for BoundPBO<'a> {
632 fn drop(&mut self) {
633 self.device.gl.unmap_buffer(gl::PIXEL_PACK_BUFFER);
634 self.device.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
635 }
636}
637
638#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
639pub struct FBOId(gl::GLuint);
640
641#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
642pub struct RBOId(gl::GLuint);
643
644#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
645pub struct VBOId(gl::GLuint);
646
647#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
648struct IBOId(gl::GLuint);
649
650#[derive(Clone, Debug)]
651enum ProgramSourceType {
652 Unoptimized,
653 Optimized(ShaderVersion),
654}
655
656#[derive(Clone, Debug)]
657pub struct ProgramSourceInfo {
658 base_filename: &'static str,
659 features: Vec<&'static str>,
660 full_name_cstr: Rc<std::ffi::CString>,
661 source_type: ProgramSourceType,
662 digest: ProgramSourceDigest,
663}
664
665impl ProgramSourceInfo {
666 fn new(
667 device: &Device,
668 name: &'static str,
669 features: &[&'static str],
670 ) -> Self {
671
672 use std::collections::hash_map::DefaultHasher;
676 use std::hash::Hasher;
677
678 let mut hasher = DefaultHasher::new();
680 let gl_version = get_shader_version(&*device.gl());
681
682 hasher.write(device.capabilities.renderer_name.as_bytes());
684
685 let full_name = Self::make_full_name(name, features);
686
687 let optimized_source = if device.use_optimized_shaders {
688 OPTIMIZED_SHADERS.get(&(gl_version, &full_name)).or_else(|| {
689 warn!("Missing optimized shader source for {}", &full_name);
690 None
691 })
692 } else {
693 None
694 };
695
696 let source_type = match optimized_source {
697 Some(source_and_digest) => {
698 if cfg!(debug_assertions) {
702 let mut h = DefaultHasher::new();
703 h.write(source_and_digest.vert_source.as_bytes());
704 h.write(source_and_digest.frag_source.as_bytes());
705 let d: ProgramSourceDigest = h.into();
706 let digest = d.to_string();
707 debug_assert_eq!(digest, source_and_digest.digest);
708 hasher.write(digest.as_bytes());
709 } else {
710 hasher.write(source_and_digest.digest.as_bytes());
711 }
712
713 ProgramSourceType::Optimized(gl_version)
714 }
715 None => {
716 let override_path = device.resource_override_path.as_ref();
725 let source_and_digest = UNOPTIMIZED_SHADERS.get(&name).expect("Shader not found");
726
727 build_shader_prefix_string(
729 gl_version,
730 &features,
731 ShaderKind::Vertex,
732 &name,
733 &mut |s| hasher.write(s.as_bytes()),
734 );
735
736 if override_path.is_some() || cfg!(debug_assertions) {
739 let mut h = DefaultHasher::new();
740 build_shader_main_string(
741 &name,
742 &|f| get_unoptimized_shader_source(f, override_path),
743 &mut |s| h.write(s.as_bytes())
744 );
745 let d: ProgramSourceDigest = h.into();
746 let digest = format!("{}", d);
747 debug_assert!(override_path.is_some() || digest == source_and_digest.digest);
748 hasher.write(digest.as_bytes());
749 } else {
750 hasher.write(source_and_digest.digest.as_bytes());
751 }
752
753 ProgramSourceType::Unoptimized
754 }
755 };
756
757 ProgramSourceInfo {
759 base_filename: name,
760 features: features.to_vec(),
761 full_name_cstr: Rc::new(std::ffi::CString::new(full_name).unwrap()),
762 source_type,
763 digest: hasher.into(),
764 }
765 }
766
767 fn compute_source(&self, device: &Device, kind: ShaderKind) -> String {
768 let full_name = self.full_name();
769 match self.source_type {
770 ProgramSourceType::Optimized(gl_version) => {
771 let shader = OPTIMIZED_SHADERS
772 .get(&(gl_version, &full_name))
773 .unwrap_or_else(|| panic!("Missing optimized shader source for {}", full_name));
774
775 match kind {
776 ShaderKind::Vertex => shader.vert_source.to_string(),
777 ShaderKind::Fragment => shader.frag_source.to_string(),
778 }
779 },
780 ProgramSourceType::Unoptimized => {
781 let mut src = String::new();
782 device.build_shader_string(
783 &self.features,
784 kind,
785 self.base_filename,
786 |s| src.push_str(s),
787 );
788 src
789 }
790 }
791 }
792
793 fn make_full_name(base_filename: &'static str, features: &[&'static str]) -> String {
794 if features.is_empty() {
795 base_filename.to_string()
796 } else {
797 format!("{}_{}", base_filename, features.join("_"))
798 }
799 }
800
801 fn full_name(&self) -> String {
802 Self::make_full_name(self.base_filename, &self.features)
803 }
804}
805
806#[cfg_attr(feature = "serialize_program", derive(Deserialize, Serialize))]
807pub struct ProgramBinary {
808 bytes: Vec<u8>,
809 format: gl::GLenum,
810 source_digest: ProgramSourceDigest,
811}
812
813impl ProgramBinary {
814 fn new(bytes: Vec<u8>,
815 format: gl::GLenum,
816 source_digest: ProgramSourceDigest) -> Self {
817 ProgramBinary {
818 bytes,
819 format,
820 source_digest,
821 }
822 }
823
824 pub fn source_digest(&self) -> &ProgramSourceDigest {
826 &self.source_digest
827 }
828}
829
830pub trait ProgramCacheObserver {
832 fn save_shaders_to_disk(&self, entries: Vec<Arc<ProgramBinary>>);
833 fn set_startup_shaders(&self, entries: Vec<Arc<ProgramBinary>>);
834 fn try_load_shader_from_disk(&self, digest: &ProgramSourceDigest, program_cache: &Rc<ProgramCache>);
835 fn notify_program_binary_failed(&self, program_binary: &Arc<ProgramBinary>);
836}
837
838struct ProgramCacheEntry {
839 binary: Arc<ProgramBinary>,
841 linked: bool,
843}
844
845pub struct ProgramCache {
846 entries: RefCell<FastHashMap<ProgramSourceDigest, ProgramCacheEntry>>,
847
848 program_cache_handler: Option<Box<dyn ProgramCacheObserver>>,
851
852 pending_entries: RefCell<Vec<Arc<ProgramBinary>>>,
854}
855
856impl ProgramCache {
857 pub fn new(program_cache_observer: Option<Box<dyn ProgramCacheObserver>>) -> Rc<Self> {
858 Rc::new(
859 ProgramCache {
860 entries: RefCell::new(FastHashMap::default()),
861 program_cache_handler: program_cache_observer,
862 pending_entries: RefCell::new(Vec::default()),
863 }
864 )
865 }
866
867 fn update_disk_cache(&self, startup_complete: bool) {
870 if let Some(ref handler) = self.program_cache_handler {
871 if !self.pending_entries.borrow().is_empty() {
872 let pending_entries = self.pending_entries.replace(Vec::default());
873 handler.save_shaders_to_disk(pending_entries);
874 }
875
876 if startup_complete {
877 let startup_shaders = self.entries.borrow().values()
878 .filter(|e| e.linked).map(|e| e.binary.clone())
879 .collect::<Vec<_>>();
880 handler.set_startup_shaders(startup_shaders);
881 }
882 }
883 }
884
885 fn add_new_program_binary(&self, program_binary: Arc<ProgramBinary>) {
889 self.pending_entries.borrow_mut().push(program_binary.clone());
890
891 let digest = program_binary.source_digest.clone();
892 let entry = ProgramCacheEntry {
893 binary: program_binary,
894 linked: true,
895 };
896 self.entries.borrow_mut().insert(digest, entry);
897 }
898
899 #[cfg(feature = "serialize_program")]
902 pub fn load_program_binary(&self, program_binary: Arc<ProgramBinary>) {
903 let digest = program_binary.source_digest.clone();
904 let entry = ProgramCacheEntry {
905 binary: program_binary,
906 linked: false,
907 };
908 self.entries.borrow_mut().insert(digest, entry);
909 }
910
911 pub fn report_memory(&self, op: VoidPtrToSizeFn) -> usize {
913 self.entries.borrow().values()
914 .map(|e| unsafe { op(e.binary.bytes.as_ptr() as *const c_void ) })
915 .sum()
916 }
917}
918
919#[derive(Debug, Copy, Clone)]
920pub enum VertexUsageHint {
921 Static,
922 Dynamic,
923 Stream,
924}
925
926impl VertexUsageHint {
927 fn to_gl(&self) -> gl::GLuint {
928 match *self {
929 VertexUsageHint::Static => gl::STATIC_DRAW,
930 VertexUsageHint::Dynamic => gl::DYNAMIC_DRAW,
931 VertexUsageHint::Stream => gl::STREAM_DRAW,
932 }
933 }
934}
935
936#[derive(Copy, Clone, Debug)]
937pub struct UniformLocation(#[allow(dead_code)] gl::GLint);
938
939impl UniformLocation {
940 pub const INVALID: Self = UniformLocation(-1);
941}
942
943#[derive(Debug)]
944pub struct Capabilities {
945 pub supports_multisampling: bool,
947 pub supports_copy_image_sub_data: bool,
949 pub supports_color_buffer_float: bool,
951 pub supports_buffer_storage: bool,
953 pub supports_advanced_blend_equation: bool,
955 pub supports_dual_source_blending: bool,
957 pub supports_khr_debug: bool,
960 pub supports_texture_swizzle: bool,
962 pub supports_nonzero_pbo_offsets: bool,
965 pub supports_texture_usage: bool,
967 pub supports_render_target_partial_update: bool,
969 pub supports_shader_storage_object: bool,
971 pub requires_batched_texture_uploads: Option<bool>,
974 pub supports_alpha_target_clears: bool,
977 pub requires_alpha_target_full_clear: bool,
980 pub prefers_clear_scissor: bool,
983 pub supports_render_target_invalidate: bool,
986 pub supports_r8_texture_upload: bool,
988 pub supports_qcom_tiled_rendering: bool,
990 pub uses_native_clip_mask: bool,
993 pub uses_native_antialiasing: bool,
996 pub supports_image_external_essl3: bool,
1000 pub requires_vao_rebind_after_orphaning: bool,
1002 pub renderer_name: String,
1004}
1005
1006#[derive(Clone, Debug)]
1007pub enum ShaderError {
1008 Compilation(String, String), Link(String, String), }
1011
1012struct SharedDepthTarget {
1015 rbo_id: RBOId,
1017 refcount: usize,
1019}
1020
1021#[cfg(debug_assertions)]
1022impl Drop for SharedDepthTarget {
1023 fn drop(&mut self) {
1024 debug_assert!(thread::panicking() || self.refcount == 0);
1025 }
1026}
1027
1028#[derive(PartialEq, Debug)]
1031enum TexStorageUsage {
1032 Never,
1033 NonBGRA8,
1034 Always,
1035}
1036
1037#[derive(Copy, Clone, Debug)]
1040pub enum StrideAlignment {
1041 Bytes(NonZeroUsize),
1042 Pixels(NonZeroUsize),
1043}
1044
1045impl StrideAlignment {
1046 pub fn num_bytes(&self, format: ImageFormat) -> NonZeroUsize {
1047 match *self {
1048 Self::Bytes(bytes) => bytes,
1049 Self::Pixels(pixels) => {
1050 assert!(format.bytes_per_pixel() > 0);
1051 NonZeroUsize::new(pixels.get() * format.bytes_per_pixel() as usize).unwrap()
1052 }
1053 }
1054 }
1055}
1056
1057const RESERVE_DEPTH_BITS: i32 = 2;
1062
1063pub struct Device {
1064 gl: Rc<dyn gl::Gl>,
1065
1066 base_gl: Option<Rc<dyn gl::Gl>>,
1069
1070 bound_textures: [gl::GLuint; 16],
1072 bound_program: gl::GLuint,
1073 bound_program_name: Rc<std::ffi::CString>,
1074 bound_vao: gl::GLuint,
1075 bound_read_fbo: (FBOId, DeviceIntPoint),
1076 bound_draw_fbo: FBOId,
1077 default_read_fbo: FBOId,
1078 default_draw_fbo: FBOId,
1079
1080 depth_available: bool,
1083
1084 upload_method: UploadMethod,
1085 use_batched_texture_uploads: bool,
1086 use_draw_calls_for_texture_copy: bool,
1091 batched_upload_threshold: i32,
1093
1094 capabilities: Capabilities,
1096
1097 color_formats: TextureFormatPair<ImageFormat>,
1098 bgra_formats: TextureFormatPair<gl::GLuint>,
1099 bgra_pixel_type: gl::GLuint,
1100 swizzle_settings: SwizzleSettings,
1101 depth_format: gl::GLuint,
1102
1103 depth_targets: FastHashMap<DeviceIntSize, SharedDepthTarget>,
1108
1109 inside_frame: bool,
1111 crash_annotator: Option<Box<dyn CrashAnnotator>>,
1112 annotate_draw_call_crashes: bool,
1113
1114 resource_override_path: Option<PathBuf>,
1116
1117 use_optimized_shaders: bool,
1119
1120 max_texture_size: i32,
1121 cached_programs: Option<Rc<ProgramCache>>,
1122
1123 frame_id: GpuFrameId,
1126
1127 texture_storage_usage: TexStorageUsage,
1133
1134 required_pbo_stride: StrideAlignment,
1138
1139 requires_null_terminated_shader_source: bool,
1142
1143 requires_texture_external_unbind: bool,
1146
1147 is_software_webrender: bool,
1149
1150 extensions: Vec<String>,
1152
1153 dump_shader_source: Option<String>,
1155
1156 surface_origin_is_top_left: bool,
1157
1158 #[cfg(debug_assertions)]
1171 shader_is_ready: bool,
1172
1173 pub textures_created: u32,
1175 pub textures_deleted: u32,
1176}
1177
1178#[derive(Clone, Copy, Debug)]
1180pub enum DrawTarget {
1181 Default {
1184 rect: FramebufferIntRect,
1186 total_size: FramebufferIntSize,
1188 surface_origin_is_top_left: bool,
1189 },
1190 Texture {
1192 dimensions: DeviceIntSize,
1194 with_depth: bool,
1196 fbo_id: FBOId,
1198 id: gl::GLuint,
1200 target: gl::GLuint,
1202 },
1203 External {
1205 fbo: FBOId,
1206 size: FramebufferIntSize,
1207 },
1208 NativeSurface {
1210 offset: DeviceIntPoint,
1211 external_fbo_id: u32,
1212 dimensions: DeviceIntSize,
1213 },
1214}
1215
1216impl DrawTarget {
1217 pub fn new_default(size: DeviceIntSize, surface_origin_is_top_left: bool) -> Self {
1218 let total_size = device_size_as_framebuffer_size(size);
1219 DrawTarget::Default {
1220 rect: total_size.into(),
1221 total_size,
1222 surface_origin_is_top_left,
1223 }
1224 }
1225
1226 pub fn is_default(&self) -> bool {
1228 match *self {
1229 DrawTarget::Default {..} => true,
1230 _ => false,
1231 }
1232 }
1233
1234 pub fn from_texture(
1235 texture: &Texture,
1236 with_depth: bool,
1237 ) -> Self {
1238 let fbo_id = if with_depth {
1239 texture.fbo_with_depth.unwrap()
1240 } else {
1241 texture.fbo.unwrap()
1242 };
1243
1244 DrawTarget::Texture {
1245 dimensions: texture.get_dimensions(),
1246 fbo_id,
1247 with_depth,
1248 id: texture.id,
1249 target: texture.target,
1250 }
1251 }
1252
1253 pub fn dimensions(&self) -> DeviceIntSize {
1255 match *self {
1256 DrawTarget::Default { total_size, .. } => total_size.cast_unit(),
1257 DrawTarget::Texture { dimensions, .. } => dimensions,
1258 DrawTarget::External { size, .. } => size.cast_unit(),
1259 DrawTarget::NativeSurface { dimensions, .. } => dimensions,
1260 }
1261 }
1262
1263 pub fn offset(&self) -> DeviceIntPoint {
1264 match *self {
1265 DrawTarget::Default { .. } |
1266 DrawTarget::Texture { .. } |
1267 DrawTarget::External { .. } => {
1268 DeviceIntPoint::zero()
1269 }
1270 DrawTarget::NativeSurface { offset, .. } => offset,
1271 }
1272 }
1273
1274 pub fn to_framebuffer_rect(&self, device_rect: DeviceIntRect) -> FramebufferIntRect {
1275 let mut fb_rect = device_rect_as_framebuffer_rect(&device_rect);
1276 match *self {
1277 DrawTarget::Default { ref rect, surface_origin_is_top_left, .. } => {
1278 if !surface_origin_is_top_left {
1280 let w = fb_rect.width();
1281 let h = fb_rect.height();
1282 fb_rect.min.x = fb_rect.min.x + rect.min.x;
1283 fb_rect.min.y = rect.max.y - fb_rect.max.y;
1284 fb_rect.max.x = fb_rect.min.x + w;
1285 fb_rect.max.y = fb_rect.min.y + h;
1286 }
1287 }
1288 DrawTarget::Texture { .. } | DrawTarget::External { .. } | DrawTarget::NativeSurface { .. } => (),
1289 }
1290 fb_rect
1291 }
1292
1293 pub fn surface_origin_is_top_left(&self) -> bool {
1294 match *self {
1295 DrawTarget::Default { surface_origin_is_top_left, .. } => surface_origin_is_top_left,
1296 DrawTarget::Texture { .. } | DrawTarget::External { .. } | DrawTarget::NativeSurface { .. } => true,
1297 }
1298 }
1299
1300 pub fn build_scissor_rect(
1304 &self,
1305 scissor_rect: Option<DeviceIntRect>,
1306 ) -> FramebufferIntRect {
1307 let dimensions = self.dimensions();
1308
1309 match scissor_rect {
1310 Some(scissor_rect) => match *self {
1311 DrawTarget::Default { ref rect, .. } => {
1312 self.to_framebuffer_rect(scissor_rect)
1313 .intersection(rect)
1314 .unwrap_or_else(FramebufferIntRect::zero)
1315 }
1316 DrawTarget::NativeSurface { offset, .. } => {
1317 device_rect_as_framebuffer_rect(&scissor_rect.translate(offset.to_vector()))
1318 }
1319 DrawTarget::Texture { .. } | DrawTarget::External { .. } => {
1320 device_rect_as_framebuffer_rect(&scissor_rect)
1321 }
1322 }
1323 None => {
1324 FramebufferIntRect::from_size(
1325 device_size_as_framebuffer_size(dimensions),
1326 )
1327 }
1328 }
1329 }
1330}
1331
1332#[derive(Clone, Copy, Debug)]
1334pub enum ReadTarget {
1335 Default,
1337 Texture {
1339 fbo_id: FBOId,
1341 },
1342 External {
1344 fbo: FBOId,
1345 },
1346 NativeSurface {
1348 fbo_id: FBOId,
1349 offset: DeviceIntPoint,
1350 },
1351}
1352
1353impl ReadTarget {
1354 pub fn from_texture(
1355 texture: &Texture,
1356 ) -> Self {
1357 ReadTarget::Texture {
1358 fbo_id: texture.fbo.unwrap(),
1359 }
1360 }
1361
1362 fn offset(&self) -> DeviceIntPoint {
1363 match *self {
1364 ReadTarget::Default |
1365 ReadTarget::Texture { .. } |
1366 ReadTarget::External { .. } => {
1367 DeviceIntPoint::zero()
1368 }
1369
1370 ReadTarget::NativeSurface { offset, .. } => {
1371 offset
1372 }
1373 }
1374 }
1375}
1376
1377impl From<DrawTarget> for ReadTarget {
1378 fn from(t: DrawTarget) -> Self {
1379 match t {
1380 DrawTarget::Default { .. } => {
1381 ReadTarget::Default
1382 }
1383 DrawTarget::NativeSurface { external_fbo_id, offset, .. } => {
1384 ReadTarget::NativeSurface {
1385 fbo_id: FBOId(external_fbo_id),
1386 offset,
1387 }
1388 }
1389 DrawTarget::Texture { fbo_id, .. } => {
1390 ReadTarget::Texture { fbo_id }
1391 }
1392 DrawTarget::External { fbo, .. } => {
1393 ReadTarget::External { fbo }
1394 }
1395 }
1396 }
1397}
1398
1399fn parse_mali_version(version_string: &str) -> Option<(u32, u32, u32)> {
1404 let (_prefix, version_string) = version_string.split_once("v")?;
1405 let (v_str, version_string) = version_string.split_once(".r")?;
1406 let v = v_str.parse().ok()?;
1407
1408 let (r_str, version_string) = version_string.split_once("p")?;
1409 let r = r_str.parse().ok()?;
1410
1411 let (p_str, _) = version_string.split_once("-").unwrap_or((version_string, ""));
1413 let p = p_str.parse().ok()?;
1414
1415 Some((v, r, p))
1416}
1417
1418fn is_mali_midgard(renderer_name: &str) -> bool {
1420 renderer_name.starts_with("Mali-T")
1421}
1422
1423fn is_mali_bifrost(renderer_name: &str) -> bool {
1425 renderer_name == "Mali-G31"
1426 || renderer_name == "Mali-G51"
1427 || renderer_name == "Mali-G71"
1428 || renderer_name == "Mali-G52"
1429 || renderer_name == "Mali-G72"
1430 || renderer_name == "Mali-G76"
1431}
1432
1433fn is_mali_valhall(renderer_name: &str) -> bool {
1435 renderer_name.starts_with("Mali-G") && !is_mali_bifrost(renderer_name)
1438}
1439
1440impl Device {
1441 pub fn new(
1442 mut gl: Rc<dyn gl::Gl>,
1443 crash_annotator: Option<Box<dyn CrashAnnotator>>,
1444 resource_override_path: Option<PathBuf>,
1445 use_optimized_shaders: bool,
1446 upload_method: UploadMethod,
1447 batched_upload_threshold: i32,
1448 cached_programs: Option<Rc<ProgramCache>>,
1449 allow_texture_storage_support: bool,
1450 allow_texture_swizzling: bool,
1451 dump_shader_source: Option<String>,
1452 surface_origin_is_top_left: bool,
1453 panic_on_gl_error: bool,
1454 ) -> Device {
1455 let mut max_texture_size = [0];
1456 unsafe {
1457 gl.get_integer_v(gl::MAX_TEXTURE_SIZE, &mut max_texture_size);
1458 }
1459
1460 let max_texture_size = max_texture_size[0].min(16384);
1464
1465 let renderer_name = gl.get_string(gl::RENDERER);
1466 info!("Renderer: {}", renderer_name);
1467 let version_string = gl.get_string(gl::VERSION);
1468 info!("Version: {}", version_string);
1469 info!("Max texture size: {}", max_texture_size);
1470
1471 let mut extension_count = [0];
1472 unsafe {
1473 gl.get_integer_v(gl::NUM_EXTENSIONS, &mut extension_count);
1474 }
1475 let extension_count = extension_count[0] as gl::GLuint;
1476 let mut extensions = Vec::new();
1477 for i in 0 .. extension_count {
1478 extensions.push(gl.get_string_i(gl::EXTENSIONS, i));
1479 }
1480
1481 let supports_khr_debug = supports_extension(&extensions, "GL_KHR_debug")
1484 && !is_mali_valhall(&renderer_name);
1485
1486 if panic_on_gl_error || cfg!(debug_assertions) {
1490 gl = gl::ErrorReactingGl::wrap(gl, move |gl, name, code| {
1491 if supports_khr_debug {
1492 Self::log_driver_messages(gl);
1493 }
1494 error!("Caught GL error {:x} at {}", code, name);
1495 panic!("Caught GL error {:x} at {}", code, name);
1496 });
1497 }
1498
1499 if supports_extension(&extensions, "GL_ANGLE_provoking_vertex") {
1500 gl.provoking_vertex_angle(gl::FIRST_VERTEX_CONVENTION);
1501 }
1502
1503 let supports_texture_usage = supports_extension(&extensions, "GL_ANGLE_texture_usage");
1504
1505 let is_emulator = renderer_name.starts_with("Android Emulator");
1548 let avoid_tex_image = is_emulator;
1549 let mut gl_version = [0; 2];
1550 unsafe {
1551 gl.get_integer_v(gl::MAJOR_VERSION, &mut gl_version[0..1]);
1552 gl.get_integer_v(gl::MINOR_VERSION, &mut gl_version[1..2]);
1553 }
1554 info!("GL context {:?} {}.{}", gl.get_type(), gl_version[0], gl_version[1]);
1555
1556 let supports_texture_storage = allow_texture_storage_support && !cfg!(target_os = "macos") &&
1558 match gl.get_type() {
1559 gl::GlType::Gl => supports_extension(&extensions, "GL_ARB_texture_storage"),
1560 gl::GlType::Gles => true,
1561 };
1562
1563 let supports_gles_bgra = supports_extension(&extensions, "GL_EXT_texture_format_BGRA8888");
1571 let supports_texture_storage_with_gles_bgra = supports_gles_bgra
1572 && supports_extension(&extensions, "GL_EXT_texture_storage")
1573 && !renderer_name.starts_with("Intel(R) HD Graphics for BayTrail")
1574 && !renderer_name.starts_with("Intel(R) HD Graphics for Atom(TM) x5/x7");
1575
1576 let supports_texture_swizzle = allow_texture_swizzling &&
1577 match gl.get_type() {
1578 gl::GlType::Gl => gl_version >= [3, 3] ||
1580 supports_extension(&extensions, "GL_ARB_texture_swizzle"),
1581 gl::GlType::Gles => true,
1582 };
1583
1584 let (color_formats, bgra_formats, bgra_pixel_type, bgra8_sampling_swizzle, texture_storage_usage) = match gl.get_type() {
1585 gl::GlType::Gl if supports_texture_storage && supports_texture_swizzle => (
1587 TextureFormatPair::from(ImageFormat::RGBA8),
1588 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1589 gl::UNSIGNED_BYTE,
1590 Swizzle::Bgra, TexStorageUsage::Always
1592 ),
1593 gl::GlType::Gl => (
1595 TextureFormatPair { internal: ImageFormat::BGRA8, external: ImageFormat::BGRA8 },
1596 TextureFormatPair { internal: gl::RGBA, external: gl::BGRA },
1597 gl::UNSIGNED_INT_8_8_8_8_REV,
1598 Swizzle::Rgba, TexStorageUsage::Never
1600 ),
1601 gl::GlType::Gles if supports_texture_storage_with_gles_bgra => (
1605 TextureFormatPair::from(ImageFormat::BGRA8),
1606 TextureFormatPair { internal: gl::BGRA8_EXT, external: gl::BGRA_EXT },
1607 gl::UNSIGNED_BYTE,
1608 Swizzle::Rgba, TexStorageUsage::Always,
1610 ),
1611 gl::GlType::Gles if supports_texture_swizzle => (
1615 TextureFormatPair::from(ImageFormat::RGBA8),
1616 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1617 gl::UNSIGNED_BYTE,
1618 Swizzle::Bgra, TexStorageUsage::Always,
1620 ),
1621 gl::GlType::Gles if supports_gles_bgra && !avoid_tex_image => (
1625 TextureFormatPair::from(ImageFormat::BGRA8),
1626 TextureFormatPair::from(gl::BGRA_EXT),
1627 gl::UNSIGNED_BYTE,
1628 Swizzle::Rgba, TexStorageUsage::NonBGRA8,
1630 ),
1631 gl::GlType::Gles => {
1635 warn!("Neither BGRA or texture swizzling are supported. Images may be rendered incorrectly.");
1636 (
1637 TextureFormatPair::from(ImageFormat::RGBA8),
1638 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1639 gl::UNSIGNED_BYTE,
1640 Swizzle::Rgba,
1641 TexStorageUsage::Always,
1642 )
1643 }
1644 };
1645
1646 let is_software_webrender = renderer_name.starts_with("Software WebRender");
1647 let upload_method = if is_software_webrender {
1648 UploadMethod::Immediate
1650 } else {
1651 upload_method
1652 };
1653 let depth_format = gl::DEPTH_COMPONENT24;
1655
1656 info!("GL texture cache {:?}, bgra {:?} swizzle {:?}, texture storage {:?}, depth {:?}",
1657 color_formats, bgra_formats, bgra8_sampling_swizzle, texture_storage_usage, depth_format);
1658
1659 let supports_copy_image_sub_data = if renderer_name.starts_with("Mali") {
1665 false
1666 } else {
1667 supports_extension(&extensions, "GL_EXT_copy_image") ||
1668 supports_extension(&extensions, "GL_ARB_copy_image")
1669 };
1670
1671 let is_x86_powervr_rogue_g6430 = renderer_name.starts_with("PowerVR Rogue G6430")
1675 && cfg!(target_arch = "x86");
1676 let supports_color_buffer_float = match gl.get_type() {
1677 gl::GlType::Gl => true,
1678 gl::GlType::Gles if is_x86_powervr_rogue_g6430 => false,
1679 gl::GlType::Gles => supports_extension(&extensions, "GL_EXT_color_buffer_float"),
1680 };
1681
1682 let is_adreno = renderer_name.starts_with("Adreno");
1683
1684 let supports_buffer_storage = if is_adreno {
1689 false
1690 } else {
1691 supports_extension(&extensions, "GL_EXT_buffer_storage") ||
1692 supports_extension(&extensions, "GL_ARB_buffer_storage")
1693 };
1694
1695 let supports_advanced_blend_equation =
1699 supports_extension(&extensions, "GL_KHR_blend_equation_advanced") &&
1700 !is_adreno;
1701
1702 let supports_dual_source_blending = match gl.get_type() {
1703 gl::GlType::Gl => supports_extension(&extensions,"GL_ARB_blend_func_extended") &&
1704 supports_extension(&extensions,"GL_ARB_explicit_attrib_location"),
1705 gl::GlType::Gles => supports_extension(&extensions,"GL_EXT_blend_func_extended"),
1706 };
1707
1708 let use_optimized_shaders = use_optimized_shaders && !is_software_webrender;
1710
1711 let requires_null_terminated_shader_source = is_emulator || renderer_name == "Mali-T628"
1715 || renderer_name == "Mali-T720" || renderer_name == "Mali-T760";
1716
1717 let requires_texture_external_unbind = is_emulator;
1720
1721 let is_macos = cfg!(target_os = "macos");
1722 let is_windows_angle = cfg!(target_os = "windows")
1726 && renderer_name.starts_with("ANGLE");
1727 let is_adreno_3xx = renderer_name.starts_with("Adreno (TM) 3");
1728
1729 let required_pbo_stride = if is_adreno_3xx {
1733 StrideAlignment::Bytes(NonZeroUsize::new(128).unwrap())
1736 } else if is_adreno {
1737 StrideAlignment::Pixels(NonZeroUsize::new(64).unwrap())
1741 } else if is_macos {
1742 StrideAlignment::Bytes(NonZeroUsize::new(256).unwrap())
1745 } else if is_windows_angle {
1746 StrideAlignment::Bytes(NonZeroUsize::new(1).unwrap())
1749 } else {
1750 StrideAlignment::Bytes(NonZeroUsize::new(4).unwrap())
1753 };
1754
1755 let supports_nonzero_pbo_offsets = !is_macos;
1758
1759 let supports_render_target_partial_update =
1764 !is_mali_midgard(&renderer_name) && !is_mali_bifrost(&renderer_name);
1765
1766 let supports_shader_storage_object = match gl.get_type() {
1767 gl::GlType::Gl => supports_extension(&extensions, "GL_ARB_shader_storage_buffer_object"),
1769 gl::GlType::Gles => gl_version >= [3, 1],
1770 };
1771
1772 let uses_native_clip_mask = is_software_webrender;
1777
1778 let uses_native_antialiasing = is_software_webrender;
1781
1782 let mut android_mesa_version = None;
1784 if cfg!(target_os = "android") && renderer_name.starts_with("Mesa") {
1785 if let Some((_, mesa_version)) = version_string.split_once("Mesa ") {
1786 if let Some((major_str, _)) = mesa_version.split_once(".") {
1787 if let Ok(major) = major_str.parse::<i32>() {
1788 android_mesa_version = Some(major);
1789 }
1790 }
1791 }
1792 }
1793
1794 let supports_image_external_essl3 = match android_mesa_version {
1800 Some(major) if major < 20 => false,
1801 _ => supports_extension(&extensions, "GL_OES_EGL_image_external_essl3"),
1802 };
1803
1804 let mut requires_batched_texture_uploads = None;
1805 if is_software_webrender {
1806 requires_batched_texture_uploads = Some(false);
1808 } else if renderer_name.starts_with("Mali-G") {
1809 requires_batched_texture_uploads = Some(true);
1812 }
1813
1814 let is_adreno_510 = renderer_name.starts_with("Adreno (TM) 510");
1820 let supports_alpha_target_clears = !is_mali_midgard(&renderer_name) && !is_adreno_510;
1821
1822 let is_adreno_4xx = renderer_name.starts_with("Adreno (TM) 4");
1825 let requires_alpha_target_full_clear = is_adreno_4xx;
1826
1827 let prefers_clear_scissor = !cfg!(target_os = "android") || is_software_webrender;
1833
1834 let mut supports_render_target_invalidate = true;
1835
1836 let is_powervr_rogue = renderer_name.starts_with("PowerVR Rogue");
1840 if is_powervr_rogue {
1841 supports_render_target_invalidate = false;
1842 }
1843
1844 if is_mali_valhall(&renderer_name) {
1848 match parse_mali_version(&version_string) {
1849 Some(version) if version >= (1, 36, 0) => supports_render_target_invalidate = false,
1850 _ => {}
1851 }
1852 }
1853
1854 let supports_r8_texture_upload = if cfg!(target_os = "linux")
1858 && renderer_name.starts_with("AMD Radeon RX")
1859 {
1860 false
1861 } else {
1862 true
1863 };
1864
1865 let supports_qcom_tiled_rendering = if is_adreno && version_string.contains("V@0490") {
1866 false
1869 } else if renderer_name == "Adreno (TM) 308" {
1870 false
1874 } else {
1875 supports_extension(&extensions, "GL_QCOM_tiled_rendering")
1876 };
1877
1878 let requires_vao_rebind_after_orphaning = is_adreno_3xx;
1881
1882 Device {
1883 gl,
1884 base_gl: None,
1885 crash_annotator,
1886 annotate_draw_call_crashes: false,
1887 resource_override_path,
1888 use_optimized_shaders,
1889 upload_method,
1890 use_batched_texture_uploads: requires_batched_texture_uploads.unwrap_or(false),
1891 use_draw_calls_for_texture_copy: false,
1892 batched_upload_threshold,
1893
1894 inside_frame: false,
1895
1896 capabilities: Capabilities {
1897 supports_multisampling: false, supports_copy_image_sub_data,
1899 supports_color_buffer_float,
1900 supports_buffer_storage,
1901 supports_advanced_blend_equation,
1902 supports_dual_source_blending,
1903 supports_khr_debug,
1904 supports_texture_swizzle,
1905 supports_nonzero_pbo_offsets,
1906 supports_texture_usage,
1907 supports_render_target_partial_update,
1908 supports_shader_storage_object,
1909 requires_batched_texture_uploads,
1910 supports_alpha_target_clears,
1911 requires_alpha_target_full_clear,
1912 prefers_clear_scissor,
1913 supports_render_target_invalidate,
1914 supports_r8_texture_upload,
1915 supports_qcom_tiled_rendering,
1916 uses_native_clip_mask,
1917 uses_native_antialiasing,
1918 supports_image_external_essl3,
1919 requires_vao_rebind_after_orphaning,
1920 renderer_name,
1921 },
1922
1923 color_formats,
1924 bgra_formats,
1925 bgra_pixel_type,
1926 swizzle_settings: SwizzleSettings {
1927 bgra8_sampling_swizzle,
1928 },
1929 depth_format,
1930
1931 depth_targets: FastHashMap::default(),
1932
1933 bound_textures: [0; 16],
1934 bound_program: 0,
1935 bound_program_name: Rc::new(std::ffi::CString::new("").unwrap()),
1936 bound_vao: 0,
1937 bound_read_fbo: (FBOId(0), DeviceIntPoint::zero()),
1938 bound_draw_fbo: FBOId(0),
1939 default_read_fbo: FBOId(0),
1940 default_draw_fbo: FBOId(0),
1941
1942 depth_available: true,
1943
1944 max_texture_size,
1945 cached_programs,
1946 frame_id: GpuFrameId(0),
1947 extensions,
1948 texture_storage_usage,
1949 requires_null_terminated_shader_source,
1950 requires_texture_external_unbind,
1951 is_software_webrender,
1952 required_pbo_stride,
1953 dump_shader_source,
1954 surface_origin_is_top_left,
1955
1956 #[cfg(debug_assertions)]
1957 shader_is_ready: false,
1958
1959 textures_created: 0,
1960 textures_deleted: 0,
1961 }
1962 }
1963
1964 pub fn gl(&self) -> &dyn gl::Gl {
1965 &*self.gl
1966 }
1967
1968 pub fn rc_gl(&self) -> &Rc<dyn gl::Gl> {
1969 &self.gl
1970 }
1971
1972 pub fn set_parameter(&mut self, param: &Parameter) {
1973 match param {
1974 Parameter::Bool(BoolParameter::PboUploads, enabled) => {
1975 if !self.is_software_webrender {
1976 self.upload_method = if *enabled {
1977 UploadMethod::PixelBuffer(crate::ONE_TIME_USAGE_HINT)
1978 } else {
1979 UploadMethod::Immediate
1980 };
1981 }
1982 }
1983 Parameter::Bool(BoolParameter::BatchedUploads, enabled) => {
1984 if self.capabilities.requires_batched_texture_uploads.is_none() {
1985 self.use_batched_texture_uploads = *enabled;
1986 }
1987 }
1988 Parameter::Bool(BoolParameter::DrawCallsForTextureCopy, enabled) => {
1989 self.use_draw_calls_for_texture_copy = *enabled;
1990 }
1991 Parameter::Int(IntParameter::BatchedUploadThreshold, threshold) => {
1992 self.batched_upload_threshold = *threshold;
1993 }
1994 _ => {}
1995 }
1996 }
1997
1998 pub fn clamp_max_texture_size(&mut self, size: i32) {
2002 self.max_texture_size = self.max_texture_size.min(size);
2003 }
2004
2005 pub fn max_texture_size(&self) -> i32 {
2007 self.max_texture_size
2008 }
2009
2010 pub fn surface_origin_is_top_left(&self) -> bool {
2011 self.surface_origin_is_top_left
2012 }
2013
2014 pub fn get_capabilities(&self) -> &Capabilities {
2015 &self.capabilities
2016 }
2017
2018 pub fn preferred_color_formats(&self) -> TextureFormatPair<ImageFormat> {
2019 self.color_formats.clone()
2020 }
2021
2022 pub fn swizzle_settings(&self) -> Option<SwizzleSettings> {
2023 if self.capabilities.supports_texture_swizzle {
2024 Some(self.swizzle_settings)
2025 } else {
2026 None
2027 }
2028 }
2029
2030 pub fn depth_bits(&self) -> i32 {
2031 match self.depth_format {
2032 gl::DEPTH_COMPONENT16 => 16,
2033 gl::DEPTH_COMPONENT24 => 24,
2034 _ => panic!("Unknown depth format {:?}", self.depth_format),
2035 }
2036 }
2037
2038 pub fn max_depth_ids(&self) -> i32 {
2041 return 1 << (self.depth_bits() - RESERVE_DEPTH_BITS);
2042 }
2043
2044 pub fn ortho_near_plane(&self) -> f32 {
2045 return -self.max_depth_ids() as f32;
2046 }
2047
2048 pub fn ortho_far_plane(&self) -> f32 {
2049 return (self.max_depth_ids() - 1) as f32;
2050 }
2051
2052 pub fn required_pbo_stride(&self) -> StrideAlignment {
2053 self.required_pbo_stride
2054 }
2055
2056 pub fn upload_method(&self) -> &UploadMethod {
2057 &self.upload_method
2058 }
2059
2060 pub fn use_batched_texture_uploads(&self) -> bool {
2061 self.use_batched_texture_uploads
2062 }
2063
2064 pub fn use_draw_calls_for_texture_copy(&self) -> bool {
2065 self.use_draw_calls_for_texture_copy
2066 }
2067
2068 pub fn batched_upload_threshold(&self) -> i32 {
2069 self.batched_upload_threshold
2070 }
2071
2072 pub fn reset_state(&mut self) {
2073 for i in 0 .. self.bound_textures.len() {
2074 self.bound_textures[i] = 0;
2075 self.gl.active_texture(gl::TEXTURE0 + i as gl::GLuint);
2076 self.gl.bind_texture(gl::TEXTURE_2D, 0);
2077 }
2078
2079 self.bound_vao = 0;
2080 self.gl.bind_vertex_array(0);
2081
2082 self.bound_read_fbo = (self.default_read_fbo, DeviceIntPoint::zero());
2083 self.gl.bind_framebuffer(gl::READ_FRAMEBUFFER, self.default_read_fbo.0);
2084
2085 self.bound_draw_fbo = self.default_draw_fbo;
2086 self.gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, self.bound_draw_fbo.0);
2087 }
2088
2089 #[cfg(debug_assertions)]
2090 fn print_shader_errors(source: &str, log: &str) {
2091 if !log.starts_with("0:") && !log.starts_with("0(") {
2093 return;
2094 }
2095 let end_pos = match log[2..].chars().position(|c| !c.is_digit(10)) {
2096 Some(pos) => 2 + pos,
2097 None => return,
2098 };
2099 let base_line_number = match log[2 .. end_pos].parse::<usize>() {
2100 Ok(number) if number >= 2 => number - 2,
2101 _ => return,
2102 };
2103 for (line, prefix) in source.lines().skip(base_line_number).zip(&["|",">","|"]) {
2104 error!("{}\t{}", prefix, line);
2105 }
2106 }
2107
2108 pub fn compile_shader(
2109 &self,
2110 name: &str,
2111 shader_type: gl::GLenum,
2112 source: &String,
2113 ) -> Result<gl::GLuint, ShaderError> {
2114 debug!("compile {}", name);
2115 let id = self.gl.create_shader(shader_type);
2116
2117 let mut new_source = Cow::from(source.as_str());
2118 if self.requires_null_terminated_shader_source {
2121 new_source.to_mut().push('\0');
2122 }
2123
2124 self.gl.shader_source(id, &[new_source.as_bytes()]);
2125 self.gl.compile_shader(id);
2126 let log = self.gl.get_shader_info_log(id);
2127 let mut status = [0];
2128 unsafe {
2129 self.gl.get_shader_iv(id, gl::COMPILE_STATUS, &mut status);
2130 }
2131 if status[0] == 0 {
2132 let type_str = match shader_type {
2133 gl::VERTEX_SHADER => "vertex",
2134 gl::FRAGMENT_SHADER => "fragment",
2135 _ => panic!("Unexpected shader type {:x}", shader_type),
2136 };
2137 error!("Failed to compile {} shader: {}\n{}", type_str, name, log);
2138 #[cfg(debug_assertions)]
2139 Self::print_shader_errors(source, &log);
2140 Err(ShaderError::Compilation(name.to_string(), log))
2141 } else {
2142 if !log.is_empty() {
2143 warn!("Warnings detected on shader: {}\n{}", name, log);
2144 }
2145 Ok(id)
2146 }
2147 }
2148
2149 pub fn begin_frame(&mut self) -> GpuFrameId {
2150 debug_assert!(!self.inside_frame);
2151 self.inside_frame = true;
2152 #[cfg(debug_assertions)]
2153 {
2154 self.shader_is_ready = false;
2155 }
2156
2157 self.textures_created = 0;
2158 self.textures_deleted = 0;
2159
2160 let being_profiled = profiler::thread_is_being_profiled();
2163 let using_wrapper = self.base_gl.is_some();
2164
2165 if cfg!(any(target_arch = "arm", target_arch = "aarch64"))
2170 && cfg!(target_os = "android")
2171 && being_profiled
2172 && !using_wrapper
2173 {
2174 fn note(name: &str, duration: Duration) {
2175 profiler::add_text_marker("OpenGL Calls", name, duration);
2176 }
2177 let threshold = Duration::from_millis(1);
2178 let wrapped = gl::ProfilingGl::wrap(self.gl.clone(), threshold, note);
2179 let base = mem::replace(&mut self.gl, wrapped);
2180 self.base_gl = Some(base);
2181 } else if !being_profiled && using_wrapper {
2182 self.gl = self.base_gl.take().unwrap();
2183 }
2184
2185 let mut default_read_fbo = [0];
2187 unsafe {
2188 self.gl.get_integer_v(gl::READ_FRAMEBUFFER_BINDING, &mut default_read_fbo);
2189 }
2190 self.default_read_fbo = FBOId(default_read_fbo[0] as gl::GLuint);
2191 let mut default_draw_fbo = [0];
2192 unsafe {
2193 self.gl.get_integer_v(gl::DRAW_FRAMEBUFFER_BINDING, &mut default_draw_fbo);
2194 }
2195 self.default_draw_fbo = FBOId(default_draw_fbo[0] as gl::GLuint);
2196
2197 self.bound_program = 0;
2199 self.gl.use_program(0);
2200
2201 self.reset_state();
2203
2204 self.gl.pixel_store_i(gl::UNPACK_ALIGNMENT, 1);
2206 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2207
2208 self.gl.active_texture(gl::TEXTURE0);
2210
2211 self.frame_id
2212 }
2213
2214 fn bind_texture_impl(
2215 &mut self,
2216 slot: TextureSlot,
2217 id: gl::GLuint,
2218 target: gl::GLenum,
2219 set_swizzle: Option<Swizzle>,
2220 image_rendering: Option<ImageRendering>,
2221 ) {
2222 debug_assert!(self.inside_frame);
2223
2224 if self.bound_textures[slot.0] != id || set_swizzle.is_some() || image_rendering.is_some() {
2225 self.gl.active_texture(gl::TEXTURE0 + slot.0 as gl::GLuint);
2226 if target == gl::TEXTURE_2D && self.requires_texture_external_unbind {
2229 self.gl.bind_texture(gl::TEXTURE_EXTERNAL_OES, 0);
2230 }
2231 self.gl.bind_texture(target, id);
2232 if let Some(swizzle) = set_swizzle {
2233 if self.capabilities.supports_texture_swizzle {
2234 let components = match swizzle {
2235 Swizzle::Rgba => [gl::RED, gl::GREEN, gl::BLUE, gl::ALPHA],
2236 Swizzle::Bgra => [gl::BLUE, gl::GREEN, gl::RED, gl::ALPHA],
2237 };
2238 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_R, components[0] as i32);
2239 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_G, components[1] as i32);
2240 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_B, components[2] as i32);
2241 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_A, components[3] as i32);
2242 } else {
2243 debug_assert_eq!(swizzle, Swizzle::default());
2244 }
2245 }
2246 if let Some(image_rendering) = image_rendering {
2247 let filter = match image_rendering {
2248 ImageRendering::Auto | ImageRendering::CrispEdges => gl::LINEAR,
2249 ImageRendering::Pixelated => gl::NEAREST,
2250 };
2251 self.gl.tex_parameter_i(target, gl::TEXTURE_MIN_FILTER, filter as i32);
2252 self.gl.tex_parameter_i(target, gl::TEXTURE_MAG_FILTER, filter as i32);
2253 }
2254 self.gl.active_texture(gl::TEXTURE0);
2255 self.bound_textures[slot.0] = id;
2256 }
2257 }
2258
2259 pub fn bind_texture<S>(&mut self, slot: S, texture: &Texture, swizzle: Swizzle)
2260 where
2261 S: Into<TextureSlot>,
2262 {
2263 let old_swizzle = texture.active_swizzle.replace(swizzle);
2264 let set_swizzle = if old_swizzle != swizzle {
2265 Some(swizzle)
2266 } else {
2267 None
2268 };
2269 self.bind_texture_impl(slot.into(), texture.id, texture.target, set_swizzle, None);
2270 }
2271
2272 pub fn bind_external_texture<S>(&mut self, slot: S, external_texture: &ExternalTexture)
2273 where
2274 S: Into<TextureSlot>,
2275 {
2276 self.bind_texture_impl(
2277 slot.into(),
2278 external_texture.id,
2279 external_texture.target,
2280 None,
2281 Some(external_texture.image_rendering),
2282 );
2283 }
2284
2285 pub fn bind_read_target_impl(
2286 &mut self,
2287 fbo_id: FBOId,
2288 offset: DeviceIntPoint,
2289 ) {
2290 debug_assert!(self.inside_frame);
2291
2292 if self.bound_read_fbo != (fbo_id, offset) {
2293 fbo_id.bind(self.gl(), FBOTarget::Read);
2294 }
2295
2296 self.bound_read_fbo = (fbo_id, offset);
2297 }
2298
2299 pub fn bind_read_target(&mut self, target: ReadTarget) {
2300 let fbo_id = match target {
2301 ReadTarget::Default => self.default_read_fbo,
2302 ReadTarget::Texture { fbo_id } => fbo_id,
2303 ReadTarget::External { fbo } => fbo,
2304 ReadTarget::NativeSurface { fbo_id, .. } => fbo_id,
2305 };
2306
2307 self.bind_read_target_impl(fbo_id, target.offset())
2308 }
2309
2310 fn bind_draw_target_impl(&mut self, fbo_id: FBOId) {
2311 debug_assert!(self.inside_frame);
2312
2313 if self.bound_draw_fbo != fbo_id {
2314 self.bound_draw_fbo = fbo_id;
2315 fbo_id.bind(self.gl(), FBOTarget::Draw);
2316 }
2317 }
2318
2319 pub fn reset_read_target(&mut self) {
2320 let fbo = self.default_read_fbo;
2321 self.bind_read_target_impl(fbo, DeviceIntPoint::zero());
2322 }
2323
2324
2325 pub fn reset_draw_target(&mut self) {
2326 let fbo = self.default_draw_fbo;
2327 self.bind_draw_target_impl(fbo);
2328 self.depth_available = true;
2329 }
2330
2331 pub fn bind_draw_target(
2332 &mut self,
2333 target: DrawTarget,
2334 ) {
2335 let (fbo_id, rect, depth_available) = match target {
2336 DrawTarget::Default { rect, .. } => {
2337 (self.default_draw_fbo, rect, false)
2338 }
2339 DrawTarget::Texture { dimensions, fbo_id, with_depth, .. } => {
2340 let rect = FramebufferIntRect::from_size(
2341 device_size_as_framebuffer_size(dimensions),
2342 );
2343 (fbo_id, rect, with_depth)
2344 },
2345 DrawTarget::External { fbo, size } => {
2346 (fbo, size.into(), false)
2347 }
2348 DrawTarget::NativeSurface { external_fbo_id, offset, dimensions, .. } => {
2349 (
2350 FBOId(external_fbo_id),
2351 device_rect_as_framebuffer_rect(&DeviceIntRect::from_origin_and_size(offset, dimensions)),
2352 true
2353 )
2354 }
2355 };
2356
2357 self.depth_available = depth_available;
2358 self.bind_draw_target_impl(fbo_id);
2359 self.gl.viewport(
2360 rect.min.x,
2361 rect.min.y,
2362 rect.width(),
2363 rect.height(),
2364 );
2365 }
2366
2367 pub fn create_fbo(&mut self) -> FBOId {
2370 FBOId(self.gl.gen_framebuffers(1)[0])
2371 }
2372
2373 pub fn create_fbo_for_external_texture(&mut self, texture_id: u32) -> FBOId {
2375 let fbo = self.create_fbo();
2376 fbo.bind(self.gl(), FBOTarget::Draw);
2377 self.gl.framebuffer_texture_2d(
2378 gl::DRAW_FRAMEBUFFER,
2379 gl::COLOR_ATTACHMENT0,
2380 gl::TEXTURE_2D,
2381 texture_id,
2382 0,
2383 );
2384 debug_assert_eq!(
2385 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2386 gl::FRAMEBUFFER_COMPLETE,
2387 "Incomplete framebuffer",
2388 );
2389 self.bound_draw_fbo.bind(self.gl(), FBOTarget::Draw);
2390 fbo
2391 }
2392
2393 pub fn delete_fbo(&mut self, fbo: FBOId) {
2394 self.gl.delete_framebuffers(&[fbo.0]);
2395 }
2396
2397 pub fn bind_external_draw_target(&mut self, fbo_id: FBOId) {
2398 debug_assert!(self.inside_frame);
2399
2400 if self.bound_draw_fbo != fbo_id {
2401 self.bound_draw_fbo = fbo_id;
2402 fbo_id.bind(self.gl(), FBOTarget::Draw);
2403 }
2404 }
2405
2406 pub fn link_program(
2418 &mut self,
2419 program: &mut Program,
2420 descriptor: &VertexDescriptor,
2421 ) -> Result<(), ShaderError> {
2422 profile_scope!("compile shader");
2423
2424 let _guard = CrashAnnotatorGuard::new(
2425 &self.crash_annotator,
2426 CrashAnnotation::CompileShader,
2427 &program.source_info.full_name_cstr
2428 );
2429
2430 assert!(!program.is_initialized());
2431 let mut build_program = true;
2432 let info = &program.source_info;
2433
2434 if let Some(ref cached_programs) = self.cached_programs {
2436 if cached_programs.entries.borrow().get(&program.source_info.digest).is_none() {
2438 if let Some(ref handler) = cached_programs.program_cache_handler {
2439 handler.try_load_shader_from_disk(&program.source_info.digest, cached_programs);
2440 if let Some(entry) = cached_programs.entries.borrow().get(&program.source_info.digest) {
2441 self.gl.program_binary(program.id, entry.binary.format, &entry.binary.bytes);
2442 }
2443 }
2444 }
2445
2446 if let Some(entry) = cached_programs.entries.borrow_mut().get_mut(&info.digest) {
2447 let mut link_status = [0];
2448 unsafe {
2449 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2450 }
2451 if link_status[0] == 0 {
2452 let error_log = self.gl.get_program_info_log(program.id);
2453 error!(
2454 "Failed to load a program object with a program binary: {} renderer {}\n{}",
2455 &info.base_filename,
2456 self.capabilities.renderer_name,
2457 error_log
2458 );
2459 if let Some(ref program_cache_handler) = cached_programs.program_cache_handler {
2460 program_cache_handler.notify_program_binary_failed(&entry.binary);
2461 }
2462 } else {
2463 entry.linked = true;
2464 build_program = false;
2465 }
2466 }
2467 }
2468
2469 if build_program {
2471 let vs_source = info.compute_source(self, ShaderKind::Vertex);
2473 let vs_id = match self.compile_shader(&info.full_name(), gl::VERTEX_SHADER, &vs_source) {
2474 Ok(vs_id) => vs_id,
2475 Err(err) => return Err(err),
2476 };
2477
2478 let fs_source = info.compute_source(self, ShaderKind::Fragment);
2480 let fs_id =
2481 match self.compile_shader(&info.full_name(), gl::FRAGMENT_SHADER, &fs_source) {
2482 Ok(fs_id) => fs_id,
2483 Err(err) => {
2484 self.gl.delete_shader(vs_id);
2485 return Err(err);
2486 }
2487 };
2488
2489 if Some(info.base_filename) == self.dump_shader_source.as_ref().map(String::as_ref) {
2491 let path = std::path::Path::new(info.base_filename);
2492 std::fs::write(path.with_extension("vert"), vs_source).unwrap();
2493 std::fs::write(path.with_extension("frag"), fs_source).unwrap();
2494 }
2495
2496 self.gl.attach_shader(program.id, vs_id);
2498 self.gl.attach_shader(program.id, fs_id);
2499
2500 for (i, attr) in descriptor
2502 .vertex_attributes
2503 .iter()
2504 .chain(descriptor.instance_attributes.iter())
2505 .enumerate()
2506 {
2507 self.gl
2508 .bind_attrib_location(program.id, i as gl::GLuint, attr.name);
2509 }
2510
2511 if self.cached_programs.is_some() {
2512 self.gl.program_parameter_i(program.id, gl::PROGRAM_BINARY_RETRIEVABLE_HINT, gl::TRUE as gl::GLint);
2513 }
2514
2515 self.gl.link_program(program.id);
2517
2518 self.gl.detach_shader(program.id, vs_id);
2522 self.gl.detach_shader(program.id, fs_id);
2523 self.gl.delete_shader(vs_id);
2524 self.gl.delete_shader(fs_id);
2525
2526 let mut link_status = [0];
2527 unsafe {
2528 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2529 }
2530 if link_status[0] == 0 {
2531 let error_log = self.gl.get_program_info_log(program.id);
2532 error!(
2533 "Failed to link shader program: {}\n{}",
2534 &info.base_filename,
2535 error_log
2536 );
2537 self.gl.delete_program(program.id);
2538 return Err(ShaderError::Link(info.base_filename.to_owned(), error_log));
2539 }
2540
2541 if let Some(ref cached_programs) = self.cached_programs {
2542 if !cached_programs.entries.borrow().contains_key(&info.digest) {
2543 let (buffer, format) = self.gl.get_program_binary(program.id);
2544 if buffer.len() > 0 {
2545 let binary = Arc::new(ProgramBinary::new(buffer, format, info.digest.clone()));
2546 cached_programs.add_new_program_binary(binary);
2547 }
2548 }
2549 }
2550 }
2551
2552 program.is_initialized = true;
2554 program.u_transform = self.gl.get_uniform_location(program.id, "uTransform");
2555 program.u_texture_size = self.gl.get_uniform_location(program.id, "uTextureSize");
2556
2557 Ok(())
2558 }
2559
2560 pub fn bind_program(&mut self, program: &Program) -> bool {
2561 debug_assert!(self.inside_frame);
2562 debug_assert!(program.is_initialized());
2563 if !program.is_initialized() {
2564 return false;
2565 }
2566 #[cfg(debug_assertions)]
2567 {
2568 self.shader_is_ready = true;
2569 }
2570
2571 if self.bound_program != program.id {
2572 self.gl.use_program(program.id);
2573 self.bound_program = program.id;
2574 self.bound_program_name = program.source_info.full_name_cstr.clone();
2575 }
2576 true
2577 }
2578
2579 pub fn create_texture(
2580 &mut self,
2581 target: ImageBufferKind,
2582 format: ImageFormat,
2583 mut width: i32,
2584 mut height: i32,
2585 filter: TextureFilter,
2586 render_target: Option<RenderTargetInfo>,
2587 ) -> Texture {
2588 debug_assert!(self.inside_frame);
2589
2590 if width > self.max_texture_size || height > self.max_texture_size {
2591 error!("Attempting to allocate a texture of size {}x{} above the limit, trimming", width, height);
2592 width = width.min(self.max_texture_size);
2593 height = height.min(self.max_texture_size);
2594 }
2595
2596 let mut texture = Texture {
2598 id: self.gl.gen_textures(1)[0],
2599 target: get_gl_target(target),
2600 size: DeviceIntSize::new(width, height),
2601 format,
2602 filter,
2603 active_swizzle: Cell::default(),
2604 fbo: None,
2605 fbo_with_depth: None,
2606 last_frame_used: self.frame_id,
2607 flags: TextureFlags::default(),
2608 };
2609 self.bind_texture(DEFAULT_TEXTURE, &texture, Swizzle::default());
2610 self.set_texture_parameters(texture.target, filter);
2611
2612 if self.capabilities.supports_texture_usage && render_target.is_some() {
2613 self.gl.tex_parameter_i(texture.target, gl::TEXTURE_USAGE_ANGLE, gl::FRAMEBUFFER_ATTACHMENT_ANGLE as gl::GLint);
2614 }
2615
2616 let desc = self.gl_describe_format(texture.format);
2618
2619 let mipmap_levels = if texture.filter == TextureFilter::Trilinear {
2623 let max_dimension = cmp::max(width, height);
2624 ((max_dimension) as f64).log2() as gl::GLint + 1
2625 } else {
2626 1
2627 };
2628
2629 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2631
2632 let use_texture_storage = match self.texture_storage_usage {
2636 TexStorageUsage::Always => true,
2637 TexStorageUsage::NonBGRA8 => texture.format != ImageFormat::BGRA8,
2638 TexStorageUsage::Never => false,
2639 };
2640 if use_texture_storage {
2641 self.gl.tex_storage_2d(
2642 texture.target,
2643 mipmap_levels,
2644 desc.internal,
2645 texture.size.width as gl::GLint,
2646 texture.size.height as gl::GLint,
2647 );
2648 } else {
2649 self.gl.tex_image_2d(
2650 texture.target,
2651 0,
2652 desc.internal as gl::GLint,
2653 texture.size.width as gl::GLint,
2654 texture.size.height as gl::GLint,
2655 0,
2656 desc.external,
2657 desc.pixel_type,
2658 None,
2659 );
2660 }
2661
2662 if let Some(rt_info) = render_target {
2664 self.init_fbos(&mut texture, false);
2665 if rt_info.has_depth {
2666 self.init_fbos(&mut texture, true);
2667 }
2668 }
2669
2670 self.textures_created += 1;
2671
2672 texture
2673 }
2674
2675 fn set_texture_parameters(&mut self, target: gl::GLuint, filter: TextureFilter) {
2676 let mag_filter = match filter {
2677 TextureFilter::Nearest => gl::NEAREST,
2678 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2679 };
2680
2681 let min_filter = match filter {
2682 TextureFilter::Nearest => gl::NEAREST,
2683 TextureFilter::Linear => gl::LINEAR,
2684 TextureFilter::Trilinear => gl::LINEAR_MIPMAP_LINEAR,
2685 };
2686
2687 self.gl
2688 .tex_parameter_i(target, gl::TEXTURE_MAG_FILTER, mag_filter as gl::GLint);
2689 self.gl
2690 .tex_parameter_i(target, gl::TEXTURE_MIN_FILTER, min_filter as gl::GLint);
2691
2692 self.gl
2693 .tex_parameter_i(target, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as gl::GLint);
2694 self.gl
2695 .tex_parameter_i(target, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as gl::GLint);
2696 }
2697
2698 pub fn copy_entire_texture(
2702 &mut self,
2703 dst: &mut Texture,
2704 src: &Texture,
2705 ) {
2706 debug_assert!(self.inside_frame);
2707 debug_assert!(dst.size.width >= src.size.width);
2708 debug_assert!(dst.size.height >= src.size.height);
2709
2710 self.copy_texture_sub_region(
2711 src,
2712 0,
2713 0,
2714 dst,
2715 0,
2716 0,
2717 src.size.width as _,
2718 src.size.height as _,
2719 );
2720 }
2721
2722 pub fn copy_texture_sub_region(
2724 &mut self,
2725 src_texture: &Texture,
2726 src_x: usize,
2727 src_y: usize,
2728 dest_texture: &Texture,
2729 dest_x: usize,
2730 dest_y: usize,
2731 width: usize,
2732 height: usize,
2733 ) {
2734 if self.capabilities.supports_copy_image_sub_data {
2735 assert_ne!(
2736 src_texture.id, dest_texture.id,
2737 "glCopyImageSubData's behaviour is undefined if src and dst images are identical and the rectangles overlap."
2738 );
2739 unsafe {
2740 self.gl.copy_image_sub_data(
2741 src_texture.id,
2742 src_texture.target,
2743 0,
2744 src_x as _,
2745 src_y as _,
2746 0,
2747 dest_texture.id,
2748 dest_texture.target,
2749 0,
2750 dest_x as _,
2751 dest_y as _,
2752 0,
2753 width as _,
2754 height as _,
2755 1,
2756 );
2757 }
2758 } else {
2759 let src_offset = FramebufferIntPoint::new(src_x as i32, src_y as i32);
2760 let dest_offset = FramebufferIntPoint::new(dest_x as i32, dest_y as i32);
2761 let size = FramebufferIntSize::new(width as i32, height as i32);
2762
2763 self.blit_render_target(
2764 ReadTarget::from_texture(src_texture),
2765 FramebufferIntRect::from_origin_and_size(src_offset, size),
2766 DrawTarget::from_texture(dest_texture, false),
2767 FramebufferIntRect::from_origin_and_size(dest_offset, size),
2768 TextureFilter::Nearest,
2772 );
2773 }
2774 }
2775
2776 pub fn invalidate_render_target(&mut self, texture: &Texture) {
2779 if self.capabilities.supports_render_target_invalidate {
2780 let (fbo, attachments) = if texture.supports_depth() {
2781 (&texture.fbo_with_depth,
2782 &[gl::COLOR_ATTACHMENT0, gl::DEPTH_ATTACHMENT] as &[gl::GLenum])
2783 } else {
2784 (&texture.fbo, &[gl::COLOR_ATTACHMENT0] as &[gl::GLenum])
2785 };
2786
2787 if let Some(fbo_id) = fbo {
2788 let original_bound_fbo = self.bound_draw_fbo;
2789 self.bind_external_draw_target(*fbo_id);
2793 self.gl.invalidate_framebuffer(gl::FRAMEBUFFER, attachments);
2794 self.bind_external_draw_target(original_bound_fbo);
2795 }
2796 }
2797 }
2798
2799 pub fn invalidate_depth_target(&mut self) {
2805 assert!(self.depth_available);
2806 let attachments = if self.bound_draw_fbo == self.default_draw_fbo {
2807 &[gl::DEPTH] as &[gl::GLenum]
2808 } else {
2809 &[gl::DEPTH_ATTACHMENT] as &[gl::GLenum]
2810 };
2811 self.gl.invalidate_framebuffer(gl::DRAW_FRAMEBUFFER, attachments);
2812 }
2813
2814 pub fn reuse_render_target<T: Texel>(
2818 &mut self,
2819 texture: &mut Texture,
2820 rt_info: RenderTargetInfo,
2821 ) {
2822 texture.last_frame_used = self.frame_id;
2823
2824 if rt_info.has_depth && !texture.supports_depth() {
2826 self.init_fbos(texture, true);
2827 }
2828 }
2829
2830 fn init_fbos(&mut self, texture: &mut Texture, with_depth: bool) {
2831 let (fbo, depth_rb) = if with_depth {
2832 let depth_target = self.acquire_depth_target(texture.get_dimensions());
2833 (&mut texture.fbo_with_depth, Some(depth_target))
2834 } else {
2835 (&mut texture.fbo, None)
2836 };
2837
2838 assert!(fbo.is_none());
2840 let fbo_id = FBOId(*self.gl.gen_framebuffers(1).first().unwrap());
2841 *fbo = Some(fbo_id);
2842
2843 let original_bound_fbo = self.bound_draw_fbo;
2845
2846 self.bind_external_draw_target(fbo_id);
2847
2848 self.gl.framebuffer_texture_2d(
2849 gl::DRAW_FRAMEBUFFER,
2850 gl::COLOR_ATTACHMENT0,
2851 texture.target,
2852 texture.id,
2853 0,
2854 );
2855
2856 if let Some(depth_rb) = depth_rb {
2857 self.gl.framebuffer_renderbuffer(
2858 gl::DRAW_FRAMEBUFFER,
2859 gl::DEPTH_ATTACHMENT,
2860 gl::RENDERBUFFER,
2861 depth_rb.0,
2862 );
2863 }
2864
2865 debug_assert_eq!(
2866 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2867 gl::FRAMEBUFFER_COMPLETE,
2868 "Incomplete framebuffer",
2869 );
2870
2871 self.bind_external_draw_target(original_bound_fbo);
2872 }
2873
2874 fn acquire_depth_target(&mut self, dimensions: DeviceIntSize) -> RBOId {
2875 let gl = &self.gl;
2876 let depth_format = self.depth_format;
2877 let target = self.depth_targets.entry(dimensions).or_insert_with(|| {
2878 let renderbuffer_ids = gl.gen_renderbuffers(1);
2879 let depth_rb = renderbuffer_ids[0];
2880 gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb);
2881 gl.renderbuffer_storage(
2882 gl::RENDERBUFFER,
2883 depth_format,
2884 dimensions.width as _,
2885 dimensions.height as _,
2886 );
2887 SharedDepthTarget {
2888 rbo_id: RBOId(depth_rb),
2889 refcount: 0,
2890 }
2891 });
2892 target.refcount += 1;
2893 target.rbo_id
2894 }
2895
2896 fn release_depth_target(&mut self, dimensions: DeviceIntSize) {
2897 let mut entry = match self.depth_targets.entry(dimensions) {
2898 Entry::Occupied(x) => x,
2899 Entry::Vacant(..) => panic!("Releasing unknown depth target"),
2900 };
2901 debug_assert!(entry.get().refcount != 0);
2902 entry.get_mut().refcount -= 1;
2903 if entry.get().refcount == 0 {
2904 let (_, target) = entry.remove_entry();
2905 self.gl.delete_renderbuffers(&[target.rbo_id.0]);
2906 }
2907 }
2908
2909 fn blit_render_target_impl(
2911 &mut self,
2912 src_rect: FramebufferIntRect,
2913 dest_rect: FramebufferIntRect,
2914 filter: TextureFilter,
2915 ) {
2916 debug_assert!(self.inside_frame);
2917
2918 let filter = match filter {
2919 TextureFilter::Nearest => gl::NEAREST,
2920 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2921 };
2922
2923 let src_x0 = src_rect.min.x + self.bound_read_fbo.1.x;
2924 let src_y0 = src_rect.min.y + self.bound_read_fbo.1.y;
2925
2926 self.gl.blit_framebuffer(
2927 src_x0,
2928 src_y0,
2929 src_x0 + src_rect.width(),
2930 src_y0 + src_rect.height(),
2931 dest_rect.min.x,
2932 dest_rect.min.y,
2933 dest_rect.max.x,
2934 dest_rect.max.y,
2935 gl::COLOR_BUFFER_BIT,
2936 filter,
2937 );
2938 }
2939
2940 pub fn blit_render_target(
2943 &mut self,
2944 src_target: ReadTarget,
2945 src_rect: FramebufferIntRect,
2946 dest_target: DrawTarget,
2947 dest_rect: FramebufferIntRect,
2948 filter: TextureFilter,
2949 ) {
2950 debug_assert!(self.inside_frame);
2951
2952 self.bind_read_target(src_target);
2953
2954 self.bind_draw_target(dest_target);
2955
2956 self.blit_render_target_impl(src_rect, dest_rect, filter);
2957 }
2958
2959 pub fn blit_render_target_invert_y(
2963 &mut self,
2964 src_target: ReadTarget,
2965 src_rect: FramebufferIntRect,
2966 dest_target: DrawTarget,
2967 dest_rect: FramebufferIntRect,
2968 ) {
2969 debug_assert!(self.inside_frame);
2970
2971 let mut inverted_dest_rect = dest_rect;
2972 inverted_dest_rect.min.y = dest_rect.max.y;
2973 inverted_dest_rect.max.y = dest_rect.min.y;
2974
2975 self.blit_render_target(
2976 src_target,
2977 src_rect,
2978 dest_target,
2979 inverted_dest_rect,
2980 TextureFilter::Linear,
2981 );
2982 }
2983
2984 pub fn delete_texture(&mut self, mut texture: Texture) {
2985 debug_assert!(self.inside_frame);
2986 let had_depth = texture.supports_depth();
2987 if let Some(fbo) = texture.fbo {
2988 self.gl.delete_framebuffers(&[fbo.0]);
2989 texture.fbo = None;
2990 }
2991 if let Some(fbo) = texture.fbo_with_depth {
2992 self.gl.delete_framebuffers(&[fbo.0]);
2993 texture.fbo_with_depth = None;
2994 }
2995
2996 if had_depth {
2997 self.release_depth_target(texture.get_dimensions());
2998 }
2999
3000 self.gl.delete_textures(&[texture.id]);
3001
3002 for bound_texture in &mut self.bound_textures {
3003 if *bound_texture == texture.id {
3004 *bound_texture = 0;
3005 }
3006 }
3007
3008 self.textures_deleted += 1;
3009
3010 texture.id = 0;
3012 }
3013
3014 #[cfg(feature = "replay")]
3015 pub fn delete_external_texture(&mut self, mut external: ExternalTexture) {
3016 self.gl.delete_textures(&[external.id]);
3017 external.id = 0;
3018 }
3019
3020 pub fn delete_program(&mut self, mut program: Program) {
3021 self.gl.delete_program(program.id);
3022 program.id = 0;
3023 }
3024
3025 pub fn create_program_linked(
3027 &mut self,
3028 base_filename: &'static str,
3029 features: &[&'static str],
3030 descriptor: &VertexDescriptor,
3031 ) -> Result<Program, ShaderError> {
3032 let mut program = self.create_program(base_filename, features)?;
3033 self.link_program(&mut program, descriptor)?;
3034 Ok(program)
3035 }
3036
3037 pub fn create_program(
3043 &mut self,
3044 base_filename: &'static str,
3045 features: &[&'static str],
3046 ) -> Result<Program, ShaderError> {
3047 debug_assert!(self.inside_frame);
3048
3049 let source_info = ProgramSourceInfo::new(self, base_filename, features);
3050
3051 let pid = self.gl.create_program();
3053
3054 if let Some(ref cached_programs) = self.cached_programs {
3056 if let Some(entry) = cached_programs.entries.borrow().get(&source_info.digest) {
3057 self.gl.program_binary(pid, entry.binary.format, &entry.binary.bytes);
3058 }
3059 }
3060
3061 let program = Program {
3063 id: pid,
3064 u_transform: 0,
3065 u_texture_size: 0,
3066 source_info,
3067 is_initialized: false,
3068 };
3069
3070 Ok(program)
3071 }
3072
3073 fn build_shader_string<F: FnMut(&str)>(
3074 &self,
3075 features: &[&'static str],
3076 kind: ShaderKind,
3077 base_filename: &str,
3078 output: F,
3079 ) {
3080 do_build_shader_string(
3081 get_shader_version(&*self.gl),
3082 features,
3083 kind,
3084 base_filename,
3085 &|f| get_unoptimized_shader_source(f, self.resource_override_path.as_ref()),
3086 output,
3087 )
3088 }
3089
3090 pub fn bind_shader_samplers<S>(&mut self, program: &Program, bindings: &[(&'static str, S)])
3091 where
3092 S: Into<TextureSlot> + Copy,
3093 {
3094 assert_eq!(self.bound_program, program.id);
3096
3097 for binding in bindings {
3098 let u_location = self.gl.get_uniform_location(program.id, binding.0);
3099 if u_location != -1 {
3100 self.bind_program(program);
3101 self.gl
3102 .uniform_1i(u_location, binding.1.into().0 as gl::GLint);
3103 }
3104 }
3105 }
3106
3107 pub fn get_uniform_location(&self, program: &Program, name: &str) -> UniformLocation {
3108 UniformLocation(self.gl.get_uniform_location(program.id, name))
3109 }
3110
3111 pub fn set_uniforms(
3112 &self,
3113 program: &Program,
3114 transform: &Transform3D<f32>,
3115 ) {
3116 debug_assert!(self.inside_frame);
3117 #[cfg(debug_assertions)]
3118 debug_assert!(self.shader_is_ready);
3119
3120 self.gl
3121 .uniform_matrix_4fv(program.u_transform, false, &transform.to_array());
3122 }
3123
3124 pub fn set_shader_texture_size(
3127 &self,
3128 program: &Program,
3129 texture_size: DeviceSize,
3130 ) {
3131 debug_assert!(self.inside_frame);
3132 #[cfg(debug_assertions)]
3133 debug_assert!(self.shader_is_ready);
3134
3135 if program.u_texture_size != -1 {
3136 self.gl.uniform_2f(program.u_texture_size, texture_size.width, texture_size.height);
3137 }
3138 }
3139
3140 pub fn create_pbo(&mut self) -> PBO {
3141 let id = self.gl.gen_buffers(1)[0];
3142 PBO {
3143 id,
3144 reserved_size: 0,
3145 }
3146 }
3147
3148 pub fn create_pbo_with_size(&mut self, size: usize) -> PBO {
3149 let mut pbo = self.create_pbo();
3150
3151 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
3152 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
3153 self.gl.buffer_data_untyped(
3154 gl::PIXEL_PACK_BUFFER,
3155 size as _,
3156 ptr::null(),
3157 gl::STREAM_READ,
3158 );
3159 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
3160
3161 pbo.reserved_size = size;
3162 pbo
3163 }
3164
3165 pub fn read_pixels_into_pbo(
3166 &mut self,
3167 read_target: ReadTarget,
3168 rect: DeviceIntRect,
3169 format: ImageFormat,
3170 pbo: &PBO,
3171 ) {
3172 let byte_size = rect.area() as usize * format.bytes_per_pixel() as usize;
3173
3174 assert!(byte_size <= pbo.reserved_size);
3175
3176 self.bind_read_target(read_target);
3177
3178 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
3179 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
3180
3181 let gl_format = self.gl_describe_format(format);
3182
3183 unsafe {
3184 self.gl.read_pixels_into_pbo(
3185 rect.min.x as _,
3186 rect.min.y as _,
3187 rect.width() as _,
3188 rect.height() as _,
3189 gl_format.read,
3190 gl_format.pixel_type,
3191 );
3192 }
3193
3194 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
3195 }
3196
3197 pub fn map_pbo_for_readback<'a>(&'a mut self, pbo: &'a PBO) -> Option<BoundPBO<'a>> {
3198 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
3199
3200 let buf_ptr = match self.gl.get_type() {
3201 gl::GlType::Gl => {
3202 self.gl.map_buffer(gl::PIXEL_PACK_BUFFER, gl::READ_ONLY)
3203 }
3204
3205 gl::GlType::Gles => {
3206 self.gl.map_buffer_range(
3207 gl::PIXEL_PACK_BUFFER,
3208 0,
3209 pbo.reserved_size as _,
3210 gl::MAP_READ_BIT)
3211 }
3212 };
3213
3214 if buf_ptr.is_null() {
3215 return None;
3216 }
3217
3218 let buffer = unsafe { slice::from_raw_parts(buf_ptr as *const u8, pbo.reserved_size) };
3219
3220 Some(BoundPBO {
3221 device: self,
3222 data: buffer,
3223 })
3224 }
3225
3226 pub fn delete_pbo(&mut self, mut pbo: PBO) {
3227 self.gl.delete_buffers(&[pbo.id]);
3228 pbo.id = 0;
3229 pbo.reserved_size = 0
3230 }
3231
3232 pub fn required_upload_size_and_stride(&self, size: DeviceIntSize, format: ImageFormat) -> (usize, usize) {
3235 assert!(size.width >= 0);
3236 assert!(size.height >= 0);
3237
3238 let bytes_pp = format.bytes_per_pixel() as usize;
3239 let width_bytes = size.width as usize * bytes_pp;
3240
3241 let dst_stride = round_up_to_multiple(width_bytes, self.required_pbo_stride.num_bytes(format));
3242
3243 let dst_size = dst_stride * size.height as usize;
3249
3250 (dst_size, dst_stride)
3251 }
3252
3253 pub fn upload_texture<'a>(
3256 &mut self,
3257 pbo_pool: &'a mut UploadPBOPool,
3258 ) -> TextureUploader<'a> {
3259 debug_assert!(self.inside_frame);
3260
3261 pbo_pool.begin_frame(self);
3262
3263 TextureUploader {
3264 buffers: Vec::new(),
3265 pbo_pool,
3266 }
3267 }
3268
3269 pub fn upload_texture_immediate<T: Texel>(
3271 &mut self,
3272 texture: &Texture,
3273 pixels: &[T]
3274 ) {
3275 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
3276 let desc = self.gl_describe_format(texture.format);
3277 self.gl.tex_sub_image_2d(
3278 texture.target,
3279 0,
3280 0,
3281 0,
3282 texture.size.width as gl::GLint,
3283 texture.size.height as gl::GLint,
3284 desc.external,
3285 desc.pixel_type,
3286 texels_to_u8_slice(pixels),
3287 );
3288 }
3289
3290 pub fn read_pixels(&mut self, img_desc: &ImageDescriptor) -> Vec<u8> {
3291 let desc = self.gl_describe_format(img_desc.format);
3292 self.gl.read_pixels(
3293 0, 0,
3294 img_desc.size.width as i32,
3295 img_desc.size.height as i32,
3296 desc.read,
3297 desc.pixel_type,
3298 )
3299 }
3300
3301 pub fn read_pixels_into(
3303 &mut self,
3304 rect: FramebufferIntRect,
3305 format: ImageFormat,
3306 output: &mut [u8],
3307 ) {
3308 let bytes_per_pixel = format.bytes_per_pixel();
3309 let desc = self.gl_describe_format(format);
3310 let size_in_bytes = (bytes_per_pixel * rect.area()) as usize;
3311 assert_eq!(output.len(), size_in_bytes);
3312
3313 self.gl.flush();
3314 self.gl.read_pixels_into_buffer(
3315 rect.min.x as _,
3316 rect.min.y as _,
3317 rect.width() as _,
3318 rect.height() as _,
3319 desc.read,
3320 desc.pixel_type,
3321 output,
3322 );
3323 }
3324
3325 pub fn get_tex_image_into(
3327 &mut self,
3328 texture: &Texture,
3329 format: ImageFormat,
3330 output: &mut [u8],
3331 ) {
3332 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
3333 let desc = self.gl_describe_format(format);
3334 self.gl.get_tex_image_into_buffer(
3335 texture.target,
3336 0,
3337 desc.external,
3338 desc.pixel_type,
3339 output,
3340 );
3341 }
3342
3343 fn attach_read_texture_raw(&mut self, texture_id: gl::GLuint, target: gl::GLuint) {
3345 self.gl.framebuffer_texture_2d(
3346 gl::READ_FRAMEBUFFER,
3347 gl::COLOR_ATTACHMENT0,
3348 target,
3349 texture_id,
3350 0,
3351 )
3352 }
3353
3354 pub fn attach_read_texture_external(
3355 &mut self, texture_id: gl::GLuint, target: ImageBufferKind
3356 ) {
3357 self.attach_read_texture_raw(texture_id, get_gl_target(target))
3358 }
3359
3360 pub fn attach_read_texture(&mut self, texture: &Texture) {
3361 self.attach_read_texture_raw(texture.id, texture.target)
3362 }
3363
3364 fn bind_vao_impl(&mut self, id: gl::GLuint) {
3365 debug_assert!(self.inside_frame);
3366
3367 if self.bound_vao != id {
3368 self.bound_vao = id;
3369 self.gl.bind_vertex_array(id);
3370 }
3371 }
3372
3373 pub fn bind_vao(&mut self, vao: &VAO) {
3374 self.bind_vao_impl(vao.id)
3375 }
3376
3377 pub fn bind_custom_vao(&mut self, vao: &CustomVAO) {
3378 self.bind_vao_impl(vao.id)
3379 }
3380
3381 fn create_vao_with_vbos(
3382 &mut self,
3383 descriptor: &VertexDescriptor,
3384 main_vbo_id: VBOId,
3385 instance_vbo_id: VBOId,
3386 instance_divisor: u32,
3387 ibo_id: IBOId,
3388 owns_vertices_and_indices: bool,
3389 ) -> VAO {
3390 let instance_stride = descriptor.instance_stride() as usize;
3391 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3392
3393 self.bind_vao_impl(vao_id);
3394
3395 descriptor.bind(self.gl(), main_vbo_id, instance_vbo_id, instance_divisor);
3396 ibo_id.bind(self.gl()); VAO {
3399 id: vao_id,
3400 ibo_id,
3401 main_vbo_id,
3402 instance_vbo_id,
3403 instance_stride,
3404 instance_divisor,
3405 owns_vertices_and_indices,
3406 }
3407 }
3408
3409 pub fn create_custom_vao(
3410 &mut self,
3411 streams: &[Stream],
3412 ) -> CustomVAO {
3413 debug_assert!(self.inside_frame);
3414
3415 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3416 self.bind_vao_impl(vao_id);
3417
3418 let mut attrib_index = 0;
3419 for stream in streams {
3420 VertexDescriptor::bind_attributes(
3421 stream.attributes,
3422 attrib_index,
3423 0,
3424 self.gl(),
3425 stream.vbo,
3426 );
3427 attrib_index += stream.attributes.len();
3428 }
3429
3430 CustomVAO {
3431 id: vao_id,
3432 }
3433 }
3434
3435 pub fn delete_custom_vao(&mut self, mut vao: CustomVAO) {
3436 self.gl.delete_vertex_arrays(&[vao.id]);
3437 vao.id = 0;
3438 }
3439
3440 pub fn create_vbo<T>(&mut self) -> VBO<T> {
3441 let ids = self.gl.gen_buffers(1);
3442 VBO {
3443 id: ids[0],
3444 target: gl::ARRAY_BUFFER,
3445 allocated_count: 0,
3446 marker: PhantomData,
3447 }
3448 }
3449
3450 pub fn delete_vbo<T>(&mut self, mut vbo: VBO<T>) {
3451 self.gl.delete_buffers(&[vbo.id]);
3452 vbo.id = 0;
3453 }
3454
3455 pub fn create_vao(&mut self, descriptor: &VertexDescriptor, instance_divisor: u32) -> VAO {
3456 debug_assert!(self.inside_frame);
3457
3458 let buffer_ids = self.gl.gen_buffers(3);
3459 let ibo_id = IBOId(buffer_ids[0]);
3460 let main_vbo_id = VBOId(buffer_ids[1]);
3461 let intance_vbo_id = VBOId(buffer_ids[2]);
3462
3463 self.create_vao_with_vbos(descriptor, main_vbo_id, intance_vbo_id, instance_divisor, ibo_id, true)
3464 }
3465
3466 pub fn delete_vao(&mut self, mut vao: VAO) {
3467 self.gl.delete_vertex_arrays(&[vao.id]);
3468 vao.id = 0;
3469
3470 if vao.owns_vertices_and_indices {
3471 self.gl.delete_buffers(&[vao.ibo_id.0]);
3472 self.gl.delete_buffers(&[vao.main_vbo_id.0]);
3473 }
3474
3475 self.gl.delete_buffers(&[vao.instance_vbo_id.0])
3476 }
3477
3478 pub fn allocate_vbo<V>(
3479 &mut self,
3480 vbo: &mut VBO<V>,
3481 count: usize,
3482 usage_hint: VertexUsageHint,
3483 ) {
3484 debug_assert!(self.inside_frame);
3485 vbo.allocated_count = count;
3486
3487 self.gl.bind_buffer(vbo.target, vbo.id);
3488 self.gl.buffer_data_untyped(
3489 vbo.target,
3490 (count * mem::size_of::<V>()) as _,
3491 ptr::null(),
3492 usage_hint.to_gl(),
3493 );
3494 }
3495
3496 pub fn fill_vbo<V>(
3497 &mut self,
3498 vbo: &VBO<V>,
3499 data: &[V],
3500 offset: usize,
3501 ) {
3502 debug_assert!(self.inside_frame);
3503 assert!(offset + data.len() <= vbo.allocated_count);
3504 let stride = mem::size_of::<V>();
3505
3506 self.gl.bind_buffer(vbo.target, vbo.id);
3507 self.gl.buffer_sub_data_untyped(
3508 vbo.target,
3509 (offset * stride) as _,
3510 (data.len() * stride) as _,
3511 data.as_ptr() as _,
3512 );
3513 }
3514
3515 fn update_vbo_data<V>(
3516 &mut self,
3517 vbo: VBOId,
3518 vertices: &[V],
3519 usage_hint: VertexUsageHint,
3520 ) {
3521 debug_assert!(self.inside_frame);
3522
3523 vbo.bind(self.gl());
3524 gl::buffer_data(self.gl(), gl::ARRAY_BUFFER, vertices, usage_hint.to_gl());
3525 }
3526
3527 pub fn create_vao_with_new_instances(
3528 &mut self,
3529 descriptor: &VertexDescriptor,
3530 base_vao: &VAO,
3531 ) -> VAO {
3532 debug_assert!(self.inside_frame);
3533
3534 let buffer_ids = self.gl.gen_buffers(1);
3535 let intance_vbo_id = VBOId(buffer_ids[0]);
3536
3537 self.create_vao_with_vbos(
3538 descriptor,
3539 base_vao.main_vbo_id,
3540 intance_vbo_id,
3541 base_vao.instance_divisor,
3542 base_vao.ibo_id,
3543 false,
3544 )
3545 }
3546
3547 pub fn update_vao_main_vertices<V>(
3548 &mut self,
3549 vao: &VAO,
3550 vertices: &[V],
3551 usage_hint: VertexUsageHint,
3552 ) {
3553 debug_assert_eq!(self.bound_vao, vao.id);
3554 self.update_vbo_data(vao.main_vbo_id, vertices, usage_hint)
3555 }
3556
3557 pub fn update_vao_instances<V: Clone>(
3558 &mut self,
3559 vao: &VAO,
3560 instances: &[V],
3561 usage_hint: VertexUsageHint,
3562 repeat: Option<NonZeroUsize>,
3564 ) {
3565 debug_assert_eq!(self.bound_vao, vao.id);
3566 debug_assert_eq!(vao.instance_stride as usize, mem::size_of::<V>());
3567
3568 match repeat {
3569 Some(count) => {
3570 let target = gl::ARRAY_BUFFER;
3571 self.gl.bind_buffer(target, vao.instance_vbo_id.0);
3572 let size = instances.len() * count.get() * mem::size_of::<V>();
3573 self.gl.buffer_data_untyped(
3574 target,
3575 size as _,
3576 ptr::null(),
3577 usage_hint.to_gl(),
3578 );
3579
3580 let ptr = match self.gl.get_type() {
3581 gl::GlType::Gl => {
3582 self.gl.map_buffer(target, gl::WRITE_ONLY)
3583 }
3584 gl::GlType::Gles => {
3585 self.gl.map_buffer_range(target, 0, size as _, gl::MAP_WRITE_BIT)
3586 }
3587 };
3588 assert!(!ptr.is_null());
3589
3590 let buffer_slice = unsafe {
3591 slice::from_raw_parts_mut(ptr as *mut V, instances.len() * count.get())
3592 };
3593 for (quad, instance) in buffer_slice.chunks_mut(4).zip(instances) {
3594 quad[0] = instance.clone();
3595 quad[1] = instance.clone();
3596 quad[2] = instance.clone();
3597 quad[3] = instance.clone();
3598 }
3599 self.gl.unmap_buffer(target);
3600 }
3601 None => {
3602 self.update_vbo_data(vao.instance_vbo_id, instances, usage_hint);
3603 }
3604 }
3605
3606 if self.capabilities.requires_vao_rebind_after_orphaning {
3610 self.bind_vao_impl(0);
3611 self.bind_vao_impl(vao.id);
3612 }
3613 }
3614
3615 pub fn update_vao_indices<I>(&mut self, vao: &VAO, indices: &[I], usage_hint: VertexUsageHint) {
3616 debug_assert!(self.inside_frame);
3617 debug_assert_eq!(self.bound_vao, vao.id);
3618
3619 vao.ibo_id.bind(self.gl());
3620 gl::buffer_data(
3621 self.gl(),
3622 gl::ELEMENT_ARRAY_BUFFER,
3623 indices,
3624 usage_hint.to_gl(),
3625 );
3626 }
3627
3628 pub fn draw_triangles_u16(&mut self, first_vertex: i32, index_count: i32) {
3629 debug_assert!(self.inside_frame);
3630 #[cfg(debug_assertions)]
3631 debug_assert!(self.shader_is_ready);
3632
3633 let _guard = if self.annotate_draw_call_crashes {
3634 Some(CrashAnnotatorGuard::new(
3635 &self.crash_annotator,
3636 CrashAnnotation::DrawShader,
3637 &self.bound_program_name,
3638 ))
3639 } else {
3640 None
3641 };
3642
3643 self.gl.draw_elements(
3644 gl::TRIANGLES,
3645 index_count,
3646 gl::UNSIGNED_SHORT,
3647 first_vertex as u32 * 2,
3648 );
3649 }
3650
3651 pub fn draw_triangles_u32(&mut self, first_vertex: i32, index_count: i32) {
3652 debug_assert!(self.inside_frame);
3653 #[cfg(debug_assertions)]
3654 debug_assert!(self.shader_is_ready);
3655
3656 let _guard = if self.annotate_draw_call_crashes {
3657 Some(CrashAnnotatorGuard::new(
3658 &self.crash_annotator,
3659 CrashAnnotation::DrawShader,
3660 &self.bound_program_name,
3661 ))
3662 } else {
3663 None
3664 };
3665
3666 self.gl.draw_elements(
3667 gl::TRIANGLES,
3668 index_count,
3669 gl::UNSIGNED_INT,
3670 first_vertex as u32 * 4,
3671 );
3672 }
3673
3674 pub fn draw_nonindexed_points(&mut self, first_vertex: i32, vertex_count: i32) {
3675 debug_assert!(self.inside_frame);
3676 #[cfg(debug_assertions)]
3677 debug_assert!(self.shader_is_ready);
3678
3679 let _guard = if self.annotate_draw_call_crashes {
3680 Some(CrashAnnotatorGuard::new(
3681 &self.crash_annotator,
3682 CrashAnnotation::DrawShader,
3683 &self.bound_program_name,
3684 ))
3685 } else {
3686 None
3687 };
3688
3689 self.gl.draw_arrays(gl::POINTS, first_vertex, vertex_count);
3690 }
3691
3692 pub fn draw_nonindexed_lines(&mut self, first_vertex: i32, vertex_count: i32) {
3693 debug_assert!(self.inside_frame);
3694 #[cfg(debug_assertions)]
3695 debug_assert!(self.shader_is_ready);
3696
3697 let _guard = if self.annotate_draw_call_crashes {
3698 Some(CrashAnnotatorGuard::new(
3699 &self.crash_annotator,
3700 CrashAnnotation::DrawShader,
3701 &self.bound_program_name,
3702 ))
3703 } else {
3704 None
3705 };
3706
3707 self.gl.draw_arrays(gl::LINES, first_vertex, vertex_count);
3708 }
3709
3710 pub fn draw_indexed_triangles(&mut self, index_count: i32) {
3711 debug_assert!(self.inside_frame);
3712 #[cfg(debug_assertions)]
3713 debug_assert!(self.shader_is_ready);
3714
3715 let _guard = if self.annotate_draw_call_crashes {
3716 Some(CrashAnnotatorGuard::new(
3717 &self.crash_annotator,
3718 CrashAnnotation::DrawShader,
3719 &self.bound_program_name,
3720 ))
3721 } else {
3722 None
3723 };
3724
3725 self.gl.draw_elements(
3726 gl::TRIANGLES,
3727 index_count,
3728 gl::UNSIGNED_SHORT,
3729 0,
3730 );
3731 }
3732
3733 pub fn draw_indexed_triangles_instanced_u16(&mut self, index_count: i32, instance_count: i32) {
3734 debug_assert!(self.inside_frame);
3735 #[cfg(debug_assertions)]
3736 debug_assert!(self.shader_is_ready);
3737
3738 let _guard = if self.annotate_draw_call_crashes {
3739 Some(CrashAnnotatorGuard::new(
3740 &self.crash_annotator,
3741 CrashAnnotation::DrawShader,
3742 &self.bound_program_name,
3743 ))
3744 } else {
3745 None
3746 };
3747
3748 self.gl.draw_elements_instanced(
3749 gl::TRIANGLES,
3750 index_count,
3751 gl::UNSIGNED_SHORT,
3752 0,
3753 instance_count,
3754 );
3755 }
3756
3757 pub fn end_frame(&mut self) {
3758 self.reset_draw_target();
3759 self.reset_read_target();
3760
3761 debug_assert!(self.inside_frame);
3762 self.inside_frame = false;
3763
3764 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3765 self.gl.use_program(0);
3766
3767 for i in 0 .. self.bound_textures.len() {
3768 self.gl.active_texture(gl::TEXTURE0 + i as gl::GLuint);
3769 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3770 }
3771
3772 self.gl.active_texture(gl::TEXTURE0);
3773
3774 self.frame_id.0 += 1;
3775
3776 if let Some(ref cache) = self.cached_programs {
3780 cache.update_disk_cache(self.frame_id.0 == 10);
3781 }
3782 }
3783
3784 pub fn clear_target(
3785 &self,
3786 color: Option<[f32; 4]>,
3787 depth: Option<f32>,
3788 rect: Option<FramebufferIntRect>,
3789 ) {
3790 let mut clear_bits = 0;
3791
3792 if let Some(color) = color {
3793 self.gl.clear_color(color[0], color[1], color[2], color[3]);
3794 clear_bits |= gl::COLOR_BUFFER_BIT;
3795 }
3796
3797 if let Some(depth) = depth {
3798 if cfg!(debug_assertions) {
3799 let mut mask = [0];
3800 unsafe {
3801 self.gl.get_boolean_v(gl::DEPTH_WRITEMASK, &mut mask);
3802 }
3803 assert_ne!(mask[0], 0);
3804 }
3805 self.gl.clear_depth(depth as f64);
3806 clear_bits |= gl::DEPTH_BUFFER_BIT;
3807 }
3808
3809 if clear_bits != 0 {
3810 match rect {
3811 Some(rect) => {
3812 self.gl.enable(gl::SCISSOR_TEST);
3813 self.gl.scissor(
3814 rect.min.x,
3815 rect.min.y,
3816 rect.width(),
3817 rect.height(),
3818 );
3819 self.gl.clear(clear_bits);
3820 self.gl.disable(gl::SCISSOR_TEST);
3821 }
3822 None => {
3823 self.gl.clear(clear_bits);
3824 }
3825 }
3826 }
3827 }
3828
3829 pub fn enable_depth(&self, depth_func: DepthFunction) {
3830 assert!(self.depth_available, "Enabling depth test without depth target");
3831 self.gl.enable(gl::DEPTH_TEST);
3832 self.gl.depth_func(depth_func as gl::GLuint);
3833 }
3834
3835 pub fn disable_depth(&self) {
3836 self.gl.disable(gl::DEPTH_TEST);
3837 }
3838
3839 pub fn enable_depth_write(&self) {
3840 assert!(self.depth_available, "Enabling depth write without depth target");
3841 self.gl.depth_mask(true);
3842 }
3843
3844 pub fn disable_depth_write(&self) {
3845 self.gl.depth_mask(false);
3846 }
3847
3848 pub fn disable_stencil(&self) {
3849 self.gl.disable(gl::STENCIL_TEST);
3850 }
3851
3852 pub fn set_scissor_rect(&self, rect: FramebufferIntRect) {
3853 self.gl.scissor(
3854 rect.min.x,
3855 rect.min.y,
3856 rect.width(),
3857 rect.height(),
3858 );
3859 }
3860
3861 pub fn enable_scissor(&self) {
3862 self.gl.enable(gl::SCISSOR_TEST);
3863 }
3864
3865 pub fn disable_scissor(&self) {
3866 self.gl.disable(gl::SCISSOR_TEST);
3867 }
3868
3869 pub fn enable_color_write(&self) {
3870 self.gl.color_mask(true, true, true, true);
3871 }
3872
3873 pub fn disable_color_write(&self) {
3874 self.gl.color_mask(false, false, false, false);
3875 }
3876
3877 pub fn set_blend(&mut self, enable: bool) {
3878 if enable {
3879 self.gl.enable(gl::BLEND);
3880 } else {
3881 self.gl.disable(gl::BLEND);
3882 }
3883 #[cfg(debug_assertions)]
3884 {
3885 self.shader_is_ready = false;
3886 }
3887 }
3888
3889 fn set_blend_factors(
3890 &mut self,
3891 color: (gl::GLenum, gl::GLenum),
3892 alpha: (gl::GLenum, gl::GLenum),
3893 ) {
3894 self.gl.blend_equation(gl::FUNC_ADD);
3895 if color == alpha {
3896 self.gl.blend_func(color.0, color.1);
3897 } else {
3898 self.gl.blend_func_separate(color.0, color.1, alpha.0, alpha.1);
3899 }
3900 #[cfg(debug_assertions)]
3901 {
3902 self.shader_is_ready = false;
3903 }
3904 }
3905
3906 pub fn set_blend_mode_alpha(&mut self) {
3907 self.set_blend_factors(
3908 (gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA),
3909 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3910 );
3911 }
3912
3913 pub fn set_blend_mode_premultiplied_alpha(&mut self) {
3914 self.set_blend_factors(
3915 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3916 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3917 );
3918 }
3919
3920 pub fn set_blend_mode_premultiplied_dest_out(&mut self) {
3921 self.set_blend_factors(
3922 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3923 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3924 );
3925 }
3926
3927 pub fn set_blend_mode_multiply(&mut self) {
3928 self.set_blend_factors(
3929 (gl::ZERO, gl::SRC_COLOR),
3930 (gl::ZERO, gl::SRC_ALPHA),
3931 );
3932 }
3933 pub fn set_blend_mode_subpixel_pass0(&mut self) {
3934 self.set_blend_factors(
3935 (gl::ZERO, gl::ONE_MINUS_SRC_COLOR),
3936 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3937 );
3938 }
3939 pub fn set_blend_mode_subpixel_pass1(&mut self) {
3940 self.set_blend_factors(
3941 (gl::ONE, gl::ONE),
3942 (gl::ONE, gl::ONE),
3943 );
3944 }
3945 pub fn set_blend_mode_subpixel_dual_source(&mut self) {
3946 self.set_blend_factors(
3947 (gl::ONE, gl::ONE_MINUS_SRC1_COLOR),
3948 (gl::ONE, gl::ONE_MINUS_SRC1_ALPHA),
3949 );
3950 }
3951 pub fn set_blend_mode_multiply_dual_source(&mut self) {
3952 self.set_blend_factors(
3953 (gl::ONE_MINUS_DST_ALPHA, gl::ONE_MINUS_SRC1_COLOR),
3954 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3955 );
3956 }
3957 pub fn set_blend_mode_screen(&mut self) {
3958 self.set_blend_factors(
3959 (gl::ONE, gl::ONE_MINUS_SRC_COLOR),
3960 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3961 );
3962 }
3963 pub fn set_blend_mode_plus_lighter(&mut self) {
3964 self.set_blend_factors(
3965 (gl::ONE, gl::ONE),
3966 (gl::ONE, gl::ONE),
3967 );
3968 }
3969 pub fn set_blend_mode_exclusion(&mut self) {
3970 self.set_blend_factors(
3971 (gl::ONE_MINUS_DST_COLOR, gl::ONE_MINUS_SRC_COLOR),
3972 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3973 );
3974 }
3975 pub fn set_blend_mode_show_overdraw(&mut self) {
3976 self.set_blend_factors(
3977 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3978 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3979 );
3980 }
3981
3982 pub fn set_blend_mode_max(&mut self) {
3983 self.gl
3984 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3985 self.gl.blend_equation_separate(gl::MAX, gl::FUNC_ADD);
3986 #[cfg(debug_assertions)]
3987 {
3988 self.shader_is_ready = false;
3989 }
3990 }
3991 pub fn set_blend_mode_min(&mut self) {
3992 self.gl
3993 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3994 self.gl.blend_equation_separate(gl::MIN, gl::FUNC_ADD);
3995 #[cfg(debug_assertions)]
3996 {
3997 self.shader_is_ready = false;
3998 }
3999 }
4000 pub fn set_blend_mode_advanced(&mut self, mode: MixBlendMode) {
4001 self.gl.blend_equation(match mode {
4002 MixBlendMode::Normal => {
4003 self.gl.blend_func_separate(gl::ZERO, gl::SRC_COLOR, gl::ZERO, gl::SRC_ALPHA);
4005 gl::FUNC_ADD
4006 },
4007 MixBlendMode::PlusLighter => {
4008 return self.set_blend_mode_plus_lighter();
4009 },
4010 MixBlendMode::Multiply => gl::MULTIPLY_KHR,
4011 MixBlendMode::Screen => gl::SCREEN_KHR,
4012 MixBlendMode::Overlay => gl::OVERLAY_KHR,
4013 MixBlendMode::Darken => gl::DARKEN_KHR,
4014 MixBlendMode::Lighten => gl::LIGHTEN_KHR,
4015 MixBlendMode::ColorDodge => gl::COLORDODGE_KHR,
4016 MixBlendMode::ColorBurn => gl::COLORBURN_KHR,
4017 MixBlendMode::HardLight => gl::HARDLIGHT_KHR,
4018 MixBlendMode::SoftLight => gl::SOFTLIGHT_KHR,
4019 MixBlendMode::Difference => gl::DIFFERENCE_KHR,
4020 MixBlendMode::Exclusion => gl::EXCLUSION_KHR,
4021 MixBlendMode::Hue => gl::HSL_HUE_KHR,
4022 MixBlendMode::Saturation => gl::HSL_SATURATION_KHR,
4023 MixBlendMode::Color => gl::HSL_COLOR_KHR,
4024 MixBlendMode::Luminosity => gl::HSL_LUMINOSITY_KHR,
4025 });
4026 #[cfg(debug_assertions)]
4027 {
4028 self.shader_is_ready = false;
4029 }
4030 }
4031
4032 pub fn supports_extension(&self, extension: &str) -> bool {
4033 supports_extension(&self.extensions, extension)
4034 }
4035
4036 pub fn echo_driver_messages(&self) {
4037 if self.capabilities.supports_khr_debug {
4038 Device::log_driver_messages(self.gl());
4039 }
4040 }
4041
4042 fn log_driver_messages(gl: &dyn gl::Gl) {
4043 for msg in gl.get_debug_messages() {
4044 let level = match msg.severity {
4045 gl::DEBUG_SEVERITY_HIGH => Level::Error,
4046 gl::DEBUG_SEVERITY_MEDIUM => Level::Warn,
4047 gl::DEBUG_SEVERITY_LOW => Level::Info,
4048 gl::DEBUG_SEVERITY_NOTIFICATION => Level::Debug,
4049 _ => Level::Trace,
4050 };
4051 let ty = match msg.ty {
4052 gl::DEBUG_TYPE_ERROR => "error",
4053 gl::DEBUG_TYPE_DEPRECATED_BEHAVIOR => "deprecated",
4054 gl::DEBUG_TYPE_UNDEFINED_BEHAVIOR => "undefined",
4055 gl::DEBUG_TYPE_PORTABILITY => "portability",
4056 gl::DEBUG_TYPE_PERFORMANCE => "perf",
4057 gl::DEBUG_TYPE_MARKER => "marker",
4058 gl::DEBUG_TYPE_PUSH_GROUP => "group push",
4059 gl::DEBUG_TYPE_POP_GROUP => "group pop",
4060 gl::DEBUG_TYPE_OTHER => "other",
4061 _ => "?",
4062 };
4063 log!(level, "({}) {}", ty, msg.message);
4064 }
4065 }
4066
4067 pub fn gl_describe_format(&self, format: ImageFormat) -> FormatDesc {
4068 match format {
4069 ImageFormat::R8 => FormatDesc {
4070 internal: gl::R8,
4071 external: gl::RED,
4072 read: gl::RED,
4073 pixel_type: gl::UNSIGNED_BYTE,
4074 },
4075 ImageFormat::R16 => FormatDesc {
4076 internal: gl::R16,
4077 external: gl::RED,
4078 read: gl::RED,
4079 pixel_type: gl::UNSIGNED_SHORT,
4080 },
4081 ImageFormat::BGRA8 => {
4082 FormatDesc {
4083 internal: self.bgra_formats.internal,
4084 external: self.bgra_formats.external,
4085 read: gl::BGRA,
4086 pixel_type: self.bgra_pixel_type,
4087 }
4088 },
4089 ImageFormat::RGBA8 => {
4090 FormatDesc {
4091 internal: gl::RGBA8,
4092 external: gl::RGBA,
4093 read: gl::RGBA,
4094 pixel_type: gl::UNSIGNED_BYTE,
4095 }
4096 },
4097 ImageFormat::RGBAF32 => FormatDesc {
4098 internal: gl::RGBA32F,
4099 external: gl::RGBA,
4100 read: gl::RGBA,
4101 pixel_type: gl::FLOAT,
4102 },
4103 ImageFormat::RGBAI32 => FormatDesc {
4104 internal: gl::RGBA32I,
4105 external: gl::RGBA_INTEGER,
4106 read: gl::RGBA_INTEGER,
4107 pixel_type: gl::INT,
4108 },
4109 ImageFormat::RG8 => FormatDesc {
4110 internal: gl::RG8,
4111 external: gl::RG,
4112 read: gl::RG,
4113 pixel_type: gl::UNSIGNED_BYTE,
4114 },
4115 ImageFormat::RG16 => FormatDesc {
4116 internal: gl::RG16,
4117 external: gl::RG,
4118 read: gl::RG,
4119 pixel_type: gl::UNSIGNED_SHORT,
4120 },
4121 }
4122 }
4123
4124 pub fn report_memory(&self, size_op_funs: &MallocSizeOfOps, swgl: *mut c_void) -> MemoryReport {
4126 let mut report = MemoryReport::default();
4127 report.depth_target_textures += self.depth_targets_memory();
4128
4129 #[cfg(feature = "sw_compositor")]
4130 if !swgl.is_null() {
4131 report.swgl += swgl::Context::from(swgl).report_memory(size_op_funs.size_of_op);
4132 }
4133 let _ = size_op_funs;
4135 let _ = swgl;
4136 report
4137 }
4138
4139 pub fn depth_targets_memory(&self) -> usize {
4140 let mut total = 0;
4141 for dim in self.depth_targets.keys() {
4142 total += depth_target_size_in_bytes(dim);
4143 }
4144
4145 total
4146 }
4147}
4148
4149pub struct FormatDesc {
4150 pub internal: gl::GLenum,
4152 pub external: gl::GLuint,
4154 pub read: gl::GLuint,
4157 pub pixel_type: gl::GLuint,
4159}
4160
4161#[derive(Debug)]
4162struct UploadChunk<'a> {
4163 rect: DeviceIntRect,
4164 stride: Option<i32>,
4165 offset: usize,
4166 format_override: Option<ImageFormat>,
4167 texture: &'a Texture,
4168}
4169
4170#[derive(Debug)]
4171struct PixelBuffer<'a> {
4172 size_used: usize,
4173 chunks: SmallVec<[UploadChunk<'a>; 1]>,
4175 inner: UploadPBO,
4176 mapping: &'a mut [mem::MaybeUninit<u8>],
4177}
4178
4179impl<'a> PixelBuffer<'a> {
4180 fn new(
4181 pbo: UploadPBO,
4182 ) -> Self {
4183 let mapping = unsafe {
4184 slice::from_raw_parts_mut(pbo.mapping.get_ptr().as_ptr(), pbo.pbo.reserved_size)
4185 };
4186 Self {
4187 size_used: 0,
4188 chunks: SmallVec::new(),
4189 inner: pbo,
4190 mapping,
4191 }
4192 }
4193
4194 fn flush_chunks(&mut self, device: &mut Device) {
4195 for chunk in self.chunks.drain(..) {
4196 TextureUploader::update_impl(device, chunk);
4197 }
4198 }
4199}
4200
4201impl<'a> Drop for PixelBuffer<'a> {
4202 fn drop(&mut self) {
4203 assert_eq!(self.chunks.len(), 0, "PixelBuffer must be flushed before dropping.");
4204 }
4205}
4206
4207#[derive(Debug)]
4208enum PBOMapping {
4209 Unmapped,
4210 Transient(ptr::NonNull<mem::MaybeUninit<u8>>),
4211 Persistent(ptr::NonNull<mem::MaybeUninit<u8>>),
4212}
4213
4214impl PBOMapping {
4215 fn get_ptr(&self) -> ptr::NonNull<mem::MaybeUninit<u8>> {
4216 match self {
4217 PBOMapping::Unmapped => unreachable!("Cannot get pointer to unmapped PBO."),
4218 PBOMapping::Transient(ptr) => *ptr,
4219 PBOMapping::Persistent(ptr) => *ptr,
4220 }
4221 }
4222}
4223
4224#[derive(Debug)]
4226struct UploadPBO {
4227 pbo: PBO,
4228 mapping: PBOMapping,
4229 can_recycle: bool,
4230}
4231
4232impl UploadPBO {
4233 fn empty() -> Self {
4234 Self {
4235 pbo: PBO {
4236 id: 0,
4237 reserved_size: 0,
4238 },
4239 mapping: PBOMapping::Unmapped,
4240 can_recycle: false,
4241 }
4242 }
4243}
4244
4245pub struct UploadPBOPool {
4249 usage_hint: VertexUsageHint,
4251 default_size: usize,
4253 available_buffers: Vec<UploadPBO>,
4255 returned_buffers: Vec<UploadPBO>,
4258 waiting_buffers: Vec<(gl::GLsync, Vec<UploadPBO>)>,
4261 orphaned_buffers: Vec<PBO>,
4264}
4265
4266impl UploadPBOPool {
4267 pub fn new(device: &mut Device, default_size: usize) -> Self {
4268 let usage_hint = match device.upload_method {
4269 UploadMethod::Immediate => VertexUsageHint::Stream,
4270 UploadMethod::PixelBuffer(usage_hint) => usage_hint,
4271 };
4272 Self {
4273 usage_hint,
4274 default_size,
4275 available_buffers: Vec::new(),
4276 returned_buffers: Vec::new(),
4277 waiting_buffers: Vec::new(),
4278 orphaned_buffers: Vec::new(),
4279 }
4280 }
4281
4282 pub fn begin_frame(&mut self, device: &mut Device) {
4285 let mut first_not_signalled = self.waiting_buffers.len();
4290 for (i, (sync, buffers)) in self.waiting_buffers.iter_mut().enumerate() {
4291 match device.gl.client_wait_sync(*sync, 0, 0) {
4292 gl::TIMEOUT_EXPIRED => {
4293 first_not_signalled = i;
4294 break;
4295 },
4296 gl::ALREADY_SIGNALED | gl::CONDITION_SATISFIED => {
4297 self.available_buffers.extend(buffers.drain(..));
4298 }
4299 gl::WAIT_FAILED | _ => {
4300 warn!("glClientWaitSync error in UploadPBOPool::begin_frame()");
4301 for buffer in buffers.drain(..) {
4302 device.delete_pbo(buffer.pbo);
4303 }
4304 }
4305 }
4306 }
4307
4308 for (sync, _) in self.waiting_buffers.drain(0..first_not_signalled) {
4310 device.gl.delete_sync(sync);
4311 }
4312 }
4313
4314 pub fn end_frame(&mut self, device: &mut Device) {
4317 if !self.returned_buffers.is_empty() {
4318 let sync = device.gl.fence_sync(gl::SYNC_GPU_COMMANDS_COMPLETE, 0);
4319 if !sync.is_null() {
4320 self.waiting_buffers.push((sync, mem::replace(&mut self.returned_buffers, Vec::new())))
4321 } else {
4322 warn!("glFenceSync error in UploadPBOPool::end_frame()");
4323
4324 for buffer in self.returned_buffers.drain(..) {
4325 device.delete_pbo(buffer.pbo);
4326 }
4327 }
4328 }
4329 }
4330
4331 fn get_pbo(&mut self, device: &mut Device, min_size: usize) -> Result<UploadPBO, String> {
4335
4336 let (can_recycle, size) = if min_size <= self.default_size && device.capabilities.supports_nonzero_pbo_offsets {
4341 (true, self.default_size)
4342 } else {
4343 (false, min_size)
4344 };
4345
4346 if can_recycle {
4348 if let Some(mut buffer) = self.available_buffers.pop() {
4349 assert_eq!(buffer.pbo.reserved_size, size);
4350 assert!(buffer.can_recycle);
4351
4352 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.pbo.id);
4353
4354 match buffer.mapping {
4355 PBOMapping::Unmapped => {
4356 let ptr = device.gl.map_buffer_range(
4358 gl::PIXEL_UNPACK_BUFFER,
4359 0,
4360 buffer.pbo.reserved_size as _,
4361 gl::MAP_WRITE_BIT | gl::MAP_UNSYNCHRONIZED_BIT,
4362 ) as *mut _;
4363
4364 let ptr = ptr::NonNull::new(ptr).ok_or_else(
4365 || format!("Failed to transiently map PBO of size {} bytes", buffer.pbo.reserved_size)
4366 )?;
4367
4368 buffer.mapping = PBOMapping::Transient(ptr);
4369 }
4370 PBOMapping::Transient(_) => {
4371 unreachable!("Transiently mapped UploadPBO must be unmapped before returning to pool.");
4372 }
4373 PBOMapping::Persistent(_) => {
4374 }
4375 }
4376
4377 return Ok(buffer);
4378 }
4379 }
4380
4381 let mut pbo = match self.orphaned_buffers.pop() {
4384 Some(pbo) => pbo,
4385 None => device.create_pbo(),
4386 };
4387
4388 assert_eq!(pbo.reserved_size, 0);
4389 pbo.reserved_size = size;
4390
4391 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, pbo.id);
4392 let mapping = if device.capabilities.supports_buffer_storage && can_recycle {
4393 device.gl.buffer_storage(
4394 gl::PIXEL_UNPACK_BUFFER,
4395 pbo.reserved_size as _,
4396 ptr::null(),
4397 gl::MAP_WRITE_BIT | gl::MAP_PERSISTENT_BIT,
4398 );
4399 let ptr = device.gl.map_buffer_range(
4400 gl::PIXEL_UNPACK_BUFFER,
4401 0,
4402 pbo.reserved_size as _,
4403 gl::MAP_WRITE_BIT | gl::MAP_PERSISTENT_BIT | gl::MAP_FLUSH_EXPLICIT_BIT,
4407 ) as *mut _;
4408
4409 let ptr = ptr::NonNull::new(ptr).ok_or_else(
4410 || format!("Failed to transiently map PBO of size {} bytes", pbo.reserved_size)
4411 )?;
4412
4413 PBOMapping::Persistent(ptr)
4414 } else {
4415 device.gl.buffer_data_untyped(
4416 gl::PIXEL_UNPACK_BUFFER,
4417 pbo.reserved_size as _,
4418 ptr::null(),
4419 self.usage_hint.to_gl(),
4420 );
4421 let ptr = device.gl.map_buffer_range(
4422 gl::PIXEL_UNPACK_BUFFER,
4423 0,
4424 pbo.reserved_size as _,
4425 gl::MAP_WRITE_BIT,
4428 ) as *mut _;
4429
4430 let ptr = ptr::NonNull::new(ptr).ok_or_else(
4431 || format!("Failed to transiently map PBO of size {} bytes", pbo.reserved_size)
4432 )?;
4433
4434 PBOMapping::Transient(ptr)
4435 };
4436
4437 Ok(UploadPBO { pbo, mapping, can_recycle })
4438 }
4439
4440 fn return_pbo(&mut self, device: &mut Device, mut buffer: UploadPBO) {
4443 assert!(
4444 !matches!(buffer.mapping, PBOMapping::Transient(_)),
4445 "Transiently mapped UploadPBO must be unmapped before returning to pool.",
4446 );
4447
4448 if buffer.can_recycle {
4449 self.returned_buffers.push(buffer);
4450 } else {
4451 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.pbo.id);
4452 device.gl.buffer_data_untyped(
4453 gl::PIXEL_UNPACK_BUFFER,
4454 0,
4455 ptr::null(),
4456 gl::STREAM_DRAW,
4457 );
4458 buffer.pbo.reserved_size = 0;
4459 self.orphaned_buffers.push(buffer.pbo);
4460 }
4461
4462 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
4463 }
4464
4465 pub fn on_memory_pressure(&mut self, device: &mut Device) {
4467 for buffer in self.available_buffers.drain(..) {
4468 device.delete_pbo(buffer.pbo);
4469 }
4470 for buffer in self.returned_buffers.drain(..) {
4471 device.delete_pbo(buffer.pbo)
4472 }
4473 for (sync, buffers) in self.waiting_buffers.drain(..) {
4474 device.gl.delete_sync(sync);
4475 for buffer in buffers {
4476 device.delete_pbo(buffer.pbo)
4477 }
4478 }
4479 }
4481
4482 pub fn report_memory(&self) -> MemoryReport {
4484 let mut report = MemoryReport::default();
4485 for buffer in &self.available_buffers {
4486 report.texture_upload_pbos += buffer.pbo.reserved_size;
4487 }
4488 for buffer in &self.returned_buffers {
4489 report.texture_upload_pbos += buffer.pbo.reserved_size;
4490 }
4491 for (_, buffers) in &self.waiting_buffers {
4492 for buffer in buffers {
4493 report.texture_upload_pbos += buffer.pbo.reserved_size;
4494 }
4495 }
4496 report
4497 }
4498
4499 pub fn deinit(&mut self, device: &mut Device) {
4500 for buffer in self.available_buffers.drain(..) {
4501 device.delete_pbo(buffer.pbo);
4502 }
4503 for buffer in self.returned_buffers.drain(..) {
4504 device.delete_pbo(buffer.pbo)
4505 }
4506 for (sync, buffers) in self.waiting_buffers.drain(..) {
4507 device.gl.delete_sync(sync);
4508 for buffer in buffers {
4509 device.delete_pbo(buffer.pbo)
4510 }
4511 }
4512 for pbo in self.orphaned_buffers.drain(..) {
4513 device.delete_pbo(pbo);
4514 }
4515 }
4516}
4517
4518pub struct TextureUploader<'a> {
4522 buffers: Vec<PixelBuffer<'a>>,
4524 pub pbo_pool: &'a mut UploadPBOPool,
4526}
4527
4528impl<'a> Drop for TextureUploader<'a> {
4529 fn drop(&mut self) {
4530 assert!(
4531 thread::panicking() || self.buffers.is_empty(),
4532 "TextureUploader must be flushed before it is dropped."
4533 );
4534 }
4535}
4536
4537#[derive(Debug)]
4540pub struct UploadStagingBuffer<'a> {
4541 buffer: PixelBuffer<'a>,
4543 offset: usize,
4545 size: usize,
4547 stride: usize,
4549}
4550
4551impl<'a> UploadStagingBuffer<'a> {
4552 pub fn get_stride(&self) -> usize {
4554 self.stride
4555 }
4556
4557 pub fn get_mapping(&mut self) -> &mut [mem::MaybeUninit<u8>] {
4559 &mut self.buffer.mapping[self.offset..self.offset + self.size]
4560 }
4561}
4562
4563impl<'a> TextureUploader<'a> {
4564 pub fn stage(
4567 &mut self,
4568 device: &mut Device,
4569 format: ImageFormat,
4570 size: DeviceIntSize,
4571 ) -> Result<UploadStagingBuffer<'a>, String> {
4572 assert!(matches!(device.upload_method, UploadMethod::PixelBuffer(_)), "Texture uploads should only be staged when using pixel buffers.");
4573
4574 let (dst_size, dst_stride) = device.required_upload_size_and_stride(
4577 size,
4578 format,
4579 );
4580
4581 let buffer_index = self.buffers.iter().position(|buffer| {
4583 buffer.size_used + dst_size <= buffer.inner.pbo.reserved_size
4584 });
4585 let buffer = match buffer_index {
4586 Some(i) => self.buffers.swap_remove(i),
4587 None => PixelBuffer::new(self.pbo_pool.get_pbo(device, dst_size)?),
4588 };
4589
4590 if !device.capabilities.supports_nonzero_pbo_offsets {
4591 assert_eq!(buffer.size_used, 0, "PBO uploads from non-zero offset are not supported.");
4592 }
4593 assert!(buffer.size_used + dst_size <= buffer.inner.pbo.reserved_size, "PixelBuffer is too small");
4594
4595 let offset = buffer.size_used;
4596
4597 Ok(UploadStagingBuffer {
4598 buffer,
4599 offset,
4600 size: dst_size,
4601 stride: dst_stride,
4602 })
4603 }
4604
4605 pub fn upload_staged(
4607 &mut self,
4608 device: &mut Device,
4609 texture: &'a Texture,
4610 rect: DeviceIntRect,
4611 format_override: Option<ImageFormat>,
4612 mut staging_buffer: UploadStagingBuffer<'a>,
4613 ) -> usize {
4614 let size = staging_buffer.size;
4615
4616 staging_buffer.buffer.chunks.push(UploadChunk {
4617 rect,
4618 stride: Some(staging_buffer.stride as i32),
4619 offset: staging_buffer.offset,
4620 format_override,
4621 texture,
4622 });
4623 staging_buffer.buffer.size_used += staging_buffer.size;
4624
4625 if staging_buffer.buffer.size_used < staging_buffer.buffer.inner.pbo.reserved_size {
4627 self.buffers.push(staging_buffer.buffer);
4628 } else {
4629 Self::flush_buffer(device, self.pbo_pool, staging_buffer.buffer);
4630 }
4631
4632 size
4633 }
4634
4635 pub fn upload<T>(
4637 &mut self,
4638 device: &mut Device,
4639 texture: &'a Texture,
4640 mut rect: DeviceIntRect,
4641 stride: Option<i32>,
4642 format_override: Option<ImageFormat>,
4643 data: *const T,
4644 len: usize,
4645 ) -> usize {
4646 let cropped = rect.intersection(
4649 &DeviceIntRect::from_size(texture.get_dimensions())
4650 );
4651 if cfg!(debug_assertions) && cropped.map_or(true, |r| r != rect) {
4652 warn!("Cropping texture upload {:?} to {:?}", rect, cropped);
4653 }
4654 rect = match cropped {
4655 None => return 0,
4656 Some(r) => r,
4657 };
4658
4659 let bytes_pp = texture.format.bytes_per_pixel() as usize;
4660 let width_bytes = rect.width() as usize * bytes_pp;
4661
4662 let src_stride = stride.map_or(width_bytes, |stride| {
4663 assert!(stride >= 0);
4664 stride as usize
4665 });
4666 let src_size = (rect.height() as usize - 1) * src_stride + width_bytes;
4667 assert!(src_size <= len * mem::size_of::<T>());
4668
4669 match device.upload_method {
4670 UploadMethod::Immediate => {
4671 if cfg!(debug_assertions) {
4672 let mut bound_buffer = [0];
4673 unsafe {
4674 device.gl.get_integer_v(gl::PIXEL_UNPACK_BUFFER_BINDING, &mut bound_buffer);
4675 }
4676 assert_eq!(bound_buffer[0], 0, "GL_PIXEL_UNPACK_BUFFER must not be bound for immediate uploads.");
4677 }
4678
4679 Self::update_impl(device, UploadChunk {
4680 rect,
4681 stride: Some(src_stride as i32),
4682 offset: data as _,
4683 format_override,
4684 texture,
4685 });
4686
4687 width_bytes * rect.height() as usize
4688 }
4689 UploadMethod::PixelBuffer(_) => {
4690 let mut staging_buffer = match self.stage(device, texture.format, rect.size()) {
4691 Ok(staging_buffer) => staging_buffer,
4692 Err(_) => return 0,
4693 };
4694 let dst_stride = staging_buffer.get_stride();
4695
4696 unsafe {
4697 let src: &[mem::MaybeUninit<u8>] = slice::from_raw_parts(data as *const _, src_size);
4698
4699 if src_stride == dst_stride {
4700 staging_buffer.get_mapping()[..src_size].copy_from_slice(src);
4703 } else {
4704 for y in 0..rect.height() as usize {
4707 let src_start = y * src_stride;
4708 let src_end = src_start + width_bytes;
4709 let dst_start = y * staging_buffer.get_stride();
4710 let dst_end = dst_start + width_bytes;
4711
4712 staging_buffer.get_mapping()[dst_start..dst_end].copy_from_slice(&src[src_start..src_end])
4713 }
4714 }
4715 }
4716
4717 self.upload_staged(device, texture, rect, format_override, staging_buffer)
4718 }
4719 }
4720 }
4721
4722 fn flush_buffer(device: &mut Device, pbo_pool: &mut UploadPBOPool, mut buffer: PixelBuffer) {
4723 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.inner.pbo.id);
4724 match buffer.inner.mapping {
4725 PBOMapping::Unmapped => unreachable!("UploadPBO should be mapped at this stage."),
4726 PBOMapping::Transient(_) => {
4727 device.gl.unmap_buffer(gl::PIXEL_UNPACK_BUFFER);
4728 buffer.inner.mapping = PBOMapping::Unmapped;
4729 }
4730 PBOMapping::Persistent(_) => {
4731 device.gl.flush_mapped_buffer_range(gl::PIXEL_UNPACK_BUFFER, 0, buffer.size_used as _);
4732 }
4733 }
4734 buffer.flush_chunks(device);
4735 let pbo = mem::replace(&mut buffer.inner, UploadPBO::empty());
4736 pbo_pool.return_pbo(device, pbo);
4737 }
4738
4739 pub fn flush(mut self, device: &mut Device) {
4742 for buffer in self.buffers.drain(..) {
4743 Self::flush_buffer(device, self.pbo_pool, buffer);
4744 }
4745
4746 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
4747 }
4748
4749 fn update_impl(device: &mut Device, chunk: UploadChunk) {
4750 device.bind_texture(DEFAULT_TEXTURE, chunk.texture, Swizzle::default());
4751
4752 let format = chunk.format_override.unwrap_or(chunk.texture.format);
4753 let (gl_format, bpp, data_type) = match format {
4754 ImageFormat::R8 => (gl::RED, 1, gl::UNSIGNED_BYTE),
4755 ImageFormat::R16 => (gl::RED, 2, gl::UNSIGNED_SHORT),
4756 ImageFormat::BGRA8 => (device.bgra_formats.external, 4, device.bgra_pixel_type),
4757 ImageFormat::RGBA8 => (gl::RGBA, 4, gl::UNSIGNED_BYTE),
4758 ImageFormat::RG8 => (gl::RG, 2, gl::UNSIGNED_BYTE),
4759 ImageFormat::RG16 => (gl::RG, 4, gl::UNSIGNED_SHORT),
4760 ImageFormat::RGBAF32 => (gl::RGBA, 16, gl::FLOAT),
4761 ImageFormat::RGBAI32 => (gl::RGBA_INTEGER, 16, gl::INT),
4762 };
4763
4764 let row_length = match chunk.stride {
4765 Some(value) => value / bpp,
4766 None => chunk.texture.size.width,
4767 };
4768
4769 if chunk.stride.is_some() {
4770 device.gl.pixel_store_i(
4771 gl::UNPACK_ROW_LENGTH,
4772 row_length as _,
4773 );
4774 }
4775
4776 let pos = chunk.rect.min;
4777 let size = chunk.rect.size();
4778
4779 match chunk.texture.target {
4780 gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => {
4781 device.gl.tex_sub_image_2d_pbo(
4782 chunk.texture.target,
4783 0,
4784 pos.x as _,
4785 pos.y as _,
4786 size.width as _,
4787 size.height as _,
4788 gl_format,
4789 data_type,
4790 chunk.offset,
4791 );
4792 }
4793 _ => panic!("BUG: Unexpected texture target!"),
4794 }
4795
4796 if chunk.texture.filter == TextureFilter::Trilinear {
4798 device.gl.generate_mipmap(chunk.texture.target);
4799 }
4800
4801 if chunk.stride.is_some() {
4803 device.gl.pixel_store_i(gl::UNPACK_ROW_LENGTH, 0 as _);
4804 }
4805 }
4806}
4807
4808fn texels_to_u8_slice<T: Texel>(texels: &[T]) -> &[u8] {
4809 unsafe {
4810 slice::from_raw_parts(texels.as_ptr() as *const u8, texels.len() * mem::size_of::<T>())
4811 }
4812}