1use super::super::shader_source::{OPTIMIZED_SHADERS, UNOPTIMIZED_SHADERS};
6use api::{ImageDescriptor, ImageFormat, Parameter, BoolParameter, IntParameter, ImageRendering};
7use api::{MixBlendMode, ImageBufferKind, VoidPtrToSizeFn};
8use api::{CrashAnnotator, CrashAnnotation, CrashAnnotatorGuard};
9use api::units::*;
10use euclid::default::Transform3D;
11use gleam::gl;
12use crate::render_api::MemoryReport;
13use crate::internal_types::{FastHashMap, RenderTargetInfo, Swizzle, SwizzleSettings};
14use crate::util::round_up_to_multiple;
15use crate::profiler;
16use log::Level;
17use smallvec::SmallVec;
18use std::{
19 borrow::Cow,
20 cell::{Cell, RefCell},
21 cmp,
22 collections::hash_map::Entry,
23 marker::PhantomData,
24 mem,
25 num::NonZeroUsize,
26 os::raw::c_void,
27 ops::Add,
28 path::PathBuf,
29 ptr,
30 rc::Rc,
31 slice,
32 sync::Arc,
33 thread,
34 time::Duration,
35};
36use webrender_build::shader::{
37 ProgramSourceDigest, ShaderKind, ShaderVersion, build_shader_main_string,
38 build_shader_prefix_string, do_build_shader_string, shader_source_from_file,
39};
40use malloc_size_of::MallocSizeOfOps;
41
42#[derive(Debug, Copy, Clone, PartialEq, Ord, Eq, PartialOrd)]
44#[cfg_attr(feature = "capture", derive(Serialize))]
45#[cfg_attr(feature = "replay", derive(Deserialize))]
46pub struct GpuFrameId(usize);
47
48impl GpuFrameId {
49 pub fn new(value: usize) -> Self {
50 GpuFrameId(value)
51 }
52}
53
54impl Add<usize> for GpuFrameId {
55 type Output = GpuFrameId;
56
57 fn add(self, other: usize) -> GpuFrameId {
58 GpuFrameId(self.0 + other)
59 }
60}
61
62pub struct TextureSlot(pub usize);
63
64const DEFAULT_TEXTURE: TextureSlot = TextureSlot(0);
66
67#[repr(u32)]
68pub enum DepthFunction {
69 Always = gl::ALWAYS,
70 Less = gl::LESS,
71 LessEqual = gl::LEQUAL,
72}
73
74#[repr(u32)]
75#[derive(Copy, Clone, Debug, Eq, PartialEq)]
76#[cfg_attr(feature = "capture", derive(Serialize))]
77#[cfg_attr(feature = "replay", derive(Deserialize))]
78pub enum TextureFilter {
79 Nearest,
80 Linear,
81 Trilinear,
82}
83
84#[derive(Clone, Debug)]
86#[cfg_attr(feature = "capture", derive(Serialize))]
87#[cfg_attr(feature = "replay", derive(Deserialize))]
88pub struct TextureFormatPair<T> {
89 pub internal: T,
91 pub external: T,
93}
94
95impl<T: Copy> From<T> for TextureFormatPair<T> {
96 fn from(value: T) -> Self {
97 TextureFormatPair {
98 internal: value,
99 external: value,
100 }
101 }
102}
103
104#[derive(Debug)]
105pub enum VertexAttributeKind {
106 F32,
107 U8Norm,
108 U16Norm,
109 I32,
110 U16,
111}
112
113#[derive(Debug)]
114pub struct VertexAttribute {
115 pub name: &'static str,
116 pub count: u32,
117 pub kind: VertexAttributeKind,
118}
119
120#[derive(Debug)]
121pub struct VertexDescriptor {
122 pub vertex_attributes: &'static [VertexAttribute],
123 pub instance_attributes: &'static [VertexAttribute],
124}
125
126enum FBOTarget {
127 Read,
128 Draw,
129}
130
131#[derive(Debug, Clone)]
133pub enum UploadMethod {
134 Immediate,
136 PixelBuffer(VertexUsageHint),
138}
139
140pub unsafe trait Texel: Copy + Default {
142 fn image_format() -> ImageFormat;
143}
144
145unsafe impl Texel for u8 {
146 fn image_format() -> ImageFormat { ImageFormat::R8 }
147}
148
149fn depth_target_size_in_bytes(dimensions: &DeviceIntSize) -> usize {
151 let pixels = dimensions.width * dimensions.height;
154 (pixels as usize) * 4
155}
156
157pub fn get_gl_target(target: ImageBufferKind) -> gl::GLuint {
158 match target {
159 ImageBufferKind::Texture2D => gl::TEXTURE_2D,
160 ImageBufferKind::TextureRect => gl::TEXTURE_RECTANGLE,
161 ImageBufferKind::TextureExternal => gl::TEXTURE_EXTERNAL_OES,
162 ImageBufferKind::TextureExternalBT709 => gl::TEXTURE_EXTERNAL_OES,
163 }
164}
165
166pub fn from_gl_target(target: gl::GLuint) -> ImageBufferKind {
167 match target {
168 gl::TEXTURE_2D => ImageBufferKind::Texture2D,
169 gl::TEXTURE_RECTANGLE => ImageBufferKind::TextureRect,
170 gl::TEXTURE_EXTERNAL_OES => ImageBufferKind::TextureExternal,
171 _ => panic!("Unexpected target {:?}", target),
172 }
173}
174
175fn supports_extension(extensions: &[String], extension: &str) -> bool {
176 extensions.iter().any(|s| s == extension)
177}
178
179fn get_shader_version(gl: &dyn gl::Gl) -> ShaderVersion {
180 match gl.get_type() {
181 gl::GlType::Gl => ShaderVersion::Gl,
182 gl::GlType::Gles => ShaderVersion::Gles,
183 }
184}
185
186pub fn get_unoptimized_shader_source(shader_name: &str, base_path: Option<&PathBuf>) -> Cow<'static, str> {
189 if let Some(ref base) = base_path {
190 let shader_path = base.join(&format!("{}.glsl", shader_name));
191 Cow::Owned(shader_source_from_file(&shader_path))
192 } else {
193 Cow::Borrowed(
194 UNOPTIMIZED_SHADERS
195 .get(shader_name)
196 .expect("Shader not found")
197 .source
198 )
199 }
200}
201
202impl VertexAttributeKind {
203 fn size_in_bytes(&self) -> u32 {
204 match *self {
205 VertexAttributeKind::F32 => 4,
206 VertexAttributeKind::U8Norm => 1,
207 VertexAttributeKind::U16Norm => 2,
208 VertexAttributeKind::I32 => 4,
209 VertexAttributeKind::U16 => 2,
210 }
211 }
212}
213
214impl VertexAttribute {
215 fn size_in_bytes(&self) -> u32 {
216 self.count * self.kind.size_in_bytes()
217 }
218
219 fn bind_to_vao(
220 &self,
221 attr_index: gl::GLuint,
222 divisor: gl::GLuint,
223 stride: gl::GLint,
224 offset: gl::GLuint,
225 gl: &dyn gl::Gl,
226 ) {
227 gl.enable_vertex_attrib_array(attr_index);
228 gl.vertex_attrib_divisor(attr_index, divisor);
229
230 match self.kind {
231 VertexAttributeKind::F32 => {
232 gl.vertex_attrib_pointer(
233 attr_index,
234 self.count as gl::GLint,
235 gl::FLOAT,
236 false,
237 stride,
238 offset,
239 );
240 }
241 VertexAttributeKind::U8Norm => {
242 gl.vertex_attrib_pointer(
243 attr_index,
244 self.count as gl::GLint,
245 gl::UNSIGNED_BYTE,
246 true,
247 stride,
248 offset,
249 );
250 }
251 VertexAttributeKind::U16Norm => {
252 gl.vertex_attrib_pointer(
253 attr_index,
254 self.count as gl::GLint,
255 gl::UNSIGNED_SHORT,
256 true,
257 stride,
258 offset,
259 );
260 }
261 VertexAttributeKind::I32 => {
262 gl.vertex_attrib_i_pointer(
263 attr_index,
264 self.count as gl::GLint,
265 gl::INT,
266 stride,
267 offset,
268 );
269 }
270 VertexAttributeKind::U16 => {
271 gl.vertex_attrib_i_pointer(
272 attr_index,
273 self.count as gl::GLint,
274 gl::UNSIGNED_SHORT,
275 stride,
276 offset,
277 );
278 }
279 }
280 }
281}
282
283impl VertexDescriptor {
284 fn instance_stride(&self) -> u32 {
285 self.instance_attributes
286 .iter()
287 .map(|attr| attr.size_in_bytes())
288 .sum()
289 }
290
291 fn bind_attributes(
292 attributes: &[VertexAttribute],
293 start_index: usize,
294 divisor: u32,
295 gl: &dyn gl::Gl,
296 vbo: VBOId,
297 ) {
298 vbo.bind(gl);
299
300 let stride: u32 = attributes
301 .iter()
302 .map(|attr| attr.size_in_bytes())
303 .sum();
304
305 let mut offset = 0;
306 for (i, attr) in attributes.iter().enumerate() {
307 let attr_index = (start_index + i) as gl::GLuint;
308 attr.bind_to_vao(attr_index, divisor, stride as _, offset, gl);
309 offset += attr.size_in_bytes();
310 }
311 }
312
313 fn bind(&self, gl: &dyn gl::Gl, main: VBOId, instance: VBOId, instance_divisor: u32) {
314 Self::bind_attributes(self.vertex_attributes, 0, 0, gl, main);
315
316 if !self.instance_attributes.is_empty() {
317 Self::bind_attributes(
318 self.instance_attributes,
319 self.vertex_attributes.len(),
320 instance_divisor,
321 gl,
322 instance,
323 );
324 }
325 }
326}
327
328impl VBOId {
329 fn bind(&self, gl: &dyn gl::Gl) {
330 gl.bind_buffer(gl::ARRAY_BUFFER, self.0);
331 }
332}
333
334impl IBOId {
335 fn bind(&self, gl: &dyn gl::Gl) {
336 gl.bind_buffer(gl::ELEMENT_ARRAY_BUFFER, self.0);
337 }
338}
339
340impl FBOId {
341 fn bind(&self, gl: &dyn gl::Gl, target: FBOTarget) {
342 let target = match target {
343 FBOTarget::Read => gl::READ_FRAMEBUFFER,
344 FBOTarget::Draw => gl::DRAW_FRAMEBUFFER,
345 };
346 gl.bind_framebuffer(target, self.0);
347 }
348}
349
350pub struct Stream<'a> {
351 attributes: &'a [VertexAttribute],
352 vbo: VBOId,
353}
354
355pub struct VBO<V> {
356 id: gl::GLuint,
357 target: gl::GLenum,
358 allocated_count: usize,
359 marker: PhantomData<V>,
360}
361
362impl<V> VBO<V> {
363 pub fn allocated_count(&self) -> usize {
364 self.allocated_count
365 }
366
367 pub fn stream_with<'a>(&self, attributes: &'a [VertexAttribute]) -> Stream<'a> {
368 debug_assert_eq!(
369 mem::size_of::<V>(),
370 attributes.iter().map(|a| a.size_in_bytes() as usize).sum::<usize>()
371 );
372 Stream {
373 attributes,
374 vbo: VBOId(self.id),
375 }
376 }
377}
378
379impl<T> Drop for VBO<T> {
380 fn drop(&mut self) {
381 debug_assert!(thread::panicking() || self.id == 0);
382 }
383}
384
385#[cfg_attr(feature = "replay", derive(Clone))]
386#[derive(Debug)]
387pub struct ExternalTexture {
388 id: gl::GLuint,
389 target: gl::GLuint,
390 uv_rect: TexelRect,
391 image_rendering: ImageRendering,
392}
393
394impl ExternalTexture {
395 pub fn new(
396 id: u32,
397 target: ImageBufferKind,
398 uv_rect: TexelRect,
399 image_rendering: ImageRendering,
400 ) -> Self {
401 ExternalTexture {
402 id,
403 target: get_gl_target(target),
404 uv_rect,
405 image_rendering,
406 }
407 }
408
409 #[cfg(feature = "replay")]
410 pub fn internal_id(&self) -> gl::GLuint {
411 self.id
412 }
413
414 pub fn get_uv_rect(&self) -> TexelRect {
415 self.uv_rect
416 }
417}
418
419bitflags! {
420 #[derive(Default, Debug, Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]
421 pub struct TextureFlags: u32 {
422 const IS_SHARED_TEXTURE_CACHE = 1 << 0;
424 }
425}
426
427#[derive(Debug)]
433pub struct Texture {
434 id: gl::GLuint,
435 target: gl::GLuint,
436 format: ImageFormat,
437 size: DeviceIntSize,
438 filter: TextureFilter,
439 flags: TextureFlags,
440 active_swizzle: Cell<Swizzle>,
442 fbo: Option<FBOId>,
446 fbo_with_depth: Option<FBOId>,
464 last_frame_used: GpuFrameId,
465}
466
467impl Texture {
468 pub fn get_dimensions(&self) -> DeviceIntSize {
469 self.size
470 }
471
472 pub fn get_format(&self) -> ImageFormat {
473 self.format
474 }
475
476 pub fn get_filter(&self) -> TextureFilter {
477 self.filter
478 }
479
480 pub fn get_target(&self) -> ImageBufferKind {
481 from_gl_target(self.target)
482 }
483
484 pub fn supports_depth(&self) -> bool {
485 self.fbo_with_depth.is_some()
486 }
487
488 pub fn last_frame_used(&self) -> GpuFrameId {
489 self.last_frame_used
490 }
491
492 pub fn used_in_frame(&self, frame_id: GpuFrameId) -> bool {
493 self.last_frame_used == frame_id
494 }
495
496 pub fn is_render_target(&self) -> bool {
497 self.fbo.is_some()
498 }
499
500 pub fn used_recently(&self, current_frame_id: GpuFrameId, threshold: usize) -> bool {
503 self.last_frame_used + threshold >= current_frame_id
504 }
505
506 pub fn flags(&self) -> &TextureFlags {
508 &self.flags
509 }
510
511 pub fn flags_mut(&mut self) -> &mut TextureFlags {
513 &mut self.flags
514 }
515
516 pub fn size_in_bytes(&self) -> usize {
519 let bpp = self.format.bytes_per_pixel() as usize;
520 let w = self.size.width as usize;
521 let h = self.size.height as usize;
522 bpp * w * h
523 }
524
525 #[cfg(feature = "replay")]
526 pub fn into_external(mut self) -> ExternalTexture {
527 let ext = ExternalTexture {
528 id: self.id,
529 target: self.target,
530 uv_rect: TexelRect::new(
532 0.0,
533 0.0,
534 self.size.width as f32,
535 self.size.height as f32,
536 ),
537 image_rendering: ImageRendering::Auto,
538 };
539 self.id = 0; ext
541 }
542}
543
544impl Drop for Texture {
545 fn drop(&mut self) {
546 debug_assert!(thread::panicking() || self.id == 0);
547 }
548}
549
550pub struct Program {
551 id: gl::GLuint,
552 u_transform: gl::GLint,
553 u_texture_size: gl::GLint,
554 source_info: ProgramSourceInfo,
555 is_initialized: bool,
556}
557
558impl Program {
559 pub fn is_initialized(&self) -> bool {
560 self.is_initialized
561 }
562}
563
564impl Drop for Program {
565 fn drop(&mut self) {
566 debug_assert!(
567 thread::panicking() || self.id == 0,
568 "renderer::deinit not called"
569 );
570 }
571}
572
573pub struct CustomVAO {
574 id: gl::GLuint,
575}
576
577impl Drop for CustomVAO {
578 fn drop(&mut self) {
579 debug_assert!(
580 thread::panicking() || self.id == 0,
581 "renderer::deinit not called"
582 );
583 }
584}
585
586pub struct VAO {
587 id: gl::GLuint,
588 ibo_id: IBOId,
589 main_vbo_id: VBOId,
590 instance_vbo_id: VBOId,
591 instance_stride: usize,
592 instance_divisor: u32,
593 owns_vertices_and_indices: bool,
594}
595
596impl Drop for VAO {
597 fn drop(&mut self) {
598 debug_assert!(
599 thread::panicking() || self.id == 0,
600 "renderer::deinit not called"
601 );
602 }
603}
604
605#[derive(Debug)]
606pub struct PBO {
607 id: gl::GLuint,
608 reserved_size: usize,
609}
610
611impl PBO {
612 pub fn get_reserved_size(&self) -> usize {
613 self.reserved_size
614 }
615}
616
617impl Drop for PBO {
618 fn drop(&mut self) {
619 debug_assert!(
620 thread::panicking() || self.id == 0,
621 "renderer::deinit not called or PBO not returned to pool"
622 );
623 }
624}
625
626pub struct BoundPBO<'a> {
627 device: &'a mut Device,
628 pub data: &'a [u8]
629}
630
631impl<'a> Drop for BoundPBO<'a> {
632 fn drop(&mut self) {
633 self.device.gl.unmap_buffer(gl::PIXEL_PACK_BUFFER);
634 self.device.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
635 }
636}
637
638#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
639pub struct FBOId(gl::GLuint);
640
641#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
642pub struct RBOId(gl::GLuint);
643
644#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
645pub struct VBOId(gl::GLuint);
646
647#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
648struct IBOId(gl::GLuint);
649
650#[derive(Clone, Debug)]
651enum ProgramSourceType {
652 Unoptimized,
653 Optimized(ShaderVersion),
654}
655
656#[derive(Clone, Debug)]
657pub struct ProgramSourceInfo {
658 base_filename: &'static str,
659 features: Vec<&'static str>,
660 full_name_cstr: Rc<std::ffi::CString>,
661 source_type: ProgramSourceType,
662 digest: ProgramSourceDigest,
663}
664
665impl ProgramSourceInfo {
666 fn new(
667 device: &Device,
668 name: &'static str,
669 features: &[&'static str],
670 ) -> Self {
671
672 use std::collections::hash_map::DefaultHasher;
676 use std::hash::Hasher;
677
678 let mut hasher = DefaultHasher::new();
680 let gl_version = get_shader_version(&*device.gl());
681
682 hasher.write(device.capabilities.renderer_name.as_bytes());
684
685 let full_name = Self::make_full_name(name, features);
686
687 let optimized_source = if device.use_optimized_shaders {
688 OPTIMIZED_SHADERS.get(&(gl_version, &full_name)).or_else(|| {
689 warn!("Missing optimized shader source for {}", &full_name);
690 None
691 })
692 } else {
693 None
694 };
695
696 let source_type = match optimized_source {
697 Some(source_and_digest) => {
698 if cfg!(debug_assertions) {
702 let mut h = DefaultHasher::new();
703 h.write(source_and_digest.vert_source.as_bytes());
704 h.write(source_and_digest.frag_source.as_bytes());
705 let d: ProgramSourceDigest = h.into();
706 let digest = d.to_string();
707 debug_assert_eq!(digest, source_and_digest.digest);
708 hasher.write(digest.as_bytes());
709 } else {
710 hasher.write(source_and_digest.digest.as_bytes());
711 }
712
713 ProgramSourceType::Optimized(gl_version)
714 }
715 None => {
716 let override_path = device.resource_override_path.as_ref();
725 let source_and_digest = UNOPTIMIZED_SHADERS.get(&name).expect("Shader not found");
726
727 build_shader_prefix_string(
729 gl_version,
730 &features,
731 ShaderKind::Vertex,
732 &name,
733 &mut |s| hasher.write(s.as_bytes()),
734 );
735
736 if override_path.is_some() || cfg!(debug_assertions) {
739 let mut h = DefaultHasher::new();
740 build_shader_main_string(
741 &name,
742 &|f| get_unoptimized_shader_source(f, override_path),
743 &mut |s| h.write(s.as_bytes())
744 );
745 let d: ProgramSourceDigest = h.into();
746 let digest = format!("{}", d);
747 debug_assert!(override_path.is_some() || digest == source_and_digest.digest);
748 hasher.write(digest.as_bytes());
749 } else {
750 hasher.write(source_and_digest.digest.as_bytes());
751 }
752
753 ProgramSourceType::Unoptimized
754 }
755 };
756
757 ProgramSourceInfo {
759 base_filename: name,
760 features: features.to_vec(),
761 full_name_cstr: Rc::new(std::ffi::CString::new(full_name).unwrap()),
762 source_type,
763 digest: hasher.into(),
764 }
765 }
766
767 fn compute_source(&self, device: &Device, kind: ShaderKind) -> String {
768 let full_name = self.full_name();
769 match self.source_type {
770 ProgramSourceType::Optimized(gl_version) => {
771 let shader = OPTIMIZED_SHADERS
772 .get(&(gl_version, &full_name))
773 .unwrap_or_else(|| panic!("Missing optimized shader source for {}", full_name));
774
775 match kind {
776 ShaderKind::Vertex => shader.vert_source.to_string(),
777 ShaderKind::Fragment => shader.frag_source.to_string(),
778 }
779 },
780 ProgramSourceType::Unoptimized => {
781 let mut src = String::new();
782 device.build_shader_string(
783 &self.features,
784 kind,
785 self.base_filename,
786 |s| src.push_str(s),
787 );
788 src
789 }
790 }
791 }
792
793 fn make_full_name(base_filename: &'static str, features: &[&'static str]) -> String {
794 if features.is_empty() {
795 base_filename.to_string()
796 } else {
797 format!("{}_{}", base_filename, features.join("_"))
798 }
799 }
800
801 fn full_name(&self) -> String {
802 Self::make_full_name(self.base_filename, &self.features)
803 }
804}
805
806#[cfg_attr(feature = "serialize_program", derive(Deserialize, Serialize))]
807pub struct ProgramBinary {
808 bytes: Vec<u8>,
809 format: gl::GLenum,
810 source_digest: ProgramSourceDigest,
811}
812
813impl ProgramBinary {
814 fn new(bytes: Vec<u8>,
815 format: gl::GLenum,
816 source_digest: ProgramSourceDigest) -> Self {
817 ProgramBinary {
818 bytes,
819 format,
820 source_digest,
821 }
822 }
823
824 pub fn source_digest(&self) -> &ProgramSourceDigest {
826 &self.source_digest
827 }
828}
829
830pub trait ProgramCacheObserver {
832 fn save_shaders_to_disk(&self, entries: Vec<Arc<ProgramBinary>>);
833 fn set_startup_shaders(&self, entries: Vec<Arc<ProgramBinary>>);
834 fn try_load_shader_from_disk(&self, digest: &ProgramSourceDigest, program_cache: &Rc<ProgramCache>);
835 fn notify_program_binary_failed(&self, program_binary: &Arc<ProgramBinary>);
836}
837
838struct ProgramCacheEntry {
839 binary: Arc<ProgramBinary>,
841 linked: bool,
843}
844
845pub struct ProgramCache {
846 entries: RefCell<FastHashMap<ProgramSourceDigest, ProgramCacheEntry>>,
847
848 program_cache_handler: Option<Box<dyn ProgramCacheObserver>>,
851
852 pending_entries: RefCell<Vec<Arc<ProgramBinary>>>,
854}
855
856impl ProgramCache {
857 pub fn new(program_cache_observer: Option<Box<dyn ProgramCacheObserver>>) -> Rc<Self> {
858 Rc::new(
859 ProgramCache {
860 entries: RefCell::new(FastHashMap::default()),
861 program_cache_handler: program_cache_observer,
862 pending_entries: RefCell::new(Vec::default()),
863 }
864 )
865 }
866
867 fn update_disk_cache(&self, startup_complete: bool) {
870 if let Some(ref handler) = self.program_cache_handler {
871 if !self.pending_entries.borrow().is_empty() {
872 let pending_entries = self.pending_entries.replace(Vec::default());
873 handler.save_shaders_to_disk(pending_entries);
874 }
875
876 if startup_complete {
877 let startup_shaders = self.entries.borrow().values()
878 .filter(|e| e.linked).map(|e| e.binary.clone())
879 .collect::<Vec<_>>();
880 handler.set_startup_shaders(startup_shaders);
881 }
882 }
883 }
884
885 fn add_new_program_binary(&self, program_binary: Arc<ProgramBinary>) {
889 self.pending_entries.borrow_mut().push(program_binary.clone());
890
891 let digest = program_binary.source_digest.clone();
892 let entry = ProgramCacheEntry {
893 binary: program_binary,
894 linked: true,
895 };
896 self.entries.borrow_mut().insert(digest, entry);
897 }
898
899 #[cfg(feature = "serialize_program")]
902 pub fn load_program_binary(&self, program_binary: Arc<ProgramBinary>) {
903 let digest = program_binary.source_digest.clone();
904 let entry = ProgramCacheEntry {
905 binary: program_binary,
906 linked: false,
907 };
908 self.entries.borrow_mut().insert(digest, entry);
909 }
910
911 pub fn report_memory(&self, op: VoidPtrToSizeFn) -> usize {
913 self.entries.borrow().values()
914 .map(|e| unsafe { op(e.binary.bytes.as_ptr() as *const c_void ) })
915 .sum()
916 }
917}
918
919#[derive(Debug, Copy, Clone)]
920pub enum VertexUsageHint {
921 Static,
922 Dynamic,
923 Stream,
924}
925
926impl VertexUsageHint {
927 fn to_gl(&self) -> gl::GLuint {
928 match *self {
929 VertexUsageHint::Static => gl::STATIC_DRAW,
930 VertexUsageHint::Dynamic => gl::DYNAMIC_DRAW,
931 VertexUsageHint::Stream => gl::STREAM_DRAW,
932 }
933 }
934}
935
936#[derive(Copy, Clone, Debug)]
937pub struct UniformLocation(#[allow(dead_code)] gl::GLint);
938
939impl UniformLocation {
940 pub const INVALID: Self = UniformLocation(-1);
941}
942
943#[derive(Debug)]
944pub struct Capabilities {
945 pub supports_multisampling: bool,
947 pub supports_copy_image_sub_data: bool,
949 pub supports_color_buffer_float: bool,
951 pub supports_buffer_storage: bool,
953 pub supports_advanced_blend_equation: bool,
955 pub supports_dual_source_blending: bool,
957 pub supports_khr_debug: bool,
960 pub supports_texture_swizzle: bool,
962 pub supports_nonzero_pbo_offsets: bool,
965 pub supports_texture_usage: bool,
967 pub supports_render_target_partial_update: bool,
969 pub supports_shader_storage_object: bool,
971 pub requires_batched_texture_uploads: Option<bool>,
974 pub supports_alpha_target_clears: bool,
977 pub requires_alpha_target_full_clear: bool,
980 pub prefers_clear_scissor: bool,
983 pub supports_render_target_invalidate: bool,
986 pub supports_r8_texture_upload: bool,
988 pub supports_qcom_tiled_rendering: bool,
990 pub uses_native_clip_mask: bool,
993 pub uses_native_antialiasing: bool,
996 pub supports_image_external_essl3: bool,
1000 pub requires_vao_rebind_after_orphaning: bool,
1002 pub renderer_name: String,
1004}
1005
1006#[derive(Clone, Debug)]
1007pub enum ShaderError {
1008 Compilation(String, String), Link(String, String), }
1011
1012struct SharedDepthTarget {
1015 rbo_id: RBOId,
1017 refcount: usize,
1019}
1020
1021#[cfg(debug_assertions)]
1022impl Drop for SharedDepthTarget {
1023 fn drop(&mut self) {
1024 debug_assert!(thread::panicking() || self.refcount == 0);
1025 }
1026}
1027
1028#[derive(PartialEq, Debug)]
1031enum TexStorageUsage {
1032 Never,
1033 NonBGRA8,
1034 Always,
1035}
1036
1037#[derive(Copy, Clone, Debug)]
1040pub enum StrideAlignment {
1041 Bytes(NonZeroUsize),
1042 Pixels(NonZeroUsize),
1043}
1044
1045impl StrideAlignment {
1046 pub fn num_bytes(&self, format: ImageFormat) -> NonZeroUsize {
1047 match *self {
1048 Self::Bytes(bytes) => bytes,
1049 Self::Pixels(pixels) => {
1050 assert!(format.bytes_per_pixel() > 0);
1051 NonZeroUsize::new(pixels.get() * format.bytes_per_pixel() as usize).unwrap()
1052 }
1053 }
1054 }
1055}
1056
1057const RESERVE_DEPTH_BITS: i32 = 2;
1062
1063pub struct Device {
1064 gl: Rc<dyn gl::Gl>,
1065
1066 base_gl: Option<Rc<dyn gl::Gl>>,
1069
1070 bound_textures: [gl::GLuint; 16],
1072 bound_program: gl::GLuint,
1073 bound_program_name: Rc<std::ffi::CString>,
1074 bound_vao: gl::GLuint,
1075 bound_read_fbo: (FBOId, DeviceIntPoint),
1076 bound_draw_fbo: FBOId,
1077 default_read_fbo: FBOId,
1078 default_draw_fbo: FBOId,
1079
1080 depth_available: bool,
1083
1084 upload_method: UploadMethod,
1085 use_batched_texture_uploads: bool,
1086 use_draw_calls_for_texture_copy: bool,
1091 batched_upload_threshold: i32,
1093
1094 capabilities: Capabilities,
1096
1097 color_formats: TextureFormatPair<ImageFormat>,
1098 bgra_formats: TextureFormatPair<gl::GLuint>,
1099 bgra_pixel_type: gl::GLuint,
1100 swizzle_settings: SwizzleSettings,
1101 depth_format: gl::GLuint,
1102
1103 depth_targets: FastHashMap<DeviceIntSize, SharedDepthTarget>,
1108
1109 inside_frame: bool,
1111 crash_annotator: Option<Box<dyn CrashAnnotator>>,
1112 annotate_draw_call_crashes: bool,
1113
1114 resource_override_path: Option<PathBuf>,
1116
1117 use_optimized_shaders: bool,
1119
1120 max_texture_size: i32,
1121 cached_programs: Option<Rc<ProgramCache>>,
1122
1123 frame_id: GpuFrameId,
1126
1127 texture_storage_usage: TexStorageUsage,
1133
1134 required_pbo_stride: StrideAlignment,
1138
1139 requires_null_terminated_shader_source: bool,
1142
1143 requires_texture_external_unbind: bool,
1146
1147 is_software_webrender: bool,
1149
1150 extensions: Vec<String>,
1152
1153 dump_shader_source: Option<String>,
1155
1156 surface_origin_is_top_left: bool,
1157
1158 #[cfg(debug_assertions)]
1171 shader_is_ready: bool,
1172
1173 pub textures_created: u32,
1175 pub textures_deleted: u32,
1176}
1177
1178#[derive(Clone, Copy, Debug)]
1180pub enum DrawTarget {
1181 Default {
1184 rect: FramebufferIntRect,
1186 total_size: FramebufferIntSize,
1188 surface_origin_is_top_left: bool,
1189 },
1190 Texture {
1192 dimensions: DeviceIntSize,
1194 with_depth: bool,
1196 fbo_id: FBOId,
1198 id: gl::GLuint,
1200 target: gl::GLuint,
1202 },
1203 External {
1205 fbo: FBOId,
1206 size: FramebufferIntSize,
1207 },
1208 NativeSurface {
1210 offset: DeviceIntPoint,
1211 external_fbo_id: u32,
1212 dimensions: DeviceIntSize,
1213 },
1214}
1215
1216impl DrawTarget {
1217 pub fn new_default(size: DeviceIntSize, surface_origin_is_top_left: bool) -> Self {
1218 let total_size = device_size_as_framebuffer_size(size);
1219 DrawTarget::Default {
1220 rect: total_size.into(),
1221 total_size,
1222 surface_origin_is_top_left,
1223 }
1224 }
1225
1226 pub fn is_default(&self) -> bool {
1228 match *self {
1229 DrawTarget::Default {..} => true,
1230 _ => false,
1231 }
1232 }
1233
1234 pub fn from_texture(
1235 texture: &Texture,
1236 with_depth: bool,
1237 ) -> Self {
1238 let fbo_id = if with_depth {
1239 texture.fbo_with_depth.unwrap()
1240 } else {
1241 texture.fbo.unwrap()
1242 };
1243
1244 DrawTarget::Texture {
1245 dimensions: texture.get_dimensions(),
1246 fbo_id,
1247 with_depth,
1248 id: texture.id,
1249 target: texture.target,
1250 }
1251 }
1252
1253 pub fn dimensions(&self) -> DeviceIntSize {
1255 match *self {
1256 DrawTarget::Default { total_size, .. } => total_size.cast_unit(),
1257 DrawTarget::Texture { dimensions, .. } => dimensions,
1258 DrawTarget::External { size, .. } => size.cast_unit(),
1259 DrawTarget::NativeSurface { dimensions, .. } => dimensions,
1260 }
1261 }
1262
1263 pub fn offset(&self) -> DeviceIntPoint {
1264 match *self {
1265 DrawTarget::Default { .. } |
1266 DrawTarget::Texture { .. } |
1267 DrawTarget::External { .. } => {
1268 DeviceIntPoint::zero()
1269 }
1270 DrawTarget::NativeSurface { offset, .. } => offset,
1271 }
1272 }
1273
1274 pub fn to_framebuffer_rect(&self, device_rect: DeviceIntRect) -> FramebufferIntRect {
1275 let mut fb_rect = device_rect_as_framebuffer_rect(&device_rect);
1276 match *self {
1277 DrawTarget::Default { ref rect, surface_origin_is_top_left, .. } => {
1278 if !surface_origin_is_top_left {
1280 let w = fb_rect.width();
1281 let h = fb_rect.height();
1282 fb_rect.min.x = fb_rect.min.x + rect.min.x;
1283 fb_rect.min.y = rect.max.y - fb_rect.max.y;
1284 fb_rect.max.x = fb_rect.min.x + w;
1285 fb_rect.max.y = fb_rect.min.y + h;
1286 }
1287 }
1288 DrawTarget::Texture { .. } | DrawTarget::External { .. } | DrawTarget::NativeSurface { .. } => (),
1289 }
1290 fb_rect
1291 }
1292
1293 pub fn surface_origin_is_top_left(&self) -> bool {
1294 match *self {
1295 DrawTarget::Default { surface_origin_is_top_left, .. } => surface_origin_is_top_left,
1296 DrawTarget::Texture { .. } | DrawTarget::External { .. } | DrawTarget::NativeSurface { .. } => true,
1297 }
1298 }
1299
1300 pub fn build_scissor_rect(
1304 &self,
1305 scissor_rect: Option<DeviceIntRect>,
1306 ) -> FramebufferIntRect {
1307 let dimensions = self.dimensions();
1308
1309 match scissor_rect {
1310 Some(scissor_rect) => match *self {
1311 DrawTarget::Default { ref rect, .. } => {
1312 self.to_framebuffer_rect(scissor_rect)
1313 .intersection(rect)
1314 .unwrap_or_else(FramebufferIntRect::zero)
1315 }
1316 DrawTarget::NativeSurface { offset, .. } => {
1317 device_rect_as_framebuffer_rect(&scissor_rect.translate(offset.to_vector()))
1318 }
1319 DrawTarget::Texture { .. } | DrawTarget::External { .. } => {
1320 device_rect_as_framebuffer_rect(&scissor_rect)
1321 }
1322 }
1323 None => {
1324 FramebufferIntRect::from_size(
1325 device_size_as_framebuffer_size(dimensions),
1326 )
1327 }
1328 }
1329 }
1330}
1331
1332#[derive(Clone, Copy, Debug)]
1334pub enum ReadTarget {
1335 Default,
1337 Texture {
1339 fbo_id: FBOId,
1341 },
1342 External {
1344 fbo: FBOId,
1345 },
1346 NativeSurface {
1348 fbo_id: FBOId,
1349 offset: DeviceIntPoint,
1350 },
1351}
1352
1353impl ReadTarget {
1354 pub fn from_texture(
1355 texture: &Texture,
1356 ) -> Self {
1357 ReadTarget::Texture {
1358 fbo_id: texture.fbo.unwrap(),
1359 }
1360 }
1361
1362 fn offset(&self) -> DeviceIntPoint {
1363 match *self {
1364 ReadTarget::Default |
1365 ReadTarget::Texture { .. } |
1366 ReadTarget::External { .. } => {
1367 DeviceIntPoint::zero()
1368 }
1369
1370 ReadTarget::NativeSurface { offset, .. } => {
1371 offset
1372 }
1373 }
1374 }
1375}
1376
1377impl From<DrawTarget> for ReadTarget {
1378 fn from(t: DrawTarget) -> Self {
1379 match t {
1380 DrawTarget::Default { .. } => {
1381 ReadTarget::Default
1382 }
1383 DrawTarget::NativeSurface { external_fbo_id, offset, .. } => {
1384 ReadTarget::NativeSurface {
1385 fbo_id: FBOId(external_fbo_id),
1386 offset,
1387 }
1388 }
1389 DrawTarget::Texture { fbo_id, .. } => {
1390 ReadTarget::Texture { fbo_id }
1391 }
1392 DrawTarget::External { fbo, .. } => {
1393 ReadTarget::External { fbo }
1394 }
1395 }
1396 }
1397}
1398
1399fn parse_mali_version(version_string: &str) -> Option<(u32, u32, u32)> {
1404 let (_prefix, version_string) = version_string.split_once("v")?;
1405 let (v_str, version_string) = version_string.split_once(".r")?;
1406 let v = v_str.parse().ok()?;
1407
1408 let (r_str, version_string) = version_string.split_once("p")?;
1409 let r = r_str.parse().ok()?;
1410
1411 let (p_str, _) = version_string.split_once("-").unwrap_or((version_string, ""));
1413 let p = p_str.parse().ok()?;
1414
1415 Some((v, r, p))
1416}
1417
1418fn is_mali_midgard(renderer_name: &str) -> bool {
1420 renderer_name.starts_with("Mali-T")
1421}
1422
1423fn is_mali_bifrost(renderer_name: &str) -> bool {
1425 renderer_name == "Mali-G31"
1426 || renderer_name == "Mali-G51"
1427 || renderer_name == "Mali-G71"
1428 || renderer_name == "Mali-G52"
1429 || renderer_name == "Mali-G72"
1430 || renderer_name == "Mali-G76"
1431}
1432
1433fn is_mali_valhall(renderer_name: &str) -> bool {
1435 renderer_name.starts_with("Mali-G") && !is_mali_bifrost(renderer_name)
1438}
1439
1440impl Device {
1441 pub fn new(
1442 mut gl: Rc<dyn gl::Gl>,
1443 crash_annotator: Option<Box<dyn CrashAnnotator>>,
1444 resource_override_path: Option<PathBuf>,
1445 use_optimized_shaders: bool,
1446 upload_method: UploadMethod,
1447 batched_upload_threshold: i32,
1448 cached_programs: Option<Rc<ProgramCache>>,
1449 allow_texture_storage_support: bool,
1450 allow_texture_swizzling: bool,
1451 dump_shader_source: Option<String>,
1452 surface_origin_is_top_left: bool,
1453 panic_on_gl_error: bool,
1454 ) -> Device {
1455 let mut max_texture_size = [0];
1456 unsafe {
1457 gl.get_integer_v(gl::MAX_TEXTURE_SIZE, &mut max_texture_size);
1458 }
1459
1460 let max_texture_size = max_texture_size[0].min(16384);
1464
1465 let renderer_name = gl.get_string(gl::RENDERER);
1466 info!("Renderer: {}", renderer_name);
1467 let version_string = gl.get_string(gl::VERSION);
1468 info!("Version: {}", version_string);
1469 info!("Max texture size: {}", max_texture_size);
1470
1471 let mut extension_count = [0];
1472 unsafe {
1473 gl.get_integer_v(gl::NUM_EXTENSIONS, &mut extension_count);
1474 }
1475 let extension_count = extension_count[0] as gl::GLuint;
1476 let mut extensions = Vec::new();
1477 for i in 0 .. extension_count {
1478 extensions.push(gl.get_string_i(gl::EXTENSIONS, i));
1479 }
1480
1481 let is_xclipse = renderer_name.starts_with("ANGLE (Samsung Xclipse");
1482
1483 let supports_khr_debug =
1491 supports_extension(&extensions, "GL_KHR_debug")
1492 && !is_mali_valhall(&renderer_name)
1493 && !is_xclipse;
1494 if panic_on_gl_error || cfg!(debug_assertions) {
1495 gl = gl::ErrorReactingGl::wrap(gl, move |gl, name, code| {
1496 if supports_khr_debug {
1497 Self::log_driver_messages(gl);
1498 }
1499 error!("Caught GL error {:x} at {}", code, name);
1500 panic!("Caught GL error {:x} at {}", code, name);
1501 });
1502 }
1503
1504 if supports_extension(&extensions, "GL_ANGLE_provoking_vertex") {
1505 gl.provoking_vertex_angle(gl::FIRST_VERTEX_CONVENTION);
1506 }
1507
1508 let supports_texture_usage = supports_extension(&extensions, "GL_ANGLE_texture_usage");
1509
1510 let is_emulator = renderer_name.starts_with("Android Emulator");
1553 let avoid_tex_image = is_emulator;
1554 let mut gl_version = [0; 2];
1555 unsafe {
1556 gl.get_integer_v(gl::MAJOR_VERSION, &mut gl_version[0..1]);
1557 gl.get_integer_v(gl::MINOR_VERSION, &mut gl_version[1..2]);
1558 }
1559 info!("GL context {:?} {}.{}", gl.get_type(), gl_version[0], gl_version[1]);
1560
1561 let supports_texture_storage = allow_texture_storage_support && !cfg!(target_os = "macos") &&
1563 match gl.get_type() {
1564 gl::GlType::Gl => supports_extension(&extensions, "GL_ARB_texture_storage"),
1565 gl::GlType::Gles => true,
1566 };
1567
1568 let supports_gles_bgra = supports_extension(&extensions, "GL_EXT_texture_format_BGRA8888");
1576 let supports_texture_storage_with_gles_bgra = supports_gles_bgra
1577 && supports_extension(&extensions, "GL_EXT_texture_storage")
1578 && !renderer_name.starts_with("Intel(R) HD Graphics for BayTrail")
1579 && !renderer_name.starts_with("Intel(R) HD Graphics for Atom(TM) x5/x7");
1580
1581 let supports_texture_swizzle = allow_texture_swizzling &&
1582 match gl.get_type() {
1583 gl::GlType::Gl => gl_version >= [3, 3] ||
1585 supports_extension(&extensions, "GL_ARB_texture_swizzle"),
1586 gl::GlType::Gles => true,
1587 };
1588
1589 let (color_formats, bgra_formats, bgra_pixel_type, bgra8_sampling_swizzle, texture_storage_usage) = match gl.get_type() {
1590 gl::GlType::Gl if supports_texture_storage && supports_texture_swizzle => (
1592 TextureFormatPair::from(ImageFormat::RGBA8),
1593 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1594 gl::UNSIGNED_BYTE,
1595 Swizzle::Bgra, TexStorageUsage::Always
1597 ),
1598 gl::GlType::Gl => (
1600 TextureFormatPair { internal: ImageFormat::BGRA8, external: ImageFormat::BGRA8 },
1601 TextureFormatPair { internal: gl::RGBA, external: gl::BGRA },
1602 gl::UNSIGNED_INT_8_8_8_8_REV,
1603 Swizzle::Rgba, TexStorageUsage::Never
1605 ),
1606 gl::GlType::Gles if supports_texture_storage_with_gles_bgra => (
1610 TextureFormatPair::from(ImageFormat::BGRA8),
1611 TextureFormatPair { internal: gl::BGRA8_EXT, external: gl::BGRA_EXT },
1612 gl::UNSIGNED_BYTE,
1613 Swizzle::Rgba, TexStorageUsage::Always,
1615 ),
1616 gl::GlType::Gles if supports_texture_swizzle => (
1620 TextureFormatPair::from(ImageFormat::RGBA8),
1621 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1622 gl::UNSIGNED_BYTE,
1623 Swizzle::Bgra, TexStorageUsage::Always,
1625 ),
1626 gl::GlType::Gles if supports_gles_bgra && !avoid_tex_image => (
1630 TextureFormatPair::from(ImageFormat::BGRA8),
1631 TextureFormatPair::from(gl::BGRA_EXT),
1632 gl::UNSIGNED_BYTE,
1633 Swizzle::Rgba, TexStorageUsage::NonBGRA8,
1635 ),
1636 gl::GlType::Gles => {
1640 warn!("Neither BGRA or texture swizzling are supported. Images may be rendered incorrectly.");
1641 (
1642 TextureFormatPair::from(ImageFormat::RGBA8),
1643 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1644 gl::UNSIGNED_BYTE,
1645 Swizzle::Rgba,
1646 TexStorageUsage::Always,
1647 )
1648 }
1649 };
1650
1651 let is_software_webrender = renderer_name.starts_with("Software WebRender");
1652 let upload_method = if is_software_webrender {
1653 UploadMethod::Immediate
1655 } else {
1656 upload_method
1657 };
1658 let depth_format = gl::DEPTH_COMPONENT24;
1660
1661 info!("GL texture cache {:?}, bgra {:?} swizzle {:?}, texture storage {:?}, depth {:?}",
1662 color_formats, bgra_formats, bgra8_sampling_swizzle, texture_storage_usage, depth_format);
1663
1664 let supports_copy_image_sub_data = if renderer_name.starts_with("Mali") {
1670 false
1671 } else {
1672 supports_extension(&extensions, "GL_EXT_copy_image") ||
1673 supports_extension(&extensions, "GL_ARB_copy_image")
1674 };
1675
1676 let is_x86_powervr_rogue_g6430 = renderer_name.starts_with("PowerVR Rogue G6430")
1680 && cfg!(target_arch = "x86");
1681 let supports_color_buffer_float = match gl.get_type() {
1682 gl::GlType::Gl => true,
1683 gl::GlType::Gles if is_x86_powervr_rogue_g6430 => false,
1684 gl::GlType::Gles => supports_extension(&extensions, "GL_EXT_color_buffer_float"),
1685 };
1686
1687 let is_adreno = renderer_name.starts_with("Adreno");
1688
1689 let supports_buffer_storage = if is_adreno {
1694 false
1695 } else {
1696 supports_extension(&extensions, "GL_EXT_buffer_storage") ||
1697 supports_extension(&extensions, "GL_ARB_buffer_storage")
1698 };
1699
1700 let supports_advanced_blend_equation =
1704 supports_extension(&extensions, "GL_KHR_blend_equation_advanced") &&
1705 !is_adreno;
1706
1707 let supports_dual_source_blending = match gl.get_type() {
1708 gl::GlType::Gl => supports_extension(&extensions,"GL_ARB_blend_func_extended") &&
1709 supports_extension(&extensions,"GL_ARB_explicit_attrib_location"),
1710 gl::GlType::Gles => supports_extension(&extensions,"GL_EXT_blend_func_extended"),
1711 };
1712
1713 let use_optimized_shaders = use_optimized_shaders && !is_software_webrender;
1715
1716 let requires_null_terminated_shader_source = is_emulator || renderer_name == "Mali-T628"
1720 || renderer_name == "Mali-T720" || renderer_name == "Mali-T760";
1721
1722 let requires_texture_external_unbind = is_emulator;
1725
1726 let is_macos = cfg!(target_os = "macos");
1727 let is_windows_angle = cfg!(target_os = "windows")
1731 && renderer_name.starts_with("ANGLE");
1732 let is_adreno_3xx = renderer_name.starts_with("Adreno (TM) 3");
1733
1734 let required_pbo_stride = if is_adreno_3xx {
1738 StrideAlignment::Bytes(NonZeroUsize::new(128).unwrap())
1741 } else if is_adreno {
1742 StrideAlignment::Pixels(NonZeroUsize::new(64).unwrap())
1746 } else if is_macos {
1747 StrideAlignment::Bytes(NonZeroUsize::new(256).unwrap())
1750 } else if is_windows_angle {
1751 StrideAlignment::Bytes(NonZeroUsize::new(1).unwrap())
1754 } else {
1755 StrideAlignment::Bytes(NonZeroUsize::new(4).unwrap())
1758 };
1759
1760 let supports_nonzero_pbo_offsets = !is_macos;
1763
1764 let supports_render_target_partial_update =
1769 !is_mali_midgard(&renderer_name) && !is_mali_bifrost(&renderer_name);
1770
1771 let supports_shader_storage_object = match gl.get_type() {
1772 gl::GlType::Gl => supports_extension(&extensions, "GL_ARB_shader_storage_buffer_object"),
1774 gl::GlType::Gles => gl_version >= [3, 1],
1775 };
1776
1777 let uses_native_clip_mask = is_software_webrender;
1782
1783 let uses_native_antialiasing = is_software_webrender;
1786
1787 let mut android_mesa_version = None;
1789 if cfg!(target_os = "android") && renderer_name.starts_with("Mesa") {
1790 if let Some((_, mesa_version)) = version_string.split_once("Mesa ") {
1791 if let Some((major_str, _)) = mesa_version.split_once(".") {
1792 if let Ok(major) = major_str.parse::<i32>() {
1793 android_mesa_version = Some(major);
1794 }
1795 }
1796 }
1797 }
1798
1799 let supports_image_external_essl3 = match android_mesa_version {
1805 Some(major) if major < 20 => false,
1806 _ => supports_extension(&extensions, "GL_OES_EGL_image_external_essl3"),
1807 };
1808
1809 let mut requires_batched_texture_uploads = None;
1810 if is_software_webrender {
1811 requires_batched_texture_uploads = Some(false);
1813 } else if renderer_name.starts_with("Mali-G") {
1814 requires_batched_texture_uploads = Some(true);
1817 }
1818
1819 let supports_alpha_target_clears = !is_mali_midgard(&renderer_name);
1823
1824 let is_adreno_4xx = renderer_name.starts_with("Adreno (TM) 4");
1827 let requires_alpha_target_full_clear = is_adreno_4xx;
1828
1829 let prefers_clear_scissor = !cfg!(target_os = "android") || is_software_webrender;
1835
1836 let mut supports_render_target_invalidate = true;
1837
1838 let is_powervr_rogue = renderer_name.starts_with("PowerVR Rogue");
1842 if is_powervr_rogue {
1843 supports_render_target_invalidate = false;
1844 }
1845
1846 if is_mali_valhall(&renderer_name) {
1850 match parse_mali_version(&version_string) {
1851 Some(version) if version >= (1, 36, 0) => supports_render_target_invalidate = false,
1852 _ => {}
1853 }
1854 }
1855
1856 let supports_r8_texture_upload = if cfg!(target_os = "linux")
1860 && renderer_name.starts_with("AMD Radeon RX")
1861 {
1862 false
1863 } else {
1864 true
1865 };
1866
1867 let supports_qcom_tiled_rendering = if is_adreno && version_string.contains("V@0490") {
1868 false
1871 } else if renderer_name == "Adreno (TM) 308" {
1872 false
1876 } else {
1877 supports_extension(&extensions, "GL_QCOM_tiled_rendering")
1878 };
1879
1880 let requires_vao_rebind_after_orphaning = is_adreno_3xx;
1883
1884 Device {
1885 gl,
1886 base_gl: None,
1887 crash_annotator,
1888 annotate_draw_call_crashes: false,
1889 resource_override_path,
1890 use_optimized_shaders,
1891 upload_method,
1892 use_batched_texture_uploads: requires_batched_texture_uploads.unwrap_or(false),
1893 use_draw_calls_for_texture_copy: false,
1894 batched_upload_threshold,
1895
1896 inside_frame: false,
1897
1898 capabilities: Capabilities {
1899 supports_multisampling: false, supports_copy_image_sub_data,
1901 supports_color_buffer_float,
1902 supports_buffer_storage,
1903 supports_advanced_blend_equation,
1904 supports_dual_source_blending,
1905 supports_khr_debug,
1906 supports_texture_swizzle,
1907 supports_nonzero_pbo_offsets,
1908 supports_texture_usage,
1909 supports_render_target_partial_update,
1910 supports_shader_storage_object,
1911 requires_batched_texture_uploads,
1912 supports_alpha_target_clears,
1913 requires_alpha_target_full_clear,
1914 prefers_clear_scissor,
1915 supports_render_target_invalidate,
1916 supports_r8_texture_upload,
1917 supports_qcom_tiled_rendering,
1918 uses_native_clip_mask,
1919 uses_native_antialiasing,
1920 supports_image_external_essl3,
1921 requires_vao_rebind_after_orphaning,
1922 renderer_name,
1923 },
1924
1925 color_formats,
1926 bgra_formats,
1927 bgra_pixel_type,
1928 swizzle_settings: SwizzleSettings {
1929 bgra8_sampling_swizzle,
1930 },
1931 depth_format,
1932
1933 depth_targets: FastHashMap::default(),
1934
1935 bound_textures: [0; 16],
1936 bound_program: 0,
1937 bound_program_name: Rc::new(std::ffi::CString::new("").unwrap()),
1938 bound_vao: 0,
1939 bound_read_fbo: (FBOId(0), DeviceIntPoint::zero()),
1940 bound_draw_fbo: FBOId(0),
1941 default_read_fbo: FBOId(0),
1942 default_draw_fbo: FBOId(0),
1943
1944 depth_available: true,
1945
1946 max_texture_size,
1947 cached_programs,
1948 frame_id: GpuFrameId(0),
1949 extensions,
1950 texture_storage_usage,
1951 requires_null_terminated_shader_source,
1952 requires_texture_external_unbind,
1953 is_software_webrender,
1954 required_pbo_stride,
1955 dump_shader_source,
1956 surface_origin_is_top_left,
1957
1958 #[cfg(debug_assertions)]
1959 shader_is_ready: false,
1960
1961 textures_created: 0,
1962 textures_deleted: 0,
1963 }
1964 }
1965
1966 pub fn gl(&self) -> &dyn gl::Gl {
1967 &*self.gl
1968 }
1969
1970 pub fn rc_gl(&self) -> &Rc<dyn gl::Gl> {
1971 &self.gl
1972 }
1973
1974 pub fn set_parameter(&mut self, param: &Parameter) {
1975 match param {
1976 Parameter::Bool(BoolParameter::PboUploads, enabled) => {
1977 if !self.is_software_webrender {
1978 self.upload_method = if *enabled {
1979 UploadMethod::PixelBuffer(crate::ONE_TIME_USAGE_HINT)
1980 } else {
1981 UploadMethod::Immediate
1982 };
1983 }
1984 }
1985 Parameter::Bool(BoolParameter::BatchedUploads, enabled) => {
1986 if self.capabilities.requires_batched_texture_uploads.is_none() {
1987 self.use_batched_texture_uploads = *enabled;
1988 }
1989 }
1990 Parameter::Bool(BoolParameter::DrawCallsForTextureCopy, enabled) => {
1991 self.use_draw_calls_for_texture_copy = *enabled;
1992 }
1993 Parameter::Int(IntParameter::BatchedUploadThreshold, threshold) => {
1994 self.batched_upload_threshold = *threshold;
1995 }
1996 _ => {}
1997 }
1998 }
1999
2000 pub fn clamp_max_texture_size(&mut self, size: i32) {
2004 self.max_texture_size = self.max_texture_size.min(size);
2005 }
2006
2007 pub fn max_texture_size(&self) -> i32 {
2009 self.max_texture_size
2010 }
2011
2012 pub fn surface_origin_is_top_left(&self) -> bool {
2013 self.surface_origin_is_top_left
2014 }
2015
2016 pub fn get_capabilities(&self) -> &Capabilities {
2017 &self.capabilities
2018 }
2019
2020 pub fn preferred_color_formats(&self) -> TextureFormatPair<ImageFormat> {
2021 self.color_formats.clone()
2022 }
2023
2024 pub fn swizzle_settings(&self) -> Option<SwizzleSettings> {
2025 if self.capabilities.supports_texture_swizzle {
2026 Some(self.swizzle_settings)
2027 } else {
2028 None
2029 }
2030 }
2031
2032 pub fn depth_bits(&self) -> i32 {
2033 match self.depth_format {
2034 gl::DEPTH_COMPONENT16 => 16,
2035 gl::DEPTH_COMPONENT24 => 24,
2036 _ => panic!("Unknown depth format {:?}", self.depth_format),
2037 }
2038 }
2039
2040 pub fn max_depth_ids(&self) -> i32 {
2043 return 1 << (self.depth_bits() - RESERVE_DEPTH_BITS);
2044 }
2045
2046 pub fn ortho_near_plane(&self) -> f32 {
2047 return -self.max_depth_ids() as f32;
2048 }
2049
2050 pub fn ortho_far_plane(&self) -> f32 {
2051 return (self.max_depth_ids() - 1) as f32;
2052 }
2053
2054 pub fn required_pbo_stride(&self) -> StrideAlignment {
2055 self.required_pbo_stride
2056 }
2057
2058 pub fn upload_method(&self) -> &UploadMethod {
2059 &self.upload_method
2060 }
2061
2062 pub fn use_batched_texture_uploads(&self) -> bool {
2063 self.use_batched_texture_uploads
2064 }
2065
2066 pub fn use_draw_calls_for_texture_copy(&self) -> bool {
2067 self.use_draw_calls_for_texture_copy
2068 }
2069
2070 pub fn batched_upload_threshold(&self) -> i32 {
2071 self.batched_upload_threshold
2072 }
2073
2074 pub fn reset_state(&mut self) {
2075 for i in 0 .. self.bound_textures.len() {
2076 self.bound_textures[i] = 0;
2077 self.gl.active_texture(gl::TEXTURE0 + i as gl::GLuint);
2078 self.gl.bind_texture(gl::TEXTURE_2D, 0);
2079 }
2080
2081 self.bound_vao = 0;
2082 self.gl.bind_vertex_array(0);
2083
2084 self.bound_read_fbo = (self.default_read_fbo, DeviceIntPoint::zero());
2085 self.gl.bind_framebuffer(gl::READ_FRAMEBUFFER, self.default_read_fbo.0);
2086
2087 self.bound_draw_fbo = self.default_draw_fbo;
2088 self.gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, self.bound_draw_fbo.0);
2089 }
2090
2091 #[cfg(debug_assertions)]
2092 fn print_shader_errors(source: &str, log: &str) {
2093 if !log.starts_with("0:") && !log.starts_with("0(") {
2095 return;
2096 }
2097 let end_pos = match log[2..].chars().position(|c| !c.is_digit(10)) {
2098 Some(pos) => 2 + pos,
2099 None => return,
2100 };
2101 let base_line_number = match log[2 .. end_pos].parse::<usize>() {
2102 Ok(number) if number >= 2 => number - 2,
2103 _ => return,
2104 };
2105 for (line, prefix) in source.lines().skip(base_line_number).zip(&["|",">","|"]) {
2106 error!("{}\t{}", prefix, line);
2107 }
2108 }
2109
2110 pub fn compile_shader(
2111 &self,
2112 name: &str,
2113 shader_type: gl::GLenum,
2114 source: &String,
2115 ) -> Result<gl::GLuint, ShaderError> {
2116 debug!("compile {}", name);
2117 let id = self.gl.create_shader(shader_type);
2118
2119 let mut new_source = Cow::from(source.as_str());
2120 if self.requires_null_terminated_shader_source {
2123 new_source.to_mut().push('\0');
2124 }
2125
2126 self.gl.shader_source(id, &[new_source.as_bytes()]);
2127 self.gl.compile_shader(id);
2128 let log = self.gl.get_shader_info_log(id);
2129 let mut status = [0];
2130 unsafe {
2131 self.gl.get_shader_iv(id, gl::COMPILE_STATUS, &mut status);
2132 }
2133 if status[0] == 0 {
2134 let type_str = match shader_type {
2135 gl::VERTEX_SHADER => "vertex",
2136 gl::FRAGMENT_SHADER => "fragment",
2137 _ => panic!("Unexpected shader type {:x}", shader_type),
2138 };
2139 error!("Failed to compile {} shader: {}\n{}", type_str, name, log);
2140 #[cfg(debug_assertions)]
2141 Self::print_shader_errors(source, &log);
2142 Err(ShaderError::Compilation(name.to_string(), log))
2143 } else {
2144 if !log.is_empty() {
2145 warn!("Warnings detected on shader: {}\n{}", name, log);
2146 }
2147 Ok(id)
2148 }
2149 }
2150
2151 pub fn begin_frame(&mut self) -> GpuFrameId {
2152 debug_assert!(!self.inside_frame);
2153 self.inside_frame = true;
2154 #[cfg(debug_assertions)]
2155 {
2156 self.shader_is_ready = false;
2157 }
2158
2159 self.textures_created = 0;
2160 self.textures_deleted = 0;
2161
2162 let being_profiled = profiler::thread_is_being_profiled();
2165 let using_wrapper = self.base_gl.is_some();
2166
2167 if cfg!(any(target_arch = "arm", target_arch = "aarch64"))
2172 && cfg!(target_os = "android")
2173 && being_profiled
2174 && !using_wrapper
2175 {
2176 fn note(name: &str, duration: Duration) {
2177 profiler::add_text_marker("OpenGL Calls", name, duration);
2178 }
2179 let threshold = Duration::from_millis(1);
2180 let wrapped = gl::ProfilingGl::wrap(self.gl.clone(), threshold, note);
2181 let base = mem::replace(&mut self.gl, wrapped);
2182 self.base_gl = Some(base);
2183 } else if !being_profiled && using_wrapper {
2184 self.gl = self.base_gl.take().unwrap();
2185 }
2186
2187 let mut default_read_fbo = [0];
2189 unsafe {
2190 self.gl.get_integer_v(gl::READ_FRAMEBUFFER_BINDING, &mut default_read_fbo);
2191 }
2192 self.default_read_fbo = FBOId(default_read_fbo[0] as gl::GLuint);
2193 let mut default_draw_fbo = [0];
2194 unsafe {
2195 self.gl.get_integer_v(gl::DRAW_FRAMEBUFFER_BINDING, &mut default_draw_fbo);
2196 }
2197 self.default_draw_fbo = FBOId(default_draw_fbo[0] as gl::GLuint);
2198
2199 self.bound_program = 0;
2201 self.gl.use_program(0);
2202
2203 self.reset_state();
2205
2206 self.gl.pixel_store_i(gl::UNPACK_ALIGNMENT, 1);
2208 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2209
2210 self.gl.active_texture(gl::TEXTURE0);
2212
2213 self.frame_id
2214 }
2215
2216 fn bind_texture_impl(
2217 &mut self,
2218 slot: TextureSlot,
2219 id: gl::GLuint,
2220 target: gl::GLenum,
2221 set_swizzle: Option<Swizzle>,
2222 image_rendering: Option<ImageRendering>,
2223 ) {
2224 debug_assert!(self.inside_frame);
2225
2226 if self.bound_textures[slot.0] != id || set_swizzle.is_some() || image_rendering.is_some() {
2227 self.gl.active_texture(gl::TEXTURE0 + slot.0 as gl::GLuint);
2228 if target == gl::TEXTURE_2D && self.requires_texture_external_unbind {
2231 self.gl.bind_texture(gl::TEXTURE_EXTERNAL_OES, 0);
2232 }
2233 self.gl.bind_texture(target, id);
2234 if let Some(swizzle) = set_swizzle {
2235 if self.capabilities.supports_texture_swizzle {
2236 let components = match swizzle {
2237 Swizzle::Rgba => [gl::RED, gl::GREEN, gl::BLUE, gl::ALPHA],
2238 Swizzle::Bgra => [gl::BLUE, gl::GREEN, gl::RED, gl::ALPHA],
2239 };
2240 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_R, components[0] as i32);
2241 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_G, components[1] as i32);
2242 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_B, components[2] as i32);
2243 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_A, components[3] as i32);
2244 } else {
2245 debug_assert_eq!(swizzle, Swizzle::default());
2246 }
2247 }
2248 if let Some(image_rendering) = image_rendering {
2249 let filter = match image_rendering {
2250 ImageRendering::Auto | ImageRendering::CrispEdges => gl::LINEAR,
2251 ImageRendering::Pixelated => gl::NEAREST,
2252 };
2253 self.gl.tex_parameter_i(target, gl::TEXTURE_MIN_FILTER, filter as i32);
2254 self.gl.tex_parameter_i(target, gl::TEXTURE_MAG_FILTER, filter as i32);
2255 }
2256 self.gl.active_texture(gl::TEXTURE0);
2257 self.bound_textures[slot.0] = id;
2258 }
2259 }
2260
2261 pub fn bind_texture<S>(&mut self, slot: S, texture: &Texture, swizzle: Swizzle)
2262 where
2263 S: Into<TextureSlot>,
2264 {
2265 let old_swizzle = texture.active_swizzle.replace(swizzle);
2266 let set_swizzle = if old_swizzle != swizzle {
2267 Some(swizzle)
2268 } else {
2269 None
2270 };
2271 self.bind_texture_impl(slot.into(), texture.id, texture.target, set_swizzle, None);
2272 }
2273
2274 pub fn bind_external_texture<S>(&mut self, slot: S, external_texture: &ExternalTexture)
2275 where
2276 S: Into<TextureSlot>,
2277 {
2278 self.bind_texture_impl(
2279 slot.into(),
2280 external_texture.id,
2281 external_texture.target,
2282 None,
2283 Some(external_texture.image_rendering),
2284 );
2285 }
2286
2287 pub fn bind_read_target_impl(
2288 &mut self,
2289 fbo_id: FBOId,
2290 offset: DeviceIntPoint,
2291 ) {
2292 debug_assert!(self.inside_frame);
2293
2294 if self.bound_read_fbo != (fbo_id, offset) {
2295 fbo_id.bind(self.gl(), FBOTarget::Read);
2296 }
2297
2298 self.bound_read_fbo = (fbo_id, offset);
2299 }
2300
2301 pub fn bind_read_target(&mut self, target: ReadTarget) {
2302 let fbo_id = match target {
2303 ReadTarget::Default => self.default_read_fbo,
2304 ReadTarget::Texture { fbo_id } => fbo_id,
2305 ReadTarget::External { fbo } => fbo,
2306 ReadTarget::NativeSurface { fbo_id, .. } => fbo_id,
2307 };
2308
2309 self.bind_read_target_impl(fbo_id, target.offset())
2310 }
2311
2312 fn bind_draw_target_impl(&mut self, fbo_id: FBOId) {
2313 debug_assert!(self.inside_frame);
2314
2315 if self.bound_draw_fbo != fbo_id {
2316 self.bound_draw_fbo = fbo_id;
2317 fbo_id.bind(self.gl(), FBOTarget::Draw);
2318 }
2319 }
2320
2321 pub fn reset_read_target(&mut self) {
2322 let fbo = self.default_read_fbo;
2323 self.bind_read_target_impl(fbo, DeviceIntPoint::zero());
2324 }
2325
2326
2327 pub fn reset_draw_target(&mut self) {
2328 let fbo = self.default_draw_fbo;
2329 self.bind_draw_target_impl(fbo);
2330 self.depth_available = true;
2331 }
2332
2333 pub fn bind_draw_target(
2334 &mut self,
2335 target: DrawTarget,
2336 ) {
2337 let (fbo_id, rect, depth_available) = match target {
2338 DrawTarget::Default { rect, .. } => {
2339 (self.default_draw_fbo, rect, false)
2340 }
2341 DrawTarget::Texture { dimensions, fbo_id, with_depth, .. } => {
2342 let rect = FramebufferIntRect::from_size(
2343 device_size_as_framebuffer_size(dimensions),
2344 );
2345 (fbo_id, rect, with_depth)
2346 },
2347 DrawTarget::External { fbo, size } => {
2348 (fbo, size.into(), false)
2349 }
2350 DrawTarget::NativeSurface { external_fbo_id, offset, dimensions, .. } => {
2351 (
2352 FBOId(external_fbo_id),
2353 device_rect_as_framebuffer_rect(&DeviceIntRect::from_origin_and_size(offset, dimensions)),
2354 true
2355 )
2356 }
2357 };
2358
2359 self.depth_available = depth_available;
2360 self.bind_draw_target_impl(fbo_id);
2361 self.gl.viewport(
2362 rect.min.x,
2363 rect.min.y,
2364 rect.width(),
2365 rect.height(),
2366 );
2367 }
2368
2369 pub fn create_fbo(&mut self) -> FBOId {
2372 FBOId(self.gl.gen_framebuffers(1)[0])
2373 }
2374
2375 pub fn create_fbo_for_external_texture(&mut self, texture_id: u32) -> FBOId {
2377 let fbo = self.create_fbo();
2378 fbo.bind(self.gl(), FBOTarget::Draw);
2379 self.gl.framebuffer_texture_2d(
2380 gl::DRAW_FRAMEBUFFER,
2381 gl::COLOR_ATTACHMENT0,
2382 gl::TEXTURE_2D,
2383 texture_id,
2384 0,
2385 );
2386 debug_assert_eq!(
2387 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2388 gl::FRAMEBUFFER_COMPLETE,
2389 "Incomplete framebuffer",
2390 );
2391 self.bound_draw_fbo.bind(self.gl(), FBOTarget::Draw);
2392 fbo
2393 }
2394
2395 pub fn delete_fbo(&mut self, fbo: FBOId) {
2396 self.gl.delete_framebuffers(&[fbo.0]);
2397 }
2398
2399 pub fn bind_external_draw_target(&mut self, fbo_id: FBOId) {
2400 debug_assert!(self.inside_frame);
2401
2402 if self.bound_draw_fbo != fbo_id {
2403 self.bound_draw_fbo = fbo_id;
2404 fbo_id.bind(self.gl(), FBOTarget::Draw);
2405 }
2406 }
2407
2408 pub fn link_program(
2420 &mut self,
2421 program: &mut Program,
2422 descriptor: &VertexDescriptor,
2423 ) -> Result<(), ShaderError> {
2424 profile_scope!("compile shader");
2425
2426 let _guard = CrashAnnotatorGuard::new(
2427 &self.crash_annotator,
2428 CrashAnnotation::CompileShader,
2429 &program.source_info.full_name_cstr
2430 );
2431
2432 assert!(!program.is_initialized());
2433 let mut build_program = true;
2434 let info = &program.source_info;
2435
2436 if let Some(ref cached_programs) = self.cached_programs {
2438 if cached_programs.entries.borrow().get(&program.source_info.digest).is_none() {
2440 if let Some(ref handler) = cached_programs.program_cache_handler {
2441 handler.try_load_shader_from_disk(&program.source_info.digest, cached_programs);
2442 if let Some(entry) = cached_programs.entries.borrow().get(&program.source_info.digest) {
2443 self.gl.program_binary(program.id, entry.binary.format, &entry.binary.bytes);
2444 }
2445 }
2446 }
2447
2448 if let Some(entry) = cached_programs.entries.borrow_mut().get_mut(&info.digest) {
2449 let mut link_status = [0];
2450 unsafe {
2451 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2452 }
2453 if link_status[0] == 0 {
2454 let error_log = self.gl.get_program_info_log(program.id);
2455 error!(
2456 "Failed to load a program object with a program binary: {} renderer {}\n{}",
2457 &info.base_filename,
2458 self.capabilities.renderer_name,
2459 error_log
2460 );
2461 if let Some(ref program_cache_handler) = cached_programs.program_cache_handler {
2462 program_cache_handler.notify_program_binary_failed(&entry.binary);
2463 }
2464 } else {
2465 entry.linked = true;
2466 build_program = false;
2467 }
2468 }
2469 }
2470
2471 if build_program {
2473 let vs_source = info.compute_source(self, ShaderKind::Vertex);
2475 let vs_id = match self.compile_shader(&info.full_name(), gl::VERTEX_SHADER, &vs_source) {
2476 Ok(vs_id) => vs_id,
2477 Err(err) => return Err(err),
2478 };
2479
2480 let fs_source = info.compute_source(self, ShaderKind::Fragment);
2482 let fs_id =
2483 match self.compile_shader(&info.full_name(), gl::FRAGMENT_SHADER, &fs_source) {
2484 Ok(fs_id) => fs_id,
2485 Err(err) => {
2486 self.gl.delete_shader(vs_id);
2487 return Err(err);
2488 }
2489 };
2490
2491 if Some(info.base_filename) == self.dump_shader_source.as_ref().map(String::as_ref) {
2493 let path = std::path::Path::new(info.base_filename);
2494 std::fs::write(path.with_extension("vert"), vs_source).unwrap();
2495 std::fs::write(path.with_extension("frag"), fs_source).unwrap();
2496 }
2497
2498 self.gl.attach_shader(program.id, vs_id);
2500 self.gl.attach_shader(program.id, fs_id);
2501
2502 for (i, attr) in descriptor
2504 .vertex_attributes
2505 .iter()
2506 .chain(descriptor.instance_attributes.iter())
2507 .enumerate()
2508 {
2509 self.gl
2510 .bind_attrib_location(program.id, i as gl::GLuint, attr.name);
2511 }
2512
2513 if self.cached_programs.is_some() {
2514 self.gl.program_parameter_i(program.id, gl::PROGRAM_BINARY_RETRIEVABLE_HINT, gl::TRUE as gl::GLint);
2515 }
2516
2517 self.gl.link_program(program.id);
2519
2520 self.gl.detach_shader(program.id, vs_id);
2524 self.gl.detach_shader(program.id, fs_id);
2525 self.gl.delete_shader(vs_id);
2526 self.gl.delete_shader(fs_id);
2527
2528 let mut link_status = [0];
2529 unsafe {
2530 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2531 }
2532 if link_status[0] == 0 {
2533 let error_log = self.gl.get_program_info_log(program.id);
2534 error!(
2535 "Failed to link shader program: {}\n{}",
2536 &info.base_filename,
2537 error_log
2538 );
2539 self.gl.delete_program(program.id);
2540 return Err(ShaderError::Link(info.base_filename.to_owned(), error_log));
2541 }
2542
2543 if let Some(ref cached_programs) = self.cached_programs {
2544 if !cached_programs.entries.borrow().contains_key(&info.digest) {
2545 let (buffer, format) = self.gl.get_program_binary(program.id);
2546 if buffer.len() > 0 {
2547 let binary = Arc::new(ProgramBinary::new(buffer, format, info.digest.clone()));
2548 cached_programs.add_new_program_binary(binary);
2549 }
2550 }
2551 }
2552 }
2553
2554 program.is_initialized = true;
2556 program.u_transform = self.gl.get_uniform_location(program.id, "uTransform");
2557 program.u_texture_size = self.gl.get_uniform_location(program.id, "uTextureSize");
2558
2559 Ok(())
2560 }
2561
2562 pub fn bind_program(&mut self, program: &Program) -> bool {
2563 debug_assert!(self.inside_frame);
2564 debug_assert!(program.is_initialized());
2565 if !program.is_initialized() {
2566 return false;
2567 }
2568 #[cfg(debug_assertions)]
2569 {
2570 self.shader_is_ready = true;
2571 }
2572
2573 if self.bound_program != program.id {
2574 self.gl.use_program(program.id);
2575 self.bound_program = program.id;
2576 self.bound_program_name = program.source_info.full_name_cstr.clone();
2577 }
2578 true
2579 }
2580
2581 pub fn create_texture(
2582 &mut self,
2583 target: ImageBufferKind,
2584 format: ImageFormat,
2585 mut width: i32,
2586 mut height: i32,
2587 filter: TextureFilter,
2588 render_target: Option<RenderTargetInfo>,
2589 ) -> Texture {
2590 debug_assert!(self.inside_frame);
2591
2592 if width > self.max_texture_size || height > self.max_texture_size {
2593 error!("Attempting to allocate a texture of size {}x{} above the limit, trimming", width, height);
2594 width = width.min(self.max_texture_size);
2595 height = height.min(self.max_texture_size);
2596 }
2597
2598 let mut texture = Texture {
2600 id: self.gl.gen_textures(1)[0],
2601 target: get_gl_target(target),
2602 size: DeviceIntSize::new(width, height),
2603 format,
2604 filter,
2605 active_swizzle: Cell::default(),
2606 fbo: None,
2607 fbo_with_depth: None,
2608 last_frame_used: self.frame_id,
2609 flags: TextureFlags::default(),
2610 };
2611 self.bind_texture(DEFAULT_TEXTURE, &texture, Swizzle::default());
2612 self.set_texture_parameters(texture.target, filter);
2613
2614 if self.capabilities.supports_texture_usage && render_target.is_some() {
2615 self.gl.tex_parameter_i(texture.target, gl::TEXTURE_USAGE_ANGLE, gl::FRAMEBUFFER_ATTACHMENT_ANGLE as gl::GLint);
2616 }
2617
2618 let desc = self.gl_describe_format(texture.format);
2620
2621 let mipmap_levels = if texture.filter == TextureFilter::Trilinear {
2625 let max_dimension = cmp::max(width, height);
2626 ((max_dimension) as f64).log2() as gl::GLint + 1
2627 } else {
2628 1
2629 };
2630
2631 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2633
2634 let use_texture_storage = match self.texture_storage_usage {
2638 TexStorageUsage::Always => true,
2639 TexStorageUsage::NonBGRA8 => texture.format != ImageFormat::BGRA8,
2640 TexStorageUsage::Never => false,
2641 };
2642 if use_texture_storage {
2643 self.gl.tex_storage_2d(
2644 texture.target,
2645 mipmap_levels,
2646 desc.internal,
2647 texture.size.width as gl::GLint,
2648 texture.size.height as gl::GLint,
2649 );
2650 } else {
2651 self.gl.tex_image_2d(
2652 texture.target,
2653 0,
2654 desc.internal as gl::GLint,
2655 texture.size.width as gl::GLint,
2656 texture.size.height as gl::GLint,
2657 0,
2658 desc.external,
2659 desc.pixel_type,
2660 None,
2661 );
2662 }
2663
2664 if let Some(rt_info) = render_target {
2666 self.init_fbos(&mut texture, false);
2667 if rt_info.has_depth {
2668 self.init_fbos(&mut texture, true);
2669 }
2670 }
2671
2672 self.textures_created += 1;
2673
2674 texture
2675 }
2676
2677 fn set_texture_parameters(&mut self, target: gl::GLuint, filter: TextureFilter) {
2678 let mag_filter = match filter {
2679 TextureFilter::Nearest => gl::NEAREST,
2680 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2681 };
2682
2683 let min_filter = match filter {
2684 TextureFilter::Nearest => gl::NEAREST,
2685 TextureFilter::Linear => gl::LINEAR,
2686 TextureFilter::Trilinear => gl::LINEAR_MIPMAP_LINEAR,
2687 };
2688
2689 self.gl
2690 .tex_parameter_i(target, gl::TEXTURE_MAG_FILTER, mag_filter as gl::GLint);
2691 self.gl
2692 .tex_parameter_i(target, gl::TEXTURE_MIN_FILTER, min_filter as gl::GLint);
2693
2694 self.gl
2695 .tex_parameter_i(target, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as gl::GLint);
2696 self.gl
2697 .tex_parameter_i(target, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as gl::GLint);
2698 }
2699
2700 pub fn copy_entire_texture(
2704 &mut self,
2705 dst: &mut Texture,
2706 src: &Texture,
2707 ) {
2708 debug_assert!(self.inside_frame);
2709 debug_assert!(dst.size.width >= src.size.width);
2710 debug_assert!(dst.size.height >= src.size.height);
2711
2712 self.copy_texture_sub_region(
2713 src,
2714 0,
2715 0,
2716 dst,
2717 0,
2718 0,
2719 src.size.width as _,
2720 src.size.height as _,
2721 );
2722 }
2723
2724 pub fn copy_texture_sub_region(
2726 &mut self,
2727 src_texture: &Texture,
2728 src_x: usize,
2729 src_y: usize,
2730 dest_texture: &Texture,
2731 dest_x: usize,
2732 dest_y: usize,
2733 width: usize,
2734 height: usize,
2735 ) {
2736 if self.capabilities.supports_copy_image_sub_data {
2737 assert_ne!(
2738 src_texture.id, dest_texture.id,
2739 "glCopyImageSubData's behaviour is undefined if src and dst images are identical and the rectangles overlap."
2740 );
2741 unsafe {
2742 self.gl.copy_image_sub_data(
2743 src_texture.id,
2744 src_texture.target,
2745 0,
2746 src_x as _,
2747 src_y as _,
2748 0,
2749 dest_texture.id,
2750 dest_texture.target,
2751 0,
2752 dest_x as _,
2753 dest_y as _,
2754 0,
2755 width as _,
2756 height as _,
2757 1,
2758 );
2759 }
2760 } else {
2761 let src_offset = FramebufferIntPoint::new(src_x as i32, src_y as i32);
2762 let dest_offset = FramebufferIntPoint::new(dest_x as i32, dest_y as i32);
2763 let size = FramebufferIntSize::new(width as i32, height as i32);
2764
2765 self.blit_render_target(
2766 ReadTarget::from_texture(src_texture),
2767 FramebufferIntRect::from_origin_and_size(src_offset, size),
2768 DrawTarget::from_texture(dest_texture, false),
2769 FramebufferIntRect::from_origin_and_size(dest_offset, size),
2770 TextureFilter::Nearest,
2774 );
2775 }
2776 }
2777
2778 pub fn invalidate_render_target(&mut self, texture: &Texture) {
2781 if self.capabilities.supports_render_target_invalidate {
2782 let (fbo, attachments) = if texture.supports_depth() {
2783 (&texture.fbo_with_depth,
2784 &[gl::COLOR_ATTACHMENT0, gl::DEPTH_ATTACHMENT] as &[gl::GLenum])
2785 } else {
2786 (&texture.fbo, &[gl::COLOR_ATTACHMENT0] as &[gl::GLenum])
2787 };
2788
2789 if let Some(fbo_id) = fbo {
2790 let original_bound_fbo = self.bound_draw_fbo;
2791 self.bind_external_draw_target(*fbo_id);
2795 self.gl.invalidate_framebuffer(gl::FRAMEBUFFER, attachments);
2796 self.bind_external_draw_target(original_bound_fbo);
2797 }
2798 }
2799 }
2800
2801 pub fn invalidate_depth_target(&mut self) {
2807 assert!(self.depth_available);
2808 let attachments = if self.bound_draw_fbo == self.default_draw_fbo {
2809 &[gl::DEPTH] as &[gl::GLenum]
2810 } else {
2811 &[gl::DEPTH_ATTACHMENT] as &[gl::GLenum]
2812 };
2813 self.gl.invalidate_framebuffer(gl::DRAW_FRAMEBUFFER, attachments);
2814 }
2815
2816 pub fn reuse_render_target<T: Texel>(
2820 &mut self,
2821 texture: &mut Texture,
2822 rt_info: RenderTargetInfo,
2823 ) {
2824 texture.last_frame_used = self.frame_id;
2825
2826 if rt_info.has_depth && !texture.supports_depth() {
2828 self.init_fbos(texture, true);
2829 }
2830 }
2831
2832 fn init_fbos(&mut self, texture: &mut Texture, with_depth: bool) {
2833 let (fbo, depth_rb) = if with_depth {
2834 let depth_target = self.acquire_depth_target(texture.get_dimensions());
2835 (&mut texture.fbo_with_depth, Some(depth_target))
2836 } else {
2837 (&mut texture.fbo, None)
2838 };
2839
2840 assert!(fbo.is_none());
2842 let fbo_id = FBOId(*self.gl.gen_framebuffers(1).first().unwrap());
2843 *fbo = Some(fbo_id);
2844
2845 let original_bound_fbo = self.bound_draw_fbo;
2847
2848 self.bind_external_draw_target(fbo_id);
2849
2850 self.gl.framebuffer_texture_2d(
2851 gl::DRAW_FRAMEBUFFER,
2852 gl::COLOR_ATTACHMENT0,
2853 texture.target,
2854 texture.id,
2855 0,
2856 );
2857
2858 if let Some(depth_rb) = depth_rb {
2859 self.gl.framebuffer_renderbuffer(
2860 gl::DRAW_FRAMEBUFFER,
2861 gl::DEPTH_ATTACHMENT,
2862 gl::RENDERBUFFER,
2863 depth_rb.0,
2864 );
2865 }
2866
2867 debug_assert_eq!(
2868 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2869 gl::FRAMEBUFFER_COMPLETE,
2870 "Incomplete framebuffer",
2871 );
2872
2873 self.bind_external_draw_target(original_bound_fbo);
2874 }
2875
2876 fn acquire_depth_target(&mut self, dimensions: DeviceIntSize) -> RBOId {
2877 let gl = &self.gl;
2878 let depth_format = self.depth_format;
2879 let target = self.depth_targets.entry(dimensions).or_insert_with(|| {
2880 let renderbuffer_ids = gl.gen_renderbuffers(1);
2881 let depth_rb = renderbuffer_ids[0];
2882 gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb);
2883 gl.renderbuffer_storage(
2884 gl::RENDERBUFFER,
2885 depth_format,
2886 dimensions.width as _,
2887 dimensions.height as _,
2888 );
2889 SharedDepthTarget {
2890 rbo_id: RBOId(depth_rb),
2891 refcount: 0,
2892 }
2893 });
2894 target.refcount += 1;
2895 target.rbo_id
2896 }
2897
2898 fn release_depth_target(&mut self, dimensions: DeviceIntSize) {
2899 let mut entry = match self.depth_targets.entry(dimensions) {
2900 Entry::Occupied(x) => x,
2901 Entry::Vacant(..) => panic!("Releasing unknown depth target"),
2902 };
2903 debug_assert!(entry.get().refcount != 0);
2904 entry.get_mut().refcount -= 1;
2905 if entry.get().refcount == 0 {
2906 let (_, target) = entry.remove_entry();
2907 self.gl.delete_renderbuffers(&[target.rbo_id.0]);
2908 }
2909 }
2910
2911 fn blit_render_target_impl(
2913 &mut self,
2914 src_rect: FramebufferIntRect,
2915 dest_rect: FramebufferIntRect,
2916 filter: TextureFilter,
2917 ) {
2918 debug_assert!(self.inside_frame);
2919
2920 let filter = match filter {
2921 TextureFilter::Nearest => gl::NEAREST,
2922 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2923 };
2924
2925 let src_x0 = src_rect.min.x + self.bound_read_fbo.1.x;
2926 let src_y0 = src_rect.min.y + self.bound_read_fbo.1.y;
2927
2928 self.gl.blit_framebuffer(
2929 src_x0,
2930 src_y0,
2931 src_x0 + src_rect.width(),
2932 src_y0 + src_rect.height(),
2933 dest_rect.min.x,
2934 dest_rect.min.y,
2935 dest_rect.max.x,
2936 dest_rect.max.y,
2937 gl::COLOR_BUFFER_BIT,
2938 filter,
2939 );
2940 }
2941
2942 pub fn blit_render_target(
2945 &mut self,
2946 src_target: ReadTarget,
2947 src_rect: FramebufferIntRect,
2948 dest_target: DrawTarget,
2949 dest_rect: FramebufferIntRect,
2950 filter: TextureFilter,
2951 ) {
2952 debug_assert!(self.inside_frame);
2953
2954 self.bind_read_target(src_target);
2955
2956 self.bind_draw_target(dest_target);
2957
2958 self.blit_render_target_impl(src_rect, dest_rect, filter);
2959 }
2960
2961 pub fn blit_render_target_invert_y(
2965 &mut self,
2966 src_target: ReadTarget,
2967 src_rect: FramebufferIntRect,
2968 dest_target: DrawTarget,
2969 dest_rect: FramebufferIntRect,
2970 ) {
2971 debug_assert!(self.inside_frame);
2972
2973 let mut inverted_dest_rect = dest_rect;
2974 inverted_dest_rect.min.y = dest_rect.max.y;
2975 inverted_dest_rect.max.y = dest_rect.min.y;
2976
2977 self.blit_render_target(
2978 src_target,
2979 src_rect,
2980 dest_target,
2981 inverted_dest_rect,
2982 TextureFilter::Linear,
2983 );
2984 }
2985
2986 pub fn delete_texture(&mut self, mut texture: Texture) {
2987 debug_assert!(self.inside_frame);
2988 let had_depth = texture.supports_depth();
2989 if let Some(fbo) = texture.fbo {
2990 self.gl.delete_framebuffers(&[fbo.0]);
2991 texture.fbo = None;
2992 }
2993 if let Some(fbo) = texture.fbo_with_depth {
2994 self.gl.delete_framebuffers(&[fbo.0]);
2995 texture.fbo_with_depth = None;
2996 }
2997
2998 if had_depth {
2999 self.release_depth_target(texture.get_dimensions());
3000 }
3001
3002 self.gl.delete_textures(&[texture.id]);
3003
3004 for bound_texture in &mut self.bound_textures {
3005 if *bound_texture == texture.id {
3006 *bound_texture = 0;
3007 }
3008 }
3009
3010 self.textures_deleted += 1;
3011
3012 texture.id = 0;
3014 }
3015
3016 #[cfg(feature = "replay")]
3017 pub fn delete_external_texture(&mut self, mut external: ExternalTexture) {
3018 self.gl.delete_textures(&[external.id]);
3019 external.id = 0;
3020 }
3021
3022 pub fn delete_program(&mut self, mut program: Program) {
3023 self.gl.delete_program(program.id);
3024 program.id = 0;
3025 }
3026
3027 pub fn create_program_linked(
3029 &mut self,
3030 base_filename: &'static str,
3031 features: &[&'static str],
3032 descriptor: &VertexDescriptor,
3033 ) -> Result<Program, ShaderError> {
3034 let mut program = self.create_program(base_filename, features)?;
3035 self.link_program(&mut program, descriptor)?;
3036 Ok(program)
3037 }
3038
3039 pub fn create_program(
3045 &mut self,
3046 base_filename: &'static str,
3047 features: &[&'static str],
3048 ) -> Result<Program, ShaderError> {
3049 debug_assert!(self.inside_frame);
3050
3051 let source_info = ProgramSourceInfo::new(self, base_filename, features);
3052
3053 let pid = self.gl.create_program();
3055
3056 if let Some(ref cached_programs) = self.cached_programs {
3058 if let Some(entry) = cached_programs.entries.borrow().get(&source_info.digest) {
3059 self.gl.program_binary(pid, entry.binary.format, &entry.binary.bytes);
3060 }
3061 }
3062
3063 let program = Program {
3065 id: pid,
3066 u_transform: 0,
3067 u_texture_size: 0,
3068 source_info,
3069 is_initialized: false,
3070 };
3071
3072 Ok(program)
3073 }
3074
3075 fn build_shader_string<F: FnMut(&str)>(
3076 &self,
3077 features: &[&'static str],
3078 kind: ShaderKind,
3079 base_filename: &str,
3080 output: F,
3081 ) {
3082 do_build_shader_string(
3083 get_shader_version(&*self.gl),
3084 features,
3085 kind,
3086 base_filename,
3087 &|f| get_unoptimized_shader_source(f, self.resource_override_path.as_ref()),
3088 output,
3089 )
3090 }
3091
3092 pub fn bind_shader_samplers<S>(&mut self, program: &Program, bindings: &[(&'static str, S)])
3093 where
3094 S: Into<TextureSlot> + Copy,
3095 {
3096 assert_eq!(self.bound_program, program.id);
3098
3099 for binding in bindings {
3100 let u_location = self.gl.get_uniform_location(program.id, binding.0);
3101 if u_location != -1 {
3102 self.bind_program(program);
3103 self.gl
3104 .uniform_1i(u_location, binding.1.into().0 as gl::GLint);
3105 }
3106 }
3107 }
3108
3109 pub fn get_uniform_location(&self, program: &Program, name: &str) -> UniformLocation {
3110 UniformLocation(self.gl.get_uniform_location(program.id, name))
3111 }
3112
3113 pub fn set_uniforms(
3114 &self,
3115 program: &Program,
3116 transform: &Transform3D<f32>,
3117 ) {
3118 debug_assert!(self.inside_frame);
3119 #[cfg(debug_assertions)]
3120 debug_assert!(self.shader_is_ready);
3121
3122 self.gl
3123 .uniform_matrix_4fv(program.u_transform, false, &transform.to_array());
3124 }
3125
3126 pub fn set_shader_texture_size(
3129 &self,
3130 program: &Program,
3131 texture_size: DeviceSize,
3132 ) {
3133 debug_assert!(self.inside_frame);
3134 #[cfg(debug_assertions)]
3135 debug_assert!(self.shader_is_ready);
3136
3137 if program.u_texture_size != -1 {
3138 self.gl.uniform_2f(program.u_texture_size, texture_size.width, texture_size.height);
3139 }
3140 }
3141
3142 pub fn create_pbo(&mut self) -> PBO {
3143 let id = self.gl.gen_buffers(1)[0];
3144 PBO {
3145 id,
3146 reserved_size: 0,
3147 }
3148 }
3149
3150 pub fn create_pbo_with_size(&mut self, size: usize) -> PBO {
3151 let mut pbo = self.create_pbo();
3152
3153 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
3154 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
3155 self.gl.buffer_data_untyped(
3156 gl::PIXEL_PACK_BUFFER,
3157 size as _,
3158 ptr::null(),
3159 gl::STREAM_READ,
3160 );
3161 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
3162
3163 pbo.reserved_size = size;
3164 pbo
3165 }
3166
3167 pub fn read_pixels_into_pbo(
3168 &mut self,
3169 read_target: ReadTarget,
3170 rect: DeviceIntRect,
3171 format: ImageFormat,
3172 pbo: &PBO,
3173 ) {
3174 let byte_size = rect.area() as usize * format.bytes_per_pixel() as usize;
3175
3176 assert!(byte_size <= pbo.reserved_size);
3177
3178 self.bind_read_target(read_target);
3179
3180 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
3181 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
3182
3183 let gl_format = self.gl_describe_format(format);
3184
3185 unsafe {
3186 self.gl.read_pixels_into_pbo(
3187 rect.min.x as _,
3188 rect.min.y as _,
3189 rect.width() as _,
3190 rect.height() as _,
3191 gl_format.read,
3192 gl_format.pixel_type,
3193 );
3194 }
3195
3196 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
3197 }
3198
3199 pub fn map_pbo_for_readback<'a>(&'a mut self, pbo: &'a PBO) -> Option<BoundPBO<'a>> {
3200 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
3201
3202 let buf_ptr = match self.gl.get_type() {
3203 gl::GlType::Gl => {
3204 self.gl.map_buffer(gl::PIXEL_PACK_BUFFER, gl::READ_ONLY)
3205 }
3206
3207 gl::GlType::Gles => {
3208 self.gl.map_buffer_range(
3209 gl::PIXEL_PACK_BUFFER,
3210 0,
3211 pbo.reserved_size as _,
3212 gl::MAP_READ_BIT)
3213 }
3214 };
3215
3216 if buf_ptr.is_null() {
3217 return None;
3218 }
3219
3220 let buffer = unsafe { slice::from_raw_parts(buf_ptr as *const u8, pbo.reserved_size) };
3221
3222 Some(BoundPBO {
3223 device: self,
3224 data: buffer,
3225 })
3226 }
3227
3228 pub fn delete_pbo(&mut self, mut pbo: PBO) {
3229 self.gl.delete_buffers(&[pbo.id]);
3230 pbo.id = 0;
3231 pbo.reserved_size = 0
3232 }
3233
3234 pub fn required_upload_size_and_stride(&self, size: DeviceIntSize, format: ImageFormat) -> (usize, usize) {
3237 assert!(size.width >= 0);
3238 assert!(size.height >= 0);
3239
3240 let bytes_pp = format.bytes_per_pixel() as usize;
3241 let width_bytes = size.width as usize * bytes_pp;
3242
3243 let dst_stride = round_up_to_multiple(width_bytes, self.required_pbo_stride.num_bytes(format));
3244
3245 let dst_size = dst_stride * size.height as usize;
3251
3252 (dst_size, dst_stride)
3253 }
3254
3255 pub fn upload_texture<'a>(
3258 &mut self,
3259 pbo_pool: &'a mut UploadPBOPool,
3260 ) -> TextureUploader<'a> {
3261 debug_assert!(self.inside_frame);
3262
3263 pbo_pool.begin_frame(self);
3264
3265 TextureUploader {
3266 buffers: Vec::new(),
3267 pbo_pool,
3268 }
3269 }
3270
3271 pub fn upload_texture_immediate<T: Texel>(
3273 &mut self,
3274 texture: &Texture,
3275 pixels: &[T]
3276 ) {
3277 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
3278 let desc = self.gl_describe_format(texture.format);
3279 self.gl.tex_sub_image_2d(
3280 texture.target,
3281 0,
3282 0,
3283 0,
3284 texture.size.width as gl::GLint,
3285 texture.size.height as gl::GLint,
3286 desc.external,
3287 desc.pixel_type,
3288 texels_to_u8_slice(pixels),
3289 );
3290 }
3291
3292 pub fn read_pixels(&mut self, img_desc: &ImageDescriptor) -> Vec<u8> {
3293 let desc = self.gl_describe_format(img_desc.format);
3294 self.gl.read_pixels(
3295 0, 0,
3296 img_desc.size.width as i32,
3297 img_desc.size.height as i32,
3298 desc.read,
3299 desc.pixel_type,
3300 )
3301 }
3302
3303 pub fn read_pixels_into(
3305 &mut self,
3306 rect: FramebufferIntRect,
3307 format: ImageFormat,
3308 output: &mut [u8],
3309 ) {
3310 let bytes_per_pixel = format.bytes_per_pixel();
3311 let desc = self.gl_describe_format(format);
3312 let size_in_bytes = (bytes_per_pixel * rect.area()) as usize;
3313 assert_eq!(output.len(), size_in_bytes);
3314
3315 self.gl.flush();
3316 self.gl.read_pixels_into_buffer(
3317 rect.min.x as _,
3318 rect.min.y as _,
3319 rect.width() as _,
3320 rect.height() as _,
3321 desc.read,
3322 desc.pixel_type,
3323 output,
3324 );
3325 }
3326
3327 pub fn get_tex_image_into(
3329 &mut self,
3330 texture: &Texture,
3331 format: ImageFormat,
3332 output: &mut [u8],
3333 ) {
3334 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
3335 let desc = self.gl_describe_format(format);
3336 self.gl.get_tex_image_into_buffer(
3337 texture.target,
3338 0,
3339 desc.external,
3340 desc.pixel_type,
3341 output,
3342 );
3343 }
3344
3345 fn attach_read_texture_raw(&mut self, texture_id: gl::GLuint, target: gl::GLuint) {
3347 self.gl.framebuffer_texture_2d(
3348 gl::READ_FRAMEBUFFER,
3349 gl::COLOR_ATTACHMENT0,
3350 target,
3351 texture_id,
3352 0,
3353 )
3354 }
3355
3356 pub fn attach_read_texture_external(
3357 &mut self, texture_id: gl::GLuint, target: ImageBufferKind
3358 ) {
3359 self.attach_read_texture_raw(texture_id, get_gl_target(target))
3360 }
3361
3362 pub fn attach_read_texture(&mut self, texture: &Texture) {
3363 self.attach_read_texture_raw(texture.id, texture.target)
3364 }
3365
3366 fn bind_vao_impl(&mut self, id: gl::GLuint) {
3367 debug_assert!(self.inside_frame);
3368
3369 if self.bound_vao != id {
3370 self.bound_vao = id;
3371 self.gl.bind_vertex_array(id);
3372 }
3373 }
3374
3375 pub fn bind_vao(&mut self, vao: &VAO) {
3376 self.bind_vao_impl(vao.id)
3377 }
3378
3379 pub fn bind_custom_vao(&mut self, vao: &CustomVAO) {
3380 self.bind_vao_impl(vao.id)
3381 }
3382
3383 fn create_vao_with_vbos(
3384 &mut self,
3385 descriptor: &VertexDescriptor,
3386 main_vbo_id: VBOId,
3387 instance_vbo_id: VBOId,
3388 instance_divisor: u32,
3389 ibo_id: IBOId,
3390 owns_vertices_and_indices: bool,
3391 ) -> VAO {
3392 let instance_stride = descriptor.instance_stride() as usize;
3393 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3394
3395 self.bind_vao_impl(vao_id);
3396
3397 descriptor.bind(self.gl(), main_vbo_id, instance_vbo_id, instance_divisor);
3398 ibo_id.bind(self.gl()); VAO {
3401 id: vao_id,
3402 ibo_id,
3403 main_vbo_id,
3404 instance_vbo_id,
3405 instance_stride,
3406 instance_divisor,
3407 owns_vertices_and_indices,
3408 }
3409 }
3410
3411 pub fn create_custom_vao(
3412 &mut self,
3413 streams: &[Stream],
3414 ) -> CustomVAO {
3415 debug_assert!(self.inside_frame);
3416
3417 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3418 self.bind_vao_impl(vao_id);
3419
3420 let mut attrib_index = 0;
3421 for stream in streams {
3422 VertexDescriptor::bind_attributes(
3423 stream.attributes,
3424 attrib_index,
3425 0,
3426 self.gl(),
3427 stream.vbo,
3428 );
3429 attrib_index += stream.attributes.len();
3430 }
3431
3432 CustomVAO {
3433 id: vao_id,
3434 }
3435 }
3436
3437 pub fn delete_custom_vao(&mut self, mut vao: CustomVAO) {
3438 self.gl.delete_vertex_arrays(&[vao.id]);
3439 vao.id = 0;
3440 }
3441
3442 pub fn create_vbo<T>(&mut self) -> VBO<T> {
3443 let ids = self.gl.gen_buffers(1);
3444 VBO {
3445 id: ids[0],
3446 target: gl::ARRAY_BUFFER,
3447 allocated_count: 0,
3448 marker: PhantomData,
3449 }
3450 }
3451
3452 pub fn delete_vbo<T>(&mut self, mut vbo: VBO<T>) {
3453 self.gl.delete_buffers(&[vbo.id]);
3454 vbo.id = 0;
3455 }
3456
3457 pub fn create_vao(&mut self, descriptor: &VertexDescriptor, instance_divisor: u32) -> VAO {
3458 debug_assert!(self.inside_frame);
3459
3460 let buffer_ids = self.gl.gen_buffers(3);
3461 let ibo_id = IBOId(buffer_ids[0]);
3462 let main_vbo_id = VBOId(buffer_ids[1]);
3463 let intance_vbo_id = VBOId(buffer_ids[2]);
3464
3465 self.create_vao_with_vbos(descriptor, main_vbo_id, intance_vbo_id, instance_divisor, ibo_id, true)
3466 }
3467
3468 pub fn delete_vao(&mut self, mut vao: VAO) {
3469 self.gl.delete_vertex_arrays(&[vao.id]);
3470 vao.id = 0;
3471
3472 if vao.owns_vertices_and_indices {
3473 self.gl.delete_buffers(&[vao.ibo_id.0]);
3474 self.gl.delete_buffers(&[vao.main_vbo_id.0]);
3475 }
3476
3477 self.gl.delete_buffers(&[vao.instance_vbo_id.0])
3478 }
3479
3480 pub fn allocate_vbo<V>(
3481 &mut self,
3482 vbo: &mut VBO<V>,
3483 count: usize,
3484 usage_hint: VertexUsageHint,
3485 ) {
3486 debug_assert!(self.inside_frame);
3487 vbo.allocated_count = count;
3488
3489 self.gl.bind_buffer(vbo.target, vbo.id);
3490 self.gl.buffer_data_untyped(
3491 vbo.target,
3492 (count * mem::size_of::<V>()) as _,
3493 ptr::null(),
3494 usage_hint.to_gl(),
3495 );
3496 }
3497
3498 pub fn fill_vbo<V>(
3499 &mut self,
3500 vbo: &VBO<V>,
3501 data: &[V],
3502 offset: usize,
3503 ) {
3504 debug_assert!(self.inside_frame);
3505 assert!(offset + data.len() <= vbo.allocated_count);
3506 let stride = mem::size_of::<V>();
3507
3508 self.gl.bind_buffer(vbo.target, vbo.id);
3509 self.gl.buffer_sub_data_untyped(
3510 vbo.target,
3511 (offset * stride) as _,
3512 (data.len() * stride) as _,
3513 data.as_ptr() as _,
3514 );
3515 }
3516
3517 fn update_vbo_data<V>(
3518 &mut self,
3519 vbo: VBOId,
3520 vertices: &[V],
3521 usage_hint: VertexUsageHint,
3522 ) {
3523 debug_assert!(self.inside_frame);
3524
3525 vbo.bind(self.gl());
3526 gl::buffer_data(self.gl(), gl::ARRAY_BUFFER, vertices, usage_hint.to_gl());
3527 }
3528
3529 pub fn create_vao_with_new_instances(
3530 &mut self,
3531 descriptor: &VertexDescriptor,
3532 base_vao: &VAO,
3533 ) -> VAO {
3534 debug_assert!(self.inside_frame);
3535
3536 let buffer_ids = self.gl.gen_buffers(1);
3537 let intance_vbo_id = VBOId(buffer_ids[0]);
3538
3539 self.create_vao_with_vbos(
3540 descriptor,
3541 base_vao.main_vbo_id,
3542 intance_vbo_id,
3543 base_vao.instance_divisor,
3544 base_vao.ibo_id,
3545 false,
3546 )
3547 }
3548
3549 pub fn update_vao_main_vertices<V>(
3550 &mut self,
3551 vao: &VAO,
3552 vertices: &[V],
3553 usage_hint: VertexUsageHint,
3554 ) {
3555 debug_assert_eq!(self.bound_vao, vao.id);
3556 self.update_vbo_data(vao.main_vbo_id, vertices, usage_hint)
3557 }
3558
3559 pub fn update_vao_instances<V: Clone>(
3560 &mut self,
3561 vao: &VAO,
3562 instances: &[V],
3563 usage_hint: VertexUsageHint,
3564 repeat: Option<NonZeroUsize>,
3566 ) {
3567 debug_assert_eq!(self.bound_vao, vao.id);
3568 debug_assert_eq!(vao.instance_stride as usize, mem::size_of::<V>());
3569
3570 match repeat {
3571 Some(count) => {
3572 let target = gl::ARRAY_BUFFER;
3573 self.gl.bind_buffer(target, vao.instance_vbo_id.0);
3574 let size = instances.len() * count.get() * mem::size_of::<V>();
3575 self.gl.buffer_data_untyped(
3576 target,
3577 size as _,
3578 ptr::null(),
3579 usage_hint.to_gl(),
3580 );
3581
3582 let ptr = match self.gl.get_type() {
3583 gl::GlType::Gl => {
3584 self.gl.map_buffer(target, gl::WRITE_ONLY)
3585 }
3586 gl::GlType::Gles => {
3587 self.gl.map_buffer_range(target, 0, size as _, gl::MAP_WRITE_BIT)
3588 }
3589 };
3590 assert!(!ptr.is_null());
3591
3592 let buffer_slice = unsafe {
3593 slice::from_raw_parts_mut(ptr as *mut V, instances.len() * count.get())
3594 };
3595 for (quad, instance) in buffer_slice.chunks_mut(4).zip(instances) {
3596 quad[0] = instance.clone();
3597 quad[1] = instance.clone();
3598 quad[2] = instance.clone();
3599 quad[3] = instance.clone();
3600 }
3601 self.gl.unmap_buffer(target);
3602 }
3603 None => {
3604 self.update_vbo_data(vao.instance_vbo_id, instances, usage_hint);
3605 }
3606 }
3607
3608 if self.capabilities.requires_vao_rebind_after_orphaning {
3612 self.bind_vao_impl(0);
3613 self.bind_vao_impl(vao.id);
3614 }
3615 }
3616
3617 pub fn update_vao_indices<I>(&mut self, vao: &VAO, indices: &[I], usage_hint: VertexUsageHint) {
3618 debug_assert!(self.inside_frame);
3619 debug_assert_eq!(self.bound_vao, vao.id);
3620
3621 vao.ibo_id.bind(self.gl());
3622 gl::buffer_data(
3623 self.gl(),
3624 gl::ELEMENT_ARRAY_BUFFER,
3625 indices,
3626 usage_hint.to_gl(),
3627 );
3628 }
3629
3630 pub fn draw_triangles_u16(&mut self, first_vertex: i32, index_count: i32) {
3631 debug_assert!(self.inside_frame);
3632 #[cfg(debug_assertions)]
3633 debug_assert!(self.shader_is_ready);
3634
3635 let _guard = if self.annotate_draw_call_crashes {
3636 Some(CrashAnnotatorGuard::new(
3637 &self.crash_annotator,
3638 CrashAnnotation::DrawShader,
3639 &self.bound_program_name,
3640 ))
3641 } else {
3642 None
3643 };
3644
3645 self.gl.draw_elements(
3646 gl::TRIANGLES,
3647 index_count,
3648 gl::UNSIGNED_SHORT,
3649 first_vertex as u32 * 2,
3650 );
3651 }
3652
3653 pub fn draw_triangles_u32(&mut self, first_vertex: i32, index_count: i32) {
3654 debug_assert!(self.inside_frame);
3655 #[cfg(debug_assertions)]
3656 debug_assert!(self.shader_is_ready);
3657
3658 let _guard = if self.annotate_draw_call_crashes {
3659 Some(CrashAnnotatorGuard::new(
3660 &self.crash_annotator,
3661 CrashAnnotation::DrawShader,
3662 &self.bound_program_name,
3663 ))
3664 } else {
3665 None
3666 };
3667
3668 self.gl.draw_elements(
3669 gl::TRIANGLES,
3670 index_count,
3671 gl::UNSIGNED_INT,
3672 first_vertex as u32 * 4,
3673 );
3674 }
3675
3676 pub fn draw_nonindexed_points(&mut self, first_vertex: i32, vertex_count: i32) {
3677 debug_assert!(self.inside_frame);
3678 #[cfg(debug_assertions)]
3679 debug_assert!(self.shader_is_ready);
3680
3681 let _guard = if self.annotate_draw_call_crashes {
3682 Some(CrashAnnotatorGuard::new(
3683 &self.crash_annotator,
3684 CrashAnnotation::DrawShader,
3685 &self.bound_program_name,
3686 ))
3687 } else {
3688 None
3689 };
3690
3691 self.gl.draw_arrays(gl::POINTS, first_vertex, vertex_count);
3692 }
3693
3694 pub fn draw_nonindexed_lines(&mut self, first_vertex: i32, vertex_count: i32) {
3695 debug_assert!(self.inside_frame);
3696 #[cfg(debug_assertions)]
3697 debug_assert!(self.shader_is_ready);
3698
3699 let _guard = if self.annotate_draw_call_crashes {
3700 Some(CrashAnnotatorGuard::new(
3701 &self.crash_annotator,
3702 CrashAnnotation::DrawShader,
3703 &self.bound_program_name,
3704 ))
3705 } else {
3706 None
3707 };
3708
3709 self.gl.draw_arrays(gl::LINES, first_vertex, vertex_count);
3710 }
3711
3712 pub fn draw_indexed_triangles(&mut self, index_count: i32) {
3713 debug_assert!(self.inside_frame);
3714 #[cfg(debug_assertions)]
3715 debug_assert!(self.shader_is_ready);
3716
3717 let _guard = if self.annotate_draw_call_crashes {
3718 Some(CrashAnnotatorGuard::new(
3719 &self.crash_annotator,
3720 CrashAnnotation::DrawShader,
3721 &self.bound_program_name,
3722 ))
3723 } else {
3724 None
3725 };
3726
3727 self.gl.draw_elements(
3728 gl::TRIANGLES,
3729 index_count,
3730 gl::UNSIGNED_SHORT,
3731 0,
3732 );
3733 }
3734
3735 pub fn draw_indexed_triangles_instanced_u16(&mut self, index_count: i32, instance_count: i32) {
3736 debug_assert!(self.inside_frame);
3737 #[cfg(debug_assertions)]
3738 debug_assert!(self.shader_is_ready);
3739
3740 let _guard = if self.annotate_draw_call_crashes {
3741 Some(CrashAnnotatorGuard::new(
3742 &self.crash_annotator,
3743 CrashAnnotation::DrawShader,
3744 &self.bound_program_name,
3745 ))
3746 } else {
3747 None
3748 };
3749
3750 self.gl.draw_elements_instanced(
3751 gl::TRIANGLES,
3752 index_count,
3753 gl::UNSIGNED_SHORT,
3754 0,
3755 instance_count,
3756 );
3757 }
3758
3759 pub fn end_frame(&mut self) {
3760 self.reset_draw_target();
3761 self.reset_read_target();
3762
3763 debug_assert!(self.inside_frame);
3764 self.inside_frame = false;
3765
3766 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3767 self.gl.use_program(0);
3768
3769 for i in 0 .. self.bound_textures.len() {
3770 self.gl.active_texture(gl::TEXTURE0 + i as gl::GLuint);
3771 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3772 }
3773
3774 self.gl.active_texture(gl::TEXTURE0);
3775
3776 self.frame_id.0 += 1;
3777
3778 if let Some(ref cache) = self.cached_programs {
3782 cache.update_disk_cache(self.frame_id.0 == 10);
3783 }
3784 }
3785
3786 pub fn clear_target(
3787 &self,
3788 color: Option<[f32; 4]>,
3789 depth: Option<f32>,
3790 rect: Option<FramebufferIntRect>,
3791 ) {
3792 let mut clear_bits = 0;
3793
3794 if let Some(color) = color {
3795 self.gl.clear_color(color[0], color[1], color[2], color[3]);
3796 clear_bits |= gl::COLOR_BUFFER_BIT;
3797 }
3798
3799 if let Some(depth) = depth {
3800 if cfg!(debug_assertions) {
3801 let mut mask = [0];
3802 unsafe {
3803 self.gl.get_boolean_v(gl::DEPTH_WRITEMASK, &mut mask);
3804 }
3805 assert_ne!(mask[0], 0);
3806 }
3807 self.gl.clear_depth(depth as f64);
3808 clear_bits |= gl::DEPTH_BUFFER_BIT;
3809 }
3810
3811 if clear_bits != 0 {
3812 match rect {
3813 Some(rect) => {
3814 self.gl.enable(gl::SCISSOR_TEST);
3815 self.gl.scissor(
3816 rect.min.x,
3817 rect.min.y,
3818 rect.width(),
3819 rect.height(),
3820 );
3821 self.gl.clear(clear_bits);
3822 self.gl.disable(gl::SCISSOR_TEST);
3823 }
3824 None => {
3825 self.gl.clear(clear_bits);
3826 }
3827 }
3828 }
3829 }
3830
3831 pub fn enable_depth(&self, depth_func: DepthFunction) {
3832 assert!(self.depth_available, "Enabling depth test without depth target");
3833 self.gl.enable(gl::DEPTH_TEST);
3834 self.gl.depth_func(depth_func as gl::GLuint);
3835 }
3836
3837 pub fn disable_depth(&self) {
3838 self.gl.disable(gl::DEPTH_TEST);
3839 }
3840
3841 pub fn enable_depth_write(&self) {
3842 assert!(self.depth_available, "Enabling depth write without depth target");
3843 self.gl.depth_mask(true);
3844 }
3845
3846 pub fn disable_depth_write(&self) {
3847 self.gl.depth_mask(false);
3848 }
3849
3850 pub fn disable_stencil(&self) {
3851 self.gl.disable(gl::STENCIL_TEST);
3852 }
3853
3854 pub fn set_scissor_rect(&self, rect: FramebufferIntRect) {
3855 self.gl.scissor(
3856 rect.min.x,
3857 rect.min.y,
3858 rect.width(),
3859 rect.height(),
3860 );
3861 }
3862
3863 pub fn enable_scissor(&self) {
3864 self.gl.enable(gl::SCISSOR_TEST);
3865 }
3866
3867 pub fn disable_scissor(&self) {
3868 self.gl.disable(gl::SCISSOR_TEST);
3869 }
3870
3871 pub fn enable_color_write(&self) {
3872 self.gl.color_mask(true, true, true, true);
3873 }
3874
3875 pub fn disable_color_write(&self) {
3876 self.gl.color_mask(false, false, false, false);
3877 }
3878
3879 pub fn set_blend(&mut self, enable: bool) {
3880 if enable {
3881 self.gl.enable(gl::BLEND);
3882 } else {
3883 self.gl.disable(gl::BLEND);
3884 }
3885 #[cfg(debug_assertions)]
3886 {
3887 self.shader_is_ready = false;
3888 }
3889 }
3890
3891 fn set_blend_factors(
3892 &mut self,
3893 color: (gl::GLenum, gl::GLenum),
3894 alpha: (gl::GLenum, gl::GLenum),
3895 ) {
3896 self.gl.blend_equation(gl::FUNC_ADD);
3897 if color == alpha {
3898 self.gl.blend_func(color.0, color.1);
3899 } else {
3900 self.gl.blend_func_separate(color.0, color.1, alpha.0, alpha.1);
3901 }
3902 #[cfg(debug_assertions)]
3903 {
3904 self.shader_is_ready = false;
3905 }
3906 }
3907
3908 pub fn set_blend_mode_alpha(&mut self) {
3909 self.set_blend_factors(
3910 (gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA),
3911 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3912 );
3913 }
3914
3915 pub fn set_blend_mode_premultiplied_alpha(&mut self) {
3916 self.set_blend_factors(
3917 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3918 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3919 );
3920 }
3921
3922 pub fn set_blend_mode_premultiplied_dest_out(&mut self) {
3923 self.set_blend_factors(
3924 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3925 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3926 );
3927 }
3928
3929 pub fn set_blend_mode_multiply(&mut self) {
3930 self.set_blend_factors(
3931 (gl::ZERO, gl::SRC_COLOR),
3932 (gl::ZERO, gl::SRC_ALPHA),
3933 );
3934 }
3935 pub fn set_blend_mode_subpixel_pass0(&mut self) {
3936 self.set_blend_factors(
3937 (gl::ZERO, gl::ONE_MINUS_SRC_COLOR),
3938 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3939 );
3940 }
3941 pub fn set_blend_mode_subpixel_pass1(&mut self) {
3942 self.set_blend_factors(
3943 (gl::ONE, gl::ONE),
3944 (gl::ONE, gl::ONE),
3945 );
3946 }
3947 pub fn set_blend_mode_subpixel_dual_source(&mut self) {
3948 self.set_blend_factors(
3949 (gl::ONE, gl::ONE_MINUS_SRC1_COLOR),
3950 (gl::ONE, gl::ONE_MINUS_SRC1_ALPHA),
3951 );
3952 }
3953 pub fn set_blend_mode_multiply_dual_source(&mut self) {
3954 self.set_blend_factors(
3955 (gl::ONE_MINUS_DST_ALPHA, gl::ONE_MINUS_SRC1_COLOR),
3956 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3957 );
3958 }
3959 pub fn set_blend_mode_screen(&mut self) {
3960 self.set_blend_factors(
3961 (gl::ONE, gl::ONE_MINUS_SRC_COLOR),
3962 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3963 );
3964 }
3965 pub fn set_blend_mode_plus_lighter(&mut self) {
3966 self.set_blend_factors(
3967 (gl::ONE, gl::ONE),
3968 (gl::ONE, gl::ONE),
3969 );
3970 }
3971 pub fn set_blend_mode_exclusion(&mut self) {
3972 self.set_blend_factors(
3973 (gl::ONE_MINUS_DST_COLOR, gl::ONE_MINUS_SRC_COLOR),
3974 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3975 );
3976 }
3977 pub fn set_blend_mode_show_overdraw(&mut self) {
3978 self.set_blend_factors(
3979 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3980 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3981 );
3982 }
3983
3984 pub fn set_blend_mode_max(&mut self) {
3985 self.gl
3986 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3987 self.gl.blend_equation_separate(gl::MAX, gl::FUNC_ADD);
3988 #[cfg(debug_assertions)]
3989 {
3990 self.shader_is_ready = false;
3991 }
3992 }
3993 pub fn set_blend_mode_min(&mut self) {
3994 self.gl
3995 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3996 self.gl.blend_equation_separate(gl::MIN, gl::FUNC_ADD);
3997 #[cfg(debug_assertions)]
3998 {
3999 self.shader_is_ready = false;
4000 }
4001 }
4002 pub fn set_blend_mode_advanced(&mut self, mode: MixBlendMode) {
4003 self.gl.blend_equation(match mode {
4004 MixBlendMode::Normal => {
4005 self.gl.blend_func_separate(gl::ZERO, gl::SRC_COLOR, gl::ZERO, gl::SRC_ALPHA);
4007 gl::FUNC_ADD
4008 },
4009 MixBlendMode::PlusLighter => {
4010 return self.set_blend_mode_plus_lighter();
4011 },
4012 MixBlendMode::Multiply => gl::MULTIPLY_KHR,
4013 MixBlendMode::Screen => gl::SCREEN_KHR,
4014 MixBlendMode::Overlay => gl::OVERLAY_KHR,
4015 MixBlendMode::Darken => gl::DARKEN_KHR,
4016 MixBlendMode::Lighten => gl::LIGHTEN_KHR,
4017 MixBlendMode::ColorDodge => gl::COLORDODGE_KHR,
4018 MixBlendMode::ColorBurn => gl::COLORBURN_KHR,
4019 MixBlendMode::HardLight => gl::HARDLIGHT_KHR,
4020 MixBlendMode::SoftLight => gl::SOFTLIGHT_KHR,
4021 MixBlendMode::Difference => gl::DIFFERENCE_KHR,
4022 MixBlendMode::Exclusion => gl::EXCLUSION_KHR,
4023 MixBlendMode::Hue => gl::HSL_HUE_KHR,
4024 MixBlendMode::Saturation => gl::HSL_SATURATION_KHR,
4025 MixBlendMode::Color => gl::HSL_COLOR_KHR,
4026 MixBlendMode::Luminosity => gl::HSL_LUMINOSITY_KHR,
4027 });
4028 #[cfg(debug_assertions)]
4029 {
4030 self.shader_is_ready = false;
4031 }
4032 }
4033
4034 pub fn supports_extension(&self, extension: &str) -> bool {
4035 supports_extension(&self.extensions, extension)
4036 }
4037
4038 pub fn echo_driver_messages(&self) {
4039 if self.capabilities.supports_khr_debug {
4040 Device::log_driver_messages(self.gl());
4041 }
4042 }
4043
4044 fn log_driver_messages(gl: &dyn gl::Gl) {
4045 for msg in gl.get_debug_messages() {
4046 let level = match msg.severity {
4047 gl::DEBUG_SEVERITY_HIGH => Level::Error,
4048 gl::DEBUG_SEVERITY_MEDIUM => Level::Warn,
4049 gl::DEBUG_SEVERITY_LOW => Level::Info,
4050 gl::DEBUG_SEVERITY_NOTIFICATION => Level::Debug,
4051 _ => Level::Trace,
4052 };
4053 let ty = match msg.ty {
4054 gl::DEBUG_TYPE_ERROR => "error",
4055 gl::DEBUG_TYPE_DEPRECATED_BEHAVIOR => "deprecated",
4056 gl::DEBUG_TYPE_UNDEFINED_BEHAVIOR => "undefined",
4057 gl::DEBUG_TYPE_PORTABILITY => "portability",
4058 gl::DEBUG_TYPE_PERFORMANCE => "perf",
4059 gl::DEBUG_TYPE_MARKER => "marker",
4060 gl::DEBUG_TYPE_PUSH_GROUP => "group push",
4061 gl::DEBUG_TYPE_POP_GROUP => "group pop",
4062 gl::DEBUG_TYPE_OTHER => "other",
4063 _ => "?",
4064 };
4065 log!(level, "({}) {}", ty, msg.message);
4066 }
4067 }
4068
4069 pub fn gl_describe_format(&self, format: ImageFormat) -> FormatDesc {
4070 match format {
4071 ImageFormat::R8 => FormatDesc {
4072 internal: gl::R8,
4073 external: gl::RED,
4074 read: gl::RED,
4075 pixel_type: gl::UNSIGNED_BYTE,
4076 },
4077 ImageFormat::R16 => FormatDesc {
4078 internal: gl::R16,
4079 external: gl::RED,
4080 read: gl::RED,
4081 pixel_type: gl::UNSIGNED_SHORT,
4082 },
4083 ImageFormat::BGRA8 => {
4084 FormatDesc {
4085 internal: self.bgra_formats.internal,
4086 external: self.bgra_formats.external,
4087 read: gl::BGRA,
4088 pixel_type: self.bgra_pixel_type,
4089 }
4090 },
4091 ImageFormat::RGBA8 => {
4092 FormatDesc {
4093 internal: gl::RGBA8,
4094 external: gl::RGBA,
4095 read: gl::RGBA,
4096 pixel_type: gl::UNSIGNED_BYTE,
4097 }
4098 },
4099 ImageFormat::RGBAF32 => FormatDesc {
4100 internal: gl::RGBA32F,
4101 external: gl::RGBA,
4102 read: gl::RGBA,
4103 pixel_type: gl::FLOAT,
4104 },
4105 ImageFormat::RGBAI32 => FormatDesc {
4106 internal: gl::RGBA32I,
4107 external: gl::RGBA_INTEGER,
4108 read: gl::RGBA_INTEGER,
4109 pixel_type: gl::INT,
4110 },
4111 ImageFormat::RG8 => FormatDesc {
4112 internal: gl::RG8,
4113 external: gl::RG,
4114 read: gl::RG,
4115 pixel_type: gl::UNSIGNED_BYTE,
4116 },
4117 ImageFormat::RG16 => FormatDesc {
4118 internal: gl::RG16,
4119 external: gl::RG,
4120 read: gl::RG,
4121 pixel_type: gl::UNSIGNED_SHORT,
4122 },
4123 }
4124 }
4125
4126 pub fn report_memory(&self, size_op_funs: &MallocSizeOfOps, swgl: *mut c_void) -> MemoryReport {
4128 let mut report = MemoryReport::default();
4129 report.depth_target_textures += self.depth_targets_memory();
4130
4131 #[cfg(feature = "sw_compositor")]
4132 if !swgl.is_null() {
4133 report.swgl += swgl::Context::from(swgl).report_memory(size_op_funs.size_of_op);
4134 }
4135 let _ = size_op_funs;
4137 let _ = swgl;
4138 report
4139 }
4140
4141 pub fn depth_targets_memory(&self) -> usize {
4142 let mut total = 0;
4143 for dim in self.depth_targets.keys() {
4144 total += depth_target_size_in_bytes(dim);
4145 }
4146
4147 total
4148 }
4149}
4150
4151pub struct FormatDesc {
4152 pub internal: gl::GLenum,
4154 pub external: gl::GLuint,
4156 pub read: gl::GLuint,
4159 pub pixel_type: gl::GLuint,
4161}
4162
4163#[derive(Debug)]
4164struct UploadChunk<'a> {
4165 rect: DeviceIntRect,
4166 stride: Option<i32>,
4167 offset: usize,
4168 format_override: Option<ImageFormat>,
4169 texture: &'a Texture,
4170}
4171
4172#[derive(Debug)]
4173struct PixelBuffer<'a> {
4174 size_used: usize,
4175 chunks: SmallVec<[UploadChunk<'a>; 1]>,
4177 inner: UploadPBO,
4178 mapping: &'a mut [mem::MaybeUninit<u8>],
4179}
4180
4181impl<'a> PixelBuffer<'a> {
4182 fn new(
4183 pbo: UploadPBO,
4184 ) -> Self {
4185 let mapping = unsafe {
4186 slice::from_raw_parts_mut(pbo.mapping.get_ptr().as_ptr(), pbo.pbo.reserved_size)
4187 };
4188 Self {
4189 size_used: 0,
4190 chunks: SmallVec::new(),
4191 inner: pbo,
4192 mapping,
4193 }
4194 }
4195
4196 fn flush_chunks(&mut self, device: &mut Device) {
4197 for chunk in self.chunks.drain(..) {
4198 TextureUploader::update_impl(device, chunk);
4199 }
4200 }
4201}
4202
4203impl<'a> Drop for PixelBuffer<'a> {
4204 fn drop(&mut self) {
4205 assert_eq!(self.chunks.len(), 0, "PixelBuffer must be flushed before dropping.");
4206 }
4207}
4208
4209#[derive(Debug)]
4210enum PBOMapping {
4211 Unmapped,
4212 Transient(ptr::NonNull<mem::MaybeUninit<u8>>),
4213 Persistent(ptr::NonNull<mem::MaybeUninit<u8>>),
4214}
4215
4216impl PBOMapping {
4217 fn get_ptr(&self) -> ptr::NonNull<mem::MaybeUninit<u8>> {
4218 match self {
4219 PBOMapping::Unmapped => unreachable!("Cannot get pointer to unmapped PBO."),
4220 PBOMapping::Transient(ptr) => *ptr,
4221 PBOMapping::Persistent(ptr) => *ptr,
4222 }
4223 }
4224}
4225
4226#[derive(Debug)]
4228struct UploadPBO {
4229 pbo: PBO,
4230 mapping: PBOMapping,
4231 can_recycle: bool,
4232}
4233
4234impl UploadPBO {
4235 fn empty() -> Self {
4236 Self {
4237 pbo: PBO {
4238 id: 0,
4239 reserved_size: 0,
4240 },
4241 mapping: PBOMapping::Unmapped,
4242 can_recycle: false,
4243 }
4244 }
4245}
4246
4247pub struct UploadPBOPool {
4251 usage_hint: VertexUsageHint,
4253 default_size: usize,
4255 available_buffers: Vec<UploadPBO>,
4257 returned_buffers: Vec<UploadPBO>,
4260 waiting_buffers: Vec<(gl::GLsync, Vec<UploadPBO>)>,
4263 orphaned_buffers: Vec<PBO>,
4266}
4267
4268impl UploadPBOPool {
4269 pub fn new(device: &mut Device, default_size: usize) -> Self {
4270 let usage_hint = match device.upload_method {
4271 UploadMethod::Immediate => VertexUsageHint::Stream,
4272 UploadMethod::PixelBuffer(usage_hint) => usage_hint,
4273 };
4274 Self {
4275 usage_hint,
4276 default_size,
4277 available_buffers: Vec::new(),
4278 returned_buffers: Vec::new(),
4279 waiting_buffers: Vec::new(),
4280 orphaned_buffers: Vec::new(),
4281 }
4282 }
4283
4284 pub fn begin_frame(&mut self, device: &mut Device) {
4287 let mut first_not_signalled = self.waiting_buffers.len();
4292 for (i, (sync, buffers)) in self.waiting_buffers.iter_mut().enumerate() {
4293 match device.gl.client_wait_sync(*sync, 0, 0) {
4294 gl::TIMEOUT_EXPIRED => {
4295 first_not_signalled = i;
4296 break;
4297 },
4298 gl::ALREADY_SIGNALED | gl::CONDITION_SATISFIED => {
4299 self.available_buffers.extend(buffers.drain(..));
4300 }
4301 gl::WAIT_FAILED | _ => {
4302 warn!("glClientWaitSync error in UploadPBOPool::begin_frame()");
4303 for buffer in buffers.drain(..) {
4304 device.delete_pbo(buffer.pbo);
4305 }
4306 }
4307 }
4308 }
4309
4310 for (sync, _) in self.waiting_buffers.drain(0..first_not_signalled) {
4312 device.gl.delete_sync(sync);
4313 }
4314 }
4315
4316 pub fn end_frame(&mut self, device: &mut Device) {
4319 if !self.returned_buffers.is_empty() {
4320 let sync = device.gl.fence_sync(gl::SYNC_GPU_COMMANDS_COMPLETE, 0);
4321 if !sync.is_null() {
4322 self.waiting_buffers.push((sync, mem::replace(&mut self.returned_buffers, Vec::new())))
4323 } else {
4324 warn!("glFenceSync error in UploadPBOPool::end_frame()");
4325
4326 for buffer in self.returned_buffers.drain(..) {
4327 device.delete_pbo(buffer.pbo);
4328 }
4329 }
4330 }
4331 }
4332
4333 fn get_pbo(&mut self, device: &mut Device, min_size: usize) -> Result<UploadPBO, String> {
4337
4338 let (can_recycle, size) = if min_size <= self.default_size && device.capabilities.supports_nonzero_pbo_offsets {
4343 (true, self.default_size)
4344 } else {
4345 (false, min_size)
4346 };
4347
4348 if can_recycle {
4350 if let Some(mut buffer) = self.available_buffers.pop() {
4351 assert_eq!(buffer.pbo.reserved_size, size);
4352 assert!(buffer.can_recycle);
4353
4354 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.pbo.id);
4355
4356 match buffer.mapping {
4357 PBOMapping::Unmapped => {
4358 let ptr = device.gl.map_buffer_range(
4360 gl::PIXEL_UNPACK_BUFFER,
4361 0,
4362 buffer.pbo.reserved_size as _,
4363 gl::MAP_WRITE_BIT | gl::MAP_UNSYNCHRONIZED_BIT,
4364 ) as *mut _;
4365
4366 let ptr = ptr::NonNull::new(ptr).ok_or_else(
4367 || format!("Failed to transiently map PBO of size {} bytes", buffer.pbo.reserved_size)
4368 )?;
4369
4370 buffer.mapping = PBOMapping::Transient(ptr);
4371 }
4372 PBOMapping::Transient(_) => {
4373 unreachable!("Transiently mapped UploadPBO must be unmapped before returning to pool.");
4374 }
4375 PBOMapping::Persistent(_) => {
4376 }
4377 }
4378
4379 return Ok(buffer);
4380 }
4381 }
4382
4383 let mut pbo = match self.orphaned_buffers.pop() {
4386 Some(pbo) => pbo,
4387 None => device.create_pbo(),
4388 };
4389
4390 assert_eq!(pbo.reserved_size, 0);
4391 pbo.reserved_size = size;
4392
4393 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, pbo.id);
4394 let mapping = if device.capabilities.supports_buffer_storage && can_recycle {
4395 device.gl.buffer_storage(
4396 gl::PIXEL_UNPACK_BUFFER,
4397 pbo.reserved_size as _,
4398 ptr::null(),
4399 gl::MAP_WRITE_BIT | gl::MAP_PERSISTENT_BIT,
4400 );
4401 let ptr = device.gl.map_buffer_range(
4402 gl::PIXEL_UNPACK_BUFFER,
4403 0,
4404 pbo.reserved_size as _,
4405 gl::MAP_WRITE_BIT | gl::MAP_PERSISTENT_BIT | gl::MAP_FLUSH_EXPLICIT_BIT,
4409 ) as *mut _;
4410
4411 let ptr = ptr::NonNull::new(ptr).ok_or_else(
4412 || format!("Failed to transiently map PBO of size {} bytes", pbo.reserved_size)
4413 )?;
4414
4415 PBOMapping::Persistent(ptr)
4416 } else {
4417 device.gl.buffer_data_untyped(
4418 gl::PIXEL_UNPACK_BUFFER,
4419 pbo.reserved_size as _,
4420 ptr::null(),
4421 self.usage_hint.to_gl(),
4422 );
4423 let ptr = device.gl.map_buffer_range(
4424 gl::PIXEL_UNPACK_BUFFER,
4425 0,
4426 pbo.reserved_size as _,
4427 gl::MAP_WRITE_BIT,
4430 ) as *mut _;
4431
4432 let ptr = ptr::NonNull::new(ptr).ok_or_else(
4433 || format!("Failed to transiently map PBO of size {} bytes", pbo.reserved_size)
4434 )?;
4435
4436 PBOMapping::Transient(ptr)
4437 };
4438
4439 Ok(UploadPBO { pbo, mapping, can_recycle })
4440 }
4441
4442 fn return_pbo(&mut self, device: &mut Device, mut buffer: UploadPBO) {
4445 assert!(
4446 !matches!(buffer.mapping, PBOMapping::Transient(_)),
4447 "Transiently mapped UploadPBO must be unmapped before returning to pool.",
4448 );
4449
4450 if buffer.can_recycle {
4451 self.returned_buffers.push(buffer);
4452 } else {
4453 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.pbo.id);
4454 device.gl.buffer_data_untyped(
4455 gl::PIXEL_UNPACK_BUFFER,
4456 0,
4457 ptr::null(),
4458 gl::STREAM_DRAW,
4459 );
4460 buffer.pbo.reserved_size = 0;
4461 self.orphaned_buffers.push(buffer.pbo);
4462 }
4463
4464 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
4465 }
4466
4467 pub fn on_memory_pressure(&mut self, device: &mut Device) {
4469 for buffer in self.available_buffers.drain(..) {
4470 device.delete_pbo(buffer.pbo);
4471 }
4472 for buffer in self.returned_buffers.drain(..) {
4473 device.delete_pbo(buffer.pbo)
4474 }
4475 for (sync, buffers) in self.waiting_buffers.drain(..) {
4476 device.gl.delete_sync(sync);
4477 for buffer in buffers {
4478 device.delete_pbo(buffer.pbo)
4479 }
4480 }
4481 }
4483
4484 pub fn report_memory(&self) -> MemoryReport {
4486 let mut report = MemoryReport::default();
4487 for buffer in &self.available_buffers {
4488 report.texture_upload_pbos += buffer.pbo.reserved_size;
4489 }
4490 for buffer in &self.returned_buffers {
4491 report.texture_upload_pbos += buffer.pbo.reserved_size;
4492 }
4493 for (_, buffers) in &self.waiting_buffers {
4494 for buffer in buffers {
4495 report.texture_upload_pbos += buffer.pbo.reserved_size;
4496 }
4497 }
4498 report
4499 }
4500
4501 pub fn deinit(&mut self, device: &mut Device) {
4502 for buffer in self.available_buffers.drain(..) {
4503 device.delete_pbo(buffer.pbo);
4504 }
4505 for buffer in self.returned_buffers.drain(..) {
4506 device.delete_pbo(buffer.pbo)
4507 }
4508 for (sync, buffers) in self.waiting_buffers.drain(..) {
4509 device.gl.delete_sync(sync);
4510 for buffer in buffers {
4511 device.delete_pbo(buffer.pbo)
4512 }
4513 }
4514 for pbo in self.orphaned_buffers.drain(..) {
4515 device.delete_pbo(pbo);
4516 }
4517 }
4518}
4519
4520pub struct TextureUploader<'a> {
4524 buffers: Vec<PixelBuffer<'a>>,
4526 pub pbo_pool: &'a mut UploadPBOPool,
4528}
4529
4530impl<'a> Drop for TextureUploader<'a> {
4531 fn drop(&mut self) {
4532 assert!(
4533 thread::panicking() || self.buffers.is_empty(),
4534 "TextureUploader must be flushed before it is dropped."
4535 );
4536 }
4537}
4538
4539#[derive(Debug)]
4542pub struct UploadStagingBuffer<'a> {
4543 buffer: PixelBuffer<'a>,
4545 offset: usize,
4547 size: usize,
4549 stride: usize,
4551}
4552
4553impl<'a> UploadStagingBuffer<'a> {
4554 pub fn get_stride(&self) -> usize {
4556 self.stride
4557 }
4558
4559 pub fn get_mapping(&mut self) -> &mut [mem::MaybeUninit<u8>] {
4561 &mut self.buffer.mapping[self.offset..self.offset + self.size]
4562 }
4563}
4564
4565impl<'a> TextureUploader<'a> {
4566 pub fn stage(
4569 &mut self,
4570 device: &mut Device,
4571 format: ImageFormat,
4572 size: DeviceIntSize,
4573 ) -> Result<UploadStagingBuffer<'a>, String> {
4574 assert!(matches!(device.upload_method, UploadMethod::PixelBuffer(_)), "Texture uploads should only be staged when using pixel buffers.");
4575
4576 let (dst_size, dst_stride) = device.required_upload_size_and_stride(
4579 size,
4580 format,
4581 );
4582
4583 let buffer_index = self.buffers.iter().position(|buffer| {
4585 buffer.size_used + dst_size <= buffer.inner.pbo.reserved_size
4586 });
4587 let buffer = match buffer_index {
4588 Some(i) => self.buffers.swap_remove(i),
4589 None => PixelBuffer::new(self.pbo_pool.get_pbo(device, dst_size)?),
4590 };
4591
4592 if !device.capabilities.supports_nonzero_pbo_offsets {
4593 assert_eq!(buffer.size_used, 0, "PBO uploads from non-zero offset are not supported.");
4594 }
4595 assert!(buffer.size_used + dst_size <= buffer.inner.pbo.reserved_size, "PixelBuffer is too small");
4596
4597 let offset = buffer.size_used;
4598
4599 Ok(UploadStagingBuffer {
4600 buffer,
4601 offset,
4602 size: dst_size,
4603 stride: dst_stride,
4604 })
4605 }
4606
4607 pub fn upload_staged(
4609 &mut self,
4610 device: &mut Device,
4611 texture: &'a Texture,
4612 rect: DeviceIntRect,
4613 format_override: Option<ImageFormat>,
4614 mut staging_buffer: UploadStagingBuffer<'a>,
4615 ) -> usize {
4616 let size = staging_buffer.size;
4617
4618 staging_buffer.buffer.chunks.push(UploadChunk {
4619 rect,
4620 stride: Some(staging_buffer.stride as i32),
4621 offset: staging_buffer.offset,
4622 format_override,
4623 texture,
4624 });
4625 staging_buffer.buffer.size_used += staging_buffer.size;
4626
4627 if staging_buffer.buffer.size_used < staging_buffer.buffer.inner.pbo.reserved_size {
4629 self.buffers.push(staging_buffer.buffer);
4630 } else {
4631 Self::flush_buffer(device, self.pbo_pool, staging_buffer.buffer);
4632 }
4633
4634 size
4635 }
4636
4637 pub fn upload<T>(
4639 &mut self,
4640 device: &mut Device,
4641 texture: &'a Texture,
4642 mut rect: DeviceIntRect,
4643 stride: Option<i32>,
4644 format_override: Option<ImageFormat>,
4645 data: *const T,
4646 len: usize,
4647 ) -> usize {
4648 let cropped = rect.intersection(
4651 &DeviceIntRect::from_size(texture.get_dimensions())
4652 );
4653 if cfg!(debug_assertions) && cropped.map_or(true, |r| r != rect) {
4654 warn!("Cropping texture upload {:?} to {:?}", rect, cropped);
4655 }
4656 rect = match cropped {
4657 None => return 0,
4658 Some(r) => r,
4659 };
4660
4661 let bytes_pp = texture.format.bytes_per_pixel() as usize;
4662 let width_bytes = rect.width() as usize * bytes_pp;
4663
4664 let src_stride = stride.map_or(width_bytes, |stride| {
4665 assert!(stride >= 0);
4666 stride as usize
4667 });
4668 let src_size = (rect.height() as usize - 1) * src_stride + width_bytes;
4669 assert!(src_size <= len * mem::size_of::<T>());
4670
4671 match device.upload_method {
4672 UploadMethod::Immediate => {
4673 if cfg!(debug_assertions) {
4674 let mut bound_buffer = [0];
4675 unsafe {
4676 device.gl.get_integer_v(gl::PIXEL_UNPACK_BUFFER_BINDING, &mut bound_buffer);
4677 }
4678 assert_eq!(bound_buffer[0], 0, "GL_PIXEL_UNPACK_BUFFER must not be bound for immediate uploads.");
4679 }
4680
4681 Self::update_impl(device, UploadChunk {
4682 rect,
4683 stride: Some(src_stride as i32),
4684 offset: data as _,
4685 format_override,
4686 texture,
4687 });
4688
4689 width_bytes * rect.height() as usize
4690 }
4691 UploadMethod::PixelBuffer(_) => {
4692 let mut staging_buffer = match self.stage(device, texture.format, rect.size()) {
4693 Ok(staging_buffer) => staging_buffer,
4694 Err(_) => return 0,
4695 };
4696 let dst_stride = staging_buffer.get_stride();
4697
4698 unsafe {
4699 let src: &[mem::MaybeUninit<u8>] = slice::from_raw_parts(data as *const _, src_size);
4700
4701 if src_stride == dst_stride {
4702 staging_buffer.get_mapping()[..src_size].copy_from_slice(src);
4705 } else {
4706 for y in 0..rect.height() as usize {
4709 let src_start = y * src_stride;
4710 let src_end = src_start + width_bytes;
4711 let dst_start = y * staging_buffer.get_stride();
4712 let dst_end = dst_start + width_bytes;
4713
4714 staging_buffer.get_mapping()[dst_start..dst_end].copy_from_slice(&src[src_start..src_end])
4715 }
4716 }
4717 }
4718
4719 self.upload_staged(device, texture, rect, format_override, staging_buffer)
4720 }
4721 }
4722 }
4723
4724 fn flush_buffer(device: &mut Device, pbo_pool: &mut UploadPBOPool, mut buffer: PixelBuffer) {
4725 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.inner.pbo.id);
4726 match buffer.inner.mapping {
4727 PBOMapping::Unmapped => unreachable!("UploadPBO should be mapped at this stage."),
4728 PBOMapping::Transient(_) => {
4729 device.gl.unmap_buffer(gl::PIXEL_UNPACK_BUFFER);
4730 buffer.inner.mapping = PBOMapping::Unmapped;
4731 }
4732 PBOMapping::Persistent(_) => {
4733 device.gl.flush_mapped_buffer_range(gl::PIXEL_UNPACK_BUFFER, 0, buffer.size_used as _);
4734 }
4735 }
4736 buffer.flush_chunks(device);
4737 let pbo = mem::replace(&mut buffer.inner, UploadPBO::empty());
4738 pbo_pool.return_pbo(device, pbo);
4739 }
4740
4741 pub fn flush(mut self, device: &mut Device) {
4744 for buffer in self.buffers.drain(..) {
4745 Self::flush_buffer(device, self.pbo_pool, buffer);
4746 }
4747
4748 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
4749 }
4750
4751 fn update_impl(device: &mut Device, chunk: UploadChunk) {
4752 device.bind_texture(DEFAULT_TEXTURE, chunk.texture, Swizzle::default());
4753
4754 let format = chunk.format_override.unwrap_or(chunk.texture.format);
4755 let (gl_format, bpp, data_type) = match format {
4756 ImageFormat::R8 => (gl::RED, 1, gl::UNSIGNED_BYTE),
4757 ImageFormat::R16 => (gl::RED, 2, gl::UNSIGNED_SHORT),
4758 ImageFormat::BGRA8 => (device.bgra_formats.external, 4, device.bgra_pixel_type),
4759 ImageFormat::RGBA8 => (gl::RGBA, 4, gl::UNSIGNED_BYTE),
4760 ImageFormat::RG8 => (gl::RG, 2, gl::UNSIGNED_BYTE),
4761 ImageFormat::RG16 => (gl::RG, 4, gl::UNSIGNED_SHORT),
4762 ImageFormat::RGBAF32 => (gl::RGBA, 16, gl::FLOAT),
4763 ImageFormat::RGBAI32 => (gl::RGBA_INTEGER, 16, gl::INT),
4764 };
4765
4766 let row_length = match chunk.stride {
4767 Some(value) => value / bpp,
4768 None => chunk.texture.size.width,
4769 };
4770
4771 if chunk.stride.is_some() {
4772 device.gl.pixel_store_i(
4773 gl::UNPACK_ROW_LENGTH,
4774 row_length as _,
4775 );
4776 }
4777
4778 let pos = chunk.rect.min;
4779 let size = chunk.rect.size();
4780
4781 match chunk.texture.target {
4782 gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => {
4783 device.gl.tex_sub_image_2d_pbo(
4784 chunk.texture.target,
4785 0,
4786 pos.x as _,
4787 pos.y as _,
4788 size.width as _,
4789 size.height as _,
4790 gl_format,
4791 data_type,
4792 chunk.offset,
4793 );
4794 }
4795 _ => panic!("BUG: Unexpected texture target!"),
4796 }
4797
4798 if chunk.texture.filter == TextureFilter::Trilinear {
4800 device.gl.generate_mipmap(chunk.texture.target);
4801 }
4802
4803 if chunk.stride.is_some() {
4805 device.gl.pixel_store_i(gl::UNPACK_ROW_LENGTH, 0 as _);
4806 }
4807 }
4808}
4809
4810fn texels_to_u8_slice<T: Texel>(texels: &[T]) -> &[u8] {
4811 unsafe {
4812 slice::from_raw_parts(texels.as_ptr() as *const u8, texels.len() * mem::size_of::<T>())
4813 }
4814}