image_webp/
decoder.rs

1use byteorder_lite::{LittleEndian, ReadBytesExt};
2use quick_error::quick_error;
3
4use std::collections::HashMap;
5use std::io::{self, BufRead, Cursor, Read, Seek};
6use std::num::NonZeroU16;
7use std::ops::Range;
8
9use crate::extended::{self, get_alpha_predictor, read_alpha_chunk, WebPExtendedInfo};
10
11use super::lossless::LosslessDecoder;
12use super::vp8::Vp8Decoder;
13
14quick_error! {
15    /// Errors that can occur when attempting to decode a WebP image
16    #[derive(Debug)]
17    #[non_exhaustive]
18    pub enum DecodingError {
19        /// An IO error occurred while reading the file
20        IoError(err: io::Error) {
21            from()
22            display("IO Error: {}", err)
23            source(err)
24        }
25
26        /// RIFF's "RIFF" signature not found or invalid
27        RiffSignatureInvalid(err: [u8; 4]) {
28            display("Invalid RIFF signature: {err:x?}")
29        }
30
31        /// WebP's "WEBP" signature not found or invalid
32        WebpSignatureInvalid(err: [u8; 4]) {
33            display("Invalid WebP signature: {err:x?}")
34        }
35
36        /// An expected chunk was missing
37        ChunkMissing {
38            display("An expected chunk was missing")
39        }
40
41        /// Chunk Header was incorrect or invalid in its usage
42        ChunkHeaderInvalid(err: [u8; 4]) {
43            display("Invalid Chunk header: {err:x?}")
44        }
45
46        #[allow(deprecated)]
47        #[deprecated]
48        /// Some bits were invalid
49        ReservedBitSet {
50            display("Reserved bits set")
51        }
52
53        /// The ALPH chunk preprocessing info flag was invalid
54        InvalidAlphaPreprocessing {
55            display("Alpha chunk preprocessing flag invalid")
56        }
57
58        /// Invalid compression method
59        InvalidCompressionMethod {
60            display("Invalid compression method")
61        }
62
63        /// Alpha chunk doesn't match the frame's size
64        AlphaChunkSizeMismatch {
65            display("Alpha chunk size mismatch")
66        }
67
68        /// Image is too large, either for the platform's pointer size or generally
69        ImageTooLarge {
70            display("Image too large")
71        }
72
73        /// Frame would go out of the canvas
74        FrameOutsideImage {
75            display("Frame outside image")
76        }
77
78        /// Signature of 0x2f not found
79        LosslessSignatureInvalid(err: u8) {
80            display("Invalid lossless signature: {err:x?}")
81        }
82
83        /// Version Number was not zero
84        VersionNumberInvalid(err: u8) {
85            display("Invalid lossless version number: {err}")
86        }
87
88        /// Invalid color cache bits
89        InvalidColorCacheBits(err: u8) {
90            display("Invalid color cache bits: {err}")
91        }
92
93        /// An invalid Huffman code was encountered
94        HuffmanError {
95            display("Invalid Huffman code")
96        }
97
98        /// The bitstream was somehow corrupt
99        BitStreamError {
100            display("Corrupt bitstream")
101        }
102
103        /// The transforms specified were invalid
104        TransformError {
105            display("Invalid transform")
106        }
107
108        /// VP8's `[0x9D, 0x01, 0x2A]` magic not found or invalid
109        Vp8MagicInvalid(err: [u8; 3]) {
110            display("Invalid VP8 magic: {err:x?}")
111        }
112
113        /// VP8 Decoder initialisation wasn't provided with enough data
114        NotEnoughInitData {
115            display("Not enough VP8 init data")
116        }
117
118        /// At time of writing, only the YUV colour-space encoded as `0` is specified
119        ColorSpaceInvalid(err: u8) {
120            display("Invalid VP8 color space: {err}")
121        }
122
123        /// LUMA prediction mode was not recognised
124        LumaPredictionModeInvalid(err: i8) {
125            display("Invalid VP8 luma prediction mode: {err}")
126        }
127
128        /// Intra-prediction mode was not recognised
129        IntraPredictionModeInvalid(err: i8) {
130            display("Invalid VP8 intra prediction mode: {err}")
131        }
132
133        /// Chroma prediction mode was not recognised
134        ChromaPredictionModeInvalid(err: i8) {
135            display("Invalid VP8 chroma prediction mode: {err}")
136        }
137
138        /// Inconsistent image sizes
139        InconsistentImageSizes {
140            display("Inconsistent image sizes")
141        }
142
143        /// The file may be valid, but this crate doesn't support decoding it.
144        UnsupportedFeature(err: String) {
145            display("Unsupported feature: {err}")
146        }
147
148        /// Invalid function call or parameter
149        InvalidParameter(err: String) {
150            display("Invalid parameter: {err}")
151        }
152
153        /// Memory limit exceeded
154        MemoryLimitExceeded {
155            display("Memory limit exceeded")
156        }
157
158        /// Invalid chunk size
159        InvalidChunkSize {
160            display("Invalid chunk size")
161        }
162
163        /// No more frames in image
164        NoMoreFrames {
165            display("No more frames")
166        }
167    }
168}
169
170/// All possible RIFF chunks in a WebP image file
171#[allow(clippy::upper_case_acronyms)]
172#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
173pub(crate) enum WebPRiffChunk {
174    RIFF,
175    WEBP,
176    VP8,
177    VP8L,
178    VP8X,
179    ANIM,
180    ANMF,
181    ALPH,
182    ICCP,
183    EXIF,
184    XMP,
185    Unknown([u8; 4]),
186}
187
188impl WebPRiffChunk {
189    pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self {
190        match &chunk_fourcc {
191            b"RIFF" => Self::RIFF,
192            b"WEBP" => Self::WEBP,
193            b"VP8 " => Self::VP8,
194            b"VP8L" => Self::VP8L,
195            b"VP8X" => Self::VP8X,
196            b"ANIM" => Self::ANIM,
197            b"ANMF" => Self::ANMF,
198            b"ALPH" => Self::ALPH,
199            b"ICCP" => Self::ICCP,
200            b"EXIF" => Self::EXIF,
201            b"XMP " => Self::XMP,
202            _ => Self::Unknown(chunk_fourcc),
203        }
204    }
205
206    pub(crate) const fn to_fourcc(self) -> [u8; 4] {
207        match self {
208            Self::RIFF => *b"RIFF",
209            Self::WEBP => *b"WEBP",
210            Self::VP8 => *b"VP8 ",
211            Self::VP8L => *b"VP8L",
212            Self::VP8X => *b"VP8X",
213            Self::ANIM => *b"ANIM",
214            Self::ANMF => *b"ANMF",
215            Self::ALPH => *b"ALPH",
216            Self::ICCP => *b"ICCP",
217            Self::EXIF => *b"EXIF",
218            Self::XMP => *b"XMP ",
219            Self::Unknown(fourcc) => fourcc,
220        }
221    }
222
223    pub(crate) const fn is_unknown(self) -> bool {
224        matches!(self, Self::Unknown(_))
225    }
226}
227
228// enum WebPImage {
229//     Lossy(VP8Frame),
230//     Lossless(LosslessFrame),
231//     Extended(ExtendedImage),
232// }
233
234enum ImageKind {
235    Lossy,
236    Lossless,
237    Extended(WebPExtendedInfo),
238}
239
240struct AnimationState {
241    next_frame: u32,
242    next_frame_start: u64,
243    dispose_next_frame: bool,
244    previous_frame_width: u32,
245    previous_frame_height: u32,
246    previous_frame_x_offset: u32,
247    previous_frame_y_offset: u32,
248    canvas: Option<Vec<u8>>,
249}
250impl Default for AnimationState {
251    fn default() -> Self {
252        Self {
253            next_frame: 0,
254            next_frame_start: 0,
255            dispose_next_frame: true,
256            previous_frame_width: 0,
257            previous_frame_height: 0,
258            previous_frame_x_offset: 0,
259            previous_frame_y_offset: 0,
260            canvas: None,
261        }
262    }
263}
264
265/// Number of times that an animation loops.
266#[derive(Copy, Clone, Debug, Eq, PartialEq)]
267pub enum LoopCount {
268    /// The animation loops forever.
269    Forever,
270    /// Each frame of the animation is displayed the specified number of times.
271    Times(NonZeroU16),
272}
273
274/// WebP decoder configuration options
275#[derive(Clone)]
276#[non_exhaustive]
277pub struct WebPDecodeOptions {
278    /// The upsampling method used in conversion from lossy yuv to rgb
279    ///
280    /// Defaults to `Bilinear`.
281    pub lossy_upsampling: UpsamplingMethod,
282}
283
284impl Default for WebPDecodeOptions {
285    fn default() -> Self {
286        Self {
287            lossy_upsampling: UpsamplingMethod::Bilinear,
288        }
289    }
290}
291
292/// Methods for upsampling the chroma values in lossy decoding
293///
294/// The chroma red and blue planes are encoded in VP8 as half the size of the luma plane
295/// Therefore we need to upsample these values up to fit each pixel in the image.
296#[derive(Clone, Copy, Default)]
297pub enum UpsamplingMethod {
298    /// Fancy upsampling
299    ///
300    /// Does bilinear interpolation using the 4 values nearest to the pixel, weighting based on the distance
301    /// from the pixel.
302    #[default]
303    Bilinear,
304    /// Simple upsampling, just uses the closest u/v value to the pixel when upsampling
305    ///
306    /// Matches the -nofancy option in dwebp.
307    /// Should be faster but may lead to slightly jagged edges.
308    Simple,
309}
310
311/// WebP image format decoder.
312pub struct WebPDecoder<R> {
313    r: R,
314    memory_limit: usize,
315
316    width: u32,
317    height: u32,
318
319    kind: ImageKind,
320    animation: AnimationState,
321
322    is_lossy: bool,
323    has_alpha: bool,
324    num_frames: u32,
325    loop_count: LoopCount,
326    loop_duration: u64,
327
328    chunks: HashMap<WebPRiffChunk, Range<u64>>,
329
330    webp_decode_options: WebPDecodeOptions,
331}
332
333impl<R: BufRead + Seek> WebPDecoder<R> {
334    /// Create a new `WebPDecoder` from the reader `r`. The decoder performs many small reads, so the
335    /// reader should be buffered.
336    pub fn new(r: R) -> Result<Self, DecodingError> {
337        Self::new_with_options(r, WebPDecodeOptions::default())
338    }
339
340    /// Create a new `WebPDecoder` from the reader `r` with the options `WebPDecodeOptions`. The decoder
341    /// performs many small reads, so the reader should be buffered.
342    pub fn new_with_options(
343        r: R,
344        webp_decode_options: WebPDecodeOptions,
345    ) -> Result<Self, DecodingError> {
346        let mut decoder = Self {
347            r,
348            width: 0,
349            height: 0,
350            num_frames: 0,
351            kind: ImageKind::Lossy,
352            chunks: HashMap::new(),
353            animation: Default::default(),
354            memory_limit: usize::MAX,
355            is_lossy: false,
356            has_alpha: false,
357            loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()),
358            loop_duration: 0,
359            webp_decode_options,
360        };
361        decoder.read_data()?;
362        Ok(decoder)
363    }
364
365    fn read_data(&mut self) -> Result<(), DecodingError> {
366        let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else {
367            return Err(DecodingError::ChunkHeaderInvalid(*b"RIFF"));
368        };
369
370        match &read_fourcc(&mut self.r)? {
371            WebPRiffChunk::WEBP => {}
372            fourcc => return Err(DecodingError::WebpSignatureInvalid(fourcc.to_fourcc())),
373        }
374
375        let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
376        let start = self.r.stream_position()?;
377
378        match chunk {
379            WebPRiffChunk::VP8 => {
380                let tag = self.r.read_u24::<LittleEndian>()?;
381
382                let keyframe = tag & 1 == 0;
383                if !keyframe {
384                    return Err(DecodingError::UnsupportedFeature(
385                        "Non-keyframe frames".to_owned(),
386                    ));
387                }
388
389                let mut tag = [0u8; 3];
390                self.r.read_exact(&mut tag)?;
391                if tag != [0x9d, 0x01, 0x2a] {
392                    return Err(DecodingError::Vp8MagicInvalid(tag));
393                }
394
395                let w = self.r.read_u16::<LittleEndian>()?;
396                let h = self.r.read_u16::<LittleEndian>()?;
397
398                self.width = u32::from(w & 0x3FFF);
399                self.height = u32::from(h & 0x3FFF);
400                if self.width == 0 || self.height == 0 {
401                    return Err(DecodingError::InconsistentImageSizes);
402                }
403
404                self.chunks
405                    .insert(WebPRiffChunk::VP8, start..start + chunk_size);
406                self.kind = ImageKind::Lossy;
407                self.is_lossy = true;
408            }
409            WebPRiffChunk::VP8L => {
410                let signature = self.r.read_u8()?;
411                if signature != 0x2f {
412                    return Err(DecodingError::LosslessSignatureInvalid(signature));
413                }
414
415                let header = self.r.read_u32::<LittleEndian>()?;
416                let version = header >> 29;
417                if version != 0 {
418                    return Err(DecodingError::VersionNumberInvalid(version as u8));
419                }
420
421                self.width = (1 + header) & 0x3FFF;
422                self.height = (1 + (header >> 14)) & 0x3FFF;
423                self.chunks
424                    .insert(WebPRiffChunk::VP8L, start..start + chunk_size);
425                self.kind = ImageKind::Lossless;
426                self.has_alpha = (header >> 28) & 1 != 0;
427            }
428            WebPRiffChunk::VP8X => {
429                let mut info = extended::read_extended_header(&mut self.r)?;
430                self.width = info.canvas_width;
431                self.height = info.canvas_height;
432
433                let mut position = start + chunk_size_rounded;
434                let max_position = position + riff_size.saturating_sub(12);
435                self.r.seek(io::SeekFrom::Start(position))?;
436
437                while position < max_position {
438                    match read_chunk_header(&mut self.r) {
439                        Ok((chunk, chunk_size, chunk_size_rounded)) => {
440                            let range = position + 8..position + 8 + chunk_size;
441                            position += 8 + chunk_size_rounded;
442
443                            if !chunk.is_unknown() {
444                                self.chunks.entry(chunk).or_insert(range);
445                            }
446
447                            if chunk == WebPRiffChunk::ANMF {
448                                self.num_frames += 1;
449                                if chunk_size < 24 {
450                                    return Err(DecodingError::InvalidChunkSize);
451                                }
452
453                                self.r.seek_relative(12)?;
454                                let duration = self.r.read_u32::<LittleEndian>()? & 0xffffff;
455                                self.loop_duration =
456                                    self.loop_duration.wrapping_add(u64::from(duration));
457
458                                // If the image is animated, the image data chunk will be inside the
459                                // ANMF chunks, so we must inspect them to determine whether the
460                                // image contains any lossy image data. VP8 chunks store lossy data
461                                // and the spec says that lossless images SHOULD NOT contain ALPH
462                                // chunks, so we treat both as indicators of lossy images.
463                                if !self.is_lossy {
464                                    let (subchunk, ..) = read_chunk_header(&mut self.r)?;
465                                    if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk {
466                                        self.is_lossy = true;
467                                    }
468                                    self.r.seek_relative(chunk_size_rounded as i64 - 24)?;
469                                } else {
470                                    self.r.seek_relative(chunk_size_rounded as i64 - 16)?;
471                                }
472
473                                continue;
474                            }
475
476                            self.r.seek_relative(chunk_size_rounded as i64)?;
477                        }
478                        Err(DecodingError::IoError(e))
479                            if e.kind() == io::ErrorKind::UnexpectedEof =>
480                        {
481                            break;
482                        }
483                        Err(e) => return Err(e),
484                    }
485                }
486                self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8);
487
488                // NOTE: We allow malformed images that have `info.icc_profile` set without a ICCP chunk,
489                // because this is relatively common.
490                if info.animation
491                    && (!self.chunks.contains_key(&WebPRiffChunk::ANIM)
492                        || !self.chunks.contains_key(&WebPRiffChunk::ANMF))
493                    || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF)
494                    || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP)
495                    || !info.animation
496                        && self.chunks.contains_key(&WebPRiffChunk::VP8)
497                            == self.chunks.contains_key(&WebPRiffChunk::VP8L)
498                {
499                    return Err(DecodingError::ChunkMissing);
500                }
501
502                // Decode ANIM chunk.
503                if info.animation {
504                    match self.read_chunk(WebPRiffChunk::ANIM, 6) {
505                        Ok(Some(chunk)) => {
506                            let mut cursor = Cursor::new(chunk);
507                            cursor.read_exact(&mut info.background_color_hint)?;
508                            self.loop_count = match cursor.read_u16::<LittleEndian>()? {
509                                0 => LoopCount::Forever,
510                                n => LoopCount::Times(NonZeroU16::new(n).unwrap()),
511                            };
512                            self.animation.next_frame_start =
513                                self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
514                        }
515                        Ok(None) => return Err(DecodingError::ChunkMissing),
516                        Err(DecodingError::MemoryLimitExceeded) => {
517                            return Err(DecodingError::InvalidChunkSize)
518                        }
519                        Err(e) => return Err(e),
520                    }
521                }
522
523                // If the image is animated, the image data chunk will be inside the ANMF chunks. We
524                // store the ALPH, VP8, and VP8L chunks (as applicable) of the first frame in the
525                // hashmap so that we can read them later.
526                if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() {
527                    let mut position = range.start + 16;
528                    self.r.seek(io::SeekFrom::Start(position))?;
529                    for _ in 0..2 {
530                        let (subchunk, subchunk_size, subchunk_size_rounded) =
531                            read_chunk_header(&mut self.r)?;
532                        let subrange = position + 8..position + 8 + subchunk_size;
533                        self.chunks.entry(subchunk).or_insert(subrange.clone());
534
535                        position += 8 + subchunk_size_rounded;
536                        if position + 8 > range.end {
537                            break;
538                        }
539                    }
540                }
541
542                self.has_alpha = info.alpha;
543                self.kind = ImageKind::Extended(info);
544            }
545            _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())),
546        };
547
548        Ok(())
549    }
550
551    /// Sets the maximum amount of memory that the decoder is allowed to allocate at once.
552    ///
553    /// TODO: Some allocations currently ignore this limit.
554    pub fn set_memory_limit(&mut self, limit: usize) {
555        self.memory_limit = limit;
556    }
557
558    /// Get the background color specified in the image file if the image is extended and animated webp.
559    pub fn background_color_hint(&self) -> Option<[u8; 4]> {
560        if let ImageKind::Extended(info) = &self.kind {
561            Some(info.background_color_hint)
562        } else {
563            None
564        }
565    }
566
567    /// Sets the background color if the image is an extended and animated webp.
568    pub fn set_background_color(&mut self, color: [u8; 4]) -> Result<(), DecodingError> {
569        if let ImageKind::Extended(info) = &mut self.kind {
570            info.background_color = Some(color);
571            Ok(())
572        } else {
573            Err(DecodingError::InvalidParameter(
574                "Background color can only be set on animated webp".to_owned(),
575            ))
576        }
577    }
578
579    /// Returns the (width, height) of the image in pixels.
580    pub fn dimensions(&self) -> (u32, u32) {
581        (self.width, self.height)
582    }
583
584    /// Returns whether the image has an alpha channel. If so, the pixel format is Rgba8 and
585    /// otherwise Rgb8.
586    pub fn has_alpha(&self) -> bool {
587        self.has_alpha
588    }
589
590    /// Returns true if the image is animated.
591    pub fn is_animated(&self) -> bool {
592        match &self.kind {
593            ImageKind::Lossy | ImageKind::Lossless => false,
594            ImageKind::Extended(extended) => extended.animation,
595        }
596    }
597
598    /// Returns whether the image is lossy. For animated images, this is true if any frame is lossy.
599    pub fn is_lossy(&mut self) -> bool {
600        self.is_lossy
601    }
602
603    /// Returns the number of frames of a single loop of the animation, or zero if the image is not
604    /// animated.
605    pub fn num_frames(&self) -> u32 {
606        self.num_frames
607    }
608
609    /// Returns the number of times the animation should loop.
610    pub fn loop_count(&self) -> LoopCount {
611        self.loop_count
612    }
613
614    /// Returns the total duration of one loop through the animation in milliseconds, or zero if the
615    /// image is not animated.
616    ///
617    /// This is the sum of the durations of all individual frames of the image.
618    pub fn loop_duration(&self) -> u64 {
619        self.loop_duration
620    }
621
622    fn read_chunk(
623        &mut self,
624        chunk: WebPRiffChunk,
625        max_size: usize,
626    ) -> Result<Option<Vec<u8>>, DecodingError> {
627        match self.chunks.get(&chunk) {
628            Some(range) => {
629                if range.end - range.start > max_size as u64 {
630                    return Err(DecodingError::MemoryLimitExceeded);
631                }
632
633                self.r.seek(io::SeekFrom::Start(range.start))?;
634                let mut data = vec![0; (range.end - range.start) as usize];
635                self.r.read_exact(&mut data)?;
636                Ok(Some(data))
637            }
638            None => Ok(None),
639        }
640    }
641
642    /// Returns the raw bytes of the ICC profile, or None if there is no ICC profile.
643    pub fn icc_profile(&mut self) -> Result<Option<Vec<u8>>, DecodingError> {
644        self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit)
645    }
646
647    /// Returns the raw bytes of the EXIF metadata, or None if there is no EXIF metadata.
648    pub fn exif_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> {
649        self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit)
650    }
651
652    /// Returns the raw bytes of the XMP metadata, or None if there is no XMP metadata.
653    pub fn xmp_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> {
654        self.read_chunk(WebPRiffChunk::XMP, self.memory_limit)
655    }
656
657    /// Returns the number of bytes required to store the image or a single frame, or None if that
658    /// would take more than `usize::MAX` bytes.
659    pub fn output_buffer_size(&self) -> Option<usize> {
660        let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 };
661        (self.width as usize)
662            .checked_mul(self.height as usize)?
663            .checked_mul(bytes_per_pixel)
664    }
665
666    /// Returns the raw bytes of the image. For animated images, this is the first frame.
667    ///
668    /// Fails with `ImageTooLarge` if `buf` has length different than `output_buffer_size()`
669    pub fn read_image(&mut self, buf: &mut [u8]) -> Result<(), DecodingError> {
670        if Some(buf.len()) != self.output_buffer_size() {
671            return Err(DecodingError::ImageTooLarge);
672        }
673
674        if self.is_animated() {
675            let saved = std::mem::take(&mut self.animation);
676            self.animation.next_frame_start =
677                self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
678            let result = self.read_frame(buf);
679            self.animation = saved;
680            result?;
681        } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) {
682            let mut decoder = LosslessDecoder::new(range_reader(&mut self.r, range.clone())?);
683
684            if self.has_alpha {
685                decoder.decode_frame(self.width, self.height, false, buf)?;
686            } else {
687                let mut data = vec![0; self.width as usize * self.height as usize * 4];
688                decoder.decode_frame(self.width, self.height, false, &mut data)?;
689                for (rgba_val, chunk) in data.chunks_exact(4).zip(buf.chunks_exact_mut(3)) {
690                    chunk.copy_from_slice(&rgba_val[..3]);
691                }
692            }
693        } else {
694            let range = self
695                .chunks
696                .get(&WebPRiffChunk::VP8)
697                .ok_or(DecodingError::ChunkMissing)?;
698            let reader = range_reader(&mut self.r, range.start..range.end)?;
699            let frame = Vp8Decoder::decode_frame(reader)?;
700            if u32::from(frame.width) != self.width || u32::from(frame.height) != self.height {
701                return Err(DecodingError::InconsistentImageSizes);
702            }
703
704            if self.has_alpha() {
705                frame.fill_rgba(buf, self.webp_decode_options.lossy_upsampling);
706
707                let range = self
708                    .chunks
709                    .get(&WebPRiffChunk::ALPH)
710                    .ok_or(DecodingError::ChunkMissing)?
711                    .clone();
712                let alpha_chunk = read_alpha_chunk(
713                    &mut range_reader(&mut self.r, range)?,
714                    self.width as u16,
715                    self.height as u16,
716                )?;
717
718                for y in 0..frame.height {
719                    for x in 0..frame.width {
720                        let predictor: u8 = get_alpha_predictor(
721                            x.into(),
722                            y.into(),
723                            frame.width.into(),
724                            alpha_chunk.filtering_method,
725                            buf,
726                        );
727
728                        let alpha_index =
729                            usize::from(y) * usize::from(frame.width) + usize::from(x);
730                        let buffer_index = alpha_index * 4 + 3;
731
732                        buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]);
733                    }
734                }
735            } else {
736                frame.fill_rgb(buf, self.webp_decode_options.lossy_upsampling);
737            }
738        }
739
740        Ok(())
741    }
742
743    /// Reads the next frame of the animation.
744    ///
745    /// The frame contents are written into `buf` and the method returns the duration of the frame
746    /// in milliseconds. If there are no more frames, the method returns
747    /// `DecodingError::NoMoreFrames` and `buf` is left unchanged.
748    ///
749    /// # Panics
750    ///
751    /// Panics if the image is not animated.
752    pub fn read_frame(&mut self, buf: &mut [u8]) -> Result<u32, DecodingError> {
753        assert!(self.is_animated());
754        assert_eq!(Some(buf.len()), self.output_buffer_size());
755
756        if self.animation.next_frame == self.num_frames {
757            return Err(DecodingError::NoMoreFrames);
758        }
759
760        let ImageKind::Extended(info) = &self.kind else {
761            unreachable!()
762        };
763
764        self.r
765            .seek(io::SeekFrom::Start(self.animation.next_frame_start))?;
766
767        let anmf_size = match read_chunk_header(&mut self.r)? {
768            (WebPRiffChunk::ANMF, size, _) if size >= 32 => size,
769            _ => return Err(DecodingError::ChunkHeaderInvalid(*b"ANMF")),
770        };
771
772        // Read ANMF chunk
773        let frame_x = extended::read_3_bytes(&mut self.r)? * 2;
774        let frame_y = extended::read_3_bytes(&mut self.r)? * 2;
775        let frame_width = extended::read_3_bytes(&mut self.r)? + 1;
776        let frame_height = extended::read_3_bytes(&mut self.r)? + 1;
777        if frame_width > 16384 || frame_height > 16384 {
778            return Err(DecodingError::ImageTooLarge);
779        }
780        if frame_x + frame_width > self.width || frame_y + frame_height > self.height {
781            return Err(DecodingError::FrameOutsideImage);
782        }
783        let duration = extended::read_3_bytes(&mut self.r)?;
784        let frame_info = self.r.read_u8()?;
785        let use_alpha_blending = frame_info & 0b00000010 == 0;
786        let dispose = frame_info & 0b00000001 != 0;
787
788        let clear_color = if self.animation.dispose_next_frame {
789            info.background_color
790        } else {
791            None
792        };
793
794        // Read normal bitstream now
795        let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
796        if chunk_size_rounded + 24 > anmf_size {
797            return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc()));
798        }
799
800        let (frame, frame_has_alpha): (Vec<u8>, bool) = match chunk {
801            WebPRiffChunk::VP8 => {
802                let reader = (&mut self.r).take(chunk_size);
803                let raw_frame = Vp8Decoder::decode_frame(reader)?;
804                if u32::from(raw_frame.width) != frame_width
805                    || u32::from(raw_frame.height) != frame_height
806                {
807                    return Err(DecodingError::InconsistentImageSizes);
808                }
809                let mut rgb_frame = vec![0; frame_width as usize * frame_height as usize * 3];
810                raw_frame.fill_rgb(&mut rgb_frame, self.webp_decode_options.lossy_upsampling);
811                (rgb_frame, false)
812            }
813            WebPRiffChunk::VP8L => {
814                let reader = (&mut self.r).take(chunk_size);
815                let mut lossless_decoder = LosslessDecoder::new(reader);
816                let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4];
817                lossless_decoder.decode_frame(frame_width, frame_height, false, &mut rgba_frame)?;
818                (rgba_frame, true)
819            }
820            WebPRiffChunk::ALPH => {
821                if chunk_size_rounded + 32 > anmf_size {
822                    return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc()));
823                }
824
825                // read alpha
826                let next_chunk_start = self.r.stream_position()? + chunk_size_rounded;
827                let mut reader = (&mut self.r).take(chunk_size);
828                let alpha_chunk =
829                    read_alpha_chunk(&mut reader, frame_width as u16, frame_height as u16)?;
830
831                // read opaque
832                self.r.seek(io::SeekFrom::Start(next_chunk_start))?;
833                let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?;
834                if chunk_size + next_chunk_size + 32 > anmf_size {
835                    return Err(DecodingError::ChunkHeaderInvalid(next_chunk.to_fourcc()));
836                }
837
838                let frame = Vp8Decoder::decode_frame((&mut self.r).take(next_chunk_size))?;
839
840                let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4];
841                frame.fill_rgba(&mut rgba_frame, self.webp_decode_options.lossy_upsampling);
842
843                for y in 0..frame.height {
844                    for x in 0..frame.width {
845                        let predictor: u8 = get_alpha_predictor(
846                            x.into(),
847                            y.into(),
848                            frame.width.into(),
849                            alpha_chunk.filtering_method,
850                            &rgba_frame,
851                        );
852
853                        let alpha_index =
854                            usize::from(y) * usize::from(frame.width) + usize::from(x);
855                        let buffer_index = alpha_index * 4 + 3;
856
857                        rgba_frame[buffer_index] =
858                            predictor.wrapping_add(alpha_chunk.data[alpha_index]);
859                    }
860                }
861
862                (rgba_frame, true)
863            }
864            _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())),
865        };
866
867        // fill starting canvas with clear color
868        if self.animation.canvas.is_none() {
869            self.animation.canvas = {
870                let mut canvas = vec![0; (self.width * self.height * 4) as usize];
871                if let Some(color) = info.background_color.as_ref() {
872                    canvas
873                        .chunks_exact_mut(4)
874                        .for_each(|c| c.copy_from_slice(color))
875                }
876                Some(canvas)
877            }
878        }
879        extended::composite_frame(
880            self.animation.canvas.as_mut().unwrap(),
881            self.width,
882            self.height,
883            clear_color,
884            &frame,
885            frame_x,
886            frame_y,
887            frame_width,
888            frame_height,
889            frame_has_alpha,
890            use_alpha_blending,
891            self.animation.previous_frame_width,
892            self.animation.previous_frame_height,
893            self.animation.previous_frame_x_offset,
894            self.animation.previous_frame_y_offset,
895        );
896
897        self.animation.previous_frame_width = frame_width;
898        self.animation.previous_frame_height = frame_height;
899        self.animation.previous_frame_x_offset = frame_x;
900        self.animation.previous_frame_y_offset = frame_y;
901
902        self.animation.dispose_next_frame = dispose;
903        self.animation.next_frame_start += anmf_size + 8;
904        self.animation.next_frame += 1;
905
906        if self.has_alpha() {
907            buf.copy_from_slice(self.animation.canvas.as_ref().unwrap());
908        } else {
909            for (b, c) in buf
910                .chunks_exact_mut(3)
911                .zip(self.animation.canvas.as_ref().unwrap().chunks_exact(4))
912            {
913                b.copy_from_slice(&c[..3]);
914            }
915        }
916
917        Ok(duration)
918    }
919
920    /// Resets the animation to the first frame.
921    ///
922    /// # Panics
923    ///
924    /// Panics if the image is not animated.
925    pub fn reset_animation(&mut self) {
926        assert!(self.is_animated());
927
928        self.animation.next_frame = 0;
929        self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
930        self.animation.dispose_next_frame = true;
931    }
932
933    /// Sets the upsampling method that is used in lossy decoding
934    pub fn set_lossy_upsampling(&mut self, upsampling_method: UpsamplingMethod) {
935        self.webp_decode_options.lossy_upsampling = upsampling_method;
936    }
937}
938
939pub(crate) fn range_reader<R: BufRead + Seek>(
940    mut r: R,
941    range: Range<u64>,
942) -> Result<impl BufRead, DecodingError> {
943    r.seek(io::SeekFrom::Start(range.start))?;
944    Ok(r.take(range.end - range.start))
945}
946
947pub(crate) fn read_fourcc<R: BufRead>(mut r: R) -> Result<WebPRiffChunk, DecodingError> {
948    let mut chunk_fourcc = [0; 4];
949    r.read_exact(&mut chunk_fourcc)?;
950    Ok(WebPRiffChunk::from_fourcc(chunk_fourcc))
951}
952
953pub(crate) fn read_chunk_header<R: BufRead>(
954    mut r: R,
955) -> Result<(WebPRiffChunk, u64, u64), DecodingError> {
956    let chunk = read_fourcc(&mut r)?;
957    let chunk_size = r.read_u32::<LittleEndian>()?;
958    let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1);
959    Ok((chunk, chunk_size.into(), chunk_size_rounded.into()))
960}
961
962#[cfg(test)]
963mod tests {
964    use super::*;
965    const RGB_BPP: usize = 3;
966
967    #[test]
968    fn add_with_overflow_size() {
969        let bytes = vec![
970            0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64,
971            0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00,
972            0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
973            0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
974            0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49,
975            0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46,
976        ];
977
978        let data = std::io::Cursor::new(bytes);
979
980        let _ = WebPDecoder::new(data);
981    }
982
983    #[test]
984    fn decode_2x2_single_color_image() {
985        // Image data created from imagemagick and output of xxd:
986        // $ convert -size 2x2 xc:#f00 red.webp
987        // $ xxd -g 1 red.webp | head
988
989        const NUM_PIXELS: usize = 2 * 2 * RGB_BPP;
990        // 2x2 red pixel image
991        let bytes = [
992            0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
993            0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00,
994            0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
995            0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
996            0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
997        ];
998
999        let mut data = [0; NUM_PIXELS];
1000        let mut decoder = WebPDecoder::new(std::io::Cursor::new(bytes)).unwrap();
1001        decoder.read_image(&mut data).unwrap();
1002
1003        // All pixels are the same value
1004        let first_pixel = &data[..RGB_BPP];
1005        assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel)));
1006    }
1007
1008    #[test]
1009    fn decode_3x3_single_color_image() {
1010        // Test that any odd pixel "tail" is decoded properly
1011
1012        const NUM_PIXELS: usize = 3 * 3 * RGB_BPP;
1013        // 3x3 red pixel image
1014        let bytes = [
1015            0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
1016            0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00,
1017            0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
1018            0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
1019            0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
1020        ];
1021
1022        let mut data = [0; NUM_PIXELS];
1023        let mut decoder = WebPDecoder::new(std::io::Cursor::new(bytes)).unwrap();
1024        decoder.read_image(&mut data).unwrap();
1025
1026        // All pixels are the same value
1027        let first_pixel = &data[..RGB_BPP];
1028        assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel)));
1029    }
1030}