1 mod stream;
2 mod zlib;
3 
4 use self::stream::{get_info, CHUNCK_BUFFER_SIZE};
5 pub use self::stream::{Decoded, DecodingError, StreamingDecoder};
6 
7 use std::borrow;
8 use std::io::{BufRead, BufReader, Read, Write};
9 use std::mem;
10 use std::ops::Range;
11 
12 use crate::chunk;
13 use crate::common::{BitDepth, BytesPerPixel, ColorType, Info, Transformations};
14 use crate::filter::{unfilter, FilterType};
15 use crate::utils;
16 
17 /*
18 pub enum InterlaceHandling {
19     /// Outputs the raw rows
20     RawRows,
21     /// Fill missing the pixels from the existing ones
22     Rectangle,
23     /// Only fill the needed pixels
24     Sparkle
25 }
26 */
27 
28 /// Output info
29 #[derive(Debug, PartialEq, Eq)]
30 pub struct OutputInfo {
31     pub width: u32,
32     pub height: u32,
33     pub color_type: ColorType,
34     pub bit_depth: BitDepth,
35     pub line_size: usize,
36 }
37 
38 impl OutputInfo {
39     /// Returns the size needed to hold a decoded frame
buffer_size(&self) -> usize40     pub fn buffer_size(&self) -> usize {
41         self.line_size * self.height as usize
42     }
43 }
44 
45 #[derive(Clone, Copy, Debug)]
46 /// Limits on the resources the `Decoder` is allowed too use
47 pub struct Limits {
48     /// maximum number of bytes the decoder is allowed to allocate, default is 64Mib
49     pub bytes: usize,
50 }
51 
52 impl Default for Limits {
default() -> Limits53     fn default() -> Limits {
54         Limits {
55             bytes: 1024 * 1024 * 64,
56         }
57     }
58 }
59 
60 /// PNG Decoder
61 pub struct Decoder<R: Read> {
62     /// Reader
63     r: R,
64     /// Output transformations
65     transform: Transformations,
66     /// Limits on resources the Decoder is allowed to use
67     limits: Limits,
68 }
69 
70 struct InterlacedRow<'data> {
71     data: &'data [u8],
72     interlace: InterlaceInfo,
73 }
74 
75 enum InterlaceInfo {
76     None,
77     Adam7 { pass: u8, line: u32, width: u32 },
78 }
79 
80 impl<R: Read> Decoder<R> {
new(r: R) -> Decoder<R>81     pub fn new(r: R) -> Decoder<R> {
82         Decoder::new_with_limits(r, Limits::default())
83     }
84 
new_with_limits(r: R, limits: Limits) -> Decoder<R>85     pub fn new_with_limits(r: R, limits: Limits) -> Decoder<R> {
86         Decoder {
87             r,
88             transform: crate::Transformations::EXPAND
89                 | crate::Transformations::SCALE_16
90                 | crate::Transformations::STRIP_16,
91             limits,
92         }
93     }
94 
95     /// Limit resource usage
96     ///
97     /// ```
98     /// use std::fs::File;
99     /// use png::{Decoder, Limits};
100     /// // This image is 32x32 pixels, so the deocder will allocate more than four bytes
101     /// let mut limits = Limits::default();
102     /// limits.bytes = 4;
103     /// let mut decoder = Decoder::new_with_limits(File::open("tests/pngsuite/basi0g01.png").unwrap(), limits);
104     /// assert!(decoder.read_info().is_err());
105     /// // This image is 32x32 pixels, so the decoder will allocate less than 10Kib
106     /// let mut limits = Limits::default();
107     /// limits.bytes = 10*1024;
108     /// let mut decoder = Decoder::new_with_limits(File::open("tests/pngsuite/basi0g01.png").unwrap(), limits);
109     /// assert!(decoder.read_info().is_ok());
110     /// ```
set_limits(&mut self, limits: Limits)111     pub fn set_limits(&mut self, limits: Limits) {
112         self.limits = limits;
113     }
114 
115     /// Reads all meta data until the first IDAT chunk
read_info(self) -> Result<(OutputInfo, Reader<R>), DecodingError>116     pub fn read_info(self) -> Result<(OutputInfo, Reader<R>), DecodingError> {
117         let mut r = Reader::new(self.r, StreamingDecoder::new(), self.transform, self.limits);
118         r.init()?;
119 
120         let color_type = r.info().color_type;
121         let bit_depth = r.info().bit_depth;
122         if color_type.is_combination_invalid(bit_depth) {
123             return Err(DecodingError::Format(
124                 format!(
125                     "Invalid color/depth combination in header: {:?}/{:?}",
126                     color_type, bit_depth
127                 )
128                 .into(),
129             ));
130         }
131 
132         // Check if the output buffer can be represented at all.
133         if r.checked_output_buffer_size().is_none() {
134             return Err(DecodingError::LimitsExceeded);
135         }
136 
137         let (ct, bits) = r.output_color_type();
138         let info = {
139             let info = r.info();
140             OutputInfo {
141                 width: info.width,
142                 height: info.height,
143                 color_type: ct,
144                 bit_depth: bits,
145                 line_size: r.output_line_size(info.width),
146             }
147         };
148         Ok((info, r))
149     }
150 
151     /// Set the allowed and performed transformations.
152     ///
153     /// A transformation is a pre-processing on the raw image data modifying content or encoding.
154     /// Many options have an impact on memory or CPU usage during decoding.
set_transformations(&mut self, transform: Transformations)155     pub fn set_transformations(&mut self, transform: Transformations) {
156         self.transform = transform;
157     }
158 }
159 
160 struct ReadDecoder<R: Read> {
161     reader: BufReader<R>,
162     decoder: StreamingDecoder,
163     at_eof: bool,
164 }
165 
166 impl<R: Read> ReadDecoder<R> {
167     /// Returns the next decoded chunk. If the chunk is an ImageData chunk, its contents are written
168     /// into image_data.
decode_next(&mut self, image_data: &mut Vec<u8>) -> Result<Option<Decoded>, DecodingError>169     fn decode_next(&mut self, image_data: &mut Vec<u8>) -> Result<Option<Decoded>, DecodingError> {
170         while !self.at_eof {
171             let (consumed, result) = {
172                 let buf = self.reader.fill_buf()?;
173                 if buf.is_empty() {
174                     return Err(DecodingError::Format("unexpected EOF".into()));
175                 }
176                 self.decoder.update(buf, image_data)?
177             };
178             self.reader.consume(consumed);
179             match result {
180                 Decoded::Nothing => (),
181                 Decoded::ImageEnd => self.at_eof = true,
182                 result => return Ok(Some(result)),
183             }
184         }
185         Ok(None)
186     }
187 
finished_decoding(&mut self) -> Result<(), DecodingError>188     fn finished_decoding(&mut self) -> Result<(), DecodingError> {
189         while !self.at_eof {
190             let buf = self.reader.fill_buf()?;
191             if buf.is_empty() {
192                 return Err(DecodingError::Format("unexpected EOF after image".into()));
193             }
194             let (consumed, event) = self.decoder.update(buf, &mut vec![])?;
195             self.reader.consume(consumed);
196             match event {
197                 Decoded::Nothing => (),
198                 Decoded::ImageEnd => self.at_eof = true,
199                 // ignore more data
200                 Decoded::ChunkComplete(_, _) | Decoded::ChunkBegin(_, _) | Decoded::ImageData => {}
201                 Decoded::ImageDataFlushed => return Ok(()),
202                 Decoded::PartialChunk(_) => {}
203                 new => unreachable!("{:?}", new),
204             }
205         }
206 
207         Err(DecodingError::Format("unexpected EOF after image".into()))
208     }
209 
info(&self) -> Option<&Info>210     fn info(&self) -> Option<&Info> {
211         get_info(&self.decoder)
212     }
213 }
214 
215 /// PNG reader (mostly high-level interface)
216 ///
217 /// Provides a high level that iterates over lines or whole images.
218 pub struct Reader<R: Read> {
219     decoder: ReadDecoder<R>,
220     bpp: BytesPerPixel,
221     subframe: SubframeInfo,
222     /// Number of frame control chunks read.
223     /// By the APNG specification the total number must equal the count specified in the animation
224     /// control chunk. The IDAT image _may_ have such a chunk applying to it.
225     fctl_read: u32,
226     next_frame: SubframeIdx,
227     /// Previous raw line
228     prev: Vec<u8>,
229     /// Current raw line
230     current: Vec<u8>,
231     /// Start index of the current scan line.
232     scan_start: usize,
233     /// Output transformations
234     transform: Transformations,
235     /// Processed line
236     processed: Vec<u8>,
237     limits: Limits,
238 }
239 
240 /// The subframe specific information.
241 ///
242 /// In APNG the frames are constructed by combining previous frame and a new subframe (through a
243 /// combination of `dispose_op` and `overlay_op`). These sub frames specify individual dimension
244 /// information and reuse the global interlace options. This struct encapsulates the state of where
245 /// in a particular IDAT-frame or subframe we are.
246 struct SubframeInfo {
247     width: u32,
248     rowlen: usize,
249     interlace: InterlaceIter,
250     consumed_and_flushed: bool,
251 }
252 
253 #[derive(Clone)]
254 enum InterlaceIter {
255     None(Range<u32>),
256     Adam7(utils::Adam7Iterator),
257 }
258 
259 /// Denote a frame as given by sequence numbers.
260 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
261 enum SubframeIdx {
262     /// The info has not yet been decoded.
263     Uninit,
264     /// The initial frame in an IDAT chunk without fcTL chunk applying to it.
265     /// Note that this variant precedes `Some` as IDAT frames precede fdAT frames and all fdAT
266     /// frames must have a fcTL applying to it.
267     Initial,
268     /// An IDAT frame with fcTL or an fdAT frame.
269     Some(u32),
270     /// The past-the-end index.
271     End,
272 }
273 
274 macro_rules! get_info(
275     ($this:expr) => {
276         $this.decoder.info().unwrap()
277     }
278 );
279 
280 impl<R: Read> Reader<R> {
281     /// Creates a new PNG reader
new(r: R, d: StreamingDecoder, t: Transformations, limits: Limits) -> Reader<R>282     fn new(r: R, d: StreamingDecoder, t: Transformations, limits: Limits) -> Reader<R> {
283         Reader {
284             decoder: ReadDecoder {
285                 reader: BufReader::with_capacity(CHUNCK_BUFFER_SIZE, r),
286                 decoder: d,
287                 at_eof: false,
288             },
289             bpp: BytesPerPixel::One,
290             subframe: SubframeInfo::not_yet_init(),
291             fctl_read: 0,
292             next_frame: SubframeIdx::Initial,
293             prev: Vec::new(),
294             current: Vec::new(),
295             scan_start: 0,
296             transform: t,
297             processed: Vec::new(),
298             limits,
299         }
300     }
301 
302     /// Reads all meta data until the next frame data starts.
303     /// Requires IHDR before the IDAT and fcTL before fdAT.
init(&mut self) -> Result<(), DecodingError>304     fn init(&mut self) -> Result<(), DecodingError> {
305         if self.next_frame == self.subframe_idx() {
306             return Ok(());
307         } else if self.next_frame == SubframeIdx::End {
308             return Err(DecodingError::Other("End of image has been reached".into()));
309         }
310 
311         loop {
312             match self.decoder.decode_next(&mut Vec::new())? {
313                 Some(Decoded::ChunkBegin(_, chunk::IDAT))
314                 | Some(Decoded::ChunkBegin(_, chunk::fdAT)) => break,
315                 Some(Decoded::FrameControl(_)) => {
316                     self.subframe = SubframeInfo::new(self.info());
317                     // The next frame is the one to which this chunk applies.
318                     self.next_frame = SubframeIdx::Some(self.fctl_read);
319                     // TODO: what about overflow here? That would imply there are more fctl chunks
320                     // than can be specified in the animation control but also that we have read
321                     // several gigabytes of data.
322                     self.fctl_read += 1;
323                 }
324                 None => return Err(DecodingError::Format("IDAT chunk missing".into())),
325                 Some(Decoded::Header { .. }) => {
326                     self.validate_buffer_sizes()?;
327                 }
328                 // Ignore all other chunk events. Any other chunk may be between IDAT chunks, fdAT
329                 // chunks and their control chunks.
330                 _ => {}
331             }
332         }
333         {
334             let info = match self.decoder.info() {
335                 Some(info) => info,
336                 None => return Err(DecodingError::Format("IHDR chunk missing".into())),
337             };
338             self.bpp = info.bpp_in_prediction();
339             // Check if the output buffer can be represented at all.
340             // Now we can init the subframe info.
341             // TODO: reuse the results obtained during the above check.
342             self.subframe = SubframeInfo::new(info);
343         }
344         self.allocate_out_buf()?;
345         self.prev = vec![0; self.subframe.rowlen];
346         Ok(())
347     }
348 
reset_current(&mut self)349     fn reset_current(&mut self) {
350         self.current.clear();
351         self.scan_start = 0;
352     }
353 
354     /// Get information on the image.
355     ///
356     /// The structure will change as new frames of an animated image are decoded.
info(&self) -> &Info357     pub fn info(&self) -> &Info {
358         get_info!(self)
359     }
360 
361     /// Get the subframe index of the current info.
subframe_idx(&self) -> SubframeIdx362     fn subframe_idx(&self) -> SubframeIdx {
363         let info = match self.decoder.info() {
364             None => return SubframeIdx::Uninit,
365             Some(info) => info,
366         };
367 
368         match info.frame_control() {
369             None => SubframeIdx::Initial,
370             Some(_) => SubframeIdx::Some(self.fctl_read - 1),
371         }
372     }
373 
374     /// Call after decoding an image, to advance expected state to the next.
finished_frame(&mut self)375     fn finished_frame(&mut self) {
376         // Should only be called after frame is done, so we have an info.
377         let info = self.info();
378 
379         let past_end_subframe = match info.animation_control() {
380             // a non-APNG has no subframes
381             None => 0,
382             // otherwise the count is the past-the-end index. It can not be 0 per spec.
383             Some(ac) => ac.num_frames,
384         };
385 
386         self.next_frame = match self.next_frame {
387             SubframeIdx::Uninit => unreachable!("Next frame can never be initial"),
388             SubframeIdx::End => unreachable!("Next frame called when already at image end"),
389             // Reached the end of non-animated image.
390             SubframeIdx::Initial if past_end_subframe == 0 => SubframeIdx::End,
391             // An animated image, expecting first subframe.
392             SubframeIdx::Initial => SubframeIdx::Some(0),
393             // This was the last subframe, slightly fuzzy condition in case of programmer error.
394             SubframeIdx::Some(idx) if past_end_subframe <= idx + 1 => SubframeIdx::End,
395             // Expecting next subframe.
396             SubframeIdx::Some(idx) => SubframeIdx::Some(idx + 1),
397         }
398     }
399 
400     /// Decodes the next frame into `buf`.
401     ///
402     /// Note that this decodes raw subframes that need to be mixed according to blend-op and
403     /// dispose-op by the caller.
404     ///
405     /// The caller must always provide a buffer large enough to hold a complete frame (the APNG
406     /// specification restricts subframes to the dimensions given in the image header). The region
407     /// that has been written be checked afterwards by calling `info` after a successful call and
408     /// inspecting the `frame_control` data. This requirement may be lifted in a later version of
409     /// `png`.
410     ///
411     /// Output lines will be written in row-major, packed matrix with width and height of the read
412     /// frame (or subframe), all samples are in big endian byte order where this matters.
next_frame(&mut self, buf: &mut [u8]) -> Result<(), DecodingError>413     pub fn next_frame(&mut self, buf: &mut [u8]) -> Result<(), DecodingError> {
414         // Advance until we've read the info / fcTL for this frame.
415         self.init()?;
416         // TODO 16 bit
417         let (color_type, bit_depth) = self.output_color_type();
418         if buf.len() < self.output_buffer_size() {
419             return Err(DecodingError::Other(
420                 "supplied buffer is too small to hold the image".into(),
421             ));
422         }
423 
424         self.reset_current();
425         let width = self.info().width;
426         if get_info!(self).interlaced {
427             while let Some((row, adam7)) = self.next_interlaced_row()? {
428                 let (pass, line, _) = adam7.unwrap();
429                 let samples = color_type.samples() as u8;
430                 utils::expand_pass(buf, width, row, pass, line, samples * (bit_depth as u8));
431             }
432         } else {
433             let mut len = 0;
434             while let Some(row) = self.next_row()? {
435                 len += (&mut buf[len..]).write(row)?;
436             }
437         }
438         // Advance over the rest of data for this (sub-)frame.
439         if !self.subframe.consumed_and_flushed {
440             self.decoder.finished_decoding()?;
441         }
442         // Advance our state to expect the next frame.
443         self.finished_frame();
444         Ok(())
445     }
446 
447     /// Returns the next processed row of the image
next_row(&mut self) -> Result<Option<&[u8]>, DecodingError>448     pub fn next_row(&mut self) -> Result<Option<&[u8]>, DecodingError> {
449         self.next_interlaced_row().map(|v| v.map(|v| v.0))
450     }
451 
452     /// Returns the next processed row of the image
next_interlaced_row( &mut self, ) -> Result<Option<(&[u8], Option<(u8, u32, u32)>)>, DecodingError>453     pub fn next_interlaced_row(
454         &mut self,
455     ) -> Result<Option<(&[u8], Option<(u8, u32, u32)>)>, DecodingError> {
456         match self.next_interlaced_row_impl() {
457             Err(err) => Err(err),
458             Ok(None) => Ok(None),
459             Ok(Some(row)) => {
460                 let interlace = match row.interlace {
461                     InterlaceInfo::None => None,
462                     InterlaceInfo::Adam7 { pass, line, width } => Some((pass, line, width)),
463                 };
464 
465                 Ok(Some((row.data, interlace)))
466             }
467         }
468     }
469 
470     /// Fetch the next interlaced row and filter it according to our own transformations.
next_interlaced_row_impl(&mut self) -> Result<Option<InterlacedRow<'_>>, DecodingError>471     fn next_interlaced_row_impl(&mut self) -> Result<Option<InterlacedRow<'_>>, DecodingError> {
472         use crate::common::ColorType::*;
473         let transform = self.transform;
474 
475         if transform == crate::Transformations::IDENTITY {
476             return self.next_raw_interlaced_row();
477         }
478 
479         // swap buffer to circumvent borrow issues
480         let mut buffer = mem::replace(&mut self.processed, Vec::new());
481         let (got_next, adam7) = if let Some(row) = self.next_raw_interlaced_row()? {
482             (&mut buffer[..]).write_all(row.data)?;
483             (true, row.interlace)
484         } else {
485             (false, InterlaceInfo::None)
486         };
487         // swap back
488         let _ = mem::replace(&mut self.processed, buffer);
489 
490         if !got_next {
491             return Ok(None);
492         }
493 
494         let (color_type, bit_depth, trns) = {
495             let info = get_info!(self);
496             (info.color_type, info.bit_depth as u8, info.trns.is_some())
497         };
498         let output_buffer = if let InterlaceInfo::Adam7 { width, .. } = adam7 {
499             let width = self
500                 .line_size(width)
501                 .expect("Adam7 interlaced rows are shorter than the buffer.");
502             &mut self.processed[..width]
503         } else {
504             &mut *self.processed
505         };
506 
507         let mut len = output_buffer.len();
508         if transform.contains(crate::Transformations::EXPAND) {
509             match color_type {
510                 Indexed => expand_paletted(output_buffer, get_info!(self))?,
511                 Grayscale | GrayscaleAlpha if bit_depth < 8 => {
512                     expand_gray_u8(output_buffer, get_info!(self))
513                 }
514                 Grayscale | RGB if trns => {
515                     let channels = color_type.samples();
516                     let trns = get_info!(self).trns.as_ref().unwrap();
517                     if bit_depth == 8 {
518                         utils::expand_trns_line(output_buffer, &*trns, channels);
519                     } else {
520                         utils::expand_trns_line16(output_buffer, &*trns, channels);
521                     }
522                 }
523                 _ => (),
524             }
525         }
526 
527         if bit_depth == 16
528             && transform
529                 .intersects(crate::Transformations::SCALE_16 | crate::Transformations::STRIP_16)
530         {
531             len /= 2;
532             for i in 0..len {
533                 output_buffer[i] = output_buffer[2 * i];
534             }
535         }
536 
537         Ok(Some(InterlacedRow {
538             data: &output_buffer[..len],
539             interlace: adam7,
540         }))
541     }
542 
543     /// Returns the color type and the number of bits per sample
544     /// of the data returned by `Reader::next_row` and Reader::frames`.
output_color_type(&mut self) -> (ColorType, BitDepth)545     pub fn output_color_type(&mut self) -> (ColorType, BitDepth) {
546         self.imm_output_color_type()
547     }
548 
imm_output_color_type(&self) -> (ColorType, BitDepth)549     pub(crate) fn imm_output_color_type(&self) -> (ColorType, BitDepth) {
550         use crate::common::ColorType::*;
551         let t = self.transform;
552         let info = get_info!(self);
553         if t == crate::Transformations::IDENTITY {
554             (info.color_type, info.bit_depth)
555         } else {
556             let bits = match info.bit_depth as u8 {
557                 16 if t.intersects(
558                     crate::Transformations::SCALE_16 | crate::Transformations::STRIP_16,
559                 ) =>
560                 {
561                     8
562                 }
563                 n if n < 8 && t.contains(crate::Transformations::EXPAND) => 8,
564                 n => n,
565             };
566             let color_type = if t.contains(crate::Transformations::EXPAND) {
567                 let has_trns = info.trns.is_some();
568                 match info.color_type {
569                     Grayscale if has_trns => GrayscaleAlpha,
570                     RGB if has_trns => RGBA,
571                     Indexed if has_trns => RGBA,
572                     Indexed => RGB,
573                     ct => ct,
574                 }
575             } else {
576                 info.color_type
577             };
578             (color_type, BitDepth::from_u8(bits).unwrap())
579         }
580     }
581 
582     /// Returns the number of bytes required to hold a deinterlaced image frame
583     /// that is decoded using the given input transformations.
output_buffer_size(&self) -> usize584     pub fn output_buffer_size(&self) -> usize {
585         let (width, height) = get_info!(self).size();
586         let size = self.output_line_size(width);
587         size * height as usize
588     }
589 
validate_buffer_sizes(&self) -> Result<(), DecodingError>590     fn validate_buffer_sizes(&self) -> Result<(), DecodingError> {
591         // Check if the decoding buffer of a single raw line has a valid size.
592         if self.info().checked_raw_row_length().is_none() {
593             return Err(DecodingError::LimitsExceeded);
594         }
595 
596         // Check if the output buffer has a valid size.
597         if self.checked_output_buffer_size().is_none() {
598             return Err(DecodingError::LimitsExceeded);
599         }
600 
601         Ok(())
602     }
603 
checked_output_buffer_size(&self) -> Option<usize>604     fn checked_output_buffer_size(&self) -> Option<usize> {
605         let (width, height) = get_info!(self).size();
606         let (color, depth) = self.imm_output_color_type();
607         let rowlen = color.checked_raw_row_length(depth, width)? - 1;
608         let height: usize = std::convert::TryFrom::try_from(height).ok()?;
609         rowlen.checked_mul(height)
610     }
611 
612     /// Returns the number of bytes required to hold a deinterlaced row.
output_line_size(&self, width: u32) -> usize613     pub fn output_line_size(&self, width: u32) -> usize {
614         let (color, depth) = self.imm_output_color_type();
615         color.raw_row_length_from_width(depth, width) - 1
616     }
617 
618     /// Returns the number of bytes required to decode a deinterlaced row.
line_size(&self, width: u32) -> Option<usize>619     fn line_size(&self, width: u32) -> Option<usize> {
620         use crate::common::ColorType::*;
621         let t = self.transform;
622         let info = get_info!(self);
623         let trns = info.trns.is_some();
624 
625         let expanded = if info.bit_depth == BitDepth::Sixteen {
626             BitDepth::Sixteen
627         } else {
628             BitDepth::Eight
629         };
630         // The color type and depth representing the decoded line
631         // TODO 16 bit
632         let (color, depth) = match info.color_type {
633             Indexed if trns && t.contains(Transformations::EXPAND) => (RGBA, expanded),
634             Indexed if t.contains(Transformations::EXPAND) => (RGB, expanded),
635             RGB if trns && t.contains(Transformations::EXPAND) => (RGBA, expanded),
636             Grayscale if trns && t.contains(Transformations::EXPAND) => (GrayscaleAlpha, expanded),
637             Grayscale if t.contains(Transformations::EXPAND) => (Grayscale, expanded),
638             GrayscaleAlpha if t.contains(Transformations::EXPAND) => (GrayscaleAlpha, expanded),
639             other => (other, info.bit_depth),
640         };
641 
642         // Without the filter method byte
643         color.checked_raw_row_length(depth, width).map(|n| n - 1)
644     }
645 
allocate_out_buf(&mut self) -> Result<(), DecodingError>646     fn allocate_out_buf(&mut self) -> Result<(), DecodingError> {
647         let width = self.subframe.width;
648         let bytes = self.limits.bytes;
649         let buflen = match self.line_size(width) {
650             Some(buflen) if buflen <= bytes => buflen,
651             // Should we differentiate between platform limits and others?
652             _ => return Err(DecodingError::LimitsExceeded),
653         };
654         self.processed.resize(buflen, 0u8);
655         Ok(())
656     }
657 
next_pass(&mut self) -> Option<(usize, InterlaceInfo)>658     fn next_pass(&mut self) -> Option<(usize, InterlaceInfo)> {
659         match self.subframe.interlace {
660             InterlaceIter::Adam7(ref mut adam7) => {
661                 let last_pass = adam7.current_pass();
662                 let (pass, line, width) = adam7.next()?;
663                 let rowlen = get_info!(self).raw_row_length_from_width(width);
664                 if last_pass != pass {
665                     self.prev.clear();
666                     self.prev.resize(rowlen, 0u8);
667                 }
668                 Some((rowlen, InterlaceInfo::Adam7 { pass, line, width }))
669             }
670             InterlaceIter::None(ref mut height) => {
671                 let _ = height.next()?;
672                 Some((self.subframe.rowlen, InterlaceInfo::None))
673             }
674         }
675     }
676 
677     /// Returns the next raw scanline of the image interlace pass.
678     /// The scanline is filtered against the previous scanline according to the specification.
next_raw_interlaced_row(&mut self) -> Result<Option<InterlacedRow<'_>>, DecodingError>679     fn next_raw_interlaced_row(&mut self) -> Result<Option<InterlacedRow<'_>>, DecodingError> {
680         let bpp = self.bpp;
681         let (rowlen, passdata) = match self.next_pass() {
682             Some((rowlen, passdata)) => (rowlen, passdata),
683             None => return Ok(None),
684         };
685         loop {
686             if self.current.len() - self.scan_start >= rowlen {
687                 let row = &mut self.current[self.scan_start..];
688                 let filter = match FilterType::from_u8(row[0]) {
689                     None => {
690                         self.scan_start += rowlen;
691                         return Err(DecodingError::Format(
692                             format!("invalid filter method ({})", row[0]).into(),
693                         ));
694                     }
695                     Some(filter) => filter,
696                 };
697 
698                 if let Err(message) =
699                     unfilter(filter, bpp, &self.prev[1..rowlen], &mut row[1..rowlen])
700                 {
701                     return Err(DecodingError::Format(borrow::Cow::Borrowed(message)));
702                 }
703 
704                 self.prev[..rowlen].copy_from_slice(&row[..rowlen]);
705                 self.scan_start += rowlen;
706 
707                 return Ok(Some(InterlacedRow {
708                     data: &self.prev[1..rowlen],
709                     interlace: passdata,
710                 }));
711             } else {
712                 if self.subframe.consumed_and_flushed {
713                     return Err(DecodingError::Format(
714                         format!("not enough data for image").into(),
715                     ));
716                 }
717 
718                 // Clear the current buffer before appending more data.
719                 if self.scan_start > 0 {
720                     self.current.drain(..self.scan_start).for_each(drop);
721                     self.scan_start = 0;
722                 }
723 
724                 let val = self.decoder.decode_next(&mut self.current)?;
725                 match val {
726                     Some(Decoded::ImageData) => {}
727                     Some(Decoded::ImageDataFlushed) => {
728                         self.subframe.consumed_and_flushed = true;
729                     }
730                     None => {
731                         if !self.current.is_empty() {
732                             return Err(DecodingError::Format("file truncated".into()));
733                         } else {
734                             return Ok(None);
735                         }
736                     }
737                     _ => (),
738                 }
739             }
740         }
741     }
742 }
743 
744 impl SubframeInfo {
not_yet_init() -> Self745     fn not_yet_init() -> Self {
746         SubframeInfo {
747             width: 0,
748             rowlen: 0,
749             interlace: InterlaceIter::None(0..0),
750             consumed_and_flushed: false,
751         }
752     }
753 
new(info: &Info) -> Self754     fn new(info: &Info) -> Self {
755         // The apng fctnl overrides width and height.
756         // All other data is set by the main info struct.
757         let (width, height) = if let Some(fc) = info.frame_control {
758             (fc.width, fc.height)
759         } else {
760             (info.width, info.height)
761         };
762 
763         let interlace = if info.interlaced {
764             InterlaceIter::Adam7(utils::Adam7Iterator::new(width, height))
765         } else {
766             InterlaceIter::None(0..height)
767         };
768 
769         SubframeInfo {
770             width,
771             rowlen: info.raw_row_length_from_width(width),
772             interlace,
773             consumed_and_flushed: false,
774         }
775     }
776 }
777 
expand_paletted(buffer: &mut [u8], info: &Info) -> Result<(), DecodingError>778 fn expand_paletted(buffer: &mut [u8], info: &Info) -> Result<(), DecodingError> {
779     if let Some(palette) = info.palette.as_ref() {
780         if let BitDepth::Sixteen = info.bit_depth {
781             Err(DecodingError::Format(
782                 "Bit depth '16' is not valid for paletted images".into(),
783             ))
784         } else {
785             let black = [0, 0, 0];
786             if let Some(ref trns) = info.trns {
787                 utils::unpack_bits(buffer, 4, info.bit_depth as u8, |i, chunk| {
788                     let (rgb, a) = (
789                         palette
790                             .get(3 * i as usize..3 * i as usize + 3)
791                             .unwrap_or(&black),
792                         *trns.get(i as usize).unwrap_or(&0xFF),
793                     );
794                     chunk[0] = rgb[0];
795                     chunk[1] = rgb[1];
796                     chunk[2] = rgb[2];
797                     chunk[3] = a;
798                 });
799             } else {
800                 utils::unpack_bits(buffer, 3, info.bit_depth as u8, |i, chunk| {
801                     let rgb = palette
802                         .get(3 * i as usize..3 * i as usize + 3)
803                         .unwrap_or(&black);
804                     chunk[0] = rgb[0];
805                     chunk[1] = rgb[1];
806                     chunk[2] = rgb[2];
807                 })
808             }
809             Ok(())
810         }
811     } else {
812         Err(DecodingError::Format("missing palette".into()))
813     }
814 }
815 
expand_gray_u8(buffer: &mut [u8], info: &Info)816 fn expand_gray_u8(buffer: &mut [u8], info: &Info) {
817     let rescale = true;
818     let scaling_factor = if rescale {
819         (255) / ((1u16 << info.bit_depth as u8) - 1) as u8
820     } else {
821         1
822     };
823     if let Some(ref trns) = info.trns {
824         utils::unpack_bits(buffer, 2, info.bit_depth as u8, |pixel, chunk| {
825             if pixel == trns[0] {
826                 chunk[1] = 0
827             } else {
828                 chunk[1] = 0xFF
829             }
830             chunk[0] = pixel * scaling_factor
831         })
832     } else {
833         utils::unpack_bits(buffer, 1, info.bit_depth as u8, |val, chunk| {
834             chunk[0] = val * scaling_factor
835         })
836     }
837 }
838 
839 #[cfg(test)]
840 mod tests {
841     use super::Decoder;
842     use std::io::{BufRead, Read, Result};
843     use std::mem::discriminant;
844 
845     /// A reader that reads at most `n` bytes.
846     struct SmalBuf<R: BufRead> {
847         inner: R,
848         cap: usize,
849     }
850 
851     impl<R: BufRead> SmalBuf<R> {
new(inner: R, cap: usize) -> Self852         fn new(inner: R, cap: usize) -> Self {
853             SmalBuf { inner, cap }
854         }
855     }
856 
857     impl<R: BufRead> Read for SmalBuf<R> {
read(&mut self, buf: &mut [u8]) -> Result<usize>858         fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
859             let len = buf.len().min(self.cap);
860             self.inner.read(&mut buf[..len])
861         }
862     }
863 
864     impl<R: BufRead> BufRead for SmalBuf<R> {
fill_buf(&mut self) -> Result<&[u8]>865         fn fill_buf(&mut self) -> Result<&[u8]> {
866             let buf = self.inner.fill_buf()?;
867             let len = buf.len().min(self.cap);
868             Ok(&buf[..len])
869         }
870 
consume(&mut self, amt: usize)871         fn consume(&mut self, amt: usize) {
872             assert!(amt <= self.cap);
873             self.inner.consume(amt)
874         }
875     }
876 
877     #[test]
no_data_dup_on_finish()878     fn no_data_dup_on_finish() {
879         const IMG: &[u8] = include_bytes!(concat!(
880             env!("CARGO_MANIFEST_DIR"),
881             "/tests/bugfixes/x_issue#214.png"
882         ));
883 
884         let (info, mut normal) = Decoder::new(IMG).read_info().unwrap();
885 
886         let mut buffer = vec![0; info.buffer_size()];
887         let normal = normal.next_frame(&mut buffer).unwrap_err();
888 
889         let smal = Decoder::new(SmalBuf::new(IMG, 1))
890             .read_info()
891             .unwrap()
892             .1
893             .next_frame(&mut buffer)
894             .unwrap_err();
895 
896         assert_eq!(discriminant(&normal), discriminant(&smal));
897     }
898 }
899