From d85907b93294f4d6604aeef2306eb4e899b457bc Mon Sep 17 00:00:00 2001 From: Jonathan Behrens Date: Wed, 14 Aug 2024 13:19:11 -0700 Subject: [PATCH] Bump MSRV to 1.80 --- .github/workflows/rust.yml | 8 ++++---- Cargo.toml | 6 +++++- src/decoder.rs | 33 +++++++++++++-------------------- src/encoder.rs | 2 +- src/lossless_transform.rs | 13 +------------ 5 files changed, 24 insertions(+), 38 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 442ceb1..ce9dba4 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -14,14 +14,14 @@ jobs: strategy: fail-fast: false matrix: - rust: ["1.67.1", nightly, beta, stable] + rust: ["1.80.1", nightly, beta, stable] steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly - if: ${{ matrix.rust == '1.67.1' }} + if: ${{ matrix.rust == '1.80.1' }} - name: Generate Cargo.lock with minimal-version dependencies - if: ${{ matrix.rust == '1.67.1' }} + if: ${{ matrix.rust == '1.80.1' }} run: cargo -Zminimal-versions generate-lockfile - uses: dtolnay/rust-toolchain@v1 @@ -34,7 +34,7 @@ jobs: - name: build run: cargo build -v - name: test - if: ${{ matrix.rust != '1.67.1' }} + if: ${{ matrix.rust != '1.80.1' }} run: cargo test -v && cargo doc -v test_big_endian: diff --git a/Cargo.toml b/Cargo.toml index 0ebe94c..23cc468 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "image-webp" version = "0.1.3" edition = "2021" license = "MIT OR Apache-2.0" -rust-version = "1.67.1" +rust-version = "1.80.1" description = "WebP encoding and decoding in pure Rust" homepage = "https://github.com/image-rs/image-webp" @@ -15,9 +15,13 @@ include = ["/src", "LICENSE-APACHE", "LICENSE-MIT", "README.md"] [dependencies] byteorder-lite = "0.1.0" quick-error = "2.0.1" +innumerable = { path = "../innumerable" } [dev-dependencies] paste = "1.0.14" png = "0.17.12" rand = "0.8.5" webp = "0.3.0" + +[lints.rust] +unexpected_cfgs = { level = "allow", check-cfg = ['cfg(has_foo)'] } diff --git a/src/decoder.rs b/src/decoder.rs index 8e80fea..6cf4ef2 100644 --- a/src/decoder.rs +++ b/src/decoder.rs @@ -2,7 +2,7 @@ use byteorder_lite::{LittleEndian, ReadBytesExt}; use quick_error::quick_error; use std::collections::HashMap; -use std::io::{self, BufReader, Cursor, Read, Seek}; +use std::io::{self, BufRead, Cursor, Read, Seek}; use std::num::NonZeroU16; use std::ops::Range; @@ -276,7 +276,7 @@ pub struct WebPDecoder { chunks: HashMap>, } -impl WebPDecoder { +impl WebPDecoder { /// Create a new WebPDecoder from the reader `r`. The decoder performs many small reads, so the /// reader should be buffered. pub fn new(r: R) -> Result, DecodingError> { @@ -370,15 +370,8 @@ impl WebPDecoder { let max_position = position + riff_size.saturating_sub(12); self.r.seek(io::SeekFrom::Start(position))?; - // Resist denial of service attacks by using a BufReader. In most images there - // should be a very small number of chunks. However, nothing prevents a malicious - // image from having an extremely large number of "unknown" chunks. Issuing - // millions of reads and seeks against the underlying reader might be very - // expensive. - let mut reader = BufReader::with_capacity(64 << 10, &mut self.r); - while position < max_position { - match read_chunk_header(&mut reader) { + match read_chunk_header(&mut self.r) { Ok((chunk, chunk_size, chunk_size_rounded)) => { let range = position + 8..position + 8 + chunk_size; position += 8 + chunk_size_rounded; @@ -393,8 +386,8 @@ impl WebPDecoder { return Err(DecodingError::InvalidChunkSize); } - reader.seek_relative(12)?; - let duration = reader.read_u32::()? & 0xffffff; + self.r.seek_relative(12)?; + let duration = self.r.read_u32::()? & 0xffffff; self.loop_duration = self.loop_duration.wrapping_add(u64::from(duration)); @@ -404,19 +397,19 @@ impl WebPDecoder { // and the spec says that lossless images SHOULD NOT contain ALPH // chunks, so we treat both as indicators of lossy images. if !self.is_lossy { - let (subchunk, ..) = read_chunk_header(&mut reader)?; + let (subchunk, ..) = read_chunk_header(&mut self.r)?; if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk { self.is_lossy = true; } - reader.seek_relative(chunk_size_rounded as i64 - 24)?; + self.r.seek_relative(chunk_size_rounded as i64 - 24)?; } else { - reader.seek_relative(chunk_size_rounded as i64 - 16)?; + self.r.seek_relative(chunk_size_rounded as i64 - 16)?; } continue; } - reader.seek_relative(chunk_size_rounded as i64)?; + self.r.seek_relative(chunk_size_rounded as i64)?; } Err(DecodingError::IoError(e)) if e.kind() == io::ErrorKind::UnexpectedEof => @@ -849,21 +842,21 @@ impl WebPDecoder { } } -pub(crate) fn range_reader( +pub(crate) fn range_reader( mut r: R, range: Range, -) -> Result { +) -> Result { r.seek(io::SeekFrom::Start(range.start))?; Ok(r.take(range.end - range.start)) } -pub(crate) fn read_fourcc(mut r: R) -> Result { +pub(crate) fn read_fourcc(mut r: R) -> Result { let mut chunk_fourcc = [0; 4]; r.read_exact(&mut chunk_fourcc)?; Ok(WebPRiffChunk::from_fourcc(chunk_fourcc)) } -pub(crate) fn read_chunk_header( +pub(crate) fn read_chunk_header( mut r: R, ) -> Result<(WebPRiffChunk, u64, u64), DecodingError> { let chunk = read_fourcc(&mut r)?; diff --git a/src/encoder.rs b/src/encoder.rs index f135ff7..048f616 100644 --- a/src/encoder.rs +++ b/src/encoder.rs @@ -286,7 +286,7 @@ fn write_huffman_tree( fn length_to_symbol(len: u16) -> (u16, u8) { let len = len - 1; - let highest_bit = 15 - len.leading_zeros() as u16; // TODO: use ilog2 once MSRV >= 1.67 + let highest_bit = len.ilog2() as u16; let second_highest_bit = (len >> (highest_bit - 1)) & 1; let extra_bits = highest_bit - 1; let symbol = 2 * highest_bit + second_highest_bit; diff --git a/src/lossless_transform.rs b/src/lossless_transform.rs index 842d517..23ec915 100644 --- a/src/lossless_transform.rs +++ b/src/lossless_transform.rs @@ -241,17 +241,6 @@ pub(crate) fn apply_color_indexing_transform( table_size: u16, table_data: &[u8], ) { - // TODO: Replace with built-in div_ceil when MSRV is 1.73+ - fn div_ceil(a: u16, b: u16) -> u16 { - let d = a / b; - let r = a % b; - if r > 0 && b > 0 { - d + 1 - } else { - d - } - } - if table_size > 16 { let mut table = table_data.chunks_exact(4).collect::>(); table.resize(256, &[0; 4]); @@ -289,7 +278,7 @@ pub(crate) fn apply_color_indexing_transform( let table = table.chunks_exact(4 << width_bits).collect::>(); let entry_size = 4 << width_bits; - let index_image_width = div_ceil(width, 1 << width_bits) as usize; + let index_image_width = width.div_ceil(1 << width_bits) as usize; let final_entry_size = width as usize * 4 - entry_size * (index_image_width - 1); for y in (0..height as usize).rev() {