Skip to content

Commit

Permalink
Bump MSRV to 1.80
Browse files Browse the repository at this point in the history
  • Loading branch information
fintelia committed Aug 14, 2024
1 parent d49ef2d commit d85907b
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 38 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ jobs:
strategy:
fail-fast: false
matrix:
rust: ["1.67.1", nightly, beta, stable]
rust: ["1.80.1", nightly, beta, stable]
steps:
- uses: actions/checkout@v4

- uses: dtolnay/rust-toolchain@nightly
if: ${{ matrix.rust == '1.67.1' }}
if: ${{ matrix.rust == '1.80.1' }}
- name: Generate Cargo.lock with minimal-version dependencies
if: ${{ matrix.rust == '1.67.1' }}
if: ${{ matrix.rust == '1.80.1' }}
run: cargo -Zminimal-versions generate-lockfile

- uses: dtolnay/rust-toolchain@v1
Expand All @@ -34,7 +34,7 @@ jobs:
- name: build
run: cargo build -v
- name: test
if: ${{ matrix.rust != '1.67.1' }}
if: ${{ matrix.rust != '1.80.1' }}
run: cargo test -v && cargo doc -v

test_big_endian:
Expand Down
6 changes: 5 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name = "image-webp"
version = "0.1.3"
edition = "2021"
license = "MIT OR Apache-2.0"
rust-version = "1.67.1"
rust-version = "1.80.1"

description = "WebP encoding and decoding in pure Rust"
homepage = "https://github.com/image-rs/image-webp"
Expand All @@ -15,9 +15,13 @@ include = ["/src", "LICENSE-APACHE", "LICENSE-MIT", "README.md"]
[dependencies]
byteorder-lite = "0.1.0"
quick-error = "2.0.1"
innumerable = { path = "../innumerable" }

[dev-dependencies]
paste = "1.0.14"
png = "0.17.12"
rand = "0.8.5"
webp = "0.3.0"

[lints.rust]
unexpected_cfgs = { level = "allow", check-cfg = ['cfg(has_foo)'] }
33 changes: 13 additions & 20 deletions src/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use byteorder_lite::{LittleEndian, ReadBytesExt};
use quick_error::quick_error;

use std::collections::HashMap;
use std::io::{self, BufReader, Cursor, Read, Seek};
use std::io::{self, BufRead, Cursor, Read, Seek};
use std::num::NonZeroU16;
use std::ops::Range;

Expand Down Expand Up @@ -276,7 +276,7 @@ pub struct WebPDecoder<R> {
chunks: HashMap<WebPRiffChunk, Range<u64>>,
}

impl<R: Read + Seek> WebPDecoder<R> {
impl<R: BufRead + Seek> WebPDecoder<R> {
/// Create a new WebPDecoder from the reader `r`. The decoder performs many small reads, so the
/// reader should be buffered.
pub fn new(r: R) -> Result<WebPDecoder<R>, DecodingError> {
Expand Down Expand Up @@ -370,15 +370,8 @@ impl<R: Read + Seek> WebPDecoder<R> {
let max_position = position + riff_size.saturating_sub(12);
self.r.seek(io::SeekFrom::Start(position))?;

// Resist denial of service attacks by using a BufReader. In most images there
// should be a very small number of chunks. However, nothing prevents a malicious
// image from having an extremely large number of "unknown" chunks. Issuing
// millions of reads and seeks against the underlying reader might be very
// expensive.
let mut reader = BufReader::with_capacity(64 << 10, &mut self.r);

while position < max_position {
match read_chunk_header(&mut reader) {
match read_chunk_header(&mut self.r) {
Ok((chunk, chunk_size, chunk_size_rounded)) => {
let range = position + 8..position + 8 + chunk_size;
position += 8 + chunk_size_rounded;
Expand All @@ -393,8 +386,8 @@ impl<R: Read + Seek> WebPDecoder<R> {
return Err(DecodingError::InvalidChunkSize);
}

reader.seek_relative(12)?;
let duration = reader.read_u32::<LittleEndian>()? & 0xffffff;
self.r.seek_relative(12)?;
let duration = self.r.read_u32::<LittleEndian>()? & 0xffffff;
self.loop_duration =
self.loop_duration.wrapping_add(u64::from(duration));

Expand All @@ -404,19 +397,19 @@ impl<R: Read + Seek> WebPDecoder<R> {
// and the spec says that lossless images SHOULD NOT contain ALPH
// chunks, so we treat both as indicators of lossy images.
if !self.is_lossy {
let (subchunk, ..) = read_chunk_header(&mut reader)?;
let (subchunk, ..) = read_chunk_header(&mut self.r)?;
if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk {
self.is_lossy = true;
}
reader.seek_relative(chunk_size_rounded as i64 - 24)?;
self.r.seek_relative(chunk_size_rounded as i64 - 24)?;
} else {
reader.seek_relative(chunk_size_rounded as i64 - 16)?;
self.r.seek_relative(chunk_size_rounded as i64 - 16)?;
}

continue;
}

reader.seek_relative(chunk_size_rounded as i64)?;
self.r.seek_relative(chunk_size_rounded as i64)?;
}
Err(DecodingError::IoError(e))
if e.kind() == io::ErrorKind::UnexpectedEof =>
Expand Down Expand Up @@ -849,21 +842,21 @@ impl<R: Read + Seek> WebPDecoder<R> {
}
}

pub(crate) fn range_reader<R: Read + Seek>(
pub(crate) fn range_reader<R: BufRead + Seek>(
mut r: R,
range: Range<u64>,
) -> Result<impl Read, DecodingError> {
) -> Result<impl BufRead, DecodingError> {
r.seek(io::SeekFrom::Start(range.start))?;
Ok(r.take(range.end - range.start))
}

pub(crate) fn read_fourcc<R: Read>(mut r: R) -> Result<WebPRiffChunk, DecodingError> {
pub(crate) fn read_fourcc<R: BufRead>(mut r: R) -> Result<WebPRiffChunk, DecodingError> {
let mut chunk_fourcc = [0; 4];
r.read_exact(&mut chunk_fourcc)?;
Ok(WebPRiffChunk::from_fourcc(chunk_fourcc))
}

pub(crate) fn read_chunk_header<R: Read>(
pub(crate) fn read_chunk_header<R: BufRead>(
mut r: R,
) -> Result<(WebPRiffChunk, u64, u64), DecodingError> {
let chunk = read_fourcc(&mut r)?;
Expand Down
2 changes: 1 addition & 1 deletion src/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ fn write_huffman_tree<W: Write>(

fn length_to_symbol(len: u16) -> (u16, u8) {
let len = len - 1;
let highest_bit = 15 - len.leading_zeros() as u16; // TODO: use ilog2 once MSRV >= 1.67
let highest_bit = len.ilog2() as u16;
let second_highest_bit = (len >> (highest_bit - 1)) & 1;
let extra_bits = highest_bit - 1;
let symbol = 2 * highest_bit + second_highest_bit;
Expand Down
13 changes: 1 addition & 12 deletions src/lossless_transform.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,17 +241,6 @@ pub(crate) fn apply_color_indexing_transform(
table_size: u16,
table_data: &[u8],
) {
// TODO: Replace with built-in div_ceil when MSRV is 1.73+
fn div_ceil(a: u16, b: u16) -> u16 {
let d = a / b;
let r = a % b;
if r > 0 && b > 0 {
d + 1
} else {
d
}
}

if table_size > 16 {
let mut table = table_data.chunks_exact(4).collect::<Vec<_>>();
table.resize(256, &[0; 4]);
Expand Down Expand Up @@ -289,7 +278,7 @@ pub(crate) fn apply_color_indexing_transform(
let table = table.chunks_exact(4 << width_bits).collect::<Vec<_>>();

let entry_size = 4 << width_bits;
let index_image_width = div_ceil(width, 1 << width_bits) as usize;
let index_image_width = width.div_ceil(1 << width_bits) as usize;
let final_entry_size = width as usize * 4 - entry_size * (index_image_width - 1);

for y in (0..height as usize).rev() {
Expand Down

0 comments on commit d85907b

Please sign in to comment.