Skip to content

Commit 7707060

Browse files
authored
Merge pull request #286 from image-rs/clippy
Resolve all the clippy lints and add it to CI
2 parents cc75d05 + b2bcbba commit 7707060

File tree

15 files changed

+81
-74
lines changed

15 files changed

+81
-74
lines changed

.github/workflows/rust.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,10 @@ jobs:
4545
- name: Cargo check
4646
run: cargo check --features "${{ matrix.feature }}" --no-default-features
4747

48+
- name: Cargo clippy
49+
if: ${{ matrix.feature != '' }}
50+
run: cargo clippy --no-deps --all-features --all-targets -- -D warnings
51+
4852
other_archs:
4953
# github actions does not support 32-bit or big endian systems directly, but
5054
# it does support QEMU. so we install qemu, then build and run the tests on

src/bytecast.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@
1010
//! the unsafe code guidelines).
1111
//!
1212
//! TODO: Would like to use std-lib here.
13+
// Until we implement predictors for f16, we don't need f16_as_ne_bytes. (Due to the macro we do
14+
// not apply this directly to the functions). And rust-version is not new enough for `expect`.
15+
#![allow(dead_code)]
1316
use std::{mem, slice};
1417

1518
use half::f16;

src/decoder/ifd.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,7 @@ impl Entry {
445445
// 2b: the value is at most 4 bytes or doesn't fit in the offset field.
446446
return Ok(match self.type_ {
447447
Type::BYTE => Byte(self.offset[0]),
448-
Type::SBYTE => SignedByte(i8::from(self.offset[0] as i8)),
448+
Type::SBYTE => SignedByte(self.offset[0] as i8),
449449
Type::UNDEFINED => Byte(self.offset[0]),
450450
Type::SHORT => Short(self.r(bo).read_u16()?),
451451
Type::SSHORT => SignedShort(self.r(bo).read_i16()?),

src/decoder/image.rs

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ pub(crate) struct TileAttributes {
2929

3030
impl TileAttributes {
3131
pub fn tiles_across(&self) -> usize {
32-
(self.image_width + self.tile_width - 1) / self.tile_width
32+
self.image_width.div_ceil(self.tile_width)
3333
}
3434
pub fn tiles_down(&self) -> usize {
35-
(self.image_height + self.tile_length - 1) / self.tile_length
35+
self.image_height.div_ceil(self.tile_length)
3636
}
3737
fn padding_right(&self) -> usize {
3838
(self.tile_width - self.image_width % self.tile_width) % self.tile_width
@@ -625,12 +625,12 @@ impl Image {
625625
let chunk_row_bits = (u64::from(chunk_dims.0) * u64::from(self.bits_per_sample))
626626
.checked_mul(samples as u64)
627627
.ok_or(TiffError::LimitsExceeded)?;
628-
let chunk_row_bytes: usize = ((chunk_row_bits + 7) / 8).try_into()?;
628+
let chunk_row_bytes: usize = chunk_row_bits.div_ceil(8).try_into()?;
629629

630630
let data_row_bits = (u64::from(data_dims.0) * u64::from(self.bits_per_sample))
631631
.checked_mul(samples as u64)
632632
.ok_or(TiffError::LimitsExceeded)?;
633-
let data_row_bytes: usize = ((data_row_bits + 7) / 8).try_into()?;
633+
let data_row_bytes: usize = data_row_bits.div_ceil(8).try_into()?;
634634

635635
// TODO: Should these return errors instead?
636636
assert!(output_row_stride >= data_row_bytes);
@@ -643,11 +643,11 @@ impl Image {
643643
self.jpeg_tables.as_deref().map(|a| &**a),
644644
)?;
645645

646-
if output_row_stride == chunk_row_bytes as usize {
646+
if output_row_stride == chunk_row_bytes {
647647
let tile = &mut buf[..chunk_row_bytes * data_dims.1 as usize];
648648
reader.read_exact(tile)?;
649649

650-
for row in tile.chunks_mut(chunk_row_bytes as usize) {
650+
for row in tile.chunks_mut(chunk_row_bytes) {
651651
super::fix_endianness_and_predict(
652652
row,
653653
color_type.bit_depth(),
@@ -678,11 +678,7 @@ impl Image {
678678
}
679679
}
680680
} else {
681-
for (_i, row) in buf
682-
.chunks_mut(output_row_stride)
683-
.take(data_dims.1 as usize)
684-
.enumerate()
685-
{
681+
for row in buf.chunks_mut(output_row_stride).take(data_dims.1 as usize) {
686682
let row = &mut row[..data_row_bytes];
687683
reader.read_exact(row)?;
688684

src/decoder/mod.rs

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -206,6 +206,7 @@ pub enum ChunkType {
206206

207207
/// Decoding limits
208208
#[derive(Clone, Debug)]
209+
#[non_exhaustive]
209210
pub struct Limits {
210211
/// The maximum size of any `DecodingResult` in bytes, the default is
211212
/// 256MiB. If the entire image is decoded at once, then this will
@@ -218,10 +219,6 @@ pub struct Limits {
218219
/// Maximum size for intermediate buffer which may be used to limit the amount of data read per
219220
/// segment even if the entire image is decoded at once.
220221
pub intermediate_buffer_size: usize,
221-
/// The purpose of this is to prevent all the fields of the struct from
222-
/// being public, as this would make adding new fields a major version
223-
/// bump.
224-
_non_exhaustive: (),
225222
}
226223

227224
impl Limits {
@@ -234,10 +231,9 @@ impl Limits {
234231
/// naturally, the machine running the program does not have infinite memory.
235232
pub fn unlimited() -> Limits {
236233
Limits {
237-
decoding_buffer_size: usize::max_value(),
238-
ifd_value_size: usize::max_value(),
239-
intermediate_buffer_size: usize::max_value(),
240-
_non_exhaustive: (),
234+
decoding_buffer_size: usize::MAX,
235+
ifd_value_size: usize::MAX,
236+
intermediate_buffer_size: usize::MAX,
241237
}
242238
}
243239
}
@@ -248,7 +244,6 @@ impl Default for Limits {
248244
decoding_buffer_size: 256 * 1024 * 1024,
249245
intermediate_buffer_size: 128 * 1024 * 1024,
250246
ifd_value_size: 1024 * 1024,
251-
_non_exhaustive: (),
252247
}
253248
}
254249
}
@@ -861,7 +856,8 @@ impl<R: Read + Seek> Decoder<R> {
861856
let row_samples = if bits_per_sample >= 8 {
862857
width
863858
} else {
864-
((((width as u64) * bits_per_sample as u64) + 7) / 8)
859+
((width as u64) * bits_per_sample as u64)
860+
.div_ceil(8)
865861
.try_into()
866862
.map_err(|_| TiffError::LimitsExceeded)?
867863
};
@@ -960,12 +956,12 @@ impl<R: Read + Seek> Decoder<R> {
960956
let output_row_bits = (width as u64 * self.image.bits_per_sample as u64)
961957
.checked_mul(samples as u64)
962958
.ok_or(TiffError::LimitsExceeded)?;
963-
let output_row_stride: usize = ((output_row_bits + 7) / 8).try_into()?;
959+
let output_row_stride: usize = output_row_bits.div_ceil(8).try_into()?;
964960

965961
let chunk_row_bits = (chunk_dimensions.0 as u64 * self.image.bits_per_sample as u64)
966962
.checked_mul(samples as u64)
967963
.ok_or(TiffError::LimitsExceeded)?;
968-
let chunk_row_bytes: usize = ((chunk_row_bits + 7) / 8).try_into()?;
964+
let chunk_row_bytes: usize = chunk_row_bits.div_ceil(8).try_into()?;
969965

970966
let chunks_across = ((width - 1) / chunk_dimensions.0 + 1) as usize;
971967

src/decoder/stream.rs

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -135,20 +135,17 @@ impl<R: Read> EndianReader<R> {
135135
}
136136
}
137137

138-
///
139-
/// # READERS
140-
///
141-
142-
///
143-
/// ## Deflate Reader
144-
///
138+
//
139+
// # READERS
140+
//
145141

142+
/// Type alias for the deflate Reader
146143
#[cfg(feature = "deflate")]
147144
pub type DeflateReader<R> = flate2::read::ZlibDecoder<R>;
148145

149-
///
150-
/// ## LZW Reader
151-
///
146+
//
147+
// ## LZW Reader
148+
//
152149

153150
/// Reader that decompresses LZW streams
154151
#[cfg(feature = "lzw")]
@@ -207,10 +204,11 @@ impl<R: Read> Read for LZWReader<R> {
207204
}
208205
}
209206

210-
///
211-
/// ## PackBits Reader
212-
///
207+
//
208+
// ## PackBits Reader
209+
//
213210

211+
/// Internal state machine for the PackBitsReader.
214212
enum PackBitsReaderState {
215213
Header,
216214
Literal,

src/directory.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ pub struct Directory {
2727
}
2828

2929
impl Directory {
30+
/// Create a directory in an initial state without entries. Note that an empty directory can
31+
/// not be encoded in a file, it must contain at least one entry.
3032
pub fn empty() -> Self {
3133
Directory {
3234
entries: BTreeMap::new(),
@@ -72,6 +74,12 @@ impl Directory {
7274
self.entries.len()
7375
}
7476

77+
/// Check if there are any entries in this directory. Note that an empty directory can not be
78+
/// encoded in the file, it must contain at least one entry.
79+
pub fn is_empty(&self) -> bool {
80+
self.entries.is_empty()
81+
}
82+
7583
/// Get the pointer to the next IFD, if it was defined.
7684
pub fn next(&self) -> Option<IfdPointer> {
7785
self.next_ifd.map(|n| IfdPointer(n.get()))

src/encoder/compression/deflate.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,21 +11,17 @@ pub struct Deflate {
1111
/// It allows trading compression ratio for compression speed.
1212
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
1313
#[non_exhaustive]
14+
#[derive(Default)]
1415
pub enum DeflateLevel {
1516
/// The fastest possible compression mode.
1617
Fast = 1,
1718
/// The conserative choice between speed and ratio.
19+
#[default]
1820
Balanced = 6,
1921
/// The best compression available with Deflate.
2022
Best = 9,
2123
}
2224

23-
impl Default for DeflateLevel {
24-
fn default() -> Self {
25-
DeflateLevel::Balanced
26-
}
27-
}
28-
2925
impl Deflate {
3026
/// Create a new deflate compressor with a specific level of compression.
3127
pub fn with_level(level: DeflateLevel) -> Self {

src/encoder/mod.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ impl<'a, W: 'a + Write + Seek, K: TiffKind> DirectoryEncoder<'a, W, K> {
296296
}
297297

298298
let entry = Self::write_value(
299-
&mut self.writer,
299+
self.writer,
300300
&DirectoryEntry {
301301
data_type: <T>::FIELD_TYPE,
302302
count: value.count().try_into()?,
@@ -370,14 +370,14 @@ impl<'a, W: 'a + Write + Seek, K: TiffKind> DirectoryEncoder<'a, W, K> {
370370

371371
if bytes.len() > in_entry_bytes {
372372
let offset = writer.offset();
373-
writer.write_bytes(&bytes)?;
373+
writer.write_bytes(bytes)?;
374374

375375
let offset = K::convert_offset(offset)?;
376376
offset_bytes[..offset.bytes()].copy_from_slice(&offset.data());
377377
} else {
378378
// Note: we have indicated our native byte order in the header, hence this
379379
// corresponds to our byte order no matter the value type.
380-
offset_bytes[..bytes.len()].copy_from_slice(&bytes);
380+
offset_bytes[..bytes.len()].copy_from_slice(bytes);
381381
}
382382

383383
// Undoing some hidden type. Offset is either u32 or u64. Due to the trait API being public
@@ -532,11 +532,11 @@ impl<'a, W: 'a + Write + Seek, T: ColorType, K: TiffKind> ImageEncoder<'a, W, T,
532532
let rows_per_strip = {
533533
match compression.tag() {
534534
CompressionMethod::PackBits => 1, // Each row must be packed separately. Do not compress across row boundaries
535-
_ => (1_000_000 + row_bytes - 1) / row_bytes,
535+
_ => 1_000_000_u64.div_ceil(row_bytes),
536536
}
537537
};
538538

539-
let strip_count = (u64::from(height) + rows_per_strip - 1) / rows_per_strip;
539+
let strip_count = u64::from(height).div_ceil(rows_per_strip);
540540

541541
encoder.write_tag(Tag::ImageWidth, width)?;
542542
encoder.write_tag(Tag::ImageLength, height)?;
@@ -705,7 +705,7 @@ impl<'a, W: 'a + Write + Seek, T: ColorType, K: TiffKind> ImageEncoder<'a, W, T,
705705
self.encoder.write_tag(Tag::RowsPerStrip, value)?;
706706

707707
let value: u64 = value as u64;
708-
self.strip_count = (self.height as u64 + value - 1) / value;
708+
self.strip_count = (self.height as u64).div_ceil(value);
709709
self.rows_per_strip = value;
710710

711711
Ok(())

src/encoder/tiff_value.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -450,7 +450,7 @@ impl TiffValue for str {
450450
}
451451
}
452452

453-
impl<'a, T: TiffValue + ?Sized> TiffValue for &'a T {
453+
impl<T: TiffValue + ?Sized> TiffValue for &'_ T {
454454
const BYTE_LEN: u8 = T::BYTE_LEN;
455455
const FIELD_TYPE: Type = T::FIELD_TYPE;
456456

0 commit comments

Comments
 (0)