From f24c6c7ea5e2ea6558b6a1923540c0bff95eb199 Mon Sep 17 00:00:00 2001 From: exfalso <0slemi0@gmail.com> Date: Wed, 10 May 2023 17:14:32 +0200 Subject: [PATCH 1/6] Add by_name_seek() for Stored zips --- src/lib.rs | 1 + src/read.rs | 430 ++++++++++++++++++++++++++++++--------------- src/read/stream.rs | 2 +- src/write.rs | 5 +- 4 files changed, 296 insertions(+), 142 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 7f3e7a01..bed39e1a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -26,6 +26,7 @@ pub use crate::compression::{CompressionMethod, SUPPORTED_COMPRESSION_METHODS}; pub use crate::read::ZipArchive; +pub use crate::read::HasZipMetadata; pub use crate::types::DateTime; pub use crate::write::ZipWriter; diff --git a/src/read.rs b/src/read.rs index b702b4f2..4125d374 100644 --- a/src/read.rs +++ b/src/read.rs @@ -12,7 +12,7 @@ use crate::zipcrypto::{ZipCryptoReader, ZipCryptoReaderValid, ZipCryptoValidator use byteorder::{LittleEndian, ReadBytesExt}; use std::borrow::Cow; use std::collections::HashMap; -use std::io::{self, prelude::*}; +use std::io::{self, prelude::*, SeekFrom}; use std::path::Path; use std::sync::Arc; @@ -52,6 +52,7 @@ pub(crate) mod zip_archive { /// ```no_run /// use std::io::prelude::*; /// fn list_zip_contents(reader: impl Read + Seek) -> zip::result::ZipResult<()> { + /// use zip::HasZipMetadata; /// let mut zip = zip::ZipArchive::new(reader)?; /// /// for i in 0..zip.len() { @@ -183,6 +184,65 @@ pub struct ZipFile<'a> { reader: ZipFileReader<'a>, } +/// A struct for reading and seeking a zip file +pub struct ZipFileSeek<'a, R> { + data: Cow<'a, ZipFileData>, + reader: ZipFileSeekReader<'a, R>, +} + +enum ZipFileSeekReader<'a, R> { + Raw(SeekableTake<'a, R>), +} + +struct SeekableTake<'a, R> { + inner: &'a mut R, + inner_starting_offset: u64, + length: u64, + current_offset: u64, +} + + +impl <'a, R: Seek> SeekableTake<'a, R> { + pub fn new(inner: &'a mut R, length: u64) -> io::Result { + let inner_starting_offset = inner.seek(SeekFrom::Current(0))?; + Ok(Self { + inner, + inner_starting_offset, + length, + current_offset: 0 + }) + } +} + +impl <'a, R: Seek> Seek for SeekableTake<'a, R> { + fn seek(&mut self, pos: SeekFrom) -> io::Result { + let offset = match pos { + SeekFrom::Start(offset) => Some(offset), + SeekFrom::End(offset) => self.length.checked_add_signed(offset), + SeekFrom::Current(offset) => self.current_offset.checked_add_signed(offset), + }; + match offset { + None => { + Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid seek to a negative or overflowing position")) + } + Some(offset) => { + let clamped_offset = std::cmp::min(self.length, offset); + let new_inner_offset = self.inner.seek(SeekFrom::Start(self.inner_starting_offset + clamped_offset))?; + self.current_offset = new_inner_offset - self.inner_starting_offset; + Ok(new_inner_offset) + } + } + } +} + +impl <'a, R: Read> Read for SeekableTake<'a, R> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let written = self.inner.take(self.length - self.current_offset).read(buf)?; + self.current_offset += written as u64; + Ok(written) + } +} + fn find_content<'a>( data: &ZipFileData, reader: &'a mut (impl Read + Seek), @@ -205,6 +265,29 @@ fn find_content<'a>( Ok((reader as &mut dyn Read).take(data.compressed_size)) } +fn find_content_seek<'a, R: Read + Seek>( + data: &ZipFileData, + reader: &'a mut R, +) -> ZipResult> { + // Parse local header + reader.seek(io::SeekFrom::Start(data.header_start))?; + let signature = reader.read_u32::()?; + if signature != spec::LOCAL_FILE_HEADER_SIGNATURE { + return Err(ZipError::InvalidArchive("Invalid local file header")); + } + + reader.seek(io::SeekFrom::Current(22))?; + let file_name_length = reader.read_u16::()? as u64; + let extra_field_length = reader.read_u16::()? as u64; + let magic_and_header = 4 + 22 + 2 + 2; + let data_start = data.header_start + magic_and_header + file_name_length + extra_field_length; + data.data_start.store(data_start); + + reader.seek(io::SeekFrom::Start(data_start))?; + + Ok(SeekableTake::new(reader, data.compressed_size)?) +} + #[allow(clippy::too_many_arguments)] fn make_crypto_reader<'a>( compression_method: crate::compression::CompressionMethod, @@ -533,6 +616,40 @@ impl ZipArchive { Ok(self.by_name_with_optional_password(name, None)?.unwrap()) } + /// Search for a file entry by name and return a seekable object. + pub fn by_name_seek(&mut self, name: &str) -> ZipResult> { + let index = match self.shared.names_map.get(name) { + Some(index) => *index, + None => { + return Err(ZipError::FileNotFound); + } + }; + self.by_index_seek(index) + } + + /// Search for a file entry by index and return a seekable object. + pub fn by_index_seek(&mut self, index: usize) -> ZipResult> { + let reader = &mut self.reader; + self.shared + .files + .get(index) + .ok_or(ZipError::FileNotFound) + .and_then(move |data| { + let seek_reader = match data.compression_method { + CompressionMethod::Stored => { + ZipFileSeekReader::Raw(find_content_seek(data, reader)?) + } + _ => { + return Err(ZipError::UnsupportedArchive("Seekable compressed files are not yet supported")) + } + }; + Ok(ZipFileSeek { + reader: seek_reader, + data: Cow::Borrowed(data), + }) + }) + } + fn by_name_with_optional_password<'a>( &'a mut self, name: &str, @@ -816,6 +933,151 @@ fn parse_extra_field(file: &mut ZipFileData) -> ZipResult<()> { Ok(()) } +/// A trait for exposing file metadata inside the zip. +pub trait HasZipMetadata { + /// Get the file metadata + fn get_metadata(&self) -> &ZipFileData; + + /// Get the version of the file + fn version_made_by(&self) -> (u8, u8) { + ( + self.get_metadata().version_made_by / 10, + self.get_metadata().version_made_by % 10, + ) + } + + /// Get the name of the file + /// + /// # Warnings + /// + /// It is dangerous to use this name directly when extracting an archive. + /// It may contain an absolute path (`/etc/shadow`), or break out of the + /// current directory (`../runtime`). Carelessly writing to these paths + /// allows an attacker to craft a ZIP archive that will overwrite critical + /// files. + /// + /// You can use the [`ZipFile::enclosed_name`] method to validate the name + /// as a safe path. + fn name(&self) -> &str { + &self.get_metadata().file_name + } + + /// Get the name of the file, in the raw (internal) byte representation. + /// + /// The encoding of this data is currently undefined. + fn name_raw(&self) -> &[u8] { + &self.get_metadata().file_name_raw + } + + /// Get the name of the file in a sanitized form. It truncates the name to the first NULL byte, + /// removes a leading '/' and removes '..' parts. + #[deprecated( + since = "0.5.7", + note = "by stripping `..`s from the path, the meaning of paths can change. + `mangled_name` can be used if this behaviour is desirable" + )] + fn sanitized_name(&self) -> ::std::path::PathBuf { + self.mangled_name() + } + + /// Rewrite the path, ignoring any path components with special meaning. + /// + /// - Absolute paths are made relative + /// - [`ParentDir`]s are ignored + /// - Truncates the filename at a NULL byte + /// + /// This is appropriate if you need to be able to extract *something* from + /// any archive, but will easily misrepresent trivial paths like + /// `foo/../bar` as `foo/bar` (instead of `bar`). Because of this, + /// [`ZipFile::enclosed_name`] is the better option in most scenarios. + /// + /// [`ParentDir`]: `Component::ParentDir` + fn mangled_name(&self) -> ::std::path::PathBuf { + self.get_metadata().file_name_sanitized() + } + + /// Ensure the file path is safe to use as a [`Path`]. + /// + /// - It can't contain NULL bytes + /// - It can't resolve to a path outside the current directory + /// > `foo/../bar` is fine, `foo/../../bar` is not. + /// - It can't be an absolute path + /// + /// This will read well-formed ZIP files correctly, and is resistant + /// to path-based exploits. It is recommended over + /// [`ZipFile::mangled_name`]. + fn enclosed_name(&self) -> Option<&Path> { + self.get_metadata().enclosed_name() + } + + /// Get the comment of the file + fn comment(&self) -> &str { + &self.get_metadata().file_comment + } + + /// Get the compression method used to store the file + fn compression(&self) -> CompressionMethod { + self.get_metadata().compression_method + } + + /// Get the size of the file, in bytes, in the archive + fn compressed_size(&self) -> u64 { + self.get_metadata().compressed_size + } + + /// Get the size of the file, in bytes, when uncompressed + fn size(&self) -> u64 { + self.get_metadata().uncompressed_size + } + + /// Get the time the file was last modified + fn last_modified(&self) -> DateTime { + self.get_metadata().last_modified_time + } + /// Returns whether the file is actually a directory + fn is_dir(&self) -> bool { + self.name() + .chars() + .rev() + .next() + .map_or(false, |c| c == '/' || c == '\\') + } + + /// Returns whether the file is a regular file + fn is_file(&self) -> bool { + !self.is_dir() + } + + /// Get unix mode for the file + fn unix_mode(&self) -> Option { + self.get_metadata().unix_mode() + } + + /// Get the CRC32 hash of the original file + fn crc32(&self) -> u32 { + self.get_metadata().crc32 + } + + /// Get the extra data of the zip header for this file + fn extra_data(&self) -> &[u8] { + &self.get_metadata().extra_field + } + + /// Get the starting offset of the data of the compressed file + fn data_start(&self) -> u64 { + self.get_metadata().data_start.load() + } + + /// Get the starting offset of the zip header for this file + fn header_start(&self) -> u64 { + self.get_metadata().header_start + } + /// Get the starting offset of the zip header in the central directory for this file + fn central_header_start(&self) -> u64 { + self.get_metadata().central_header_start + } +} + /// Methods for retrieving information on zip files impl<'a> ZipFile<'a> { fn get_reader(&mut self) -> &mut ZipFileReader<'a> { @@ -834,144 +1096,11 @@ impl<'a> ZipFile<'a> { } &mut self.reader } +} - /// Get the version of the file - pub fn version_made_by(&self) -> (u8, u8) { - ( - self.data.version_made_by / 10, - self.data.version_made_by % 10, - ) - } - - /// Get the name of the file - /// - /// # Warnings - /// - /// It is dangerous to use this name directly when extracting an archive. - /// It may contain an absolute path (`/etc/shadow`), or break out of the - /// current directory (`../runtime`). Carelessly writing to these paths - /// allows an attacker to craft a ZIP archive that will overwrite critical - /// files. - /// - /// You can use the [`ZipFile::enclosed_name`] method to validate the name - /// as a safe path. - pub fn name(&self) -> &str { - &self.data.file_name - } - - /// Get the name of the file, in the raw (internal) byte representation. - /// - /// The encoding of this data is currently undefined. - pub fn name_raw(&self) -> &[u8] { - &self.data.file_name_raw - } - - /// Get the name of the file in a sanitized form. It truncates the name to the first NULL byte, - /// removes a leading '/' and removes '..' parts. - #[deprecated( - since = "0.5.7", - note = "by stripping `..`s from the path, the meaning of paths can change. - `mangled_name` can be used if this behaviour is desirable" - )] - pub fn sanitized_name(&self) -> ::std::path::PathBuf { - self.mangled_name() - } - - /// Rewrite the path, ignoring any path components with special meaning. - /// - /// - Absolute paths are made relative - /// - [`ParentDir`]s are ignored - /// - Truncates the filename at a NULL byte - /// - /// This is appropriate if you need to be able to extract *something* from - /// any archive, but will easily misrepresent trivial paths like - /// `foo/../bar` as `foo/bar` (instead of `bar`). Because of this, - /// [`ZipFile::enclosed_name`] is the better option in most scenarios. - /// - /// [`ParentDir`]: `Component::ParentDir` - pub fn mangled_name(&self) -> ::std::path::PathBuf { - self.data.file_name_sanitized() - } - - /// Ensure the file path is safe to use as a [`Path`]. - /// - /// - It can't contain NULL bytes - /// - It can't resolve to a path outside the current directory - /// > `foo/../bar` is fine, `foo/../../bar` is not. - /// - It can't be an absolute path - /// - /// This will read well-formed ZIP files correctly, and is resistant - /// to path-based exploits. It is recommended over - /// [`ZipFile::mangled_name`]. - pub fn enclosed_name(&self) -> Option<&Path> { - self.data.enclosed_name() - } - - /// Get the comment of the file - pub fn comment(&self) -> &str { - &self.data.file_comment - } - - /// Get the compression method used to store the file - pub fn compression(&self) -> CompressionMethod { - self.data.compression_method - } - - /// Get the size of the file, in bytes, in the archive - pub fn compressed_size(&self) -> u64 { - self.data.compressed_size - } - - /// Get the size of the file, in bytes, when uncompressed - pub fn size(&self) -> u64 { - self.data.uncompressed_size - } - - /// Get the time the file was last modified - pub fn last_modified(&self) -> DateTime { - self.data.last_modified_time - } - /// Returns whether the file is actually a directory - pub fn is_dir(&self) -> bool { - self.name() - .chars() - .rev() - .next() - .map_or(false, |c| c == '/' || c == '\\') - } - - /// Returns whether the file is a regular file - pub fn is_file(&self) -> bool { - !self.is_dir() - } - - /// Get unix mode for the file - pub fn unix_mode(&self) -> Option { - self.data.unix_mode() - } - - /// Get the CRC32 hash of the original file - pub fn crc32(&self) -> u32 { - self.data.crc32 - } - - /// Get the extra data of the zip header for this file - pub fn extra_data(&self) -> &[u8] { - &self.data.extra_field - } - - /// Get the starting offset of the data of the compressed file - pub fn data_start(&self) -> u64 { - self.data.data_start.load() - } - - /// Get the starting offset of the zip header for this file - pub fn header_start(&self) -> u64 { - self.data.header_start - } - /// Get the starting offset of the zip header in the central directory for this file - pub fn central_header_start(&self) -> u64 { - self.data.central_header_start +impl <'a> HasZipMetadata for ZipFile<'a> { + fn get_metadata(&self) -> &ZipFileData { + self.data.as_ref() } } @@ -981,6 +1110,28 @@ impl<'a> Read for ZipFile<'a> { } } +impl <'a, R: Read> Read for ZipFileSeek<'a, R> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + match &mut self.reader { + ZipFileSeekReader::Raw(r) => r.read(buf), + } + } +} + +impl <'a, R: Seek> Seek for ZipFileSeek<'a, R> { + fn seek(&mut self, pos: SeekFrom) -> io::Result { + match &mut self.reader { + ZipFileSeekReader::Raw(r) => r.seek(pos), + } + } +} + +impl <'a, R> HasZipMetadata for ZipFileSeek<'a, R> { + fn get_metadata(&self) -> &ZipFileData { + self.data.as_ref() + } +} + impl<'a> Drop for ZipFile<'a> { fn drop(&mut self) { // self.data is Owned, this reader is constructed by a streaming reader. @@ -1167,6 +1318,7 @@ mod test { #[test] fn zip_contents() { use super::ZipArchive; + use super::HasZipMetadata; use std::io; let mut v = Vec::new(); @@ -1194,6 +1346,7 @@ mod test { #[test] fn zip_clone() { use super::ZipArchive; + use super::HasZipMetadata; use std::io::{self, Read}; let mut v = Vec::new(); @@ -1235,6 +1388,7 @@ mod test { #[test] fn file_and_dir_predicates() { use super::ZipArchive; + use super::HasZipMetadata; use std::io; let mut v = Vec::new(); diff --git a/src/read/stream.rs b/src/read/stream.rs index 5a01b23f..c1b677da 100644 --- a/src/read/stream.rs +++ b/src/read/stream.rs @@ -4,7 +4,7 @@ use std::path::Path; use super::{ central_header_to_zip_file_inner, read_zipfile_from_stream, spec, ZipError, ZipFile, - ZipFileData, ZipResult, + ZipFileData, ZipResult, HasZipMetadata, }; use byteorder::{LittleEndian, ReadBytesExt}; diff --git a/src/write.rs b/src/write.rs index 3f41c4d6..50394c36 100644 --- a/src/write.rs +++ b/src/write.rs @@ -1,13 +1,12 @@ //! Types for creating ZIP archives use crate::compression::CompressionMethod; -use crate::read::{central_header_to_zip_file, ZipArchive, ZipFile}; +use crate::read::{central_header_to_zip_file, ZipArchive, ZipFile, HasZipMetadata}; use crate::result::{ZipError, ZipResult}; use crate::spec; use crate::types::{AtomicU64, DateTime, System, ZipFileData, DEFAULT_VERSION}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use crc32fast::Hasher; -use std::convert::TryInto; use std::default::Default; use std::io; use std::io::prelude::*; @@ -493,7 +492,7 @@ impl ZipWriter { /// /// ``` /// use byteorder::{LittleEndian, WriteBytesExt}; - /// use zip::{ZipArchive, ZipWriter, result::ZipResult}; + /// use zip::{ZipArchive, ZipWriter, result::ZipResult, HasZipMetadata}; /// use zip::{write::FileOptions, CompressionMethod}; /// use std::io::{Write, Cursor}; /// From 614879506c3410017cc172c3d71061af3f79760b Mon Sep 17 00:00:00 2001 From: Chris Hennick <4961925+Pr0methean@users.noreply.github.com> Date: Sun, 5 May 2024 17:53:18 -0700 Subject: [PATCH 2/6] chore: Fix merge --- src/lib.rs | 2 +- src/read.rs | 83 ++++++++++++++++++++++++---------------------- src/read/stream.rs | 4 +-- src/write.rs | 6 ++-- 4 files changed, 49 insertions(+), 46 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 3e92667b..5acadcb9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -28,8 +28,8 @@ #![warn(missing_docs)] pub use crate::compression::{CompressionMethod, SUPPORTED_COMPRESSION_METHODS}; -pub use crate::read::ZipArchive; pub use crate::read::HasZipMetadata; +pub use crate::read::ZipArchive; pub use crate::types::DateTime; pub use crate::write::ZipWriter; diff --git a/src/read.rs b/src/read.rs index f03b58a8..8aa0d075 100644 --- a/src/read.rs +++ b/src/read.rs @@ -12,8 +12,7 @@ use crate::spec; use crate::types::{AesMode, AesVendorVersion, DateTime, System, ZipFileData}; use crate::zipcrypto::{ZipCryptoReader, ZipCryptoReaderValid, ZipCryptoValidator}; use indexmap::IndexMap; -use std::borrow::{Borrow, Cow}; -use std::collections::HashMap; +use std::borrow::Cow; use std::io::{self, prelude::*, SeekFrom}; use std::ops::Deref; use std::path::{Path, PathBuf}; @@ -221,20 +220,19 @@ struct SeekableTake<'a, R> { current_offset: u64, } - -impl <'a, R: Seek> SeekableTake<'a, R> { +impl<'a, R: Seek> SeekableTake<'a, R> { pub fn new(inner: &'a mut R, length: u64) -> io::Result { - let inner_starting_offset = inner.seek(SeekFrom::Current(0))?; + let inner_starting_offset = inner.stream_position()?; Ok(Self { inner, inner_starting_offset, length, - current_offset: 0 + current_offset: 0, }) } } -impl <'a, R: Seek> Seek for SeekableTake<'a, R> { +impl<'a, R: Seek> Seek for SeekableTake<'a, R> { fn seek(&mut self, pos: SeekFrom) -> io::Result { let offset = match pos { SeekFrom::Start(offset) => Some(offset), @@ -242,12 +240,15 @@ impl <'a, R: Seek> Seek for SeekableTake<'a, R> { SeekFrom::Current(offset) => self.current_offset.checked_add_signed(offset), }; match offset { - None => { - Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid seek to a negative or overflowing position")) - } + None => Err(io::Error::new( + io::ErrorKind::InvalidInput, + "invalid seek to a negative or overflowing position", + )), Some(offset) => { let clamped_offset = std::cmp::min(self.length, offset); - let new_inner_offset = self.inner.seek(SeekFrom::Start(self.inner_starting_offset + clamped_offset))?; + let new_inner_offset = self + .inner + .seek(SeekFrom::Start(self.inner_starting_offset + clamped_offset))?; self.current_offset = new_inner_offset - self.inner_starting_offset; Ok(new_inner_offset) } @@ -255,9 +256,12 @@ impl <'a, R: Seek> Seek for SeekableTake<'a, R> { } } -impl <'a, R: Read> Read for SeekableTake<'a, R> { +impl<'a, R: Read> Read for SeekableTake<'a, R> { fn read(&mut self, buf: &mut [u8]) -> io::Result { - let written = self.inner.take(self.length - self.current_offset).read(buf)?; + let written = self + .inner + .take(self.length - self.current_offset) + .read(buf)?; self.current_offset += written as u64; Ok(written) } @@ -297,17 +301,17 @@ fn find_content_seek<'a, R: Read + Seek>( ) -> ZipResult> { // Parse local header reader.seek(io::SeekFrom::Start(data.header_start))?; - let signature = reader.read_u32::()?; + let signature = reader.read_u32_le()?; if signature != spec::LOCAL_FILE_HEADER_SIGNATURE { return Err(ZipError::InvalidArchive("Invalid local file header")); } reader.seek(io::SeekFrom::Current(22))?; - let file_name_length = reader.read_u16::()? as u64; - let extra_field_length = reader.read_u16::()? as u64; + let file_name_length = reader.read_u16_le()? as u64; + let extra_field_length = reader.read_u16_le()? as u64; let magic_and_header = 4 + 22 + 2 + 2; let data_start = data.header_start + magic_and_header + file_name_length + extra_field_length; - data.data_start.store(data_start); + data.data_start.get_or_init(|| data_start); reader.seek(io::SeekFrom::Start(data_start))?; @@ -822,13 +826,7 @@ impl ZipArchive { /// Search for a file entry by name and return a seekable object. pub fn by_name_seek(&mut self, name: &str) -> ZipResult> { - let index = match self.shared.names_map.get(name) { - Some(index) => *index, - None => { - return Err(ZipError::FileNotFound); - } - }; - self.by_index_seek(index) + self.by_index_seek(self.index_for_name(name).ok_or(ZipError::FileNotFound)?) } /// Search for a file entry by index and return a seekable object. @@ -836,15 +834,17 @@ impl ZipArchive { let reader = &mut self.reader; self.shared .files - .get(index) + .get_index(index) .ok_or(ZipError::FileNotFound) - .and_then(move |data| { + .and_then(move |(_, data)| { let seek_reader = match data.compression_method { CompressionMethod::Stored => { ZipFileSeekReader::Raw(find_content_seek(data, reader)?) } _ => { - return Err(ZipError::UnsupportedArchive("Seekable compressed files are not yet supported")) + return Err(ZipError::UnsupportedArchive( + "Seekable compressed files are not yet supported", + )) } }; Ok(ZipFileSeek { @@ -1272,7 +1272,10 @@ pub trait HasZipMetadata { /// Get the extra data of the zip header for this file fn extra_data(&self) -> Option<&[u8]> { - self.get_metadata().extra_field.as_ref().map(|v| v.deref().deref()) + self.get_metadata() + .extra_field + .as_ref() + .map(|v| v.deref().deref()) } /// Get the starting offset of the data of the compressed file @@ -1308,12 +1311,6 @@ impl<'a> ZipFile<'a> { } &mut self.reader } -} - -impl <'a> HasZipMetadata for ZipFile<'a> { - fn get_metadata(&self) -> &ZipFileData { - self.data.as_ref() - } /// iterate through all extra fields pub fn extra_data_fields(&self) -> impl Iterator { @@ -1321,13 +1318,19 @@ impl <'a> HasZipMetadata for ZipFile<'a> { } } +impl<'a> HasZipMetadata for ZipFile<'a> { + fn get_metadata(&self) -> &ZipFileData { + self.data.as_ref() + } +} + impl<'a> Read for ZipFile<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result { self.get_reader().read(buf) } } -impl <'a, R: Read> Read for ZipFileSeek<'a, R> { +impl<'a, R: Read> Read for ZipFileSeek<'a, R> { fn read(&mut self, buf: &mut [u8]) -> io::Result { match &mut self.reader { ZipFileSeekReader::Raw(r) => r.read(buf), @@ -1335,7 +1338,7 @@ impl <'a, R: Read> Read for ZipFileSeek<'a, R> { } } -impl <'a, R: Seek> Seek for ZipFileSeek<'a, R> { +impl<'a, R: Seek> Seek for ZipFileSeek<'a, R> { fn seek(&mut self, pos: SeekFrom) -> io::Result { match &mut self.reader { ZipFileSeekReader::Raw(r) => r.seek(pos), @@ -1343,7 +1346,7 @@ impl <'a, R: Seek> Seek for ZipFileSeek<'a, R> { } } -impl <'a, R> HasZipMetadata for ZipFileSeek<'a, R> { +impl<'a, R> HasZipMetadata for ZipFileSeek<'a, R> { fn get_metadata(&self) -> &ZipFileData { self.data.as_ref() } @@ -1534,8 +1537,8 @@ mod test { #[test] fn zip_contents() { - use super::ZipArchive; use super::HasZipMetadata; + use super::ZipArchive; let mut v = Vec::new(); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); @@ -1560,8 +1563,8 @@ mod test { #[test] fn zip_clone() { - use super::ZipArchive; use super::HasZipMetadata; + use super::ZipArchive; use std::io::Read; let mut v = Vec::new(); @@ -1602,8 +1605,8 @@ mod test { #[test] fn file_and_dir_predicates() { - use super::ZipArchive; use super::HasZipMetadata; + use super::ZipArchive; let mut v = Vec::new(); v.extend_from_slice(include_bytes!("../tests/data/files_and_dirs.zip")); diff --git a/src/read/stream.rs b/src/read/stream.rs index 03c9d371..325d5706 100644 --- a/src/read/stream.rs +++ b/src/read/stream.rs @@ -4,8 +4,8 @@ use std::io::{self, Read}; use std::path::{Path, PathBuf}; use super::{ - central_header_to_zip_file_inner, read_zipfile_from_stream, spec, ZipError, ZipFile, - ZipFileData, ZipResult, HasZipMetadata, + central_header_to_zip_file_inner, read_zipfile_from_stream, spec, HasZipMetadata, ZipError, + ZipFile, ZipFileData, ZipResult, }; /// Stream decoder for zip. diff --git a/src/write.rs b/src/write.rs index df4ae513..437cd1ad 100644 --- a/src/write.rs +++ b/src/write.rs @@ -1,9 +1,9 @@ //! Types for creating ZIP archives use crate::compression::CompressionMethod; -use crate::read::{find_content, central_header_to_zip_file, ZipArchive, ZipFile}; +use crate::read::{find_content, ZipArchive, ZipFile, ZipFileReader}; use crate::result::{ZipError, ZipResult}; -use crate::spec; +use crate::{HasZipMetadata, spec}; use crate::types::{ffi, DateTime, System, ZipFileData, DEFAULT_VERSION}; #[cfg(any(feature = "_deflate-any", feature = "bzip2", feature = "zstd",))] use core::num::NonZeroU64; @@ -1810,7 +1810,7 @@ mod test { use crate::types::DateTime; use crate::write::SimpleFileOptions; use crate::CompressionMethod::Stored; - use crate::ZipArchive; + use crate::{HasZipMetadata, ZipArchive}; use std::io; use std::io::{Cursor, Read, Write}; use std::path::PathBuf; From 16dc2482f2906a93d7bd73fb8cdda710e8b28118 Mon Sep 17 00:00:00 2001 From: Chris Hennick <4961925+Pr0methean@users.noreply.github.com> Date: Sun, 5 May 2024 17:57:15 -0700 Subject: [PATCH 3/6] chore: Fix unused import warning for TryInto with no features --- src/write.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/write.rs b/src/write.rs index 437cd1ad..fcb0b890 100644 --- a/src/write.rs +++ b/src/write.rs @@ -9,7 +9,6 @@ use crate::types::{ffi, DateTime, System, ZipFileData, DEFAULT_VERSION}; use core::num::NonZeroU64; use crc32fast::Hasher; use indexmap::IndexMap; -use std::convert::TryInto; use std::default::Default; use std::io; use std::io::prelude::*; @@ -221,6 +220,7 @@ impl arbitrary::Arbitrary<'_> for FileOptions { match options.compression_method { #[cfg(feature = "deflate-zopfli")] CompressionMethod::Deflated => { + use core::convert::TryInto; if bool::arbitrary(u)? { let level = u.int_in_range(0..=24)?; options.compression_level = Some(level); @@ -400,6 +400,8 @@ impl FileOptions { impl Default for FileOptions { /// Construct a new FileOptions object fn default() -> Self { + #[cfg(feature = "time")] + use core::convert::TryInto; Self { compression_method: Default::default(), compression_level: None, @@ -1602,6 +1604,7 @@ fn clamp_opt>( value: T, range: std::ops::RangeInclusive, ) -> Option { + use core::convert::TryInto; if range.contains(&value.try_into().ok()?) { Some(value) } else { From 1f84a81fdbec82891dc5aaeb0816877ace6905a2 Mon Sep 17 00:00:00 2001 From: Chris Hennick <4961925+Pr0methean@users.noreply.github.com> Date: Sun, 5 May 2024 17:57:59 -0700 Subject: [PATCH 4/6] style: cargo fmt --all --- src/write.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/write.rs b/src/write.rs index fcb0b890..f531798e 100644 --- a/src/write.rs +++ b/src/write.rs @@ -3,8 +3,8 @@ use crate::compression::CompressionMethod; use crate::read::{find_content, ZipArchive, ZipFile, ZipFileReader}; use crate::result::{ZipError, ZipResult}; -use crate::{HasZipMetadata, spec}; use crate::types::{ffi, DateTime, System, ZipFileData, DEFAULT_VERSION}; +use crate::{spec, HasZipMetadata}; #[cfg(any(feature = "_deflate-any", feature = "bzip2", feature = "zstd",))] use core::num::NonZeroU64; use crc32fast::Hasher; From 57f01ba9468ba475ab8aadf439837593cb5d1b88 Mon Sep 17 00:00:00 2001 From: Chris Hennick <4961925+Pr0methean@users.noreply.github.com> Date: Sat, 6 Jul 2024 14:26:37 -0700 Subject: [PATCH 5/6] chore: Fix build errors --- fuzz/fuzz_targets/fuzz_write.rs | 162 +++++++++++++++++++------------- src/read.rs | 57 ++++------- src/read/stream.rs | 4 +- src/write.rs | 6 +- 4 files changed, 120 insertions(+), 109 deletions(-) diff --git a/fuzz/fuzz_targets/fuzz_write.rs b/fuzz/fuzz_targets/fuzz_write.rs index 414de08d..fa281a4f 100755 --- a/fuzz/fuzz_targets/fuzz_write.rs +++ b/fuzz/fuzz_targets/fuzz_write.rs @@ -2,9 +2,9 @@ use arbitrary::Arbitrary; use core::fmt::{Debug, Formatter}; -use std::borrow::Cow; use libfuzzer_sys::fuzz_target; use replace_with::replace_with_or_abort; +use std::borrow::Cow; use std::io::{Cursor, Read, Seek, Write}; use std::path::PathBuf; use tikv_jemallocator::Jemalloc; @@ -27,16 +27,16 @@ pub enum BasicFileOperation<'k> { ShallowCopy(Box>), DeepCopy(Box>), MergeWithOtherFile { - operations: Box<[(FileOperation<'k>, bool)]> + operations: Box<[(FileOperation<'k>, bool)]>, }, - SetArchiveComment(Box<[u8]>) + SetArchiveComment(Box<[u8]>), } #[derive(Arbitrary, Clone, Debug, Eq, PartialEq)] pub enum ReopenOption { DoNotReopen, ViaFinish, - ViaFinishIntoReadable + ViaFinishIntoReadable, } #[derive(Arbitrary, Clone)] @@ -47,78 +47,105 @@ pub struct FileOperation<'k> { // 'abort' flag is separate, to prevent trying to copy an aborted file } -impl <'k> FileOperation<'k> { +impl<'k> FileOperation<'k> { fn get_path(&self) -> Option> { match &self.basic { BasicFileOperation::SetArchiveComment(_) => None, BasicFileOperation::WriteDirectory(_) => Some(Cow::Owned(self.path.join("/"))), - BasicFileOperation::MergeWithOtherFile { operations } => - operations.iter().flat_map(|(op, abort)| if !abort { op.get_path() } else { None }).next(), - _ => Some(Cow::Borrowed(&self.path)) + BasicFileOperation::MergeWithOtherFile { operations } => operations + .iter() + .flat_map(|(op, abort)| if !abort { op.get_path() } else { None }) + .next(), + _ => Some(Cow::Borrowed(&self.path)), } } } -impl <'k> Debug for FileOperation<'k> { +impl<'k> Debug for FileOperation<'k> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match &self.basic { - BasicFileOperation::WriteNormalFile {contents, options} => { - f.write_fmt(format_args!("let options = {:?};\n\ - writer.start_file_from_path({:?}, options)?;\n", options, self.path))?; + BasicFileOperation::WriteNormalFile { contents, options } => { + f.write_fmt(format_args!( + "let options = {:?};\n\ + writer.start_file_from_path({:?}, options)?;\n", + options, self.path + ))?; for content_slice in contents { f.write_fmt(format_args!("writer.write_all(&({:?}))?;\n", content_slice))?; } - }, + } BasicFileOperation::WriteDirectory(options) => { - f.write_fmt(format_args!("let options = {:?};\n\ + f.write_fmt(format_args!( + "let options = {:?};\n\ writer.add_directory_from_path({:?}, options)?;\n", - options, self.path))?; - }, - BasicFileOperation::WriteSymlinkWithTarget {target, options} => { - f.write_fmt(format_args!("let options = {:?};\n\ + options, self.path + ))?; + } + BasicFileOperation::WriteSymlinkWithTarget { target, options } => { + f.write_fmt(format_args!( + "let options = {:?};\n\ writer.add_symlink_from_path({:?}, {:?}, options)?;\n", - options, self.path, target.to_owned()))?; - }, + options, + self.path, + target.to_owned() + ))?; + } BasicFileOperation::ShallowCopy(base) => { let Some(base_path) = base.get_path() else { - return Ok(()) + return Ok(()); }; - f.write_fmt(format_args!("{:?}writer.shallow_copy_file_from_path({:?}, {:?})?;\n", base, base_path, self.path))?; - }, + f.write_fmt(format_args!( + "{:?}writer.shallow_copy_file_from_path({:?}, {:?})?;\n", + base, base_path, self.path + ))?; + } BasicFileOperation::DeepCopy(base) => { let Some(base_path) = base.get_path() else { - return Ok(()) + return Ok(()); }; - f.write_fmt(format_args!("{:?}writer.deep_copy_file_from_path({:?}, {:?})?;\n", base, base_path, self.path))?; - }, - BasicFileOperation::MergeWithOtherFile {operations} => { - f.write_str("let sub_writer = {\n\ + f.write_fmt(format_args!( + "{:?}writer.deep_copy_file_from_path({:?}, {:?})?;\n", + base, base_path, self.path + ))?; + } + BasicFileOperation::MergeWithOtherFile { operations } => { + f.write_str( + "let sub_writer = {\n\ let mut writer = ZipWriter::new(Cursor::new(Vec::new()));\n\ - writer.set_flush_on_finish_file(false);\n")?; - operations.iter().map(|op| { - f.write_fmt(format_args!("{:?}", op.0))?; - if op.1 { - f.write_str("writer.abort_file()?;\n") - } else { - Ok(()) - } - }).collect::>()?; - f.write_str("writer\n\ + writer.set_flush_on_finish_file(false);\n", + )?; + operations + .iter() + .map(|op| { + f.write_fmt(format_args!("{:?}", op.0))?; + if op.1 { + f.write_str("writer.abort_file()?;\n") + } else { + Ok(()) + } + }) + .collect::>()?; + f.write_str( + "writer\n\ };\n\ - writer.merge_archive(sub_writer.finish_into_readable()?)?;\n")?; - }, + writer.merge_archive(sub_writer.finish_into_readable()?)?;\n", + )?; + } BasicFileOperation::SetArchiveComment(comment) => { - f.write_fmt(format_args!("writer.set_raw_comment({:?}.into());\n", comment))?; + f.write_fmt(format_args!( + "writer.set_raw_comment({:?}.into());\n", + comment + ))?; } } match &self.reopen { ReopenOption::DoNotReopen => Ok(()), ReopenOption::ViaFinish => { f.write_str("writer = ZipWriter::new_append(writer.finish()?)?;\n") - }, - ReopenOption::ViaFinishIntoReadable => { - f.write_str("writer = ZipWriter::new_append(writer.finish_into_readable()?.into_inner())?;\n") } + ReopenOption::ViaFinishIntoReadable => f.write_str( + "writer = ZipWriter::new_append(writer.finish_into_readable()?.into_inner())?;\n", + ), } } } @@ -129,19 +156,23 @@ pub struct FuzzTestCase<'k> { flush_on_finish_file: bool, } -impl <'k> Debug for FuzzTestCase<'k> { +impl<'k> Debug for FuzzTestCase<'k> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!( "let mut writer = ZipWriter::new(Cursor::new(Vec::new()));\n\ - writer.set_flush_on_finish_file({:?});\n", self.flush_on_finish_file))?; - self.operations.iter().map(|op| { - f.write_fmt(format_args!("{:?}", op.0))?; - if op.1 { - f.write_str("writer.abort_file()?;\n") - } else { - Ok(()) - } - }) + writer.set_flush_on_finish_file({:?});\n", + self.flush_on_finish_file + ))?; + self.operations + .iter() + .map(|op| { + f.write_fmt(format_args!("{:?}", op.0))?; + if op.1 { + f.write_str("writer.abort_file()?;\n") + } else { + Ok(()) + } + }) .collect::>()?; f.write_str("writer\n") } @@ -154,8 +185,8 @@ fn deduplicate_paths(copy: &mut Cow, original: &PathBuf) { let mut new_name = name.to_owned(); new_name.push("_copy"); copy.with_file_name(new_name) - }, - None => copy.with_file_name("copy") + } + None => copy.with_file_name("copy"), }; *copy = Cow::Owned(new_path); } @@ -166,7 +197,7 @@ fn do_operation<'k, T>( operation: &FileOperation<'k>, abort: bool, flush_on_finish_file: bool, - files_added: &mut usize + files_added: &mut usize, ) -> Result<(), Box> where T: Read + Write + Seek, @@ -175,9 +206,7 @@ where let mut path = Cow::Borrowed(&operation.path); match &operation.basic { BasicFileOperation::WriteNormalFile { - contents, - options, - .. + contents, options, .. } => { let uncompressed_size = contents.iter().map(|chunk| chunk.len()).sum::(); let mut options = (*options).to_owned(); @@ -225,12 +254,12 @@ where &operation, *abort, false, - &mut inner_files_added + &mut inner_files_added, ); }); writer.merge_archive(other_writer.finish_into_readable()?)?; *files_added += inner_files_added; - }, + } BasicFileOperation::SetArchiveComment(comment) => { writer.set_raw_comment(comment.clone()); } @@ -250,14 +279,15 @@ where zip::ZipWriter::new_append(old_writer.finish().unwrap()).unwrap() }); assert!(writer.get_raw_comment().starts_with(&old_comment)); - }, + } ReopenOption::ViaFinishIntoReadable => { let old_comment = writer.get_raw_comment().to_owned(); replace_with_or_abort(writer, |old_writer: zip::ZipWriter| { - zip::ZipWriter::new_append(old_writer.finish_into_readable().unwrap().into_inner()).unwrap() + zip::ZipWriter::new_append(old_writer.finish_into_readable().unwrap().into_inner()) + .unwrap() }); assert!(writer.get_raw_comment().starts_with(&old_comment)); - }, + } } Ok(()) } @@ -279,7 +309,7 @@ fuzz_target!(|test_case: FuzzTestCase| { &operation, *abort, test_case.flush_on_finish_file, - &mut files_added + &mut files_added, ); } if final_reopen { diff --git a/src/read.rs b/src/read.rs index d758ce19..ab6d6174 100644 --- a/src/read.rs +++ b/src/read.rs @@ -343,8 +343,11 @@ fn find_content_seek<'a, R: Read + Seek>( reader: &'a mut R, ) -> ZipResult> { // Parse local header - reader.seek(io::SeekFrom::Start(find_data_start(data, reader)?))?; - SeekableTake::new(reader, data.compressed_size) + let data_start = find_data_start(data, reader)?; + reader.seek(io::SeekFrom::Start(data_start))?; + + // Explicit Ok and ? are needed to convert io::Error to ZipError + Ok(SeekableTake::new(reader, data.compressed_size)?) } fn find_data_start( @@ -1584,7 +1587,7 @@ impl<'a> ZipFile<'a> { } /// Get the version of the file - fn version_made_by(&self) -> (u8, u8) { + pub fn version_made_by(&self) -> (u8, u8) { ( self.get_metadata().version_made_by / 10, self.get_metadata().version_made_by % 10, @@ -1603,14 +1606,14 @@ impl<'a> ZipFile<'a> { /// /// You can use the [`ZipFile::enclosed_name`] method to validate the name /// as a safe path. - fn name(&self) -> &str { + pub fn name(&self) -> &str { &self.get_metadata().file_name } /// Get the name of the file, in the raw (internal) byte representation. /// /// The encoding of this data is currently undefined. - fn name_raw(&self) -> &[u8] { + pub fn name_raw(&self) -> &[u8] { &self.get_metadata().file_name_raw } @@ -1621,7 +1624,7 @@ impl<'a> ZipFile<'a> { note = "by stripping `..`s from the path, the meaning of paths can change. `mangled_name` can be used if this behaviour is desirable" )] - fn sanitized_name(&self) -> PathBuf { + pub fn sanitized_name(&self) -> PathBuf { self.mangled_name() } @@ -1637,7 +1640,7 @@ impl<'a> ZipFile<'a> { /// [`ZipFile::enclosed_name`] is the better option in most scenarios. /// /// [`ParentDir`]: `Component::ParentDir` - fn mangled_name(&self) -> PathBuf { + pub fn mangled_name(&self) -> PathBuf { self.get_metadata().file_name_sanitized() } @@ -1651,27 +1654,27 @@ impl<'a> ZipFile<'a> { /// This will read well-formed ZIP files correctly, and is resistant /// to path-based exploits. It is recommended over /// [`ZipFile::mangled_name`]. - fn enclosed_name(&self) -> Option { + pub fn enclosed_name(&self) -> Option { self.get_metadata().enclosed_name() } /// Get the comment of the file - fn comment(&self) -> &str { + pub fn comment(&self) -> &str { &self.get_metadata().file_comment } /// Get the compression method used to store the file - fn compression(&self) -> CompressionMethod { + pub fn compression(&self) -> CompressionMethod { self.get_metadata().compression_method } /// Get the size of the file, in bytes, in the archive - fn compressed_size(&self) -> u64 { + pub fn compressed_size(&self) -> u64 { self.get_metadata().compressed_size } /// Get the size of the file, in bytes, when uncompressed - fn size(&self) -> u64 { + pub fn size(&self) -> u64 { self.get_metadata().uncompressed_size } @@ -1696,17 +1699,17 @@ impl<'a> ZipFile<'a> { } /// Get unix mode for the file - fn unix_mode(&self) -> Option { + pub fn unix_mode(&self) -> Option { self.get_metadata().unix_mode() } /// Get the CRC32 hash of the original file - fn crc32(&self) -> u32 { + pub fn crc32(&self) -> u32 { self.get_metadata().crc32 } /// Get the extra data of the zip header for this file - fn extra_data(&self) -> Option<&[u8]> { + pub fn extra_data(&self) -> Option<&[u8]> { self.get_metadata() .extra_field .as_ref() @@ -1719,34 +1722,17 @@ impl<'a> ZipFile<'a> { } /// Get the starting offset of the zip header for this file - fn header_start(&self) -> u64 { + pub fn header_start(&self) -> u64 { self.get_metadata().header_start } /// Get the starting offset of the zip header in the central directory for this file - fn central_header_start(&self) -> u64 { + pub fn central_header_start(&self) -> u64 { self.get_metadata().central_header_start } } /// Methods for retrieving information on zip files impl<'a> ZipFile<'a> { - fn get_reader(&mut self) -> &mut ZipFileReader<'a> { - if let ZipFileReader::NoReader = self.reader { - let data = &self.data; - let crypto_reader = self.crypto_reader.take().expect("Invalid reader state"); - self.reader = make_reader(data.compression_method, data.crc32, crypto_reader) - } - &mut self.reader - } - - pub(crate) fn get_raw_reader(&mut self) -> &mut dyn Read { - if let ZipFileReader::NoReader = self.reader { - let crypto_reader = self.crypto_reader.take().expect("Invalid reader state"); - self.reader = ZipFileReader::Raw(crypto_reader.into_inner()) - } - &mut self.reader - } - /// iterate through all extra fields pub fn extra_data_fields(&self) -> impl Iterator { self.data.extra_fields.iter() @@ -1916,7 +1902,6 @@ mod test { #[test] fn zip_contents() { - use super::HasZipMetadata; use super::ZipArchive; let mut v = Vec::new(); @@ -1942,7 +1927,6 @@ mod test { #[test] fn zip_clone() { - use super::HasZipMetadata; use super::ZipArchive; use std::io::Read; @@ -1984,7 +1968,6 @@ mod test { #[test] fn file_and_dir_predicates() { - use super::HasZipMetadata; use super::ZipArchive; let mut v = Vec::new(); diff --git a/src/read/stream.rs b/src/read/stream.rs index 449267b8..7fb76e70 100644 --- a/src/read/stream.rs +++ b/src/read/stream.rs @@ -3,8 +3,8 @@ use std::io::{self, Read}; use std::path::{Path, PathBuf}; use super::{ - central_header_to_zip_file_inner, read_zipfile_from_stream, spec, HasZipMetadata, ZipCentralEntryBlock, - ZipError, ZipFile, ZipFileData, ZipResult, + central_header_to_zip_file_inner, read_zipfile_from_stream, ZipCentralEntryBlock, ZipError, + ZipFile, ZipFileData, ZipResult, }; use crate::spec::FixedSizeBlock; diff --git a/src/write.rs b/src/write.rs index c02872e7..0fcb55af 100644 --- a/src/write.rs +++ b/src/write.rs @@ -7,7 +7,7 @@ use crate::read::{ find_content, parse_single_extra_field, Config, ZipArchive, ZipFile, ZipFileReader, }; use crate::result::{ZipError, ZipResult}; -use crate::spec::{self, FixedSizeBlock, HasZipMetadata, Zip32CDEBlock}; +use crate::spec::{self, FixedSizeBlock, Zip32CDEBlock}; #[cfg(feature = "aes-crypto")] use crate::types::AesMode; use crate::types::{ @@ -545,8 +545,6 @@ impl<'k> FileOptions<'k, ExtendedFileOptions> { impl<'k, T: FileOptionExtension> Default for FileOptions<'k, T> { /// Construct a new FileOptions object fn default() -> Self { - #[cfg(feature = "time")] - use core::convert::TryInto; Self { compression_method: Default::default(), compression_level: None, @@ -1978,7 +1976,7 @@ mod test { use crate::write::SimpleFileOptions; use crate::zipcrypto::ZipCryptoKeys; use crate::CompressionMethod::Stored; - use crate::{HasZipMetadata, ZipArchive}; + use crate::ZipArchive; use std::io; use std::io::{Cursor, Read, Write}; use std::marker::PhantomData; From 43db4be237693b5f1de7ab1e69db74f8625ee200 Mon Sep 17 00:00:00 2001 From: Chris Hennick <4961925+Pr0methean@users.noreply.github.com> Date: Mon, 15 Jul 2024 09:04:41 -0700 Subject: [PATCH 6/6] Update src/write.rs Signed-off-by: Chris Hennick <4961925+Pr0methean@users.noreply.github.com> --- src/write.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/write.rs b/src/write.rs index 0fcb55af..a8a31a53 100644 --- a/src/write.rs +++ b/src/write.rs @@ -1807,7 +1807,6 @@ fn clamp_opt>( value: T, range: std::ops::RangeInclusive, ) -> Option { - use core::convert::TryInto; if range.contains(&value.try_into().ok()?) { Some(value) } else {