Merge branch 'master' into feature/add_zstd_compression
This commit is contained in:
commit
63e714f622
15 changed files with 89 additions and 69 deletions
19
.github/workflows/ci.yaml
vendored
19
.github/workflows/ci.yaml
vendored
|
@ -39,6 +39,25 @@ jobs:
|
|||
command: test
|
||||
args: --all
|
||||
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: clippy
|
||||
|
||||
- name: clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --all-features -- -D warnings
|
||||
|
||||
check_fmt_and_docs:
|
||||
name: Checking fmt and docs
|
||||
runs-on: ubuntu-latest
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
|
|
@ -59,5 +59,6 @@ fn real_main() -> i32 {
|
|||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
|
||||
0
|
||||
}
|
||||
|
|
|
@ -27,5 +27,5 @@ fn real_main() -> i32 {
|
|||
file.read_to_string(&mut contents).unwrap();
|
||||
println!("{}", contents);
|
||||
|
||||
return 0;
|
||||
0
|
||||
}
|
||||
|
|
|
@ -49,5 +49,6 @@ fn real_main() -> i32 {
|
|||
);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
|
||||
0
|
||||
}
|
||||
|
|
|
@ -30,5 +30,6 @@ fn real_main() -> i32 {
|
|||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
|
||||
0
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ fn real_main() -> i32 {
|
|||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
0
|
||||
}
|
||||
|
||||
fn zip_dir<T>(
|
||||
|
@ -92,7 +92,7 @@ where
|
|||
f.read_to_end(&mut buffer)?;
|
||||
zip.write_all(&*buffer)?;
|
||||
buffer.clear();
|
||||
} else if name.as_os_str().len() != 0 {
|
||||
} else if !name.as_os_str().is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {:?} as {:?} ...", path, name);
|
||||
|
@ -116,7 +116,7 @@ fn doit(
|
|||
let path = Path::new(dst_file);
|
||||
let file = File::create(&path).unwrap();
|
||||
|
||||
let walkdir = WalkDir::new(src_dir.to_string());
|
||||
let walkdir = WalkDir::new(src_dir);
|
||||
let it = walkdir.into_iter();
|
||||
|
||||
zip_dir(&mut it.filter_map(|e| e.ok()), src_dir, file, method)?;
|
||||
|
|
|
@ -18,7 +18,7 @@ fn real_main() -> i32 {
|
|||
Err(e) => println!("Error: {:?}", e),
|
||||
}
|
||||
|
||||
return 0;
|
||||
0
|
||||
}
|
||||
|
||||
fn doit(filename: &str) -> zip::result::ZipResult<()> {
|
||||
|
@ -42,7 +42,7 @@ fn doit(filename: &str) -> zip::result::ZipResult<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
const LOREM_IPSUM : &'static [u8] = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tellus elit, tristique vitae mattis egestas, ultricies vitae risus. Quisque sit amet quam ut urna aliquet
|
||||
const LOREM_IPSUM : &[u8] = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tellus elit, tristique vitae mattis egestas, ultricies vitae risus. Quisque sit amet quam ut urna aliquet
|
||||
molestie. Proin blandit ornare dui, a tempor nisl accumsan in. Praesent a consequat felis. Morbi metus diam, auctor in auctor vel, feugiat id odio. Curabitur ex ex,
|
||||
dictum quis auctor quis, suscipit id lorem. Aliquam vestibulum dolor nec enim vehicula, porta tristique augue tincidunt. Vivamus ut gravida est. Sed pellentesque, dolor
|
||||
vitae tristique consectetur, neque lectus pulvinar dui, sed feugiat purus diam id lectus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per
|
||||
|
|
|
@ -136,7 +136,7 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn from_eq_to() {
|
||||
for v in 0..(::std::u16::MAX as u32 + 1) {
|
||||
for v in 0..(u16::MAX as u32 + 1) {
|
||||
#[allow(deprecated)]
|
||||
let from = CompressionMethod::from_u16(v as u16);
|
||||
#[allow(deprecated)]
|
||||
|
@ -146,20 +146,19 @@ mod test {
|
|||
}
|
||||
|
||||
fn methods() -> Vec<CompressionMethod> {
|
||||
let mut methods = Vec::new();
|
||||
methods.push(CompressionMethod::Stored);
|
||||
#[cfg(any(
|
||||
feature = "deflate",
|
||||
feature = "deflate-miniz",
|
||||
feature = "deflate-zlib"
|
||||
))]
|
||||
methods.push(CompressionMethod::Deflated);
|
||||
#[cfg(feature = "bzip2")]
|
||||
methods.push(CompressionMethod::Bzip2);
|
||||
#[cfg(feature = "zstd")]
|
||||
methods.push(CompressionMethod::Zstd);
|
||||
|
||||
methods
|
||||
vec![
|
||||
CompressionMethod::Stored,
|
||||
#[cfg(any(
|
||||
feature = "deflate",
|
||||
feature = "deflate-miniz",
|
||||
feature = "deflate-zlib"
|
||||
))]
|
||||
CompressionMethod::Deflated,
|
||||
#[cfg(feature = "bzip2")]
|
||||
CompressionMethod::Bzip2,
|
||||
#[cfg(feature = "zstd")]
|
||||
CompressionMethod::Zstd,
|
||||
]
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -6,7 +6,8 @@ pub trait FromCp437 {
|
|||
type Target;
|
||||
|
||||
/// Function that does the conversion from cp437.
|
||||
/// Gennerally allocations will be avoided if all data falls into the ASCII range.
|
||||
/// Generally allocations will be avoided if all data falls into the ASCII range.
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn from_cp437(self) -> Self::Target;
|
||||
}
|
||||
|
||||
|
|
33
src/read.rs
33
src/read.rs
|
@ -198,11 +198,11 @@ fn make_crypto_reader<'a>(
|
|||
Ok(Ok(reader))
|
||||
}
|
||||
|
||||
fn make_reader<'a>(
|
||||
fn make_reader(
|
||||
compression_method: CompressionMethod,
|
||||
crc32: u32,
|
||||
reader: CryptoReader<'a>,
|
||||
) -> ZipFileReader<'a> {
|
||||
reader: CryptoReader,
|
||||
) -> ZipFileReader {
|
||||
match compression_method {
|
||||
CompressionMethod::Stored => ZipFileReader::Stored(Crc32Reader::new(reader, crc32)),
|
||||
#[cfg(any(
|
||||
|
@ -317,7 +317,7 @@ impl<R: Read + io::Seek> ZipArchive<R> {
|
|||
let directory_start = footer
|
||||
.central_directory_offset
|
||||
.checked_add(archive_offset)
|
||||
.ok_or_else(|| {
|
||||
.ok_or({
|
||||
ZipError::InvalidArchive("Invalid central directory size or offset")
|
||||
})?;
|
||||
|
||||
|
@ -346,7 +346,7 @@ impl<R: Read + io::Seek> ZipArchive<R> {
|
|||
let mut files = Vec::new();
|
||||
let mut names_map = HashMap::new();
|
||||
|
||||
if let Err(_) = reader.seek(io::SeekFrom::Start(directory_start)) {
|
||||
if reader.seek(io::SeekFrom::Start(directory_start)).is_err() {
|
||||
return Err(ZipError::InvalidArchive(
|
||||
"Could not seek to start of central directory",
|
||||
));
|
||||
|
@ -471,14 +471,14 @@ impl<R: Read + io::Seek> ZipArchive<R> {
|
|||
}
|
||||
|
||||
/// Get a contained file by index
|
||||
pub fn by_index<'a>(&'a mut self, file_number: usize) -> ZipResult<ZipFile<'a>> {
|
||||
pub fn by_index(&mut self, file_number: usize) -> ZipResult<ZipFile<'_>> {
|
||||
Ok(self
|
||||
.by_index_with_optional_password(file_number, None)?
|
||||
.unwrap())
|
||||
}
|
||||
|
||||
/// Get a contained file by index without decompressing it
|
||||
pub fn by_index_raw<'a>(&'a mut self, file_number: usize) -> ZipResult<ZipFile<'a>> {
|
||||
pub fn by_index_raw(&mut self, file_number: usize) -> ZipResult<ZipFile<'_>> {
|
||||
let reader = &mut self.reader;
|
||||
self.files
|
||||
.get_mut(file_number)
|
||||
|
@ -1034,7 +1034,7 @@ mod test {
|
|||
let mut v = Vec::new();
|
||||
v.extend_from_slice(include_bytes!("../tests/data/zip64_demo.zip"));
|
||||
let reader = ZipArchive::new(io::Cursor::new(v)).unwrap();
|
||||
assert!(reader.len() == 1);
|
||||
assert_eq!(reader.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1045,7 +1045,7 @@ mod test {
|
|||
let mut v = Vec::new();
|
||||
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
|
||||
let mut reader = ZipArchive::new(io::Cursor::new(v)).unwrap();
|
||||
assert!(reader.comment() == b"");
|
||||
assert_eq!(reader.comment(), b"");
|
||||
assert_eq!(reader.by_index(0).unwrap().central_header_start(), 77);
|
||||
}
|
||||
|
||||
|
@ -1058,9 +1058,8 @@ mod test {
|
|||
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
|
||||
let mut reader = io::Cursor::new(v);
|
||||
loop {
|
||||
match read_zipfile_from_stream(&mut reader).unwrap() {
|
||||
None => break,
|
||||
_ => (),
|
||||
if read_zipfile_from_stream(&mut reader).unwrap().is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1096,14 +1095,14 @@ mod test {
|
|||
let mut buf3 = [0; 5];
|
||||
let mut buf4 = [0; 5];
|
||||
|
||||
file1.read(&mut buf1).unwrap();
|
||||
file2.read(&mut buf2).unwrap();
|
||||
file1.read(&mut buf3).unwrap();
|
||||
file2.read(&mut buf4).unwrap();
|
||||
file1.read_exact(&mut buf1).unwrap();
|
||||
file2.read_exact(&mut buf2).unwrap();
|
||||
file1.read_exact(&mut buf3).unwrap();
|
||||
file2.read_exact(&mut buf4).unwrap();
|
||||
|
||||
assert_eq!(buf1, buf2);
|
||||
assert_eq!(buf3, buf4);
|
||||
assert!(buf1 != buf3);
|
||||
assert_ne!(buf1, buf3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
16
src/types.rs
16
src/types.rs
|
@ -65,7 +65,7 @@ impl DateTime {
|
|||
let seconds = (timepart & 0b0000000000011111) << 1;
|
||||
let minutes = (timepart & 0b0000011111100000) >> 5;
|
||||
let hours = (timepart & 0b1111100000000000) >> 11;
|
||||
let days = (datepart & 0b0000000000011111) >> 0;
|
||||
let days = datepart & 0b0000000000011111;
|
||||
let months = (datepart & 0b0000000111100000) >> 5;
|
||||
let years = (datepart & 0b1111111000000000) >> 9;
|
||||
|
||||
|
@ -88,6 +88,7 @@ impl DateTime {
|
|||
/// * hour: [0, 23]
|
||||
/// * minute: [0, 59]
|
||||
/// * second: [0, 60]
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn from_date_and_time(
|
||||
year: u16,
|
||||
month: u8,
|
||||
|
@ -96,8 +97,7 @@ impl DateTime {
|
|||
minute: u8,
|
||||
second: u8,
|
||||
) -> Result<DateTime, ()> {
|
||||
if year >= 1980
|
||||
&& year <= 2107
|
||||
if (1980..=2107).contains(&year)
|
||||
&& month >= 1
|
||||
&& month <= 12
|
||||
&& day >= 1
|
||||
|
@ -123,6 +123,7 @@ impl DateTime {
|
|||
/// Converts a OffsetDateTime object to a DateTime
|
||||
///
|
||||
/// Returns `Err` when this object is out of bounds
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn from_time(dt: OffsetDateTime) -> Result<DateTime, ()> {
|
||||
if dt.year() >= 1980 && dt.year() <= 2107 {
|
||||
Ok(DateTime {
|
||||
|
@ -256,10 +257,7 @@ impl ZipFileData {
|
|||
|
||||
::std::path::Path::new(&filename)
|
||||
.components()
|
||||
.filter(|component| match *component {
|
||||
::std::path::Component::Normal(..) => true,
|
||||
_ => false,
|
||||
})
|
||||
.filter(|component| matches!(*component, ::std::path::Component::Normal(..)))
|
||||
.fold(::std::path::PathBuf::new(), |mut path, ref cur| {
|
||||
path.push(cur.as_os_str());
|
||||
path
|
||||
|
@ -325,6 +323,7 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::unusual_byte_groupings)]
|
||||
fn datetime_default() {
|
||||
use super::DateTime;
|
||||
let dt = DateTime::default();
|
||||
|
@ -333,6 +332,7 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::unusual_byte_groupings)]
|
||||
fn datetime_max() {
|
||||
use super::DateTime;
|
||||
let dt = DateTime::from_date_and_time(2107, 12, 31, 23, 59, 60).unwrap();
|
||||
|
@ -394,7 +394,7 @@ mod test {
|
|||
|
||||
#[cfg(feature = "time")]
|
||||
assert_eq!(
|
||||
format!("{}", dt.to_time().unwrap().format(&Rfc3339).unwrap()),
|
||||
dt.to_time().unwrap().format(&Rfc3339).unwrap(),
|
||||
"2018-11-17T10:38:30Z"
|
||||
);
|
||||
}
|
||||
|
|
20
src/write.rs
20
src/write.rs
|
@ -242,7 +242,10 @@ impl<A: Read + Write + io::Seek> ZipWriter<A> {
|
|||
let (archive_offset, directory_start, number_of_files) =
|
||||
ZipArchive::get_directory_counts(&mut readwriter, &footer, cde_start_pos)?;
|
||||
|
||||
if let Err(_) = readwriter.seek(io::SeekFrom::Start(directory_start)) {
|
||||
if readwriter
|
||||
.seek(io::SeekFrom::Start(directory_start))
|
||||
.is_err()
|
||||
{
|
||||
return Err(ZipError::InvalidArchive(
|
||||
"Could not seek to start of central directory",
|
||||
));
|
||||
|
@ -312,7 +315,7 @@ impl<W: Write + io::Seek> ZipWriter<W> {
|
|||
{
|
||||
self.finish_file()?;
|
||||
|
||||
let raw_values = raw_values.unwrap_or_else(|| ZipRawValues {
|
||||
let raw_values = raw_values.unwrap_or(ZipRawValues {
|
||||
crc32: 0,
|
||||
compressed_size: 0,
|
||||
uncompressed_size: 0,
|
||||
|
@ -553,7 +556,7 @@ impl<W: Write + io::Seek> ZipWriter<W> {
|
|||
}
|
||||
let file = self.files.last_mut().unwrap();
|
||||
|
||||
validate_extra_data(&file)?;
|
||||
validate_extra_data(file)?;
|
||||
|
||||
if !self.writing_to_central_extra_field_only {
|
||||
let writer = self.inner.get_plain();
|
||||
|
@ -871,10 +874,7 @@ impl<W: Write + io::Seek> GenericZipWriter<W> {
|
|||
}
|
||||
|
||||
fn is_closed(&self) -> bool {
|
||||
match *self {
|
||||
GenericZipWriter::Closed => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*self, GenericZipWriter::Closed)
|
||||
}
|
||||
|
||||
fn get_plain(&mut self) -> &mut W {
|
||||
|
@ -950,7 +950,7 @@ fn write_local_file_header<T: Write>(writer: &mut T, file: &ZipFileData) -> ZipR
|
|||
writer.write_all(file.file_name.as_bytes())?;
|
||||
// zip64 extra field
|
||||
if file.large_file {
|
||||
write_local_zip64_extra_field(writer, &file)?;
|
||||
write_local_zip64_extra_field(writer, file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1068,7 +1068,7 @@ fn validate_extra_data(file: &ZipFileData) -> ZipResult<()> {
|
|||
)));
|
||||
}
|
||||
|
||||
while data.len() > 0 {
|
||||
while !data.is_empty() {
|
||||
let left = data.len();
|
||||
if left < 4 {
|
||||
return Err(ZipError::Io(io::Error::new(
|
||||
|
@ -1248,7 +1248,7 @@ mod test {
|
|||
};
|
||||
writer.start_file("mimetype", options).unwrap();
|
||||
writer
|
||||
.write(b"application/vnd.oasis.opendocument.text")
|
||||
.write_all(b"application/vnd.oasis.opendocument.text")
|
||||
.unwrap();
|
||||
let result = writer.finish().unwrap();
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ impl ZipCryptoKeys {
|
|||
}
|
||||
|
||||
fn crc32(crc: Wrapping<u32>, input: u8) -> Wrapping<u32> {
|
||||
return (crc >> 8) ^ Wrapping(CRCTABLE[((crc & Wrapping(0xff)).0 as u8 ^ input) as usize]);
|
||||
(crc >> 8) ^ Wrapping(CRCTABLE[((crc & Wrapping(0xff)).0 as u8 ^ input) as usize])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ impl<R: std::io::Read> ZipCryptoReader<R> {
|
|||
/// password byte sequence that is unrepresentable in UTF-8.
|
||||
pub fn new(file: R, password: &[u8]) -> ZipCryptoReader<R> {
|
||||
let mut result = ZipCryptoReader {
|
||||
file: file,
|
||||
file,
|
||||
keys: ZipCryptoKeys::new(),
|
||||
};
|
||||
|
||||
|
@ -129,11 +129,11 @@ pub struct ZipCryptoReaderValid<R> {
|
|||
}
|
||||
|
||||
impl<R: std::io::Read> std::io::Read for ZipCryptoReaderValid<R> {
|
||||
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
// Note: There might be potential for optimization. Inspiration can be found at:
|
||||
// https://github.com/kornelski/7z/blob/master/CPP/7zip/Crypto/ZipCrypto.cpp
|
||||
|
||||
let result = self.reader.file.read(&mut buf);
|
||||
let result = self.reader.file.read(buf);
|
||||
for byte in buf.iter_mut() {
|
||||
*byte = self.reader.keys.decrypt_byte(*byte);
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ fn read_zip<R: Read + Seek>(zip_file: R) -> zip::result::ZipResult<zip::ZipArchi
|
|||
"test_with_extra_data/🐢.txt",
|
||||
ENTRY_NAME,
|
||||
];
|
||||
let expected_file_names = HashSet::from_iter(expected_file_names.iter().map(|&v| v));
|
||||
let expected_file_names = HashSet::from_iter(expected_file_names.iter().copied());
|
||||
let file_names = archive.file_names().collect::<HashSet<_>>();
|
||||
assert_eq!(file_names, expected_file_names);
|
||||
|
||||
|
@ -134,17 +134,17 @@ fn check_zip_contents(zip_file: &mut Cursor<Vec<u8>>, name: &str) {
|
|||
|
||||
fn check_zip_file_contents<R: Read + Seek>(archive: &mut zip::ZipArchive<R>, name: &str) {
|
||||
let file_contents: String = read_zip_file(archive, name).unwrap();
|
||||
assert!(file_contents.as_bytes() == LOREM_IPSUM);
|
||||
assert_eq!(file_contents.as_bytes(), LOREM_IPSUM);
|
||||
}
|
||||
|
||||
const LOREM_IPSUM : &'static [u8] = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tellus elit, tristique vitae mattis egestas, ultricies vitae risus. Quisque sit amet quam ut urna aliquet
|
||||
const LOREM_IPSUM : &[u8] = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tellus elit, tristique vitae mattis egestas, ultricies vitae risus. Quisque sit amet quam ut urna aliquet
|
||||
molestie. Proin blandit ornare dui, a tempor nisl accumsan in. Praesent a consequat felis. Morbi metus diam, auctor in auctor vel, feugiat id odio. Curabitur ex ex,
|
||||
dictum quis auctor quis, suscipit id lorem. Aliquam vestibulum dolor nec enim vehicula, porta tristique augue tincidunt. Vivamus ut gravida est. Sed pellentesque, dolor
|
||||
vitae tristique consectetur, neque lectus pulvinar dui, sed feugiat purus diam id lectus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per
|
||||
inceptos himenaeos. Maecenas feugiat velit in ex ultrices scelerisque id id neque.
|
||||
";
|
||||
|
||||
const EXTRA_DATA: &'static [u8] = b"Extra Data";
|
||||
const EXTRA_DATA: &[u8] = b"Extra Data";
|
||||
|
||||
const ENTRY_NAME: &str = "test/lorem_ipsum.txt";
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue