Run cargo fmt

This commit is contained in:
Ryan Levick 2020-06-15 10:44:39 +02:00
parent a973913ebd
commit ebb07348ee
19 changed files with 614 additions and 536 deletions

View file

@ -9,8 +9,8 @@ use zip::{ZipArchive, ZipWriter};
fn generate_random_archive(size: usize) -> Vec<u8> { fn generate_random_archive(size: usize) -> Vec<u8> {
let data = Vec::new(); let data = Vec::new();
let mut writer = ZipWriter::new(Cursor::new(data)); let mut writer = ZipWriter::new(Cursor::new(data));
let options = zip::write::FileOptions::default() let options =
.compression_method(zip::CompressionMethod::Stored); zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Stored);
writer.start_file("random.dat", options).unwrap(); writer.start_file("random.dat", options).unwrap();
let mut bytes = vec![0u8; size]; let mut bytes = vec![0u8; size];

View file

@ -1,5 +1,5 @@
use std::io;
use std::fs; use std::fs;
use std::io;
fn main() { fn main() {
std::process::exit(real_main()); std::process::exit(real_main());
@ -28,10 +28,19 @@ fn real_main() -> i32 {
} }
if (&*file.name()).ends_with('/') { if (&*file.name()).ends_with('/') {
println!("File {} extracted to \"{}\"", i, outpath.as_path().display()); println!(
"File {} extracted to \"{}\"",
i,
outpath.as_path().display()
);
fs::create_dir_all(&outpath).unwrap(); fs::create_dir_all(&outpath).unwrap();
} else { } else {
println!("File {} extracted to \"{}\" ({} bytes)", i, outpath.as_path().display(), file.size()); println!(
"File {} extracted to \"{}\" ({} bytes)",
i,
outpath.as_path().display(),
file.size()
);
if let Some(p) = outpath.parent() { if let Some(p) = outpath.parent() {
if !p.exists() { if !p.exists() {
fs::create_dir_all(&p).unwrap(); fs::create_dir_all(&p).unwrap();

View file

@ -1,12 +1,10 @@
use std::io::prelude::*; use std::io::prelude::*;
fn main() fn main() {
{
std::process::exit(real_main()); std::process::exit(real_main());
} }
fn real_main() -> i32 fn real_main() -> i32 {
{
let args: Vec<_> = std::env::args().collect(); let args: Vec<_> = std::env::args().collect();
if args.len() < 2 { if args.len() < 2 {
println!("Usage: {} <filename>", args[0]); println!("Usage: {} <filename>", args[0]);
@ -16,11 +14,13 @@ fn real_main() -> i32
let zipfile = std::fs::File::open(&fname).unwrap(); let zipfile = std::fs::File::open(&fname).unwrap();
let mut archive = zip::ZipArchive::new(zipfile).unwrap(); let mut archive = zip::ZipArchive::new(zipfile).unwrap();
let mut file = match archive.by_name("test/lorem_ipsum.txt") let mut file = match archive.by_name("test/lorem_ipsum.txt") {
{
Ok(file) => file, Ok(file) => file,
Err(..) => { println!("File test/lorem_ipsum.txt not found"); return 2;} Err(..) => {
println!("File test/lorem_ipsum.txt not found");
return 2;
}
}; };
let mut contents = String::new(); let mut contents = String::new();

View file

@ -29,9 +29,18 @@ fn real_main() -> i32 {
} }
if (&*file.name()).ends_with('/') { if (&*file.name()).ends_with('/') {
println!("Entry {} is a directory with name \"{}\"", i, outpath.as_path().display()); println!(
"Entry {} is a directory with name \"{}\"",
i,
outpath.as_path().display()
);
} else { } else {
println!("Entry {} is a file with name \"{}\" ({} bytes)", i, outpath.as_path().display(), file.size()); println!(
"Entry {} is a file with name \"{}\" ({} bytes)",
i,
outpath.as_path().display(),
file.size()
);
} }
} }
return 0; return 0;

View file

@ -12,17 +12,22 @@ fn real_main() -> i32 {
loop { loop {
match zip::read::read_zipfile_from_stream(&mut stdin_handle) { match zip::read::read_zipfile_from_stream(&mut stdin_handle) {
Ok(Some(mut file)) => { Ok(Some(mut file)) => {
println!("{}: {} bytes ({} bytes packed)", file.name(), file.size(), file.compressed_size()); println!(
"{}: {} bytes ({} bytes packed)",
file.name(),
file.size(),
file.compressed_size()
);
match file.read(&mut buf) { match file.read(&mut buf) {
Ok(n) => println!("The first {} bytes are: {:?}", n, &buf[0..n]), Ok(n) => println!("The first {} bytes are: {:?}", n, &buf[0..n]),
Err(e) => println!("Could not read the file: {:?}", e), Err(e) => println!("Could not read the file: {:?}", e),
}; };
}, }
Ok(None) => break, Ok(None) => break,
Err(e) => { Err(e) => {
println!("Error encountered while reading zip: {:?}", e); println!("Error encountered while reading zip: {:?}", e);
return 1; return 1;
}, }
} }
} }
return 0; return 0;

View file

@ -1,42 +1,45 @@
use std::io::prelude::*; use std::io::prelude::*;
use std::io::{Write, Seek}; use std::io::{Seek, Write};
use std::iter::Iterator; use std::iter::Iterator;
use zip::write::FileOptions;
use zip::result::ZipError; use zip::result::ZipError;
use zip::write::FileOptions;
use walkdir::{WalkDir, DirEntry};
use std::path::Path;
use std::fs::File; use std::fs::File;
use std::path::Path;
use walkdir::{DirEntry, WalkDir};
fn main() { fn main() {
std::process::exit(real_main()); std::process::exit(real_main());
} }
const METHOD_STORED : Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Stored); const METHOD_STORED: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Stored);
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
const METHOD_DEFLATED : Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Deflated); const METHOD_DEFLATED: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Deflated);
#[cfg(not(feature = "deflate"))] #[cfg(not(feature = "deflate"))]
const METHOD_DEFLATED : Option<zip::CompressionMethod> = None; const METHOD_DEFLATED: Option<zip::CompressionMethod> = None;
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
const METHOD_BZIP2 : Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Bzip2); const METHOD_BZIP2: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Bzip2);
#[cfg(not(feature = "bzip2"))] #[cfg(not(feature = "bzip2"))]
const METHOD_BZIP2 : Option<zip::CompressionMethod> = None; const METHOD_BZIP2: Option<zip::CompressionMethod> = None;
fn real_main() -> i32 { fn real_main() -> i32 {
let args: Vec<_> = std::env::args().collect(); let args: Vec<_> = std::env::args().collect();
if args.len() < 3 { if args.len() < 3 {
println!("Usage: {} <source_directory> <destination_zipfile>", println!(
args[0]); "Usage: {} <source_directory> <destination_zipfile>",
args[0]
);
return 1; return 1;
} }
let src_dir = &*args[1]; let src_dir = &*args[1];
let dst_file = &*args[2]; let dst_file = &*args[2];
for &method in [METHOD_STORED, METHOD_DEFLATED, METHOD_BZIP2].iter() { for &method in [METHOD_STORED, METHOD_DEFLATED, METHOD_BZIP2].iter() {
if method.is_none() { continue } if method.is_none() {
continue;
}
match doit(src_dir, dst_file, method.unwrap()) { match doit(src_dir, dst_file, method.unwrap()) {
Ok(_) => println!("done: {} written to {}", src_dir, dst_file), Ok(_) => println!("done: {} written to {}", src_dir, dst_file),
Err(e) => println!("Error: {:?}", e), Err(e) => println!("Error: {:?}", e),
@ -46,9 +49,14 @@ fn real_main() -> i32 {
return 0; return 0;
} }
fn zip_dir<T>(it: &mut dyn Iterator<Item=DirEntry>, prefix: &str, writer: T, method: zip::CompressionMethod) fn zip_dir<T>(
-> zip::result::ZipResult<()> it: &mut dyn Iterator<Item = DirEntry>,
where T: Write+Seek prefix: &str,
writer: T,
method: zip::CompressionMethod,
) -> zip::result::ZipResult<()>
where
T: Write + Seek,
{ {
let mut zip = zip::ZipWriter::new(writer); let mut zip = zip::ZipWriter::new(writer);
let options = FileOptions::default() let options = FileOptions::default()
@ -81,7 +89,11 @@ fn zip_dir<T>(it: &mut dyn Iterator<Item=DirEntry>, prefix: &str, writer: T, met
Result::Ok(()) Result::Ok(())
} }
fn doit(src_dir: &str, dst_file: &str, method: zip::CompressionMethod) -> zip::result::ZipResult<()> { fn doit(
src_dir: &str,
dst_file: &str,
method: zip::CompressionMethod,
) -> zip::result::ZipResult<()> {
if !Path::new(src_dir).is_dir() { if !Path::new(src_dir).is_dir() {
return Err(ZipError::FileNotFound); return Err(ZipError::FileNotFound);
} }

View file

@ -1,13 +1,11 @@
use std::io::prelude::*; use std::io::prelude::*;
use zip::write::FileOptions; use zip::write::FileOptions;
fn main() fn main() {
{
std::process::exit(real_main()); std::process::exit(real_main());
} }
fn real_main() -> i32 fn real_main() -> i32 {
{
let args: Vec<_> = std::env::args().collect(); let args: Vec<_> = std::env::args().collect();
if args.len() < 2 { if args.len() < 2 {
println!("Usage: {} <filename>", args[0]); println!("Usage: {} <filename>", args[0]);
@ -15,8 +13,7 @@ fn real_main() -> i32
} }
let filename = &*args[1]; let filename = &*args[1];
match doit(filename) match doit(filename) {
{
Ok(_) => println!("File written to {}", filename), Ok(_) => println!("File written to {}", filename),
Err(e) => println!("Error: {:?}", e), Err(e) => println!("Error: {:?}", e),
} }
@ -24,8 +21,7 @@ fn real_main() -> i32
return 0; return 0;
} }
fn doit(filename: &str) -> zip::result::ZipResult<()> fn doit(filename: &str) -> zip::result::ZipResult<()> {
{
let path = std::path::Path::new(filename); let path = std::path::Path::new(filename);
let file = std::fs::File::create(&path).unwrap(); let file = std::fs::File::create(&path).unwrap();
@ -33,7 +29,9 @@ fn doit(filename: &str) -> zip::result::ZipResult<()>
zip.add_directory("test/", Default::default())?; zip.add_directory("test/", Default::default())?;
let options = FileOptions::default().compression_method(zip::CompressionMethod::Stored).unix_permissions(0o755); let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Stored)
.unix_permissions(0o755);
zip.start_file("test/☃.txt", options)?; zip.start_file("test/☃.txt", options)?;
zip.write_all(b"Hello, World!\n")?; zip.write_all(b"Hello, World!\n")?;

View file

@ -4,8 +4,7 @@ use std::fmt;
/// Compression methods for the contents of a ZIP file. /// Compression methods for the contents of a ZIP file.
#[derive(Copy, Clone, PartialEq, Debug)] #[derive(Copy, Clone, PartialEq, Debug)]
pub enum CompressionMethod pub enum CompressionMethod {
{
/// The file is stored (no compression) /// The file is stored (no compression)
Stored, Stored,
/// Deflate in pure rust /// Deflate in pure rust
@ -57,8 +56,7 @@ mod test {
#[test] #[test]
fn from_eq_to() { fn from_eq_to() {
for v in 0..(::std::u16::MAX as u32 + 1) for v in 0..(::std::u16::MAX as u32 + 1) {
{
let from = CompressionMethod::from_u16(v as u16); let from = CompressionMethod::from_u16(v as u16);
let to = from.to_u16() as u32; let to = from.to_u16() as u32;
assert_eq!(v, to); assert_eq!(v, to);
@ -68,12 +66,13 @@ mod test {
fn methods() -> Vec<CompressionMethod> { fn methods() -> Vec<CompressionMethod> {
let mut methods = Vec::new(); let mut methods = Vec::new();
methods.push(CompressionMethod::Stored); methods.push(CompressionMethod::Stored);
#[cfg(feature="deflate")] methods.push(CompressionMethod::Deflated); #[cfg(feature = "deflate")]
#[cfg(feature="bzip2")] methods.push(CompressionMethod::Bzip2); methods.push(CompressionMethod::Deflated);
#[cfg(feature = "bzip2")]
methods.push(CompressionMethod::Bzip2);
methods methods
} }
#[test] #[test]
fn to_eq_from() { fn to_eq_from() {
fn check_match(method: CompressionMethod) { fn check_match(method: CompressionMethod) {

View file

@ -13,12 +13,10 @@ pub trait FromCp437 {
impl<'a> FromCp437 for &'a [u8] { impl<'a> FromCp437 for &'a [u8] {
type Target = ::std::borrow::Cow<'a, str>; type Target = ::std::borrow::Cow<'a, str>;
fn from_cp437(self) -> Self::Target fn from_cp437(self) -> Self::Target {
{
if self.iter().all(|c| *c < 0x80) { if self.iter().all(|c| *c < 0x80) {
::std::str::from_utf8(self).unwrap().into() ::std::str::from_utf8(self).unwrap().into()
} } else {
else {
self.iter().map(|c| to_char(*c)).collect::<String>().into() self.iter().map(|c| to_char(*c)).collect::<String>().into()
} }
} }
@ -30,18 +28,15 @@ impl FromCp437 for Vec<u8> {
fn from_cp437(self) -> Self::Target { fn from_cp437(self) -> Self::Target {
if self.iter().all(|c| *c < 0x80) { if self.iter().all(|c| *c < 0x80) {
String::from_utf8(self).unwrap() String::from_utf8(self).unwrap()
} } else {
else {
self.into_iter().map(|c| to_char(c)).collect() self.into_iter().map(|c| to_char(c)).collect()
} }
} }
} }
fn to_char(input: u8) -> char fn to_char(input: u8) -> char {
{ let output = match input {
let output = match input 0x00..=0x7f => input as u32,
{
0x00 ..= 0x7f => input as u32,
0x80 => 0x00c7, 0x80 => 0x00c7,
0x81 => 0x00fc, 0x81 => 0x00fc,
0x82 => 0x00e9, 0x82 => 0x00e9,
@ -175,20 +170,17 @@ fn to_char(input: u8) -> char
} }
#[cfg(test)] #[cfg(test)]
mod test mod test {
{
#[test] #[test]
fn to_char_valid() fn to_char_valid() {
{ for i in 0x00_u32..0x100 {
for i in 0x00_u32 .. 0x100
{
super::to_char(i as u8); super::to_char(i as u8);
} }
} }
#[test] #[test]
fn ascii() { fn ascii() {
for i in 0x00 .. 0x80 { for i in 0x00..0x80 {
assert_eq!(super::to_char(i), i as char); assert_eq!(super::to_char(i), i as char);
} }
} }

View file

@ -6,28 +6,23 @@ use std::io::prelude::*;
use crc32fast::Hasher; use crc32fast::Hasher;
/// Reader that validates the CRC32 when it reaches the EOF. /// Reader that validates the CRC32 when it reaches the EOF.
pub struct Crc32Reader<R> pub struct Crc32Reader<R> {
{
inner: R, inner: R,
hasher: Hasher, hasher: Hasher,
check: u32, check: u32,
} }
impl<R> Crc32Reader<R> impl<R> Crc32Reader<R> {
{
/// Get a new Crc32Reader which check the inner reader against checksum. /// Get a new Crc32Reader which check the inner reader against checksum.
pub fn new(inner: R, checksum: u32) -> Crc32Reader<R> pub fn new(inner: R, checksum: u32) -> Crc32Reader<R> {
{ Crc32Reader {
Crc32Reader
{
inner: inner, inner: inner,
hasher: Hasher::new(), hasher: Hasher::new(),
check: checksum, check: checksum,
} }
} }
fn check_matches(&self) -> bool fn check_matches(&self) -> bool {
{
self.check == self.hasher.clone().finalize() self.check == self.hasher.clone().finalize()
} }
@ -36,13 +31,12 @@ impl<R> Crc32Reader<R>
} }
} }
impl<R: Read> Read for Crc32Reader<R> impl<R: Read> Read for Crc32Reader<R> {
{ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> let count = match self.inner.read(buf) {
{ Ok(0) if !buf.is_empty() && !self.check_matches() => {
let count = match self.inner.read(buf) return Err(io::Error::new(io::ErrorKind::Other, "Invalid checksum"))
{ }
Ok(0) if !buf.is_empty() && !self.check_matches() => { return Err(io::Error::new(io::ErrorKind::Other, "Invalid checksum")) },
Ok(n) => n, Ok(n) => n,
Err(e) => return Err(e), Err(e) => return Err(e),
}; };
@ -52,14 +46,12 @@ impl<R: Read> Read for Crc32Reader<R>
} }
#[cfg(test)] #[cfg(test)]
mod test mod test {
{
use super::*; use super::*;
use std::io::Read; use std::io::Read;
#[test] #[test]
fn test_empty_reader() fn test_empty_reader() {
{
let data: &[u8] = b""; let data: &[u8] = b"";
let mut buf = [0; 1]; let mut buf = [0; 1];
@ -67,12 +59,15 @@ mod test
assert_eq!(reader.read(&mut buf).unwrap(), 0); assert_eq!(reader.read(&mut buf).unwrap(), 0);
let mut reader = Crc32Reader::new(data, 1); let mut reader = Crc32Reader::new(data, 1);
assert!(reader.read(&mut buf).unwrap_err().to_string().contains("Invalid checksum")); assert!(reader
.read(&mut buf)
.unwrap_err()
.to_string()
.contains("Invalid checksum"));
} }
#[test] #[test]
fn test_byte_by_byte() fn test_byte_by_byte() {
{
let data: &[u8] = b"1234"; let data: &[u8] = b"1234";
let mut buf = [0; 1]; let mut buf = [0; 1];
@ -87,8 +82,7 @@ mod test
} }
#[test] #[test]
fn test_zero_read() fn test_zero_read() {
{
let data: &[u8] = b"1234"; let data: &[u8] = b"1234";
let mut buf = [0; 5]; let mut buf = [0; 5];

View file

@ -2,16 +2,16 @@
#![warn(missing_docs)] #![warn(missing_docs)]
pub use crate::read::ZipArchive;
pub use crate::write::ZipWriter;
pub use crate::compression::CompressionMethod; pub use crate::compression::CompressionMethod;
pub use crate::read::ZipArchive;
pub use crate::types::DateTime; pub use crate::types::DateTime;
pub use crate::write::ZipWriter;
mod spec;
mod crc32;
mod types;
pub mod read;
mod compression; mod compression;
pub mod write;
mod cp437; mod cp437;
mod crc32;
pub mod read;
pub mod result; pub mod result;
mod spec;
mod types;
pub mod write;

View file

@ -1,17 +1,17 @@
//! Structs for reading a ZIP archive //! Structs for reading a ZIP archive
use crate::crc32::Crc32Reader;
use crate::compression::CompressionMethod; use crate::compression::CompressionMethod;
use crate::crc32::Crc32Reader;
use crate::result::{ZipError, ZipResult};
use crate::spec; use crate::spec;
use crate::result::{ZipResult, ZipError}; use std::borrow::Cow;
use std::collections::HashMap;
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use std::collections::HashMap;
use std::borrow::Cow;
use podio::{ReadPodExt, LittleEndian};
use crate::types::{ZipFileData, System, DateTime};
use crate::cp437::FromCp437; use crate::cp437::FromCp437;
use crate::types::{DateTime, System, ZipFileData};
use podio::{LittleEndian, ReadPodExt};
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
use flate2::read::DeflateDecoder; use flate2::read::DeflateDecoder;
@ -51,8 +51,7 @@ mod ffi {
/// println!("Result: {:?}", doit()); /// println!("Result: {:?}", doit());
/// ``` /// ```
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ZipArchive<R: Read + io::Seek> pub struct ZipArchive<R: Read + io::Seek> {
{
reader: R, reader: R,
files: Vec<ZipFileData>, files: Vec<ZipFileData>,
names_map: HashMap<String, usize>, names_map: HashMap<String, usize>,
@ -75,70 +74,64 @@ pub struct ZipFile<'a> {
reader: ZipFileReader<'a>, reader: ZipFileReader<'a>,
} }
fn unsupported_zip_error<T>(detail: &'static str) -> ZipResult<T> fn unsupported_zip_error<T>(detail: &'static str) -> ZipResult<T> {
{
Err(ZipError::UnsupportedArchive(detail)) Err(ZipError::UnsupportedArchive(detail))
} }
fn make_reader<'a>( fn make_reader<'a>(
compression_method: crate::compression::CompressionMethod, compression_method: crate::compression::CompressionMethod,
crc32: u32, crc32: u32,
reader: io::Take<&'a mut dyn io::Read>) reader: io::Take<&'a mut dyn io::Read>,
-> ZipResult<ZipFileReader<'a>> { ) -> ZipResult<ZipFileReader<'a>> {
match compression_method { match compression_method {
CompressionMethod::Stored => CompressionMethod::Stored => Ok(ZipFileReader::Stored(Crc32Reader::new(reader, crc32))),
{
Ok(ZipFileReader::Stored(Crc32Reader::new(
reader,
crc32)))
},
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
CompressionMethod::Deflated => CompressionMethod::Deflated => {
{
let deflate_reader = DeflateDecoder::new(reader); let deflate_reader = DeflateDecoder::new(reader);
Ok(ZipFileReader::Deflated(Crc32Reader::new( Ok(ZipFileReader::Deflated(Crc32Reader::new(
deflate_reader, deflate_reader,
crc32))) crc32,
}, )))
}
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
CompressionMethod::Bzip2 => CompressionMethod::Bzip2 => {
{
let bzip2_reader = BzDecoder::new(reader); let bzip2_reader = BzDecoder::new(reader);
Ok(ZipFileReader::Bzip2(Crc32Reader::new( Ok(ZipFileReader::Bzip2(Crc32Reader::new(bzip2_reader, crc32)))
bzip2_reader, }
crc32)))
},
_ => unsupported_zip_error("Compression method not supported"), _ => unsupported_zip_error("Compression method not supported"),
} }
} }
impl<R: Read+io::Seek> ZipArchive<R> impl<R: Read + io::Seek> ZipArchive<R> {
{
/// Get the directory start offset and number of files. This is done in a /// Get the directory start offset and number of files. This is done in a
/// separate function to ease the control flow design. /// separate function to ease the control flow design.
fn get_directory_counts(reader: &mut R, fn get_directory_counts(
footer: &spec::CentralDirectoryEnd, reader: &mut R,
cde_start_pos: u64) -> ZipResult<(u64, u64, usize)> { footer: &spec::CentralDirectoryEnd,
cde_start_pos: u64,
) -> ZipResult<(u64, u64, usize)> {
// See if there's a ZIP64 footer. The ZIP64 locator if present will // See if there's a ZIP64 footer. The ZIP64 locator if present will
// have its signature 20 bytes in front of the standard footer. The // have its signature 20 bytes in front of the standard footer. The
// standard footer, in turn, is 22+N bytes large, where N is the // standard footer, in turn, is 22+N bytes large, where N is the
// comment length. Therefore: // comment length. Therefore:
let zip64locator = if reader.seek(io::SeekFrom::End(-(20 + 22 + footer.zip_file_comment.len() as i64))).is_ok() { let zip64locator = if reader
.seek(io::SeekFrom::End(
-(20 + 22 + footer.zip_file_comment.len() as i64),
))
.is_ok()
{
match spec::Zip64CentralDirectoryEndLocator::parse(reader) { match spec::Zip64CentralDirectoryEndLocator::parse(reader) {
Ok(loc) => Some(loc), Ok(loc) => Some(loc),
Err(ZipError::InvalidArchive(_)) => { Err(ZipError::InvalidArchive(_)) => {
// No ZIP64 header; that's actually fine. We're done here. // No ZIP64 header; that's actually fine. We're done here.
None None
}, }
Err(e) => { Err(e) => {
// Yikes, a real problem // Yikes, a real problem
return Err(e); return Err(e);
} }
} }
} } else {
else {
// Empty Zip files will have nothing else so this error might be fine. If // Empty Zip files will have nothing else so this error might be fine. If
// not, we'll find out soon. // not, we'll find out soon.
None None
@ -150,19 +143,24 @@ impl<R: Read+io::Seek> ZipArchive<R>
// offsets all being too small. Get the amount of error by comparing // offsets all being too small. Get the amount of error by comparing
// the actual file position we found the CDE at with the offset // the actual file position we found the CDE at with the offset
// recorded in the CDE. // recorded in the CDE.
let archive_offset = cde_start_pos.checked_sub(footer.central_directory_size as u64) let archive_offset = cde_start_pos
.checked_sub(footer.central_directory_size as u64)
.and_then(|x| x.checked_sub(footer.central_directory_offset as u64)) .and_then(|x| x.checked_sub(footer.central_directory_offset as u64))
.ok_or(ZipError::InvalidArchive("Invalid central directory size or offset"))?; .ok_or(ZipError::InvalidArchive(
"Invalid central directory size or offset",
))?;
let directory_start = footer.central_directory_offset as u64 + archive_offset; let directory_start = footer.central_directory_offset as u64 + archive_offset;
let number_of_files = footer.number_of_files_on_this_disk as usize; let number_of_files = footer.number_of_files_on_this_disk as usize;
return Ok((archive_offset, directory_start, number_of_files)); return Ok((archive_offset, directory_start, number_of_files));
}, }
Some(locator64) => { Some(locator64) => {
// If we got here, this is indeed a ZIP64 file. // If we got here, this is indeed a ZIP64 file.
if footer.disk_number as u32 != locator64.disk_with_central_directory { if footer.disk_number as u32 != locator64.disk_with_central_directory {
return unsupported_zip_error("Support for multi-disk files is not implemented") return unsupported_zip_error(
"Support for multi-disk files is not implemented",
);
} }
// We need to reassess `archive_offset`. We know where the ZIP64 // We need to reassess `archive_offset`. We know where the ZIP64
@ -175,19 +173,28 @@ impl<R: Read+io::Seek> ZipArchive<R>
let search_upper_bound = cde_start_pos let search_upper_bound = cde_start_pos
.checked_sub(60) // minimum size of Zip64CentralDirectoryEnd + Zip64CentralDirectoryEndLocator .checked_sub(60) // minimum size of Zip64CentralDirectoryEnd + Zip64CentralDirectoryEndLocator
.ok_or(ZipError::InvalidArchive("File cannot contain ZIP64 central directory end"))?; .ok_or(ZipError::InvalidArchive(
"File cannot contain ZIP64 central directory end",
))?;
let (footer, archive_offset) = spec::Zip64CentralDirectoryEnd::find_and_parse( let (footer, archive_offset) = spec::Zip64CentralDirectoryEnd::find_and_parse(
reader, reader,
locator64.end_of_central_directory_offset, locator64.end_of_central_directory_offset,
search_upper_bound)?; search_upper_bound,
)?;
if footer.disk_number != footer.disk_with_central_directory { if footer.disk_number != footer.disk_with_central_directory {
return unsupported_zip_error("Support for multi-disk files is not implemented") return unsupported_zip_error(
"Support for multi-disk files is not implemented",
);
} }
let directory_start = footer.central_directory_offset + archive_offset; let directory_start = footer.central_directory_offset + archive_offset;
Ok((archive_offset, directory_start, footer.number_of_files as usize)) Ok((
}, archive_offset,
directory_start,
footer.number_of_files as usize,
))
}
} }
} }
@ -195,9 +202,8 @@ impl<R: Read+io::Seek> ZipArchive<R>
pub fn new(mut reader: R) -> ZipResult<ZipArchive<R>> { pub fn new(mut reader: R) -> ZipResult<ZipArchive<R>> {
let (footer, cde_start_pos) = spec::CentralDirectoryEnd::find_and_parse(&mut reader)?; let (footer, cde_start_pos) = spec::CentralDirectoryEnd::find_and_parse(&mut reader)?;
if footer.disk_number != footer.disk_with_central_directory if footer.disk_number != footer.disk_with_central_directory {
{ return unsupported_zip_error("Support for multi-disk files is not implemented");
return unsupported_zip_error("Support for multi-disk files is not implemented")
} }
let (archive_offset, directory_start, number_of_files) = let (archive_offset, directory_start, number_of_files) =
@ -207,11 +213,12 @@ impl<R: Read+io::Seek> ZipArchive<R>
let mut names_map = HashMap::new(); let mut names_map = HashMap::new();
if let Err(_) = reader.seek(io::SeekFrom::Start(directory_start)) { if let Err(_) = reader.seek(io::SeekFrom::Start(directory_start)) {
return Err(ZipError::InvalidArchive("Could not seek to start of central directory")); return Err(ZipError::InvalidArchive(
"Could not seek to start of central directory",
));
} }
for _ in 0 .. number_of_files for _ in 0..number_of_files {
{
let file = central_header_to_zip_file(&mut reader, archive_offset)?; let file = central_header_to_zip_file(&mut reader, archive_offset)?;
names_map.insert(file.file_name.clone(), files.len()); names_map.insert(file.file_name.clone(), files.len());
files.push(file); files.push(file);
@ -238,8 +245,7 @@ impl<R: Read+io::Seek> ZipArchive<R>
/// } /// }
/// } /// }
/// ``` /// ```
pub fn len(&self) -> usize pub fn len(&self) -> usize {
{
self.files.len() self.files.len()
} }
@ -262,62 +268,66 @@ impl<R: Read+io::Seek> ZipArchive<R>
} }
/// Search for a file entry by name /// Search for a file entry by name
pub fn by_name<'a>(&'a mut self, name: &str) -> ZipResult<ZipFile<'a>> pub fn by_name<'a>(&'a mut self, name: &str) -> ZipResult<ZipFile<'a>> {
{
let index = match self.names_map.get(name) { let index = match self.names_map.get(name) {
Some(index) => *index, Some(index) => *index,
None => { return Err(ZipError::FileNotFound); }, None => {
return Err(ZipError::FileNotFound);
}
}; };
self.by_index(index) self.by_index(index)
} }
/// Get a contained file by index /// Get a contained file by index
pub fn by_index<'a>(&'a mut self, file_number: usize) -> ZipResult<ZipFile<'a>> pub fn by_index<'a>(&'a mut self, file_number: usize) -> ZipResult<ZipFile<'a>> {
{ if file_number >= self.files.len() {
if file_number >= self.files.len() { return Err(ZipError::FileNotFound); } return Err(ZipError::FileNotFound);
}
let ref mut data = self.files[file_number]; let ref mut data = self.files[file_number];
if data.encrypted if data.encrypted {
{ return unsupported_zip_error("Encrypted files are not supported");
return unsupported_zip_error("Encrypted files are not supported")
} }
// Parse local header // Parse local header
self.reader.seek(io::SeekFrom::Start(data.header_start))?; self.reader.seek(io::SeekFrom::Start(data.header_start))?;
let signature = self.reader.read_u32::<LittleEndian>()?; let signature = self.reader.read_u32::<LittleEndian>()?;
if signature != spec::LOCAL_FILE_HEADER_SIGNATURE if signature != spec::LOCAL_FILE_HEADER_SIGNATURE {
{ return Err(ZipError::InvalidArchive("Invalid local file header"));
return Err(ZipError::InvalidArchive("Invalid local file header"))
} }
self.reader.seek(io::SeekFrom::Current(22))?; self.reader.seek(io::SeekFrom::Current(22))?;
let file_name_length = self.reader.read_u16::<LittleEndian>()? as u64; let file_name_length = self.reader.read_u16::<LittleEndian>()? as u64;
let extra_field_length = self.reader.read_u16::<LittleEndian>()? as u64; let extra_field_length = self.reader.read_u16::<LittleEndian>()? as u64;
let magic_and_header = 4 + 22 + 2 + 2; let magic_and_header = 4 + 22 + 2 + 2;
data.data_start = data.header_start + magic_and_header + file_name_length + extra_field_length; data.data_start =
data.header_start + magic_and_header + file_name_length + extra_field_length;
self.reader.seek(io::SeekFrom::Start(data.data_start))?; self.reader.seek(io::SeekFrom::Start(data.data_start))?;
let limit_reader = (self.reader.by_ref() as &mut dyn Read).take(data.compressed_size); let limit_reader = (self.reader.by_ref() as &mut dyn Read).take(data.compressed_size);
Ok(ZipFile { reader: make_reader(data.compression_method, data.crc32, limit_reader)?, data: Cow::Borrowed(data) }) Ok(ZipFile {
reader: make_reader(data.compression_method, data.crc32, limit_reader)?,
data: Cow::Borrowed(data),
})
} }
/// Unwrap and return the inner reader object /// Unwrap and return the inner reader object
/// ///
/// The position of the reader is undefined. /// The position of the reader is undefined.
pub fn into_inner(self) -> R pub fn into_inner(self) -> R {
{
self.reader self.reader
} }
} }
fn central_header_to_zip_file<R: Read+io::Seek>(reader: &mut R, archive_offset: u64) -> ZipResult<ZipFileData> fn central_header_to_zip_file<R: Read + io::Seek>(
{ reader: &mut R,
archive_offset: u64,
) -> ZipResult<ZipFileData> {
// Parse central header // Parse central header
let signature = reader.read_u32::<LittleEndian>()?; let signature = reader.read_u32::<LittleEndian>()?;
if signature != spec::CENTRAL_DIRECTORY_HEADER_SIGNATURE if signature != spec::CENTRAL_DIRECTORY_HEADER_SIGNATURE {
{ return Err(ZipError::InvalidArchive("Invalid Central Directory header"));
return Err(ZipError::InvalidArchive("Invalid Central Directory header"))
} }
let version_made_by = reader.read_u16::<LittleEndian>()?; let version_made_by = reader.read_u16::<LittleEndian>()?;
@ -340,22 +350,19 @@ fn central_header_to_zip_file<R: Read+io::Seek>(reader: &mut R, archive_offset:
let offset = reader.read_u32::<LittleEndian>()? as u64; let offset = reader.read_u32::<LittleEndian>()? as u64;
let file_name_raw = ReadPodExt::read_exact(reader, file_name_length)?; let file_name_raw = ReadPodExt::read_exact(reader, file_name_length)?;
let extra_field = ReadPodExt::read_exact(reader, extra_field_length)?; let extra_field = ReadPodExt::read_exact(reader, extra_field_length)?;
let file_comment_raw = ReadPodExt::read_exact(reader, file_comment_length)?; let file_comment_raw = ReadPodExt::read_exact(reader, file_comment_length)?;
let file_name = match is_utf8 let file_name = match is_utf8 {
{
true => String::from_utf8_lossy(&*file_name_raw).into_owned(), true => String::from_utf8_lossy(&*file_name_raw).into_owned(),
false => file_name_raw.clone().from_cp437(), false => file_name_raw.clone().from_cp437(),
}; };
let file_comment = match is_utf8 let file_comment = match is_utf8 {
{
true => String::from_utf8_lossy(&*file_comment_raw).into_owned(), true => String::from_utf8_lossy(&*file_comment_raw).into_owned(),
false => file_comment_raw.from_cp437(), false => file_comment_raw.from_cp437(),
}; };
// Construct the result // Construct the result
let mut result = ZipFileData let mut result = ZipFileData {
{
system: System::from_u8((version_made_by >> 8) as u8), system: System::from_u8((version_made_by >> 8) as u8),
version_made_by: version_made_by as u8, version_made_by: version_made_by as u8,
encrypted: encrypted, encrypted: encrypted,
@ -373,7 +380,7 @@ fn central_header_to_zip_file<R: Read+io::Seek>(reader: &mut R, archive_offset:
}; };
match parse_extra_field(&mut result, &*extra_field) { match parse_extra_field(&mut result, &*extra_field) {
Ok(..) | Err(ZipError::Io(..)) => {}, Ok(..) | Err(ZipError::Io(..)) => {}
Err(e) => Err(e)?, Err(e) => Err(e)?,
} }
@ -383,17 +390,14 @@ fn central_header_to_zip_file<R: Read+io::Seek>(reader: &mut R, archive_offset:
Ok(result) Ok(result)
} }
fn parse_extra_field(file: &mut ZipFileData, data: &[u8]) -> ZipResult<()> fn parse_extra_field(file: &mut ZipFileData, data: &[u8]) -> ZipResult<()> {
{
let mut reader = io::Cursor::new(data); let mut reader = io::Cursor::new(data);
while (reader.position() as usize) < data.len() while (reader.position() as usize) < data.len() {
{
let kind = reader.read_u16::<LittleEndian>()?; let kind = reader.read_u16::<LittleEndian>()?;
let len = reader.read_u16::<LittleEndian>()?; let len = reader.read_u16::<LittleEndian>()?;
let mut len_left = len as i64; let mut len_left = len as i64;
match kind match kind {
{
// Zip64 extended information extra field // Zip64 extended information extra field
0x0001 => { 0x0001 => {
if file.uncompressed_size == 0xFFFFFFFF { if file.uncompressed_size == 0xFFFFFFFF {
@ -410,8 +414,8 @@ fn parse_extra_field(file: &mut ZipFileData, data: &[u8]) -> ZipResult<()>
} }
// Unparsed fields: // Unparsed fields:
// u32: disk start number // u32: disk start number
}, }
_ => {}, _ => {}
} }
// We could also check for < 0 to check for errors // We could also check for < 0 to check for errors
@ -440,7 +444,10 @@ impl<'a> ZipFile<'a> {
} }
/// Get the version of the file /// Get the version of the file
pub fn version_made_by(&self) -> (u8, u8) { pub fn version_made_by(&self) -> (u8, u8) {
(self.data.version_made_by / 10, self.data.version_made_by % 10) (
self.data.version_made_by / 10,
self.data.version_made_by % 10,
)
} }
/// Get the name of the file /// Get the name of the file
pub fn name(&self) -> &str { pub fn name(&self) -> &str {
@ -477,7 +484,11 @@ impl<'a> ZipFile<'a> {
} }
/// Returns whether the file is actually a directory /// Returns whether the file is actually a directory
pub fn is_dir(&self) -> bool { pub fn is_dir(&self) -> bool {
self.name().chars().rev().next().map_or(false, |c| c == '/' || c == '\\') self.name()
.chars()
.rev()
.next()
.map_or(false, |c| c == '/' || c == '\\')
} }
/// Returns whether the file is a regular file /// Returns whether the file is a regular file
pub fn is_file(&self) -> bool { pub fn is_file(&self) -> bool {
@ -490,9 +501,7 @@ impl<'a> ZipFile<'a> {
} }
match self.data.system { match self.data.system {
System::Unix => { System::Unix => Some(self.data.external_attributes >> 16),
Some(self.data.external_attributes >> 16)
},
System::Dos => { System::Dos => {
// Interpret MSDOS directory bit // Interpret MSDOS directory bit
let mut mode = if 0x10 == (self.data.external_attributes & 0x10) { let mut mode = if 0x10 == (self.data.external_attributes & 0x10) {
@ -505,7 +514,7 @@ impl<'a> ZipFile<'a> {
mode &= 0o0555; mode &= 0o0555;
} }
Some(mode) Some(mode)
}, }
_ => None, _ => None,
} }
} }
@ -521,9 +530,9 @@ impl<'a> ZipFile<'a> {
} }
impl<'a> Read for ZipFile<'a> { impl<'a> Read for ZipFile<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.get_reader().read(buf) self.get_reader().read(buf)
} }
} }
impl<'a> Drop for ZipFile<'a> { impl<'a> Drop for ZipFile<'a> {
@ -531,7 +540,7 @@ impl<'a> Drop for ZipFile<'a> {
// self.data is Owned, this reader is constructed by a streaming reader. // self.data is Owned, this reader is constructed by a streaming reader.
// In this case, we want to exhaust the reader so that the next file is accessible. // In this case, we want to exhaust the reader so that the next file is accessible.
if let Cow::Owned(_) = self.data { if let Cow::Owned(_) = self.data {
let mut buffer = [0; 1<<16]; let mut buffer = [0; 1 << 16];
// Get the inner `Take` reader so all decompression and CRC calculation is skipped. // Get the inner `Take` reader so all decompression and CRC calculation is skipped.
let innerreader = ::std::mem::replace(&mut self.reader, ZipFileReader::NoReader); let innerreader = ::std::mem::replace(&mut self.reader, ZipFileReader::NoReader);
@ -548,7 +557,10 @@ impl<'a> Drop for ZipFile<'a> {
match reader.read(&mut buffer) { match reader.read(&mut buffer) {
Ok(0) => break, Ok(0) => break,
Ok(_) => (), Ok(_) => (),
Err(e) => panic!("Could not consume all of the output of the current ZipFile: {:?}", e), Err(e) => panic!(
"Could not consume all of the output of the current ZipFile: {:?}",
e
),
} }
} }
} }
@ -571,7 +583,9 @@ impl<'a> Drop for ZipFile<'a> {
/// * `comment`: set to an empty string /// * `comment`: set to an empty string
/// * `data_start`: set to 0 /// * `data_start`: set to 0
/// * `external_attributes`: `unix_mode()`: will return None /// * `external_attributes`: `unix_mode()`: will return None
pub fn read_zipfile_from_stream<'a, R: io::Read>(reader: &'a mut R) -> ZipResult<Option<ZipFile<'_>>> { pub fn read_zipfile_from_stream<'a, R: io::Read>(
reader: &'a mut R,
) -> ZipResult<Option<ZipFile<'_>>> {
let signature = reader.read_u32::<LittleEndian>()?; let signature = reader.read_u32::<LittleEndian>()?;
match signature { match signature {
@ -597,14 +611,12 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(reader: &'a mut R) -> ZipResult
let file_name_raw = ReadPodExt::read_exact(reader, file_name_length)?; let file_name_raw = ReadPodExt::read_exact(reader, file_name_length)?;
let extra_field = ReadPodExt::read_exact(reader, extra_field_length)?; let extra_field = ReadPodExt::read_exact(reader, extra_field_length)?;
let file_name = match is_utf8 let file_name = match is_utf8 {
{
true => String::from_utf8_lossy(&*file_name_raw).into_owned(), true => String::from_utf8_lossy(&*file_name_raw).into_owned(),
false => file_name_raw.clone().from_cp437(), false => file_name_raw.clone().from_cp437(),
}; };
let mut result = ZipFileData let mut result = ZipFileData {
{
system: System::from_u8((version_made_by >> 8) as u8), system: System::from_u8((version_made_by >> 8) as u8),
version_made_by: version_made_by as u8, version_made_by: version_made_by as u8,
encrypted: encrypted, encrypted: encrypted,
@ -615,7 +627,7 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(reader: &'a mut R) -> ZipResult
uncompressed_size: uncompressed_size as u64, uncompressed_size: uncompressed_size as u64,
file_name: file_name, file_name: file_name,
file_name_raw: file_name_raw, file_name_raw: file_name_raw,
file_comment: String::new(), // file comment is only available in the central directory file_comment: String::new(), // file comment is only available in the central directory
// header_start and data start are not available, but also don't matter, since seeking is // header_start and data start are not available, but also don't matter, since seeking is
// not available. // not available.
header_start: 0, header_start: 0,
@ -627,12 +639,12 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(reader: &'a mut R) -> ZipResult
}; };
match parse_extra_field(&mut result, &extra_field) { match parse_extra_field(&mut result, &extra_field) {
Ok(..) | Err(ZipError::Io(..)) => {}, Ok(..) | Err(ZipError::Io(..)) => {}
Err(e) => Err(e)?, Err(e) => Err(e)?,
} }
if encrypted { if encrypted {
return unsupported_zip_error("Encrypted files are not supported") return unsupported_zip_error("Encrypted files are not supported");
} }
if using_data_descriptor { if using_data_descriptor {
return unsupported_zip_error("The file length is not available in the local header"); return unsupported_zip_error("The file length is not available in the local header");
@ -644,7 +656,7 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(reader: &'a mut R) -> ZipResult
let result_compression_method = result.compression_method; let result_compression_method = result.compression_method;
Ok(Some(ZipFile { Ok(Some(ZipFile {
data: Cow::Owned(result), data: Cow::Owned(result),
reader: make_reader(result_compression_method, result_crc32, limit_reader)? reader: make_reader(result_compression_method, result_crc32, limit_reader)?,
})) }))
} }
@ -652,8 +664,8 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(reader: &'a mut R) -> ZipResult
mod test { mod test {
#[test] #[test]
fn invalid_offset() { fn invalid_offset() {
use std::io;
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/invalid_offset.zip")); v.extend_from_slice(include_bytes!("../tests/data/invalid_offset.zip"));
@ -663,8 +675,8 @@ mod test {
#[test] #[test]
fn zip64_with_leading_junk() { fn zip64_with_leading_junk() {
use std::io;
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/zip64_demo.zip")); v.extend_from_slice(include_bytes!("../tests/data/zip64_demo.zip"));
@ -674,8 +686,8 @@ mod test {
#[test] #[test]
fn zip_comment() { fn zip_comment() {
use std::io;
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
@ -685,8 +697,8 @@ mod test {
#[test] #[test]
fn zip_read_streaming() { fn zip_read_streaming() {
use std::io;
use super::read_zipfile_from_stream; use super::read_zipfile_from_stream;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
@ -701,8 +713,8 @@ mod test {
#[test] #[test]
fn zip_clone() { fn zip_clone() {
use std::io::{self, Read};
use super::ZipArchive; use super::ZipArchive;
use std::io::{self, Read};
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
@ -713,7 +725,17 @@ mod test {
let mut file2 = reader2.by_index(0).unwrap(); let mut file2 = reader2.by_index(0).unwrap();
let t = file1.last_modified(); let t = file1.last_modified();
assert_eq!((t.year(), t.month(), t.day(), t.hour(), t.minute(), t.second()), (1980, 1, 1, 0, 0, 0)); assert_eq!(
(
t.year(),
t.month(),
t.day(),
t.hour(),
t.minute(),
t.second()
),
(1980, 1, 1, 0, 0, 0)
);
let mut buf1 = [0; 5]; let mut buf1 = [0; 5];
let mut buf2 = [0; 5]; let mut buf2 = [0; 5];

View file

@ -10,8 +10,7 @@ pub type ZipResult<T> = Result<T, ZipError>;
/// Error type for Zip /// Error type for Zip
#[derive(Debug)] #[derive(Debug)]
pub enum ZipError pub enum ZipError {
{
/// An Error caused by I/O /// An Error caused by I/O
Io(io::Error), Io(io::Error),
@ -25,57 +24,43 @@ pub enum ZipError
FileNotFound, FileNotFound,
} }
impl ZipError impl ZipError {
{ fn detail(&self) -> ::std::borrow::Cow<'_, str> {
fn detail(&self) -> ::std::borrow::Cow<'_, str>
{
use std::error::Error; use std::error::Error;
match *self match *self {
{
ZipError::Io(ref io_err) => { ZipError::Io(ref io_err) => {
("Io Error: ".to_string() + (io_err as &dyn error::Error).description()).into() ("Io Error: ".to_string() + (io_err as &dyn error::Error).description()).into()
}, }
ZipError::InvalidArchive(msg) | ZipError::UnsupportedArchive(msg) => { ZipError::InvalidArchive(msg) | ZipError::UnsupportedArchive(msg) => {
(self.description().to_string() + ": " + msg).into() (self.description().to_string() + ": " + msg).into()
}, }
ZipError::FileNotFound => { ZipError::FileNotFound => self.description().into(),
self.description().into()
},
} }
} }
} }
impl convert::From<io::Error> for ZipError impl convert::From<io::Error> for ZipError {
{ fn from(err: io::Error) -> ZipError {
fn from(err: io::Error) -> ZipError
{
ZipError::Io(err) ZipError::Io(err)
} }
} }
impl convert::From<ZipError> for io::Error impl convert::From<ZipError> for io::Error {
{ fn from(err: ZipError) -> io::Error {
fn from(err: ZipError) -> io::Error
{
io::Error::new(io::ErrorKind::Other, err) io::Error::new(io::ErrorKind::Other, err)
} }
} }
impl fmt::Display for ZipError impl fmt::Display for ZipError {
{ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error>
{
fmt.write_str(&*self.detail()) fmt.write_str(&*self.detail())
} }
} }
impl error::Error for ZipError impl error::Error for ZipError {
{ fn description(&self) -> &str {
fn description(&self) -> &str match *self {
{
match *self
{
ZipError::Io(ref io_err) => (io_err as &dyn error::Error).description(), ZipError::Io(ref io_err) => (io_err as &dyn error::Error).description(),
ZipError::InvalidArchive(..) => "Invalid Zip archive", ZipError::InvalidArchive(..) => "Invalid Zip archive",
ZipError::UnsupportedArchive(..) => "Unsupported Zip archive", ZipError::UnsupportedArchive(..) => "Unsupported Zip archive",
@ -83,10 +68,8 @@ impl error::Error for ZipError
} }
} }
fn cause(&self) -> Option<&dyn error::Error> fn cause(&self) -> Option<&dyn error::Error> {
{ match *self {
match *self
{
ZipError::Io(ref io_err) => Some(io_err as &dyn error::Error), ZipError::Io(ref io_err) => Some(io_err as &dyn error::Error),
_ => None, _ => None,
} }

View file

@ -1,16 +1,15 @@
use crate::result::{ZipError, ZipResult};
use podio::{LittleEndian, ReadPodExt, WritePodExt};
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use crate::result::{ZipResult, ZipError};
use podio::{ReadPodExt, WritePodExt, LittleEndian};
pub const LOCAL_FILE_HEADER_SIGNATURE : u32 = 0x04034b50; pub const LOCAL_FILE_HEADER_SIGNATURE: u32 = 0x04034b50;
pub const CENTRAL_DIRECTORY_HEADER_SIGNATURE : u32 = 0x02014b50; pub const CENTRAL_DIRECTORY_HEADER_SIGNATURE: u32 = 0x02014b50;
const CENTRAL_DIRECTORY_END_SIGNATURE : u32 = 0x06054b50; const CENTRAL_DIRECTORY_END_SIGNATURE: u32 = 0x06054b50;
pub const ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE : u32 = 0x06064b50; pub const ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE: u32 = 0x06064b50;
const ZIP64_CENTRAL_DIRECTORY_END_LOCATOR_SIGNATURE : u32 = 0x07064b50; const ZIP64_CENTRAL_DIRECTORY_END_LOCATOR_SIGNATURE: u32 = 0x07064b50;
pub struct CentralDirectoryEnd pub struct CentralDirectoryEnd {
{
pub disk_number: u16, pub disk_number: u16,
pub disk_with_central_directory: u16, pub disk_with_central_directory: u16,
pub number_of_files_on_this_disk: u16, pub number_of_files_on_this_disk: u16,
@ -20,14 +19,11 @@ pub struct CentralDirectoryEnd
pub zip_file_comment: Vec<u8>, pub zip_file_comment: Vec<u8>,
} }
impl CentralDirectoryEnd impl CentralDirectoryEnd {
{ pub fn parse<T: Read>(reader: &mut T) -> ZipResult<CentralDirectoryEnd> {
pub fn parse<T: Read>(reader: &mut T) -> ZipResult<CentralDirectoryEnd>
{
let magic = reader.read_u32::<LittleEndian>()?; let magic = reader.read_u32::<LittleEndian>()?;
if magic != CENTRAL_DIRECTORY_END_SIGNATURE if magic != CENTRAL_DIRECTORY_END_SIGNATURE {
{ return Err(ZipError::InvalidArchive("Invalid digital signature header"));
return Err(ZipError::InvalidArchive("Invalid digital signature header"))
} }
let disk_number = reader.read_u16::<LittleEndian>()?; let disk_number = reader.read_u16::<LittleEndian>()?;
let disk_with_central_directory = reader.read_u16::<LittleEndian>()?; let disk_with_central_directory = reader.read_u16::<LittleEndian>()?;
@ -38,40 +34,41 @@ impl CentralDirectoryEnd
let zip_file_comment_length = reader.read_u16::<LittleEndian>()? as usize; let zip_file_comment_length = reader.read_u16::<LittleEndian>()? as usize;
let zip_file_comment = ReadPodExt::read_exact(reader, zip_file_comment_length)?; let zip_file_comment = ReadPodExt::read_exact(reader, zip_file_comment_length)?;
Ok(CentralDirectoryEnd Ok(CentralDirectoryEnd {
{ disk_number: disk_number,
disk_number: disk_number, disk_with_central_directory: disk_with_central_directory,
disk_with_central_directory: disk_with_central_directory, number_of_files_on_this_disk: number_of_files_on_this_disk,
number_of_files_on_this_disk: number_of_files_on_this_disk, number_of_files: number_of_files,
number_of_files: number_of_files, central_directory_size: central_directory_size,
central_directory_size: central_directory_size, central_directory_offset: central_directory_offset,
central_directory_offset: central_directory_offset, zip_file_comment: zip_file_comment,
zip_file_comment: zip_file_comment, })
})
} }
pub fn find_and_parse<T: Read+io::Seek>(reader: &mut T) -> ZipResult<(CentralDirectoryEnd, u64)> pub fn find_and_parse<T: Read + io::Seek>(
{ reader: &mut T,
) -> ZipResult<(CentralDirectoryEnd, u64)> {
const HEADER_SIZE: u64 = 22; const HEADER_SIZE: u64 = 22;
const BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE: u64 = HEADER_SIZE - 6; const BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE: u64 = HEADER_SIZE - 6;
let file_length = reader.seek(io::SeekFrom::End(0))?; let file_length = reader.seek(io::SeekFrom::End(0))?;
let search_upper_bound = file_length.checked_sub(HEADER_SIZE + ::std::u16::MAX as u64).unwrap_or(0); let search_upper_bound = file_length
.checked_sub(HEADER_SIZE + ::std::u16::MAX as u64)
.unwrap_or(0);
if file_length < HEADER_SIZE { if file_length < HEADER_SIZE {
return Err(ZipError::InvalidArchive("Invalid zip header")); return Err(ZipError::InvalidArchive("Invalid zip header"));
} }
let mut pos = file_length - HEADER_SIZE; let mut pos = file_length - HEADER_SIZE;
while pos >= search_upper_bound while pos >= search_upper_bound {
{
reader.seek(io::SeekFrom::Start(pos as u64))?; reader.seek(io::SeekFrom::Start(pos as u64))?;
if reader.read_u32::<LittleEndian>()? == CENTRAL_DIRECTORY_END_SIGNATURE if reader.read_u32::<LittleEndian>()? == CENTRAL_DIRECTORY_END_SIGNATURE {
{ reader.seek(io::SeekFrom::Current(
reader.seek(io::SeekFrom::Current(BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE as i64))?; BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE as i64,
))?;
let comment_length = reader.read_u16::<LittleEndian>()? as u64; let comment_length = reader.read_u16::<LittleEndian>()? as u64;
if file_length - pos - HEADER_SIZE == comment_length if file_length - pos - HEADER_SIZE == comment_length {
{
let cde_start_pos = reader.seek(io::SeekFrom::Start(pos as u64))?; let cde_start_pos = reader.seek(io::SeekFrom::Start(pos as u64))?;
return CentralDirectoryEnd::parse(reader).map(|cde| (cde, cde_start_pos)); return CentralDirectoryEnd::parse(reader).map(|cde| (cde, cde_start_pos));
} }
@ -81,11 +78,12 @@ impl CentralDirectoryEnd
None => break, None => break,
}; };
} }
Err(ZipError::InvalidArchive("Could not find central directory end")) Err(ZipError::InvalidArchive(
"Could not find central directory end",
))
} }
pub fn write<T: Write>(&self, writer: &mut T) -> ZipResult<()> pub fn write<T: Write>(&self, writer: &mut T) -> ZipResult<()> {
{
writer.write_u32::<LittleEndian>(CENTRAL_DIRECTORY_END_SIGNATURE)?; writer.write_u32::<LittleEndian>(CENTRAL_DIRECTORY_END_SIGNATURE)?;
writer.write_u16::<LittleEndian>(self.disk_number)?; writer.write_u16::<LittleEndian>(self.disk_number)?;
writer.write_u16::<LittleEndian>(self.disk_with_central_directory)?; writer.write_u16::<LittleEndian>(self.disk_with_central_directory)?;
@ -99,37 +97,33 @@ impl CentralDirectoryEnd
} }
} }
pub struct Zip64CentralDirectoryEndLocator pub struct Zip64CentralDirectoryEndLocator {
{
pub disk_with_central_directory: u32, pub disk_with_central_directory: u32,
pub end_of_central_directory_offset: u64, pub end_of_central_directory_offset: u64,
pub number_of_disks: u32, pub number_of_disks: u32,
} }
impl Zip64CentralDirectoryEndLocator impl Zip64CentralDirectoryEndLocator {
{ pub fn parse<T: Read>(reader: &mut T) -> ZipResult<Zip64CentralDirectoryEndLocator> {
pub fn parse<T: Read>(reader: &mut T) -> ZipResult<Zip64CentralDirectoryEndLocator>
{
let magic = reader.read_u32::<LittleEndian>()?; let magic = reader.read_u32::<LittleEndian>()?;
if magic != ZIP64_CENTRAL_DIRECTORY_END_LOCATOR_SIGNATURE if magic != ZIP64_CENTRAL_DIRECTORY_END_LOCATOR_SIGNATURE {
{ return Err(ZipError::InvalidArchive(
return Err(ZipError::InvalidArchive("Invalid zip64 locator digital signature header")) "Invalid zip64 locator digital signature header",
));
} }
let disk_with_central_directory = reader.read_u32::<LittleEndian>()?; let disk_with_central_directory = reader.read_u32::<LittleEndian>()?;
let end_of_central_directory_offset = reader.read_u64::<LittleEndian>()?; let end_of_central_directory_offset = reader.read_u64::<LittleEndian>()?;
let number_of_disks = reader.read_u32::<LittleEndian>()?; let number_of_disks = reader.read_u32::<LittleEndian>()?;
Ok(Zip64CentralDirectoryEndLocator Ok(Zip64CentralDirectoryEndLocator {
{ disk_with_central_directory: disk_with_central_directory,
disk_with_central_directory: disk_with_central_directory, end_of_central_directory_offset: end_of_central_directory_offset,
end_of_central_directory_offset: end_of_central_directory_offset, number_of_disks: number_of_disks,
number_of_disks: number_of_disks, })
})
} }
} }
pub struct Zip64CentralDirectoryEnd pub struct Zip64CentralDirectoryEnd {
{
pub version_made_by: u16, pub version_made_by: u16,
pub version_needed_to_extract: u16, pub version_needed_to_extract: u16,
pub disk_number: u32, pub disk_number: u32,
@ -141,20 +135,18 @@ pub struct Zip64CentralDirectoryEnd
//pub extensible_data_sector: Vec<u8>, <-- We don't do anything with this at the moment. //pub extensible_data_sector: Vec<u8>, <-- We don't do anything with this at the moment.
} }
impl Zip64CentralDirectoryEnd impl Zip64CentralDirectoryEnd {
{ pub fn find_and_parse<T: Read + io::Seek>(
pub fn find_and_parse<T: Read+io::Seek>(reader: &mut T, reader: &mut T,
nominal_offset: u64, nominal_offset: u64,
search_upper_bound: u64) -> ZipResult<(Zip64CentralDirectoryEnd, u64)> search_upper_bound: u64,
{ ) -> ZipResult<(Zip64CentralDirectoryEnd, u64)> {
let mut pos = nominal_offset; let mut pos = nominal_offset;
while pos <= search_upper_bound while pos <= search_upper_bound {
{
reader.seek(io::SeekFrom::Start(pos))?; reader.seek(io::SeekFrom::Start(pos))?;
if reader.read_u32::<LittleEndian>()? == ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE if reader.read_u32::<LittleEndian>()? == ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE {
{
let archive_offset = pos - nominal_offset; let archive_offset = pos - nominal_offset;
let _record_size = reader.read_u64::<LittleEndian>()?; let _record_size = reader.read_u64::<LittleEndian>()?;
@ -169,22 +161,26 @@ impl Zip64CentralDirectoryEnd
let central_directory_size = reader.read_u64::<LittleEndian>()?; let central_directory_size = reader.read_u64::<LittleEndian>()?;
let central_directory_offset = reader.read_u64::<LittleEndian>()?; let central_directory_offset = reader.read_u64::<LittleEndian>()?;
return Ok((Zip64CentralDirectoryEnd return Ok((
{ Zip64CentralDirectoryEnd {
version_made_by: version_made_by, version_made_by: version_made_by,
version_needed_to_extract: version_needed_to_extract, version_needed_to_extract: version_needed_to_extract,
disk_number: disk_number, disk_number: disk_number,
disk_with_central_directory: disk_with_central_directory, disk_with_central_directory: disk_with_central_directory,
number_of_files_on_this_disk: number_of_files_on_this_disk, number_of_files_on_this_disk: number_of_files_on_this_disk,
number_of_files: number_of_files, number_of_files: number_of_files,
central_directory_size: central_directory_size, central_directory_size: central_directory_size,
central_directory_offset: central_directory_offset, central_directory_offset: central_directory_offset,
}, archive_offset)); },
archive_offset,
));
} }
pos += 1; pos += 1;
} }
Err(ZipError::InvalidArchive("Could not find ZIP64 central directory end")) Err(ZipError::InvalidArchive(
"Could not find ZIP64 central directory end",
))
} }
} }

View file

@ -1,8 +1,7 @@
//! Types that specify what is contained in a ZIP. //! Types that specify what is contained in a ZIP.
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum System pub enum System {
{
Dos = 0, Dos = 0,
Unix = 3, Unix = 3,
Unknown, Unknown,
@ -11,8 +10,7 @@ pub enum System
} }
impl System { impl System {
pub fn from_u8(system: u8) -> System pub fn from_u8(system: u8) -> System {
{
use self::System::*; use self::System::*;
match system { match system {
@ -59,10 +57,10 @@ impl DateTime {
pub fn from_msdos(datepart: u16, timepart: u16) -> DateTime { pub fn from_msdos(datepart: u16, timepart: u16) -> DateTime {
let seconds = (timepart & 0b0000000000011111) << 1; let seconds = (timepart & 0b0000000000011111) << 1;
let minutes = (timepart & 0b0000011111100000) >> 5; let minutes = (timepart & 0b0000011111100000) >> 5;
let hours = (timepart & 0b1111100000000000) >> 11; let hours = (timepart & 0b1111100000000000) >> 11;
let days = (datepart & 0b0000000000011111) >> 0; let days = (datepart & 0b0000000000011111) >> 0;
let months = (datepart & 0b0000000111100000) >> 5; let months = (datepart & 0b0000000111100000) >> 5;
let years = (datepart & 0b1111111000000000) >> 9; let years = (datepart & 0b1111111000000000) >> 9;
DateTime { DateTime {
year: (years + 1980) as u16, year: (years + 1980) as u16,
@ -83,10 +81,20 @@ impl DateTime {
/// * hour: [0, 23] /// * hour: [0, 23]
/// * minute: [0, 59] /// * minute: [0, 59]
/// * second: [0, 60] /// * second: [0, 60]
pub fn from_date_and_time(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) -> Result<DateTime, ()> { pub fn from_date_and_time(
if year >= 1980 && year <= 2107 year: u16,
&& month >= 1 && month <= 12 month: u8,
&& day >= 1 && day <= 31 day: u8,
hour: u8,
minute: u8,
second: u8,
) -> Result<DateTime, ()> {
if year >= 1980
&& year <= 2107
&& month >= 1
&& month <= 12
&& day >= 1
&& day <= 31
&& hour <= 23 && hour <= 23
&& minute <= 59 && minute <= 59
&& second <= 60 && second <= 60
@ -99,8 +107,7 @@ impl DateTime {
minute: minute, minute: minute,
second: second, second: second,
}) })
} } else {
else {
Err(()) Err(())
} }
} }
@ -110,12 +117,18 @@ impl DateTime {
/// ///
/// Returns `Err` when this object is out of bounds /// Returns `Err` when this object is out of bounds
pub fn from_time(tm: ::time::Tm) -> Result<DateTime, ()> { pub fn from_time(tm: ::time::Tm) -> Result<DateTime, ()> {
if tm.tm_year >= 80 && tm.tm_year <= 207 if tm.tm_year >= 80
&& tm.tm_mon >= 0 && tm.tm_mon <= 11 && tm.tm_year <= 207
&& tm.tm_mday >= 1 && tm.tm_mday <= 31 && tm.tm_mon >= 0
&& tm.tm_hour >= 0 && tm.tm_hour <= 23 && tm.tm_mon <= 11
&& tm.tm_min >= 0 && tm.tm_min <= 59 && tm.tm_mday >= 1
&& tm.tm_sec >= 0 && tm.tm_sec <= 60 && tm.tm_mday <= 31
&& tm.tm_hour >= 0
&& tm.tm_hour <= 23
&& tm.tm_min >= 0
&& tm.tm_min <= 59
&& tm.tm_sec >= 0
&& tm.tm_sec <= 60
{ {
Ok(DateTime { Ok(DateTime {
year: (tm.tm_year + 1900) as u16, year: (tm.tm_year + 1900) as u16,
@ -125,8 +138,7 @@ impl DateTime {
minute: tm.tm_min as u8, minute: tm.tm_min as u8,
second: tm.tm_sec as u8, second: tm.tm_sec as u8,
}) })
} } else {
else {
Err(()) Err(())
} }
} }
@ -154,7 +166,7 @@ impl DateTime {
tm_mon: self.month as i32 - 1, tm_mon: self.month as i32 - 1,
tm_year: self.year as i32 - 1900, tm_year: self.year as i32 - 1900,
tm_isdst: -1, tm_isdst: -1,
.. ::time::empty_tm() ..::time::empty_tm()
} }
} }
@ -193,8 +205,7 @@ pub const DEFAULT_VERSION: u8 = 46;
/// Structure representing a ZIP file. /// Structure representing a ZIP file.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ZipFileData pub struct ZipFileData {
{
/// Compatibility of the file attribute information /// Compatibility of the file attribute information
pub system: System, pub system: System,
/// Specification version /// Specification version
@ -230,7 +241,8 @@ impl ZipFileData {
let no_null_filename = match self.file_name.find('\0') { let no_null_filename = match self.file_name.find('\0') {
Some(index) => &self.file_name[0..index], Some(index) => &self.file_name[0..index],
None => &self.file_name, None => &self.file_name,
}.to_string(); }
.to_string();
// zip files can contain both / and \ as separators regardless of the OS // zip files can contain both / and \ as separators regardless of the OS
// and as we want to return a sanitized PathBuf that only supports the // and as we want to return a sanitized PathBuf that only supports the
@ -295,7 +307,10 @@ mod test {
data_start: 0, data_start: 0,
external_attributes: 0, external_attributes: 0,
}; };
assert_eq!(data.file_name_sanitized(), ::std::path::PathBuf::from("path/etc/passwd")); assert_eq!(
data.file_name_sanitized(),
::std::path::PathBuf::from("path/etc/passwd")
);
} }
#[test] #[test]
@ -399,7 +414,10 @@ mod test {
assert_eq!(dt.second(), 30); assert_eq!(dt.second(), 30);
#[cfg(feature = "time")] #[cfg(feature = "time")]
assert_eq!(format!("{}", dt.to_time().rfc3339()), "2018-11-17T10:38:30Z"); assert_eq!(
format!("{}", dt.to_time().rfc3339()),
"2018-11-17T10:38:30Z"
);
} }
#[test] #[test]
@ -414,7 +432,10 @@ mod test {
assert_eq!(dt.second(), 62); assert_eq!(dt.second(), 62);
#[cfg(feature = "time")] #[cfg(feature = "time")]
assert_eq!(format!("{}", dt.to_time().rfc3339()), "2107-15-31T31:63:62Z"); assert_eq!(
format!("{}", dt.to_time().rfc3339()),
"2107-15-31T31:63:62Z"
);
let dt = DateTime::from_msdos(0x0000, 0x0000); let dt = DateTime::from_msdos(0x0000, 0x0000);
assert_eq!(dt.year(), 1980); assert_eq!(dt.year(), 1980);
@ -425,7 +446,10 @@ mod test {
assert_eq!(dt.second(), 0); assert_eq!(dt.second(), 0);
#[cfg(feature = "time")] #[cfg(feature = "time")]
assert_eq!(format!("{}", dt.to_time().rfc3339()), "1980-00-00T00:00:00Z"); assert_eq!(
format!("{}", dt.to_time().rfc3339()),
"1980-00-00T00:00:00Z"
);
} }
#[cfg(feature = "time")] #[cfg(feature = "time")]

View file

@ -1,15 +1,15 @@
//! Structs for creating a new zip archive //! Structs for creating a new zip archive
use crate::compression::CompressionMethod; use crate::compression::CompressionMethod;
use crate::types::{ZipFileData, System, DEFAULT_VERSION, DateTime}; use crate::result::{ZipError, ZipResult};
use crate::spec; use crate::spec;
use crate::types::{DateTime, System, ZipFileData, DEFAULT_VERSION};
use crc32fast::Hasher; use crc32fast::Hasher;
use crate::result::{ZipResult, ZipError}; use podio::{LittleEndian, WritePodExt};
use std::default::Default; use std::default::Default;
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use std::mem; use std::mem;
use podio::{WritePodExt, LittleEndian};
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
use flate2::write::DeflateEncoder; use flate2::write::DeflateEncoder;
@ -17,8 +17,7 @@ use flate2::write::DeflateEncoder;
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
use bzip2::write::BzEncoder; use bzip2::write::BzEncoder;
enum GenericZipWriter<W: Write + io::Seek> enum GenericZipWriter<W: Write + io::Seek> {
{
Closed, Closed,
Storer(W), Storer(W),
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
@ -51,8 +50,7 @@ enum GenericZipWriter<W: Write + io::Seek>
/// ///
/// println!("Result: {:?}", doit().unwrap()); /// println!("Result: {:?}", doit().unwrap());
/// ``` /// ```
pub struct ZipWriter<W: Write + io::Seek> pub struct ZipWriter<W: Write + io::Seek> {
{
inner: GenericZipWriter<W>, inner: GenericZipWriter<W>,
files: Vec<ZipFileData>, files: Vec<ZipFileData>,
stats: ZipWriterStats, stats: ZipWriterStats,
@ -61,8 +59,7 @@ pub struct ZipWriter<W: Write + io::Seek>
} }
#[derive(Default)] #[derive(Default)]
struct ZipWriterStats struct ZipWriterStats {
{
hasher: Hasher, hasher: Hasher,
start: u64, start: u64,
bytes_written: u64, bytes_written: u64,
@ -80,10 +77,14 @@ impl FileOptions {
/// Construct a new FileOptions object /// Construct a new FileOptions object
pub fn default() -> FileOptions { pub fn default() -> FileOptions {
FileOptions { FileOptions {
#[cfg(feature = "deflate")] compression_method: CompressionMethod::Deflated, #[cfg(feature = "deflate")]
#[cfg(not(feature = "deflate"))] compression_method: CompressionMethod::Stored, compression_method: CompressionMethod::Deflated,
#[cfg(feature = "time")] last_modified_time: DateTime::from_time(time::now()).unwrap_or(DateTime::default()), #[cfg(not(feature = "deflate"))]
#[cfg(not(feature = "time"))] last_modified_time: DateTime::default(), compression_method: CompressionMethod::Stored,
#[cfg(feature = "time")]
last_modified_time: DateTime::from_time(time::now()).unwrap_or(DateTime::default()),
#[cfg(not(feature = "time"))]
last_modified_time: DateTime::default(),
permissions: None, permissions: None,
} }
} }
@ -124,53 +125,53 @@ impl Default for FileOptions {
} }
} }
impl<W: Write+io::Seek> Write for ZipWriter<W> impl<W: Write + io::Seek> Write for ZipWriter<W> {
{ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> if !self.writing_to_file {
{ return Err(io::Error::new(
if !self.writing_to_file { return Err(io::Error::new(io::ErrorKind::Other, "No file has been started")) } io::ErrorKind::Other,
match self.inner.ref_mut() "No file has been started",
{ ));
}
match self.inner.ref_mut() {
Some(ref mut w) => { Some(ref mut w) => {
let write_result = w.write(buf); let write_result = w.write(buf);
if let Ok(count) = write_result { if let Ok(count) = write_result {
self.stats.update(&buf[0..count]); self.stats.update(&buf[0..count]);
} }
write_result write_result
} }
None => Err(io::Error::new(io::ErrorKind::BrokenPipe, "ZipWriter was already closed")), None => Err(io::Error::new(
io::ErrorKind::BrokenPipe,
"ZipWriter was already closed",
)),
} }
} }
fn flush(&mut self) -> io::Result<()> fn flush(&mut self) -> io::Result<()> {
{ match self.inner.ref_mut() {
match self.inner.ref_mut()
{
Some(ref mut w) => w.flush(), Some(ref mut w) => w.flush(),
None => Err(io::Error::new(io::ErrorKind::BrokenPipe, "ZipWriter was already closed")), None => Err(io::Error::new(
io::ErrorKind::BrokenPipe,
"ZipWriter was already closed",
)),
} }
} }
} }
impl ZipWriterStats impl ZipWriterStats {
{ fn update(&mut self, buf: &[u8]) {
fn update(&mut self, buf: &[u8])
{
self.hasher.update(buf); self.hasher.update(buf);
self.bytes_written += buf.len() as u64; self.bytes_written += buf.len() as u64;
} }
} }
impl<W: Write+io::Seek> ZipWriter<W> impl<W: Write + io::Seek> ZipWriter<W> {
{
/// Initializes the ZipWriter. /// Initializes the ZipWriter.
/// ///
/// Before writing to this object, the start_file command should be called. /// Before writing to this object, the start_file command should be called.
pub fn new(inner: W) -> ZipWriter<W> pub fn new(inner: W) -> ZipWriter<W> {
{ ZipWriter {
ZipWriter
{
inner: GenericZipWriter::Storer(inner), inner: GenericZipWriter::Storer(inner),
files: Vec::new(), files: Vec::new(),
stats: Default::default(), stats: Default::default(),
@ -180,13 +181,17 @@ impl<W: Write+io::Seek> ZipWriter<W>
} }
/// Set ZIP archive comment. Defaults to 'zip-rs' if not set. /// Set ZIP archive comment. Defaults to 'zip-rs' if not set.
pub fn set_comment<S>(&mut self, comment: S) where S: Into<String> { pub fn set_comment<S>(&mut self, comment: S)
where
S: Into<String>,
{
self.comment = comment.into(); self.comment = comment.into();
} }
/// Start a new file for with the requested options. /// Start a new file for with the requested options.
fn start_entry<S>(&mut self, name: S, options: FileOptions) -> ZipResult<()> fn start_entry<S>(&mut self, name: S, options: FileOptions) -> ZipResult<()>
where S: Into<String> where
S: Into<String>,
{ {
self.finish_file()?; self.finish_file()?;
@ -197,8 +202,7 @@ impl<W: Write+io::Seek> ZipWriter<W>
let permissions = options.permissions.unwrap_or(0o100644); let permissions = options.permissions.unwrap_or(0o100644);
let file_name = name.into(); let file_name = name.into();
let file_name_raw = file_name.clone().into_bytes(); let file_name_raw = file_name.clone().into_bytes();
let mut file = ZipFileData let mut file = ZipFileData {
{
system: System::Unix, system: System::Unix,
version_made_by: DEFAULT_VERSION, version_made_by: DEFAULT_VERSION,
encrypted: false, encrypted: false,
@ -231,13 +235,11 @@ impl<W: Write+io::Seek> ZipWriter<W>
Ok(()) Ok(())
} }
fn finish_file(&mut self) -> ZipResult<()> fn finish_file(&mut self) -> ZipResult<()> {
{
self.inner.switch_to(CompressionMethod::Stored)?; self.inner.switch_to(CompressionMethod::Stored)?;
let writer = self.inner.get_plain(); let writer = self.inner.get_plain();
let file = match self.files.last_mut() let file = match self.files.last_mut() {
{
None => return Ok(()), None => return Ok(()),
Some(f) => f, Some(f) => f,
}; };
@ -256,7 +258,8 @@ impl<W: Write+io::Seek> ZipWriter<W>
/// Starts a file. /// Starts a file.
pub fn start_file<S>(&mut self, name: S, mut options: FileOptions) -> ZipResult<()> pub fn start_file<S>(&mut self, name: S, mut options: FileOptions) -> ZipResult<()>
where S: Into<String> where
S: Into<String>,
{ {
if options.permissions.is_none() { if options.permissions.is_none() {
options.permissions = Some(0o644); options.permissions = Some(0o644);
@ -271,7 +274,11 @@ impl<W: Write+io::Seek> ZipWriter<W>
/// ///
/// This function ensures that the '/' path seperator is used. It also ignores all non 'Normal' /// This function ensures that the '/' path seperator is used. It also ignores all non 'Normal'
/// Components, such as a starting '/' or '..' and '.'. /// Components, such as a starting '/' or '..' and '.'.
pub fn start_file_from_path(&mut self, path: &std::path::Path, options: FileOptions) -> ZipResult<()> { pub fn start_file_from_path(
&mut self,
path: &std::path::Path,
options: FileOptions,
) -> ZipResult<()> {
self.start_file(path_to_string(path), options) self.start_file(path_to_string(path), options)
} }
@ -279,7 +286,8 @@ impl<W: Write+io::Seek> ZipWriter<W>
/// ///
/// You can't write data to the file afterwards. /// You can't write data to the file afterwards.
pub fn add_directory<S>(&mut self, name: S, mut options: FileOptions) -> ZipResult<()> pub fn add_directory<S>(&mut self, name: S, mut options: FileOptions) -> ZipResult<()>
where S: Into<String> where
S: Into<String>,
{ {
if options.permissions.is_none() { if options.permissions.is_none() {
options.permissions = Some(0o755); options.permissions = Some(0o755);
@ -303,7 +311,11 @@ impl<W: Write+io::Seek> ZipWriter<W>
/// ///
/// This function ensures that the '/' path seperator is used. It also ignores all non 'Normal' /// This function ensures that the '/' path seperator is used. It also ignores all non 'Normal'
/// Components, such as a starting '/' or '..' and '.'. /// Components, such as a starting '/' or '..' and '.'.
pub fn add_directory_from_path(&mut self, path: &std::path::Path, options: FileOptions) -> ZipResult<()> { pub fn add_directory_from_path(
&mut self,
path: &std::path::Path,
options: FileOptions,
) -> ZipResult<()> {
self.add_directory(path_to_string(path.into()), options) self.add_directory(path_to_string(path.into()), options)
} }
@ -311,29 +323,25 @@ impl<W: Write+io::Seek> ZipWriter<W>
/// ///
/// This will return the writer, but one should normally not append any data to the end of the file. /// This will return the writer, but one should normally not append any data to the end of the file.
/// Note that the zipfile will also be finished on drop. /// Note that the zipfile will also be finished on drop.
pub fn finish(&mut self) -> ZipResult<W> pub fn finish(&mut self) -> ZipResult<W> {
{
self.finalize()?; self.finalize()?;
let inner = mem::replace(&mut self.inner, GenericZipWriter::Closed); let inner = mem::replace(&mut self.inner, GenericZipWriter::Closed);
Ok(inner.unwrap()) Ok(inner.unwrap())
} }
fn finalize(&mut self) -> ZipResult<()> fn finalize(&mut self) -> ZipResult<()> {
{
self.finish_file()?; self.finish_file()?;
{ {
let writer = self.inner.get_plain(); let writer = self.inner.get_plain();
let central_start = writer.seek(io::SeekFrom::Current(0))?; let central_start = writer.seek(io::SeekFrom::Current(0))?;
for file in self.files.iter() for file in self.files.iter() {
{
write_central_directory_header(writer, file)?; write_central_directory_header(writer, file)?;
} }
let central_size = writer.seek(io::SeekFrom::Current(0))? - central_start; let central_size = writer.seek(io::SeekFrom::Current(0))? - central_start;
let footer = spec::CentralDirectoryEnd let footer = spec::CentralDirectoryEnd {
{
disk_number: 0, disk_number: 0,
disk_with_central_directory: 0, disk_with_central_directory: 0,
number_of_files_on_this_disk: self.files.len() as u16, number_of_files_on_this_disk: self.files.len() as u16,
@ -350,12 +358,9 @@ impl<W: Write+io::Seek> ZipWriter<W>
} }
} }
impl<W: Write+io::Seek> Drop for ZipWriter<W> impl<W: Write + io::Seek> Drop for ZipWriter<W> {
{ fn drop(&mut self) {
fn drop(&mut self) if !self.inner.is_closed() {
{
if !self.inner.is_closed()
{
if let Err(e) = self.finalize() { if let Err(e) = self.finalize() {
let _ = write!(&mut io::stderr(), "ZipWriter drop failed: {:?}", e); let _ = write!(&mut io::stderr(), "ZipWriter drop failed: {:?}", e);
} }
@ -363,34 +368,43 @@ impl<W: Write+io::Seek> Drop for ZipWriter<W>
} }
} }
impl<W: Write+io::Seek> GenericZipWriter<W> impl<W: Write + io::Seek> GenericZipWriter<W> {
{ fn switch_to(&mut self, compression: CompressionMethod) -> ZipResult<()> {
fn switch_to(&mut self, compression: CompressionMethod) -> ZipResult<()>
{
match self.current_compression() { match self.current_compression() {
Some(method) if method == compression => return Ok(()), Some(method) if method == compression => return Ok(()),
None => Err(io::Error::new(io::ErrorKind::BrokenPipe, "ZipWriter was already closed"))?, None => Err(io::Error::new(
_ => {}, io::ErrorKind::BrokenPipe,
"ZipWriter was already closed",
))?,
_ => {}
} }
let bare = match mem::replace(self, GenericZipWriter::Closed) let bare = match mem::replace(self, GenericZipWriter::Closed) {
{
GenericZipWriter::Storer(w) => w, GenericZipWriter::Storer(w) => w,
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
GenericZipWriter::Deflater(w) => w.finish()?, GenericZipWriter::Deflater(w) => w.finish()?,
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
GenericZipWriter::Bzip2(w) => w.finish()?, GenericZipWriter::Bzip2(w) => w.finish()?,
GenericZipWriter::Closed => Err(io::Error::new(io::ErrorKind::BrokenPipe, "ZipWriter was already closed"))?, GenericZipWriter::Closed => Err(io::Error::new(
io::ErrorKind::BrokenPipe,
"ZipWriter was already closed",
))?,
}; };
*self = match compression *self = match compression {
{
CompressionMethod::Stored => GenericZipWriter::Storer(bare), CompressionMethod::Stored => GenericZipWriter::Storer(bare),
#[cfg(feature = "deflate")] #[cfg(feature = "deflate")]
CompressionMethod::Deflated => GenericZipWriter::Deflater(DeflateEncoder::new(bare, flate2::Compression::default())), CompressionMethod::Deflated => GenericZipWriter::Deflater(DeflateEncoder::new(
bare,
flate2::Compression::default(),
)),
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
CompressionMethod::Bzip2 => GenericZipWriter::Bzip2(BzEncoder::new(bare, bzip2::Compression::Default)), CompressionMethod::Bzip2 => {
CompressionMethod::Unsupported(..) => return Err(ZipError::UnsupportedArchive("Unsupported compression")), GenericZipWriter::Bzip2(BzEncoder::new(bare, bzip2::Compression::Default))
}
CompressionMethod::Unsupported(..) => {
return Err(ZipError::UnsupportedArchive("Unsupported compression"))
}
}; };
Ok(()) Ok(())
@ -407,19 +421,15 @@ impl<W: Write+io::Seek> GenericZipWriter<W>
} }
} }
fn is_closed(&self) -> bool fn is_closed(&self) -> bool {
{ match *self {
match *self
{
GenericZipWriter::Closed => true, GenericZipWriter::Closed => true,
_ => false, _ => false,
} }
} }
fn get_plain(&mut self) -> &mut W fn get_plain(&mut self) -> &mut W {
{ match *self {
match *self
{
GenericZipWriter::Storer(ref mut w) => w, GenericZipWriter::Storer(ref mut w) => w,
_ => panic!("Should have switched to stored beforehand"), _ => panic!("Should have switched to stored beforehand"),
} }
@ -436,24 +446,25 @@ impl<W: Write+io::Seek> GenericZipWriter<W>
} }
} }
fn unwrap(self) -> W fn unwrap(self) -> W {
{ match self {
match self
{
GenericZipWriter::Storer(w) => w, GenericZipWriter::Storer(w) => w,
_ => panic!("Should have switched to stored beforehand"), _ => panic!("Should have switched to stored beforehand"),
} }
} }
} }
fn write_local_file_header<T: Write>(writer: &mut T, file: &ZipFileData) -> ZipResult<()> fn write_local_file_header<T: Write>(writer: &mut T, file: &ZipFileData) -> ZipResult<()> {
{
// local file header signature // local file header signature
writer.write_u32::<LittleEndian>(spec::LOCAL_FILE_HEADER_SIGNATURE)?; writer.write_u32::<LittleEndian>(spec::LOCAL_FILE_HEADER_SIGNATURE)?;
// version needed to extract // version needed to extract
writer.write_u16::<LittleEndian>(file.version_needed())?; writer.write_u16::<LittleEndian>(file.version_needed())?;
// general purpose bit flag // general purpose bit flag
let flag = if !file.file_name.is_ascii() { 1u16 << 11 } else { 0 }; let flag = if !file.file_name.is_ascii() {
1u16 << 11
} else {
0
};
writer.write_u16::<LittleEndian>(flag)?; writer.write_u16::<LittleEndian>(flag)?;
// Compression method // Compression method
writer.write_u16::<LittleEndian>(file.compression_method.to_u16())?; writer.write_u16::<LittleEndian>(file.compression_method.to_u16())?;
@ -479,9 +490,11 @@ fn write_local_file_header<T: Write>(writer: &mut T, file: &ZipFileData) -> ZipR
Ok(()) Ok(())
} }
fn update_local_file_header<T: Write+io::Seek>(writer: &mut T, file: &ZipFileData) -> ZipResult<()> fn update_local_file_header<T: Write + io::Seek>(
{ writer: &mut T,
const CRC32_OFFSET : u64 = 14; file: &ZipFileData,
) -> ZipResult<()> {
const CRC32_OFFSET: u64 = 14;
writer.seek(io::SeekFrom::Start(file.header_start + CRC32_OFFSET))?; writer.seek(io::SeekFrom::Start(file.header_start + CRC32_OFFSET))?;
writer.write_u32::<LittleEndian>(file.crc32)?; writer.write_u32::<LittleEndian>(file.crc32)?;
writer.write_u32::<LittleEndian>(file.compressed_size as u32)?; writer.write_u32::<LittleEndian>(file.compressed_size as u32)?;
@ -489,8 +502,7 @@ fn update_local_file_header<T: Write+io::Seek>(writer: &mut T, file: &ZipFileDat
Ok(()) Ok(())
} }
fn write_central_directory_header<T: Write>(writer: &mut T, file: &ZipFileData) -> ZipResult<()> fn write_central_directory_header<T: Write>(writer: &mut T, file: &ZipFileData) -> ZipResult<()> {
{
// central file header signature // central file header signature
writer.write_u32::<LittleEndian>(spec::CENTRAL_DIRECTORY_HEADER_SIGNATURE)?; writer.write_u32::<LittleEndian>(spec::CENTRAL_DIRECTORY_HEADER_SIGNATURE)?;
// version made by // version made by
@ -499,7 +511,11 @@ fn write_central_directory_header<T: Write>(writer: &mut T, file: &ZipFileData)
// version needed to extract // version needed to extract
writer.write_u16::<LittleEndian>(file.version_needed())?; writer.write_u16::<LittleEndian>(file.version_needed())?;
// general puprose bit flag // general puprose bit flag
let flag = if !file.file_name.is_ascii() { 1u16 << 11 } else { 0 }; let flag = if !file.file_name.is_ascii() {
1u16 << 11
} else {
0
};
writer.write_u16::<LittleEndian>(flag)?; writer.write_u16::<LittleEndian>(flag)?;
// compression method // compression method
writer.write_u16::<LittleEndian>(file.compression_method.to_u16())?; writer.write_u16::<LittleEndian>(file.compression_method.to_u16())?;
@ -537,8 +553,7 @@ fn write_central_directory_header<T: Write>(writer: &mut T, file: &ZipFileData)
Ok(()) Ok(())
} }
fn build_extra_field(_file: &ZipFileData) -> ZipResult<Vec<u8>> fn build_extra_field(_file: &ZipFileData) -> ZipResult<Vec<u8>> {
{
let writer = Vec::new(); let writer = Vec::new();
// Future work // Future work
Ok(writer) Ok(writer)
@ -562,11 +577,11 @@ fn path_to_string(path: &std::path::Path) -> String {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::io;
use std::io::Write;
use crate::types::DateTime;
use super::{FileOptions, ZipWriter}; use super::{FileOptions, ZipWriter};
use crate::compression::CompressionMethod; use crate::compression::CompressionMethod;
use crate::types::DateTime;
use std::io;
use std::io::Write;
#[test] #[test]
fn write_empty_zip() { fn write_empty_zip() {
@ -574,24 +589,38 @@ mod test {
writer.set_comment("ZIP"); writer.set_comment("ZIP");
let result = writer.finish().unwrap(); let result = writer.finish().unwrap();
assert_eq!(result.get_ref().len(), 25); assert_eq!(result.get_ref().len(), 25);
assert_eq!(*result.get_ref(), [80, 75, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 90, 73, 80]); assert_eq!(
*result.get_ref(),
[80, 75, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 90, 73, 80]
);
} }
#[test] #[test]
fn write_zip_dir() { fn write_zip_dir() {
let mut writer = ZipWriter::new(io::Cursor::new(Vec::new())); let mut writer = ZipWriter::new(io::Cursor::new(Vec::new()));
writer.add_directory("test", FileOptions::default().last_modified_time( writer
DateTime::from_date_and_time(2018, 8, 15, 20, 45, 6).unwrap() .add_directory(
)).unwrap(); "test",
assert!(writer.write(b"writing to a directory is not allowed, and will not write any data").is_err()); FileOptions::default().last_modified_time(
DateTime::from_date_and_time(2018, 8, 15, 20, 45, 6).unwrap(),
),
)
.unwrap();
assert!(writer
.write(b"writing to a directory is not allowed, and will not write any data")
.is_err());
let result = writer.finish().unwrap(); let result = writer.finish().unwrap();
assert_eq!(result.get_ref().len(), 114); assert_eq!(result.get_ref().len(), 114);
assert_eq!(*result.get_ref(), &[ assert_eq!(
80u8, 75, 3, 4, 20, 0, 0, 0, 0, 0, 163, 165, 15, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 116, *result.get_ref(),
101, 115, 116, 47, 80, 75, 1, 2, 46, 3, 20, 0, 0, 0, 0, 0, 163, 165, 15, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[
0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 237, 65, 0, 0, 0, 0, 116, 101, 115, 116, 47, 80, 75, 5, 6, 80u8, 75, 3, 4, 20, 0, 0, 0, 0, 0, 163, 165, 15, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 1, 0, 51, 0, 0, 0, 35, 0, 0, 0, 6, 0, 122, 105, 112, 45, 114, 115 0, 0, 5, 0, 0, 0, 116, 101, 115, 116, 47, 80, 75, 1, 2, 46, 3, 20, 0, 0, 0, 0, 0,
] as &[u8]); 163, 165, 15, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 237, 65, 0, 0, 0, 0, 116, 101, 115, 116, 47, 80, 75, 5, 6, 0, 0, 0, 0, 1, 0,
1, 0, 51, 0, 0, 0, 35, 0, 0, 0, 6, 0, 122, 105, 112, 45, 114, 115
] as &[u8]
);
} }
#[test] #[test]
@ -603,7 +632,9 @@ mod test {
permissions: Some(33188), permissions: Some(33188),
}; };
writer.start_file("mimetype", options).unwrap(); writer.start_file("mimetype", options).unwrap();
writer.write(b"application/vnd.oasis.opendocument.text").unwrap(); writer
.write(b"application/vnd.oasis.opendocument.text")
.unwrap();
let result = writer.finish().unwrap(); let result = writer.finish().unwrap();
assert_eq!(result.get_ref().len(), 159); assert_eq!(result.get_ref().len(), 159);
let mut v = Vec::new(); let mut v = Vec::new();
@ -614,8 +645,10 @@ mod test {
#[test] #[test]
fn path_to_string() { fn path_to_string() {
let mut path = std::path::PathBuf::new(); let mut path = std::path::PathBuf::new();
#[cfg(windows)] path.push(r"C:\"); #[cfg(windows)]
#[cfg(unix)] path.push("/"); path.push(r"C:\");
#[cfg(unix)]
path.push("/");
path.push("windows"); path.push("windows");
path.push(".."); path.push("..");
path.push("."); path.push(".");

View file

@ -1,8 +1,8 @@
use std::collections::HashSet;
use std::io::prelude::*; use std::io::prelude::*;
use zip::write::FileOptions;
use std::io::Cursor; use std::io::Cursor;
use std::iter::FromIterator; use std::iter::FromIterator;
use std::collections::HashSet; use zip::write::FileOptions;
// This test asserts that after creating a zip file, then reading its contents back out, // This test asserts that after creating a zip file, then reading its contents back out,
// the extracted data will *always* be exactly the same as the original data. // the extracted data will *always* be exactly the same as the original data.
@ -38,7 +38,7 @@ fn write_to_zip_file(file: &mut Cursor<Vec<u8>>) -> zip::result::ZipResult<()> {
fn read_zip_file(zip_file: &mut Cursor<Vec<u8>>) -> zip::result::ZipResult<String> { fn read_zip_file(zip_file: &mut Cursor<Vec<u8>>) -> zip::result::ZipResult<String> {
let mut archive = zip::ZipArchive::new(zip_file).unwrap(); let mut archive = zip::ZipArchive::new(zip_file).unwrap();
let expected_file_names = [ "test/", "test/☃.txt", "test/lorem_ipsum.txt" ]; let expected_file_names = ["test/", "test/☃.txt", "test/lorem_ipsum.txt"];
let expected_file_names = HashSet::from_iter(expected_file_names.iter().copied()); let expected_file_names = HashSet::from_iter(expected_file_names.iter().copied());
let file_names = archive.file_names().collect::<HashSet<_>>(); let file_names = archive.file_names().collect::<HashSet<_>>();
assert_eq!(file_names, expected_file_names); assert_eq!(file_names, expected_file_names);

View file

@ -1,26 +1,21 @@
use zip::read::ZipArchive;
use std::io::Cursor; use std::io::Cursor;
use zip::read::ZipArchive;
const BUF : &[u8] = &[ const BUF: &[u8] = &[
0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x00, 0x1c, 0x00, 0x69, 0x6e,
0x00, 0x00, 0x12, 0x00, 0x1c, 0x00, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f,
0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f, 0x55, 0x54, 0x09, 0x00, 0x03, 0xf4, 0x5c, 0x88, 0x5a, 0xf4, 0x5c, 0x88, 0x5a, 0x75, 0x78, 0x0b,
0x55, 0x54, 0x09, 0x00, 0x03, 0xf4, 0x5c, 0x88, 0x5a, 0xf4, 0x5c, 0x88, 0x00, 0x01, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x01, 0x02,
0x5a, 0x75, 0x78, 0x0b, 0x00, 0x01, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0x1e, 0x03, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x0a, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x01, 0x02, 0x1e, 0x03, 0x0a, 0x00, 0x00, // time part: 0 seconds, 0 minutes, 0 hours
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // date part: day 0 (invalid), month 0 (invalid), year 0 (1980)
0x00, 0x00, // time part: 0 seconds, 0 minutes, 0 hours 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x00, 0x18, 0x00,
0x00, 0x00, // date part: day 0 (invalid), month 0 (invalid), year 0 (1980) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0xed, 0x41, 0x00, 0x00, 0x00, 0x00, 0x69, 0x6e,
0x00, 0x00, 0x00, 0x00, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x00, 0x18, 0x00, 0x55, 0x54, 0x05, 0x00, 0x03, 0xf4, 0x5c, 0x88, 0x5a, 0x75, 0x78, 0x0b, 0x00, 0x01, 0x04, 0xe8,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0xed, 0x41, 0x00, 0x00, 0x03, 0x00, 0x00, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x05, 0x06, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x74, 0x69, 0x01, 0x00, 0x01, 0x00, 0x58, 0x00, 0x00, 0x00, 0x4c, 0x00, 0x00, 0x00, 0x00, 0x00,
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f, 0x55, 0x54, 0x05, 0x00,
0x03, 0xf4, 0x5c, 0x88, 0x5a, 0x75, 0x78, 0x0b, 0x00, 0x01, 0x04, 0xe8,
0x03, 0x00, 0x00, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x05, 0x06,
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x58, 0x00, 0x00, 0x00,
0x4c, 0x00, 0x00, 0x00, 0x00, 0x00
]; ];
#[test] #[test]

View file

@ -53,10 +53,10 @@
// 22c400260 00 00 50 4b 05 06 00 00 00 00 03 00 03 00 27 01 |..PK..........'.| // 22c400260 00 00 50 4b 05 06 00 00 00 00 03 00 03 00 27 01 |..PK..........'.|
// 22c400270 00 00 ff ff ff ff 00 00 |........| // 22c400270 00 00 ff ff ff ff 00 00 |........|
// 22c400278 // 22c400278
use std::io::{self, Seek, SeekFrom, Read}; use std::io::{self, Read, Seek, SeekFrom};
const BLOCK1_LENGTH : u64 = 0x60; const BLOCK1_LENGTH: u64 = 0x60;
const BLOCK1 : [u8; BLOCK1_LENGTH as usize] = [ const BLOCK1: [u8; BLOCK1_LENGTH as usize] = [
0x50, 0x4b, 0x03, 0x04, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x6e, 0x51, 0x4d, 0x66, 0x82, 0x50, 0x4b, 0x03, 0x04, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x6e, 0x51, 0x4d, 0x66, 0x82,
0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x08, 0x00, 0x30, 0x00, 0x7a, 0x65, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x08, 0x00, 0x30, 0x00, 0x7a, 0x65,
0x72, 0x6f, 0x34, 0x34, 0x30, 0x30, 0x55, 0x54, 0x09, 0x00, 0x03, 0xa5, 0x21, 0xc7, 0x5b, 0xdb, 0x72, 0x6f, 0x34, 0x34, 0x30, 0x30, 0x55, 0x54, 0x09, 0x00, 0x03, 0xa5, 0x21, 0xc7, 0x5b, 0xdb,
@ -65,8 +65,8 @@ const BLOCK1 : [u8; BLOCK1_LENGTH as usize] = [
0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
]; ];
const BLOCK2_LENGTH : u64 = 0x50; const BLOCK2_LENGTH: u64 = 0x50;
const BLOCK2 : [u8; BLOCK2_LENGTH as usize] = [ const BLOCK2: [u8; BLOCK2_LENGTH as usize] = [
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00,
0x2b, 0x6e, 0x51, 0x4d, 0x98, 0x23, 0x28, 0x4b, 0x00, 0x00, 0x40, 0x06, 0x00, 0x00, 0x40, 0x06, 0x2b, 0x6e, 0x51, 0x4d, 0x98, 0x23, 0x28, 0x4b, 0x00, 0x00, 0x40, 0x06, 0x00, 0x00, 0x40, 0x06,
0x07, 0x00, 0x1c, 0x00, 0x7a, 0x65, 0x72, 0x6f, 0x31, 0x30, 0x30, 0x55, 0x54, 0x09, 0x00, 0x03, 0x07, 0x00, 0x1c, 0x00, 0x7a, 0x65, 0x72, 0x6f, 0x31, 0x30, 0x30, 0x55, 0x54, 0x09, 0x00, 0x03,
@ -74,8 +74,8 @@ const BLOCK2 : [u8; BLOCK2_LENGTH as usize] = [
0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
]; ];
const BLOCK3_LENGTH : u64 = 0x60; const BLOCK3_LENGTH: u64 = 0x60;
const BLOCK3 : [u8; BLOCK3_LENGTH as usize] = [ const BLOCK3: [u8; BLOCK3_LENGTH as usize] = [
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x2d, 0x00, 0x00, 0x00, 0x00,
0x00, 0x3b, 0x6e, 0x51, 0x4d, 0x66, 0x82, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x3b, 0x6e, 0x51, 0x4d, 0x66, 0x82, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0x0a, 0x00, 0x30, 0x00, 0x7a, 0x65, 0x72, 0x6f, 0x34, 0x34, 0x30, 0x30, 0x5f, 0x32, 0x55, 0xff, 0x0a, 0x00, 0x30, 0x00, 0x7a, 0x65, 0x72, 0x6f, 0x34, 0x34, 0x30, 0x30, 0x5f, 0x32, 0x55,
@ -84,8 +84,8 @@ const BLOCK3 : [u8; BLOCK3_LENGTH as usize] = [
0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00,
]; ];
const BLOCK4_LENGTH : u64 = 0x198; const BLOCK4_LENGTH: u64 = 0x198;
const BLOCK4 : [u8; BLOCK4_LENGTH as usize] = [ const BLOCK4: [u8; BLOCK4_LENGTH as usize] = [
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50,
0x4b, 0x01, 0x02, 0x1e, 0x03, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x6e, 0x51, 0x4d, 0x66, 0x4b, 0x01, 0x02, 0x1e, 0x03, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x6e, 0x51, 0x4d, 0x66,
0x82, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x08, 0x00, 0x2c, 0x00, 0x00, 0x82, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x08, 0x00, 0x2c, 0x00, 0x00,
@ -114,17 +114,17 @@ const BLOCK4 : [u8; BLOCK4_LENGTH as usize] = [
0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
]; ];
const BLOCK1_START : u64 = 0x000000000; const BLOCK1_START: u64 = 0x000000000;
const BLOCK2_START : u64 = 0x113000050; const BLOCK2_START: u64 = 0x113000050;
const BLOCK3_START : u64 = 0x119400090; const BLOCK3_START: u64 = 0x119400090;
const BLOCK4_START : u64 = 0x22c4000e0; const BLOCK4_START: u64 = 0x22c4000e0;
const BLOCK1_END : u64 = BLOCK1_START + BLOCK1_LENGTH - 1; const BLOCK1_END: u64 = BLOCK1_START + BLOCK1_LENGTH - 1;
const BLOCK2_END : u64 = BLOCK2_START + BLOCK2_LENGTH - 1; const BLOCK2_END: u64 = BLOCK2_START + BLOCK2_LENGTH - 1;
const BLOCK3_END : u64 = BLOCK3_START + BLOCK3_LENGTH - 1; const BLOCK3_END: u64 = BLOCK3_START + BLOCK3_LENGTH - 1;
const BLOCK4_END : u64 = BLOCK4_START + BLOCK4_LENGTH - 1; const BLOCK4_END: u64 = BLOCK4_START + BLOCK4_LENGTH - 1;
const TOTAL_LENGTH : u64 = BLOCK4_START + BLOCK4_LENGTH; const TOTAL_LENGTH: u64 = BLOCK4_START + BLOCK4_LENGTH;
struct Zip64File { struct Zip64File {
pointer: u64, pointer: u64,
@ -139,20 +139,22 @@ impl Zip64File {
impl Seek for Zip64File { impl Seek for Zip64File {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
match pos { match pos {
SeekFrom::Start(offset) => { self.pointer = offset; }, SeekFrom::Start(offset) => {
self.pointer = offset;
}
SeekFrom::End(offset) => { SeekFrom::End(offset) => {
if offset > 0 || offset < -(TOTAL_LENGTH as i64) { if offset > 0 || offset < -(TOTAL_LENGTH as i64) {
return Err(io::Error::new(io::ErrorKind::Other, "Invalid seek offset")); return Err(io::Error::new(io::ErrorKind::Other, "Invalid seek offset"));
} }
self.pointer = (TOTAL_LENGTH as i64 + offset) as u64; self.pointer = (TOTAL_LENGTH as i64 + offset) as u64;
}, }
SeekFrom::Current(offset) => { SeekFrom::Current(offset) => {
let seekpos = self.pointer as i64 + offset; let seekpos = self.pointer as i64 + offset;
if seekpos < 0 || seekpos as u64 > TOTAL_LENGTH { if seekpos < 0 || seekpos as u64 > TOTAL_LENGTH {
return Err(io::Error::new(io::ErrorKind::Other, "Invalid seek offset")); return Err(io::Error::new(io::ErrorKind::Other, "Invalid seek offset"));
} }
self.pointer = seekpos as u64; self.pointer = seekpos as u64;
}, }
} }
Ok(self.pointer) Ok(self.pointer)
} }
@ -164,21 +166,21 @@ impl Read for Zip64File {
return Ok(0); return Ok(0);
} }
match self.pointer { match self.pointer {
BLOCK1_START ..= BLOCK1_END => { BLOCK1_START..=BLOCK1_END => {
buf[0] = BLOCK1[(self.pointer - BLOCK1_START) as usize]; buf[0] = BLOCK1[(self.pointer - BLOCK1_START) as usize];
}, }
BLOCK2_START ..= BLOCK2_END => { BLOCK2_START..=BLOCK2_END => {
buf[0] = BLOCK2[(self.pointer - BLOCK2_START) as usize]; buf[0] = BLOCK2[(self.pointer - BLOCK2_START) as usize];
}, }
BLOCK3_START ..= BLOCK3_END => { BLOCK3_START..=BLOCK3_END => {
buf[0] = BLOCK3[(self.pointer - BLOCK3_START) as usize]; buf[0] = BLOCK3[(self.pointer - BLOCK3_START) as usize];
}, }
BLOCK4_START ..= BLOCK4_END => { BLOCK4_START..=BLOCK4_END => {
buf[0] = BLOCK4[(self.pointer - BLOCK4_START) as usize]; buf[0] = BLOCK4[(self.pointer - BLOCK4_START) as usize];
}, }
_ => { _ => {
buf[0] = 0; buf[0] = 0;
}, }
} }
self.pointer += 1; self.pointer += 1;
Ok(1) Ok(1)
@ -194,7 +196,12 @@ fn zip64_large() {
for i in 0..archive.len() { for i in 0..archive.len() {
let mut file = archive.by_index(i).unwrap(); let mut file = archive.by_index(i).unwrap();
let outpath = file.sanitized_name(); let outpath = file.sanitized_name();
println!("Entry {} has name \"{}\" ({} bytes)", i, outpath.as_path().display(), file.size()); println!(
"Entry {} has name \"{}\" ({} bytes)",
i,
outpath.as_path().display(),
file.size()
);
match file.read_exact(&mut buf) { match file.read_exact(&mut buf) {
Ok(()) => println!("The first {} bytes are: {:?}", buf.len(), buf), Ok(()) => println!("The first {} bytes are: {:?}", buf.len(), buf),