Merge remote-tracking branch 'zip-next/master' into deflate64-next

This commit is contained in:
anatawa12 2023-10-13 19:58:44 +09:00
commit 4812d77e5b
No known key found for this signature in database
GPG key ID: 9CA909848B8E4EA6
42 changed files with 2250 additions and 979 deletions

View file

@ -1,7 +1,11 @@
version: 2 version: 2
updates: updates:
- package-ecosystem: cargo - package-ecosystem: cargo
directory: "/" directory: "/"
schedule: schedule:
interval: daily interval: daily
open-pull-requests-limit: 10 open-pull-requests-limit: 10
- package-ecosystem: "github-actions" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "daily"

View file

@ -3,8 +3,6 @@ name: CI
on: on:
pull_request: pull_request:
push: push:
branches:
- master
env: env:
RUSTFLAGS: -Dwarnings RUSTFLAGS: -Dwarnings
@ -16,7 +14,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, macOS-latest, windows-latest] os: [ubuntu-latest, macOS-latest, windows-latest]
rust: [stable, 1.59.0] rust: [stable, 1.66.0]
steps: steps:
- uses: actions/checkout@master - uses: actions/checkout@master
@ -27,23 +25,29 @@ jobs:
toolchain: ${{ matrix.rust }} toolchain: ${{ matrix.rust }}
override: true override: true
- name: check - name: Check
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: check command: check
args: --all --bins --examples args: --all --bins --examples
- name: tests - name: Tests
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: --all args: --all
- name: Tests (no features)
uses: actions-rs/cargo@v1
with:
command: test
args: --all --no-default-features
clippy: clippy:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
@ -73,13 +77,13 @@ jobs:
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
- name: Docs - name: Docs
run: cargo doc run: cargo doc --no-deps
fuzz: fuzz_read:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
profile: minimal profile: minimal
@ -90,3 +94,65 @@ jobs:
- name: compile fuzz - name: compile fuzz
run: | run: |
cargo fuzz build fuzz_read cargo fuzz build fuzz_read
- name: run fuzz
run: |
cargo fuzz run fuzz_read -- -timeout=1s -jobs=100 -workers=2 -runs=1000000 -max_len=5000000000
- name: Upload any failure inputs
if: always()
uses: actions/upload-artifact@v3
with:
name: fuzz_read_bad_inputs
path: fuzz/artifacts/fuzz_read/crash-*
if-no-files-found: ignore
fuzz_write:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- run: cargo install cargo-fuzz
- name: compile fuzz
run: |
cargo fuzz build fuzz_write
- name: run fuzz
run: |
cargo fuzz run fuzz_write -- -timeout=5s -jobs=100 -workers=2 -runs=10000 -max_len=5000000000
- name: Upload any failure inputs
if: always()
uses: actions/upload-artifact@v3
with:
name: fuzz_write_bad_inputs
path: fuzz/artifacts/fuzz_write/crash-*
if-no-files-found: ignore
fuzz_write_with_no_features:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- run: cargo install cargo-fuzz
- name: compile fuzz
run: |
cargo fuzz build --no-default-features fuzz_write
- name: run fuzz
run: |
cargo fuzz run fuzz_write -- -timeout=5s -jobs=100 -workers=2 -runs=10000 -max_len=5000000000
- name: Upload any failure inputs
if: always()
uses: actions/upload-artifact@v3
with:
name: fuzz_write_bad_inputs
path: fuzz/artifacts/fuzz_write/crash-*
if-no-files-found: ignore

View file

@ -0,0 +1,27 @@
name: Dependabot auto-approve and auto-merge
on: pull_request
permissions:
contents: write
pull-requests: write
jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v1.6.0
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Approve
run: gh pr review --approve "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- name: Enable auto-merge
run: gh pr merge --auto --merge "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}

14
.whitesource Normal file
View file

@ -0,0 +1,14 @@
{
"scanSettings": {
"baseBranches": []
},
"checkRunSettings": {
"vulnerableCheckRunConclusionLevel": "failure",
"displayMode": "diff",
"useMendCheckNames": true
},
"issueSettings": {
"minSeverityLevel": "LOW",
"issueType": "DEPENDENCY"
}
}

View file

@ -1,19 +1,230 @@
# Changelog # Changelog
## [0.6.6]
### Changed
- Updated `aes` dependency to `0.8.2` (https://github.com/zip-rs/zip/pull/354)
## [0.6.5]
### Changed
- Added experimental [`zip::unstable::write::FileOptions::with_deprecated_encryption`] API to enable encrypting files with PKWARE encryption.
## [0.6.4] ## [0.6.4]
### Changed ### Changed
- [#333](https://github.com/zip-rs/zip/pull/333): disabled the default features of the `time` dependency, and also `formatting` and `macros`, as they were enabled by mistake. - [#333](https://github.com/zip-rs/zip/pull/333): disabled the default features of the `time` dependency, and also `formatting` and `macros`, as they were enabled by mistake.
- Deprecated [`DateTime::from_time`](https://docs.rs/zip/0.6/zip/struct.DateTime.html#method.from_time) in favor of [`DateTime::try_from`](https://docs.rs/zip/0.6/zip/struct.DateTime.html#impl-TryFrom-for-DateTime) - Deprecated [`DateTime::from_time`](https://docs.rs/zip/0.6/zip/struct.DateTime.html#method.from_time) in favor of [`DateTime::try_from`](https://docs.rs/zip/0.6/zip/struct.DateTime.html#impl-TryFrom-for-DateTime)
## [0.6.5]
### Added
- `shallow_copy_file` method: copy a file from within the ZipWriter
## [0.6.6]
### Fixed
- Unused flag `#![feature(read_buf)]` was breaking compatibility with stable compiler.
### Changed
- Updated dependency versions.
## [0.6.7]
### Added
- `deep_copy_file` method: more standards-compliant way to copy a file from within the ZipWriter
## [0.6.8]
### Added
- Detects duplicate filenames.
### Fixed
- `deep_copy_file` could set incorrect Unix permissions.
- `deep_copy_file` could handle files incorrectly if their compressed size was u32::MAX bytes or less but their
uncompressed size was not.
- Documented that `deep_copy_file` does not copy a directory's contents.
### Changed
- Improved performance of `deep_copy_file` by using a HashMap and eliminating a redundant search.
## [0.6.9]
### Fixed
- Fixed an issue that prevented `ZipWriter` from implementing `Send`.
## [0.6.10]
### Changed
- Updated dependency versions.
## [0.6.11]
### Fixed
- Fixed a bug that could cause later writes to fail after a `deep_copy_file` call.
## [0.6.12]
### Fixed
- Fixed a Clippy warning that was missed during the last release.
## [0.6.13]
### Fixed
- Fixed a possible bug in deep_copy_file.
## [0.7.0]
### Fixed
- Calling `start_file` with invalid parameters no longer closes the `ZipWriter`.
- Attempting to write a 4GiB file without calling `FileOptions::large_file(true)` now removes the file from the archive
but does not close the `ZipWriter`.
- Attempting to write a file with an unrepresentable or invalid last-modified date will instead add it with a date of
1980-01-01 00:00:00.
### Added
- Method `is_writing_file` - indicates whether a file is open for writing.
## [0.7.1]
### Changed
- Bumped the version number in order to upload an updated README to crates.io.
## [0.7.2]
### Added
- Method `abort_file` - removes the current or most recently-finished file from the archive.
### Fixed
- Fixed a bug where a file could remain open for writing after validations failed.
## [0.7.3]
### Fixed
- Fixed a bug that occurs when a filename in a ZIP32 file includes the ZIP64 magic bytes.
## [0.7.4]
### Merged from upstream
- Added experimental [`zip_next::unstable::write::FileOptions::with_deprecated_encryption`] API to enable encrypting
files with PKWARE encryption.
## [0.7.5]
### Fixed
- Fixed a bug that occurs when ZIP64 magic bytes occur twice in a filename or across two filenames.
## [0.8.0]
### Deleted
- Methods `start_file_aligned`, `start_file_with_extra_data`, `end_local_start_central_extra_data` and
`end_extra_data` (see below).
### Changed
- Alignment and extra-data fields are now attributes of [`zip_next::unstable::write::FileOptions`], allowing them to be
specified for `add_directory` and `add_symlink`.
- Extra-data fields are now formatted by the `FileOptions` method `add_extra_data`.
- Improved performance, especially for `shallow_copy_file` and `deep_copy_file` on files with extra data.
### Fixed
- Fixes a rare bug where the size of the extra-data field could overflow when `large_file` was set.
- Fixes more cases of a bug when ZIP64 magic bytes occur in filenames.
## [0.8.1]
### Fixed
- `ZipWriter` now once again implements `Send` if the underlying writer does.
## [0.8.2]
### Fixed
- Fixed an issue where code might spuriously fail during write fuzzing.
### Added
- New method `with_alignment` on `FileOptions`.
## [0.8.3]
### Merged from upstream
- Uses the `aes::cipher::KeyInit` trait from `aes` 0.8.2 where appropriate.
### Fixed
- Calling `abort_file()` no longer corrupts the archive if called on a
shallow copy of a remaining file, or on an archive whose CDR entries are out
of sequence. However, it may leave an unused entry in the archive.
- Calling `abort_file()` while writing a ZipCrypto-encrypted file no longer
causes a crash.
- Calling `abort_file()` on the last file before `finish()` no longer produces
an invalid ZIP file or garbage in the comment.
### Added
- `ZipWriter` methods `get_comment()` and `get_raw_comment()`.
## [0.9.0]
### Added
- `flush_on_finish_file` parameter for `ZipWriter`.
## [0.9.1]
### Added
- Zopfli for aggressive Deflate compression.
## [0.9.2]
### Added
- `zlib-ng` for fast Deflate compression. This is now the default for compression levels 0-9.
- `chrono` to convert zip_next::DateTime to and from chrono::NaiveDateTime
## [0.10.0]
### Changed
- Replaces the `flush_on_finish_file` parameter of `ZipWriter::new` and `ZipWriter::Append` with
a `set_flush_on_finish_file` method.
### Fixed
- Fixes build errors that occur when all default features are disabled.
- Fixes more cases of a bug when ZIP64 magic bytes occur in filenames.
## [0.10.1]
### Changed
- Date and time conversion methods now return `DateTimeRangeError` rather than `()` on error.
## [0.10.2]
### Changed
- Where possible, methods are now `const`. This improves performance, especially when reading.
## [0.10.3]
### Changed
- Updated dependencies.

View file

@ -1,46 +1,55 @@
[package] [package]
name = "zip" name = "zip_next"
version = "0.6.6" version = "0.10.3"
authors = ["Mathijs van de Nes <git@mathijs.vd-nes.nl>", "Marli Frost <marli@frost.red>", "Ryan Levick <ryan.levick@gmail.com>"] authors = ["Mathijs van de Nes <git@mathijs.vd-nes.nl>", "Marli Frost <marli@frost.red>", "Ryan Levick <ryan.levick@gmail.com>",
"Chris Hennick <hennickc@amazon.com>"]
license = "MIT" license = "MIT"
repository = "https://github.com/zip-rs/zip.git" repository = "https://github.com/Pr0methean/zip-next.git"
keywords = ["zip", "archive"] keywords = ["zip", "archive"]
description = """ description = """
rust-version = "1.66.0"
Library to support the reading and writing of zip files. Library to support the reading and writing of zip files.
""" """
edition = "2021" edition = "2021"
rust-version = "1.59.0"
[dependencies] [dependencies]
aes = { version = "0.8.2", optional = true } aes = { version = "0.8.3", optional = true }
byteorder = "1.4.3" byteorder = "1.4.3"
bzip2 = { version = "0.4.3", optional = true } bzip2 = { version = "0.4.4", optional = true }
constant_time_eq = { version = "0.1.5", optional = true } chrono = { version = "0.4.26", optional = true }
constant_time_eq = { version = "0.3.0", optional = true }
crc32fast = "1.3.2" crc32fast = "1.3.2"
flate2 = { version = "1.0.23", default-features = false, optional = true } flate2 = { version = "1.0.26", default-features = false, optional = true }
hmac = { version = "0.12.1", optional = true, features = ["reset"] } hmac = { version = "0.12.1", optional = true, features = ["reset"] }
pbkdf2 = {version = "0.11.0", optional = true } pbkdf2 = {version = "0.12.1", optional = true }
sha1 = {version = "0.10.1", optional = true } sha1 = {version = "0.10.5", optional = true }
time = { version = "0.3.7", optional = true, default-features = false, features = ["std"] } time = { version = "0.3.22", optional = true, default-features = false, features = ["std"] }
zstd = { version = "0.11.2", optional = true } zstd = { version = "0.12.3", optional = true, default-features = false }
deflate64 = { version = "0.1.4", optional = true } zopfli = { version = "0.7.4", optional = true }
deflate64 = { version = "0.1.5", optional = true }
[target.'cfg(any(all(target_arch = "arm", target_pointer_width = "32"), target_arch = "mips", target_arch = "powerpc"))'.dependencies] [target.'cfg(any(all(target_arch = "arm", target_pointer_width = "32"), target_arch = "mips", target_arch = "powerpc"))'.dependencies]
crossbeam-utils = "0.8.8" crossbeam-utils = "0.8.16"
[target.'cfg(fuzzing)'.dependencies]
arbitrary = { version = "1.3.0", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
bencher = "0.1.5" bencher = "0.1.5"
getrandom = "0.2.5" getrandom = { version = "0.2.10", features = ["js"] }
walkdir = "2.3.2" walkdir = "2.3.3"
time = { version = "0.3.7", features = ["formatting", "macros"] } time = { version = "0.3.22", features = ["formatting", "macros"] }
[features] [features]
aes-crypto = [ "aes", "constant_time_eq", "hmac", "pbkdf2", "sha1" ] aes-crypto = [ "aes", "constant_time_eq", "hmac", "pbkdf2", "sha1" ]
chrono = ["chrono/default"]
deflate = ["flate2/rust_backend"] deflate = ["flate2/rust_backend"]
deflate-miniz = ["flate2/default"] deflate-miniz = ["flate2/default"]
deflate-zlib = ["flate2/zlib"] deflate-zlib = ["flate2/zlib"]
deflate-zlib-ng = ["flate2/zlib-ng"]
deflate-zopfli = ["zopfli"]
unreserved = [] unreserved = []
default = ["aes-crypto", "bzip2", "deflate", "time", "zstd"] default = ["aes-crypto", "bzip2", "deflate", "deflate-zlib-ng", "deflate-zopfli", "time", "zstd"]
[[bench]] [[bench]]
name = "read_entry" name = "read_entry"

View file

@ -1,17 +1,17 @@
zip-rs zip_next
====== ========
[![Build Status](https://img.shields.io/github/workflow/status/zip-rs/zip/CI)](https://github.com/zip-rs/zip/actions?query=branch%3Amaster+workflow%3ACI) [![Build Status](https://github.com/Pr0methean/zip-next/actions/workflows/ci.yaml/badge.svg)](https://github.com/Pr0methean/zip-next/actions?query=branch%3Amaster+workflow%3ACI)
[![Crates.io version](https://img.shields.io/crates/v/zip.svg)](https://crates.io/crates/zip) [![Crates.io version](https://img.shields.io/crates/v/zip_next.svg)](https://crates.io/crates/zip_next)
[![Discord](https://badgen.net/badge/icon/discord?icon=discord&label)](https://discord.gg/rQ7H9cSsF4)
[Documentation](https://docs.rs/zip/0.6.3/zip/) [Documentation](https://docs.rs/zip_next/0.10.1/zip_next/)
Info Info
---- ----
A zip library for rust which supports reading and writing of simple ZIP files. A zip library for rust which supports reading and writing of simple ZIP files. Forked from https://crates.io/crates/zip
to add more features and improve test coverage.
Supported compression formats: Supported compression formats:
@ -33,31 +33,38 @@ With all default features:
```toml ```toml
[dependencies] [dependencies]
zip = "0.6" zip_next = "0.10.3"
``` ```
Without the default features: Without the default features:
```toml ```toml
[dependencies] [dependencies]
zip = { version = "0.6.6", default-features = false } zip_next = { version = "0.10.3", default-features = false }
``` ```
The features available are: The features available are:
* `aes-crypto`: Enables decryption of files which were encrypted with AES. Supports AE-1 and AE-2 methods. * `aes-crypto`: Enables decryption of files which were encrypted with AES. Supports AE-1 and AE-2 methods.
* `deflate`: Enables the deflate compression algorithm, which is the default for zip files. * `deflate`: Enables decompressing the deflate compression algorithm, which is the default for zip files.
* `deflate-miniz`: Enables deflating files with the `miniz_oxide` library (used when compression quality is 0..=9).
* `deflate-zlib`: Enables deflating files with the `zlib` library (used when compression quality is 0..=9).
* `deflate-zlib-ng`: Enables deflating files with the `zlib-ng` library (used when compression quality is 0..=9).
This is the fastest `deflate` implementation available.
* `deflate-zopfli`: Enables deflating files with the `zopfli` library (used when compression quality is 10..=264). This
is the most effective `deflate` implementation available.
* `deflate64`: Enables the deflate64 compression algorithm. Decompression is only supported. * `deflate64`: Enables the deflate64 compression algorithm. Decompression is only supported.
* `bzip2`: Enables the BZip2 compression algorithm. * `bzip2`: Enables the BZip2 compression algorithm.
* `time`: Enables features using the [time](https://github.com/rust-lang-deprecated/time) crate. * `time`: Enables features using the [time](https://github.com/rust-lang-deprecated/time) crate.
* `chrono`: Enables converting last-modified `zip_next::DateTime` to and from `chrono::NaiveDateTime`.
* `zstd`: Enables the Zstandard compression algorithm. * `zstd`: Enables the Zstandard compression algorithm.
All of these are enabled by default. By default `aes-crypto`, `deflate`, `deflate-zlib-ng`, `deflate-zopfli`, `bzip2`, `time` and `zstd` are enabled.
MSRV MSRV
---- ----
Our current Minimum Supported Rust Version is **1.59.0**. When adding features, Our current Minimum Supported Rust Version is **1.66.0**. When adding features,
we will follow these guidelines: we will follow these guidelines:
- We will always support the latest four minor Rust versions. This gives you a 6 - We will always support the latest four minor Rust versions. This gives you a 6
@ -95,3 +102,9 @@ To start fuzzing zip extraction:
```bash ```bash
cargo +nightly fuzz run fuzz_read cargo +nightly fuzz run fuzz_read
``` ```
To start fuzzing zip creation:
```bash
cargo +nightly fuzz run fuzz_write
```

View file

@ -4,13 +4,13 @@ use std::io::{Cursor, Read, Write};
use bencher::Bencher; use bencher::Bencher;
use getrandom::getrandom; use getrandom::getrandom;
use zip::{ZipArchive, ZipWriter}; use zip_next::{ZipArchive, ZipWriter};
fn generate_random_archive(size: usize) -> Vec<u8> { fn generate_random_archive(size: usize) -> Vec<u8> {
let data = Vec::new(); let data = Vec::new();
let mut writer = ZipWriter::new(Cursor::new(data)); let mut writer = ZipWriter::new(Cursor::new(data));
let options = let options = zip_next::write::FileOptions::default()
zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Stored); .compression_method(zip_next::CompressionMethod::Stored);
writer.start_file("random.dat", options).unwrap(); writer.start_file("random.dat", options).unwrap();
let mut bytes = vec![0u8; size]; let mut bytes = vec![0u8; size];

View file

@ -3,7 +3,8 @@ use bencher::{benchmark_group, benchmark_main};
use std::io::{Cursor, Write}; use std::io::{Cursor, Write};
use bencher::Bencher; use bencher::Bencher;
use zip::{ZipArchive, ZipWriter}; use zip_next::write::FileOptions;
use zip_next::{CompressionMethod, ZipArchive, ZipWriter};
const FILE_COUNT: usize = 15_000; const FILE_COUNT: usize = 15_000;
const FILE_SIZE: usize = 1024; const FILE_SIZE: usize = 1024;
@ -11,14 +12,13 @@ const FILE_SIZE: usize = 1024;
fn generate_random_archive(count_files: usize, file_size: usize) -> Vec<u8> { fn generate_random_archive(count_files: usize, file_size: usize) -> Vec<u8> {
let data = Vec::new(); let data = Vec::new();
let mut writer = ZipWriter::new(Cursor::new(data)); let mut writer = ZipWriter::new(Cursor::new(data));
let options = let options = FileOptions::default().compression_method(CompressionMethod::Stored);
zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Stored);
let bytes = vec![0u8; file_size]; let bytes = vec![0u8; file_size];
for i in 0..count_files { for i in 0..count_files {
let name = format!("file_deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef_{i}.dat"); let name = format!("file_deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef_{i}.dat");
writer.start_file(name, options).unwrap(); writer.start_file(name, options.clone()).unwrap();
writer.write_all(&bytes).unwrap(); writer.write_all(&bytes).unwrap();
} }

View file

@ -14,7 +14,7 @@ fn real_main() -> i32 {
let fname = std::path::Path::new(&*args[1]); let fname = std::path::Path::new(&*args[1]);
let file = fs::File::open(fname).unwrap(); let file = fs::File::open(fname).unwrap();
let mut archive = zip::ZipArchive::new(file).unwrap(); let mut archive = zip_next::ZipArchive::new(file).unwrap();
for i in 0..archive.len() { for i in 0..archive.len() {
let mut file = archive.by_index(i).unwrap(); let mut file = archive.by_index(i).unwrap();

View file

@ -13,7 +13,7 @@ fn real_main() -> i32 {
let fname = std::path::Path::new(&*args[1]); let fname = std::path::Path::new(&*args[1]);
let zipfile = std::fs::File::open(fname).unwrap(); let zipfile = std::fs::File::open(fname).unwrap();
let mut archive = zip::ZipArchive::new(zipfile).unwrap(); let mut archive = zip_next::ZipArchive::new(zipfile).unwrap();
let mut file = match archive.by_name("test/lorem_ipsum.txt") { let mut file = match archive.by_name("test/lorem_ipsum.txt") {
Ok(file) => file, Ok(file) => file,

View file

@ -15,7 +15,7 @@ fn real_main() -> i32 {
let file = fs::File::open(fname).unwrap(); let file = fs::File::open(fname).unwrap();
let reader = BufReader::new(file); let reader = BufReader::new(file);
let mut archive = zip::ZipArchive::new(reader).unwrap(); let mut archive = zip_next::ZipArchive::new(reader).unwrap();
for i in 0..archive.len() { for i in 0..archive.len() {
let file = archive.by_index(i).unwrap(); let file = archive.by_index(i).unwrap();

View file

@ -10,7 +10,7 @@ fn real_main() -> i32 {
let mut buf = [0u8; 16]; let mut buf = [0u8; 16];
loop { loop {
match zip::read::read_zipfile_from_stream(&mut stdin_handle) { match zip_next::read::read_zipfile_from_stream(&mut stdin_handle) {
Ok(Some(mut file)) => { Ok(Some(mut file)) => {
println!( println!(
"{}: {} bytes ({} bytes packed)", "{}: {} bytes ({} bytes packed)",

View file

@ -1,8 +1,8 @@
use std::io::prelude::*; use std::io::prelude::*;
use std::io::{Seek, Write}; use std::io::{Seek, Write};
use std::iter::Iterator; use std::iter::Iterator;
use zip::result::ZipError; use zip_next::result::ZipError;
use zip::write::FileOptions; use zip_next::write::FileOptions;
use std::fs::File; use std::fs::File;
use std::path::Path; use std::path::Path;
@ -12,30 +12,35 @@ fn main() {
std::process::exit(real_main()); std::process::exit(real_main());
} }
const METHOD_STORED: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Stored); const METHOD_STORED: Option<zip_next::CompressionMethod> =
Some(zip_next::CompressionMethod::Stored);
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng"
))] ))]
const METHOD_DEFLATED: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Deflated); const METHOD_DEFLATED: Option<zip_next::CompressionMethod> =
Some(zip_next::CompressionMethod::Deflated);
#[cfg(not(any( #[cfg(not(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
)))] )))]
const METHOD_DEFLATED: Option<zip::CompressionMethod> = None; const METHOD_DEFLATED: Option<zip_next::CompressionMethod> = None;
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
const METHOD_BZIP2: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Bzip2); const METHOD_BZIP2: Option<zip_next::CompressionMethod> = Some(zip_next::CompressionMethod::Bzip2);
#[cfg(not(feature = "bzip2"))] #[cfg(not(feature = "bzip2"))]
const METHOD_BZIP2: Option<zip::CompressionMethod> = None; const METHOD_BZIP2: Option<zip_next::CompressionMethod> = None;
#[cfg(feature = "zstd")] #[cfg(feature = "zstd")]
const METHOD_ZSTD: Option<zip::CompressionMethod> = Some(zip::CompressionMethod::Zstd); const METHOD_ZSTD: Option<zip_next::CompressionMethod> = Some(zip_next::CompressionMethod::Zstd);
#[cfg(not(feature = "zstd"))] #[cfg(not(feature = "zstd"))]
const METHOD_ZSTD: Option<zip::CompressionMethod> = None; const METHOD_ZSTD: Option<zip_next::CompressionMethod> = None;
fn real_main() -> i32 { fn real_main() -> i32 {
let args: Vec<_> = std::env::args().collect(); let args: Vec<_> = std::env::args().collect();
@ -66,12 +71,12 @@ fn zip_dir<T>(
it: &mut dyn Iterator<Item = DirEntry>, it: &mut dyn Iterator<Item = DirEntry>,
prefix: &str, prefix: &str,
writer: T, writer: T,
method: zip::CompressionMethod, method: zip_next::CompressionMethod,
) -> zip::result::ZipResult<()> ) -> zip_next::result::ZipResult<()>
where where
T: Write + Seek, T: Write + Seek,
{ {
let mut zip = zip::ZipWriter::new(writer); let mut zip = zip_next::ZipWriter::new(writer);
let options = FileOptions::default() let options = FileOptions::default()
.compression_method(method) .compression_method(method)
.unix_permissions(0o755); .unix_permissions(0o755);
@ -86,7 +91,7 @@ where
if path.is_file() { if path.is_file() {
println!("adding file {path:?} as {name:?} ..."); println!("adding file {path:?} as {name:?} ...");
#[allow(deprecated)] #[allow(deprecated)]
zip.start_file_from_path(name, options)?; zip.start_file_from_path(name, options.clone())?;
let mut f = File::open(path)?; let mut f = File::open(path)?;
f.read_to_end(&mut buffer)?; f.read_to_end(&mut buffer)?;
@ -97,18 +102,18 @@ where
// and mapname conversion failed error on unzip // and mapname conversion failed error on unzip
println!("adding dir {path:?} as {name:?} ..."); println!("adding dir {path:?} as {name:?} ...");
#[allow(deprecated)] #[allow(deprecated)]
zip.add_directory_from_path(name, options)?; zip.add_directory_from_path(name, options.clone())?;
} }
} }
zip.finish()?; zip.finish()?;
Result::Ok(()) Ok(())
} }
fn doit( fn doit(
src_dir: &str, src_dir: &str,
dst_file: &str, dst_file: &str,
method: zip::CompressionMethod, method: zip_next::CompressionMethod,
) -> zip::result::ZipResult<()> { ) -> zip_next::result::ZipResult<()> {
if !Path::new(src_dir).is_dir() { if !Path::new(src_dir).is_dir() {
return Err(ZipError::FileNotFound); return Err(ZipError::FileNotFound);
} }

View file

@ -1,5 +1,5 @@
use std::io::prelude::*; use std::io::prelude::*;
use zip::write::FileOptions; use zip_next::write::FileOptions;
fn main() { fn main() {
std::process::exit(real_main()); std::process::exit(real_main());
@ -21,16 +21,16 @@ fn real_main() -> i32 {
0 0
} }
fn doit(filename: &str) -> zip::result::ZipResult<()> { fn doit(filename: &str) -> zip_next::result::ZipResult<()> {
let path = std::path::Path::new(filename); let path = std::path::Path::new(filename);
let file = std::fs::File::create(path).unwrap(); let file = std::fs::File::create(path).unwrap();
let mut zip = zip::ZipWriter::new(file); let mut zip = zip_next::ZipWriter::new(file);
zip.add_directory("test/", Default::default())?; zip.add_directory("test/", Default::default())?;
let options = FileOptions::default() let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Stored) .compression_method(zip_next::CompressionMethod::Stored)
.unix_permissions(0o755); .unix_permissions(0o755);
zip.start_file("test/☃.txt", options)?; zip.start_file("test/☃.txt", options)?;
zip.write_all(b"Hello, World!\n")?; zip.write_all(b"Hello, World!\n")?;

View file

@ -10,8 +10,9 @@ cargo-fuzz = true
[dependencies] [dependencies]
libfuzzer-sys = "0.4" libfuzzer-sys = "0.4"
arbitrary = { version = "1.3.0", features = ["derive"] }
[dependencies.zip] [dependencies.zip_next]
path = ".." path = ".."
# Prevent this from interfering with workspaces # Prevent this from interfering with workspaces
@ -23,3 +24,9 @@ name = "fuzz_read"
path = "fuzz_targets/fuzz_read.rs" path = "fuzz_targets/fuzz_read.rs"
test = false test = false
doc = false doc = false
[[bin]]
name = "fuzz_write"
path = "fuzz_targets/fuzz_write.rs"
test = false
doc = false

View file

@ -3,7 +3,7 @@ use libfuzzer_sys::fuzz_target;
fn decompress_all(data: &[u8]) -> Result<(), Box<dyn std::error::Error>> { fn decompress_all(data: &[u8]) -> Result<(), Box<dyn std::error::Error>> {
let reader = std::io::Cursor::new(data); let reader = std::io::Cursor::new(data);
let mut zip = zip::ZipArchive::new(reader)?; let mut zip = zip_next::ZipArchive::new(reader)?;
for i in 0..zip.len() { for i in 0..zip.len() {
let mut file = zip.by_index(i)?; let mut file = zip.by_index(i)?;

View file

@ -0,0 +1,104 @@
#![no_main]
use std::cell::RefCell;
use libfuzzer_sys::fuzz_target;
use arbitrary::Arbitrary;
use std::io::{Cursor, Read, Seek, Write};
use std::path::{PathBuf};
#[derive(Arbitrary,Clone,Debug)]
pub enum BasicFileOperation {
WriteNormalFile {
contents: Vec<Vec<u8>>,
options: zip_next::write::FileOptions,
},
WriteDirectory(zip_next::write::FileOptions),
WriteSymlinkWithTarget {
target: Box<PathBuf>,
options: zip_next::write::FileOptions,
},
ShallowCopy(Box<FileOperation>),
DeepCopy(Box<FileOperation>),
}
#[derive(Arbitrary,Clone,Debug)]
pub struct FileOperation {
basic: BasicFileOperation,
name: String,
reopen: bool,
// 'abort' flag is separate, to prevent trying to copy an aborted file
}
#[derive(Arbitrary,Clone,Debug)]
pub struct FuzzTestCase {
comment: Vec<u8>,
operations: Vec<(FileOperation, bool)>,
flush_on_finish_file: bool,
}
impl FileOperation {
fn referenceable_name(&self) -> String {
if let BasicFileOperation::WriteDirectory(_) = self.basic {
if !self.name.ends_with('\\') && !self.name.ends_with('/') {
return self.name.to_owned() + "/";
}
}
self.name.to_owned()
}
}
fn do_operation<T>(writer: &mut RefCell<zip_next::ZipWriter<T>>,
operation: FileOperation,
abort: bool, flush_on_finish_file: bool) -> Result<(), Box<dyn std::error::Error>>
where T: Read + Write + Seek {
writer.borrow_mut().set_flush_on_finish_file(flush_on_finish_file);
let name = operation.name;
match operation.basic {
BasicFileOperation::WriteNormalFile {contents, mut options, ..} => {
let uncompressed_size = contents.iter().map(Vec::len).sum::<usize>();
if uncompressed_size >= u32::MAX as usize {
options = options.large_file(true);
}
writer.borrow_mut().start_file(name, options)?;
for chunk in contents {
writer.borrow_mut().write_all(chunk.as_slice())?;
}
}
BasicFileOperation::WriteDirectory(options) => {
writer.borrow_mut().add_directory(name, options)?;
}
BasicFileOperation::WriteSymlinkWithTarget {target, options} => {
writer.borrow_mut().add_symlink(name, target.to_string_lossy(), options)?;
}
BasicFileOperation::ShallowCopy(base) => {
let base_name = base.referenceable_name();
do_operation(writer, *base, false, flush_on_finish_file)?;
writer.borrow_mut().shallow_copy_file(&base_name, &name)?;
}
BasicFileOperation::DeepCopy(base) => {
let base_name = base.referenceable_name();
do_operation(writer, *base, false, flush_on_finish_file)?;
writer.borrow_mut().deep_copy_file(&base_name, &name)?;
}
}
if abort {
writer.borrow_mut().abort_file().unwrap();
}
if operation.reopen {
let old_comment = writer.borrow().get_raw_comment().to_owned();
let new_writer = zip_next::ZipWriter::new_append(
writer.borrow_mut().finish().unwrap()).unwrap();
assert_eq!(&old_comment, new_writer.get_raw_comment());
*writer = new_writer.into();
}
Ok(())
}
fuzz_target!(|test_case: FuzzTestCase| {
let mut writer = RefCell::new(zip_next::ZipWriter::new(Cursor::new(Vec::new())));
writer.borrow_mut().set_raw_comment(test_case.comment);
for (operation, abort) in test_case.operations {
let _ = do_operation(&mut writer, operation, abort, test_case.flush_on_finish_file);
}
let _ = zip_next::ZipArchive::new(writer.borrow_mut().finish().unwrap());
});

View file

@ -9,7 +9,7 @@ use crate::types::AesMode;
use constant_time_eq::constant_time_eq; use constant_time_eq::constant_time_eq;
use hmac::{Hmac, Mac}; use hmac::{Hmac, Mac};
use sha1::Sha1; use sha1::Sha1;
use std::io::{self, Read}; use std::io::{self, Error, ErrorKind, Read};
/// The length of the password verifcation value in bytes /// The length of the password verifcation value in bytes
const PWD_VERIFY_LENGTH: usize = 2; const PWD_VERIFY_LENGTH: usize = 2;
@ -45,7 +45,7 @@ pub struct AesReader<R> {
} }
impl<R: Read> AesReader<R> { impl<R: Read> AesReader<R> {
pub fn new(reader: R, aes_mode: AesMode, compressed_size: u64) -> AesReader<R> { pub const fn new(reader: R, aes_mode: AesMode, compressed_size: u64) -> AesReader<R> {
let data_length = compressed_size let data_length = compressed_size
- (PWD_VERIFY_LENGTH + AUTH_CODE_LENGTH + aes_mode.salt_length()) as u64; - (PWD_VERIFY_LENGTH + AUTH_CODE_LENGTH + aes_mode.salt_length()) as u64;
@ -84,7 +84,8 @@ impl<R: Read> AesReader<R> {
let mut derived_key: Vec<u8> = vec![0; derived_key_len]; let mut derived_key: Vec<u8> = vec![0; derived_key_len];
// use PBKDF2 with HMAC-Sha1 to derive the key // use PBKDF2 with HMAC-Sha1 to derive the key
pbkdf2::pbkdf2::<Hmac<Sha1>>(password, &salt, ITERATION_COUNT, &mut derived_key); pbkdf2::pbkdf2::<Hmac<Sha1>>(password, &salt, ITERATION_COUNT, &mut derived_key)
.map_err(|e| Error::new(ErrorKind::InvalidInput, e))?;
let decrypt_key = &derived_key[0..key_length]; let decrypt_key = &derived_key[0..key_length];
let hmac_key = &derived_key[key_length..key_length * 2]; let hmac_key = &derived_key[key_length..key_length * 2];
let pwd_verify = &derived_key[derived_key_len - 2..]; let pwd_verify = &derived_key[derived_key_len - 2..];
@ -165,8 +166,8 @@ impl<R: Read> Read for AesReaderValid<R> {
// use constant time comparison to mitigate timing attacks // use constant time comparison to mitigate timing attacks
if !constant_time_eq(computed_auth_code, &read_auth_code) { if !constant_time_eq(computed_auth_code, &read_auth_code) {
return Err( return Err(
io::Error::new( Error::new(
io::ErrorKind::InvalidData, ErrorKind::InvalidData,
"Invalid authentication code, this could be due to an invalid password or errors in the data" "Invalid authentication code, this could be due to an invalid password or errors in the data"
) )
); );

View file

@ -5,7 +5,6 @@
//! See [AesCtrZipKeyStream] for more information. //! See [AesCtrZipKeyStream] for more information.
use aes::cipher::generic_array::GenericArray; use aes::cipher::generic_array::GenericArray;
// use aes::{BlockEncrypt, NewBlockCipher};
use aes::cipher::{BlockEncrypt, KeyInit}; use aes::cipher::{BlockEncrypt, KeyInit};
use byteorder::WriteBytesExt; use byteorder::WriteBytesExt;
use std::{any, fmt}; use std::{any, fmt};
@ -28,7 +27,7 @@ pub trait AesKind {
/// Key type. /// Key type.
type Key: AsRef<[u8]>; type Key: AsRef<[u8]>;
/// Cipher used to decrypt. /// Cipher used to decrypt.
type Cipher; type Cipher: KeyInit;
} }
impl AesKind for Aes128 { impl AesKind for Aes128 {

View file

@ -11,6 +11,7 @@ use std::fmt;
/// When creating ZIP files, you may choose the method to use with /// When creating ZIP files, you may choose the method to use with
/// [`crate::write::FileOptions::compression_method`] /// [`crate::write::FileOptions::compression_method`]
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[cfg_attr(fuzzing, derive(arbitrary::Arbitrary))]
#[non_exhaustive] #[non_exhaustive]
pub enum CompressionMethod { pub enum CompressionMethod {
/// Store the file as is /// Store the file as is
@ -19,7 +20,9 @@ pub enum CompressionMethod {
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
))] ))]
Deflated, Deflated,
/// Compress the file using Deflate64. /// Compress the file using Deflate64.
@ -39,7 +42,10 @@ pub enum CompressionMethod {
#[cfg(feature = "zstd")] #[cfg(feature = "zstd")]
Zstd, Zstd,
/// Unsupported compression method /// Unsupported compression method
#[deprecated(since = "0.5.7", note = "use the constants instead")] #[cfg_attr(
not(fuzzing),
deprecated(since = "0.5.7", note = "use the constants instead")
)]
Unsupported(u16), Unsupported(u16),
} }
#[allow(deprecated, missing_docs)] #[allow(deprecated, missing_docs)]
@ -55,13 +61,17 @@ impl CompressionMethod {
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
))] ))]
pub const DEFLATE: Self = CompressionMethod::Deflated; pub const DEFLATE: Self = CompressionMethod::Deflated;
#[cfg(not(any( #[cfg(not(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
)))] )))]
pub const DEFLATE: Self = CompressionMethod::Unsupported(8); pub const DEFLATE: Self = CompressionMethod::Unsupported(8);
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]
@ -97,14 +107,16 @@ impl CompressionMethod {
since = "0.5.7", since = "0.5.7",
note = "use a constant to construct a compression method" note = "use a constant to construct a compression method"
)] )]
pub fn from_u16(val: u16) -> CompressionMethod { pub const fn from_u16(val: u16) -> CompressionMethod {
#[allow(deprecated)] #[allow(deprecated)]
match val { match val {
0 => CompressionMethod::Stored, 0 => CompressionMethod::Stored,
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
))] ))]
8 => CompressionMethod::Deflated, 8 => CompressionMethod::Deflated,
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]
@ -125,14 +137,16 @@ impl CompressionMethod {
since = "0.5.7", since = "0.5.7",
note = "to match on other compression methods, use a constant" note = "to match on other compression methods, use a constant"
)] )]
pub fn to_u16(self) -> u16 { pub const fn to_u16(self) -> u16 {
#[allow(deprecated)] #[allow(deprecated)]
match self { match self {
CompressionMethod::Stored => 0, CompressionMethod::Stored => 0,
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
))] ))]
CompressionMethod::Deflated => 8, CompressionMethod::Deflated => 8,
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]
@ -149,6 +163,55 @@ impl CompressionMethod {
} }
} }
impl Default for CompressionMethod {
fn default() -> Self {
#[cfg(any(
feature = "deflate",
feature = "deflate-miniz",
feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
))]
return CompressionMethod::Deflated;
#[cfg(all(
not(any(
feature = "deflate",
feature = "deflate-miniz",
feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
)),
feature = "bzip2"
))]
return CompressionMethod::Bzip2;
#[cfg(all(
not(any(
feature = "deflate",
feature = "deflate-miniz",
feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli",
feature = "bzip2"
)),
feature = "zstd"
))]
return CompressionMethod::Zstd;
#[cfg(not(any(
feature = "deflate",
feature = "deflate-miniz",
feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli",
feature = "bzip2",
feature = "zstd"
)))]
return CompressionMethod::Stored;
}
}
impl fmt::Display for CompressionMethod { impl fmt::Display for CompressionMethod {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Just duplicate what the Debug format looks like, i.e, the enum key: // Just duplicate what the Debug format looks like, i.e, the enum key:
@ -162,7 +225,9 @@ pub const SUPPORTED_COMPRESSION_METHODS: &[CompressionMethod] = &[
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng",
feature = "deflate-zopfli"
))] ))]
CompressionMethod::Deflated, CompressionMethod::Deflated,
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]

View file

@ -21,7 +21,6 @@
//! //!
//! //!
//! //!
#![warn(missing_docs)] #![warn(missing_docs)]
pub use crate::compression::{CompressionMethod, SUPPORTED_COMPRESSION_METHODS}; pub use crate::compression::{CompressionMethod, SUPPORTED_COMPRESSION_METHODS};
@ -50,6 +49,6 @@ mod zipcrypto;
/// ///
/// ```toml /// ```toml
/// [dependencies] /// [dependencies]
/// zip = "=0.6.6" /// zip_next = "=0.10.3"
/// ``` /// ```
pub mod unstable; pub mod unstable;

View file

@ -19,7 +19,8 @@ use std::sync::Arc;
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng"
))] ))]
use flate2::read::DeflateDecoder; use flate2::read::DeflateDecoder;
@ -54,13 +55,13 @@ pub(crate) mod zip_archive {
/// ///
/// ```no_run /// ```no_run
/// use std::io::prelude::*; /// use std::io::prelude::*;
/// fn list_zip_contents(reader: impl Read + Seek) -> zip::result::ZipResult<()> { /// fn list_zip_contents(reader: impl Read + Seek) -> zip_next::result::ZipResult<()> {
/// let mut zip = zip::ZipArchive::new(reader)?; /// let mut zip = zip_next::ZipArchive::new(reader)?;
/// ///
/// for i in 0..zip.len() { /// for i in 0..zip.len() {
/// let mut file = zip.by_index(i)?; /// let mut file = zip.by_index(i)?;
/// println!("Filename: {}", file.name()); /// println!("Filename: {}", file.name());
/// std::io::copy(&mut file, &mut std::io::stdout()); /// std::io::copy(&mut file, &mut std::io::stdout())?;
/// } /// }
/// ///
/// Ok(()) /// Ok(())
@ -74,8 +75,9 @@ pub(crate) mod zip_archive {
} }
pub use zip_archive::ZipArchive; pub use zip_archive::ZipArchive;
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
enum CryptoReader<'a> { pub(crate) enum CryptoReader<'a> {
Plaintext(io::Take<&'a mut dyn Read>), Plaintext(io::Take<&'a mut dyn Read>),
ZipCrypto(ZipCryptoReaderValid<io::Take<&'a mut dyn Read>>), ZipCrypto(ZipCryptoReaderValid<io::Take<&'a mut dyn Read>>),
#[cfg(feature = "aes-crypto")] #[cfg(feature = "aes-crypto")]
@ -108,7 +110,7 @@ impl<'a> CryptoReader<'a> {
} }
/// Returns `true` if the data is encrypted using AE2. /// Returns `true` if the data is encrypted using AE2.
pub fn is_ae2_encrypted(&self) -> bool { pub const fn is_ae2_encrypted(&self) -> bool {
#[cfg(feature = "aes-crypto")] #[cfg(feature = "aes-crypto")]
return matches!( return matches!(
self, self,
@ -122,16 +124,17 @@ impl<'a> CryptoReader<'a> {
} }
} }
enum ZipFileReader<'a> { pub(crate) enum ZipFileReader<'a> {
NoReader, NoReader,
Raw(io::Take<&'a mut dyn io::Read>), Raw(io::Take<&'a mut dyn Read>),
Stored(Crc32Reader<CryptoReader<'a>>), Stored(Crc32Reader<CryptoReader<'a>>),
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng"
))] ))]
Deflated(Crc32Reader<flate2::read::DeflateDecoder<CryptoReader<'a>>>), Deflated(Crc32Reader<DeflateDecoder<CryptoReader<'a>>>),
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]
Deflate64(Crc32Reader<Deflate64Decoder<io::BufReader<CryptoReader<'a>>>>), Deflate64(Crc32Reader<Deflate64Decoder<io::BufReader<CryptoReader<'a>>>>),
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
@ -149,7 +152,8 @@ impl<'a> Read for ZipFileReader<'a> {
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng"
))] ))]
ZipFileReader::Deflated(r) => r.read(buf), ZipFileReader::Deflated(r) => r.read(buf),
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]
@ -172,7 +176,8 @@ impl<'a> ZipFileReader<'a> {
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng"
))] ))]
ZipFileReader::Deflated(r) => r.into_inner().into_inner().into_inner(), ZipFileReader::Deflated(r) => r.into_inner().into_inner().into_inner(),
#[cfg(feature = "deflate64")] #[cfg(feature = "deflate64")]
@ -187,12 +192,12 @@ impl<'a> ZipFileReader<'a> {
/// A struct for reading a zip file /// A struct for reading a zip file
pub struct ZipFile<'a> { pub struct ZipFile<'a> {
data: Cow<'a, ZipFileData>, pub(crate) data: Cow<'a, ZipFileData>,
crypto_reader: Option<CryptoReader<'a>>, pub(crate) crypto_reader: Option<CryptoReader<'a>>,
reader: ZipFileReader<'a>, pub(crate) reader: ZipFileReader<'a>,
} }
fn find_content<'a>( pub(crate) fn find_content<'a>(
data: &ZipFileData, data: &ZipFileData,
reader: &'a mut (impl Read + Seek), reader: &'a mut (impl Read + Seek),
) -> ZipResult<io::Take<&'a mut dyn Read>> { ) -> ZipResult<io::Take<&'a mut dyn Read>> {
@ -215,12 +220,12 @@ fn find_content<'a>(
} }
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn make_crypto_reader<'a>( pub(crate) fn make_crypto_reader<'a>(
compression_method: crate::compression::CompressionMethod, compression_method: CompressionMethod,
crc32: u32, crc32: u32,
last_modified_time: DateTime, last_modified_time: DateTime,
using_data_descriptor: bool, using_data_descriptor: bool,
reader: io::Take<&'a mut dyn io::Read>, reader: io::Take<&'a mut dyn Read>,
password: Option<&[u8]>, password: Option<&[u8]>,
aes_info: Option<(AesMode, AesVendorVersion)>, aes_info: Option<(AesMode, AesVendorVersion)>,
#[cfg(feature = "aes-crypto")] compressed_size: u64, #[cfg(feature = "aes-crypto")] compressed_size: u64,
@ -266,7 +271,7 @@ fn make_crypto_reader<'a>(
Ok(Ok(reader)) Ok(Ok(reader))
} }
fn make_reader( pub(crate) fn make_reader(
compression_method: CompressionMethod, compression_method: CompressionMethod,
crc32: u32, crc32: u32,
reader: CryptoReader, reader: CryptoReader,
@ -280,7 +285,8 @@ fn make_reader(
#[cfg(any( #[cfg(any(
feature = "deflate", feature = "deflate",
feature = "deflate-miniz", feature = "deflate-miniz",
feature = "deflate-zlib" feature = "deflate-zlib",
feature = "deflate-zlib-ng"
))] ))]
CompressionMethod::Deflated => { CompressionMethod::Deflated => {
let deflate_reader = DeflateDecoder::new(reader); let deflate_reader = DeflateDecoder::new(reader);
@ -305,107 +311,153 @@ fn make_reader(
} }
} }
impl<R: Read + io::Seek> ZipArchive<R> { pub(crate) struct DirectoryCounts {
pub(crate) archive_offset: u64,
pub(crate) directory_start: u64,
pub(crate) number_of_files: usize,
pub(crate) disk_number: u32,
pub(crate) disk_with_central_directory: u32,
}
impl<R: Read + Seek> ZipArchive<R> {
fn get_directory_counts_zip32(
footer: &spec::CentralDirectoryEnd,
cde_start_pos: u64,
) -> ZipResult<DirectoryCounts> {
// Some zip files have data prepended to them, resulting in the
// offsets all being too small. Get the amount of error by comparing
// the actual file position we found the CDE at with the offset
// recorded in the CDE.
let archive_offset = cde_start_pos
.checked_sub(footer.central_directory_size as u64)
.and_then(|x| x.checked_sub(footer.central_directory_offset as u64))
.ok_or(ZipError::InvalidArchive(
"Invalid central directory size or offset",
))?;
let directory_start = footer.central_directory_offset as u64 + archive_offset;
let number_of_files = footer.number_of_files_on_this_disk as usize;
Ok(DirectoryCounts {
archive_offset,
directory_start,
number_of_files,
disk_number: footer.disk_number as u32,
disk_with_central_directory: footer.disk_with_central_directory as u32,
})
}
fn get_directory_counts_zip64(
reader: &mut R,
footer: &spec::CentralDirectoryEnd,
cde_start_pos: u64,
) -> ZipResult<DirectoryCounts> {
// See if there's a ZIP64 footer. The ZIP64 locator if present will
// have its signature 20 bytes in front of the standard footer. The
// standard footer, in turn, is 22+N bytes large, where N is the
// comment length. Therefore:
reader.seek(io::SeekFrom::End(
-(20 + 22 + footer.zip_file_comment.len() as i64),
))?;
let locator64 = spec::Zip64CentralDirectoryEndLocator::parse(reader)?;
// We need to reassess `archive_offset`. We know where the ZIP64
// central-directory-end structure *should* be, but unfortunately we
// don't know how to precisely relate that location to our current
// actual offset in the file, since there may be junk at its
// beginning. Therefore we need to perform another search, as in
// read::CentralDirectoryEnd::find_and_parse, except now we search
// forward.
let search_upper_bound = cde_start_pos
.checked_sub(60) // minimum size of Zip64CentralDirectoryEnd + Zip64CentralDirectoryEndLocator
.ok_or(ZipError::InvalidArchive(
"File cannot contain ZIP64 central directory end",
))?;
let (footer64, archive_offset) = spec::Zip64CentralDirectoryEnd::find_and_parse(
reader,
locator64.end_of_central_directory_offset,
search_upper_bound,
)?;
let directory_start = footer64
.central_directory_offset
.checked_add(archive_offset)
.ok_or(ZipError::InvalidArchive(
"Invalid central directory size or offset",
))?;
if directory_start > search_upper_bound {
return Err(ZipError::InvalidArchive(
"Invalid central directory size or offset",
));
}
if footer64.number_of_files_on_this_disk > footer64.number_of_files {
return Err(ZipError::InvalidArchive(
"ZIP64 footer indicates more files on this disk than in the whole archive",
));
}
if footer64.version_needed_to_extract > footer64.version_made_by {
return Err(ZipError::InvalidArchive(
"ZIP64 footer indicates a new version is needed to extract this archive than the \
version that wrote it",
));
}
Ok(DirectoryCounts {
archive_offset,
directory_start,
number_of_files: footer64.number_of_files as usize,
disk_number: footer64.disk_number,
disk_with_central_directory: footer64.disk_with_central_directory,
})
}
/// Get the directory start offset and number of files. This is done in a /// Get the directory start offset and number of files. This is done in a
/// separate function to ease the control flow design. /// separate function to ease the control flow design.
pub(crate) fn get_directory_counts( pub(crate) fn get_directory_counts(
reader: &mut R, reader: &mut R,
footer: &spec::CentralDirectoryEnd, footer: &spec::CentralDirectoryEnd,
cde_start_pos: u64, cde_start_pos: u64,
) -> ZipResult<(u64, u64, usize)> { ) -> ZipResult<DirectoryCounts> {
// See if there's a ZIP64 footer. The ZIP64 locator if present will // Check if file has a zip64 footer
// have its signature 20 bytes in front of the standard footer. The let counts_64 = Self::get_directory_counts_zip64(reader, footer, cde_start_pos);
// standard footer, in turn, is 22+N bytes large, where N is the let counts_32 = Self::get_directory_counts_zip32(footer, cde_start_pos);
// comment length. Therefore: match counts_64 {
let zip64locator = if reader Err(_) => match counts_32 {
.seek(io::SeekFrom::End( Err(e) => Err(e),
-(20 + 22 + footer.zip_file_comment.len() as i64), Ok(counts) => {
)) if counts.disk_number != counts.disk_with_central_directory {
.is_ok() return unsupported_zip_error(
{ "Support for multi-disk files is not implemented",
match spec::Zip64CentralDirectoryEndLocator::parse(reader) { );
Ok(loc) => Some(loc), }
Err(ZipError::InvalidArchive(_)) => { Ok(counts)
// No ZIP64 header; that's actually fine. We're done here.
None
} }
Err(e) => { },
// Yikes, a real problem Ok(counts_64) => {
return Err(e); match counts_32 {
Err(_) => Ok(counts_64),
Ok(counts_32) => {
// Both zip32 and zip64 footers exist, so check if the zip64 footer is valid; if not, try zip32
if counts_64.number_of_files != counts_32.number_of_files
&& counts_32.number_of_files != u16::MAX as usize
{
return Ok(counts_32);
}
if counts_64.disk_number != counts_32.disk_number
&& counts_32.disk_number != u16::MAX as u32
{
return Ok(counts_32);
}
if counts_64.disk_with_central_directory
!= counts_32.disk_with_central_directory
&& counts_32.disk_with_central_directory != u16::MAX as u32
{
return Ok(counts_32);
}
Ok(counts_64)
}
} }
} }
} else {
// Empty Zip files will have nothing else so this error might be fine. If
// not, we'll find out soon.
None
};
match zip64locator {
None => {
// Some zip files have data prepended to them, resulting in the
// offsets all being too small. Get the amount of error by comparing
// the actual file position we found the CDE at with the offset
// recorded in the CDE.
let archive_offset = cde_start_pos
.checked_sub(footer.central_directory_size as u64)
.and_then(|x| x.checked_sub(footer.central_directory_offset as u64))
.ok_or(ZipError::InvalidArchive(
"Invalid central directory size or offset",
))?;
let directory_start = footer.central_directory_offset as u64 + archive_offset;
let number_of_files = footer.number_of_files_on_this_disk as usize;
Ok((archive_offset, directory_start, number_of_files))
}
Some(locator64) => {
// If we got here, this is indeed a ZIP64 file.
if !footer.record_too_small()
&& footer.disk_number as u32 != locator64.disk_with_central_directory
{
return unsupported_zip_error(
"Support for multi-disk files is not implemented",
);
}
// We need to reassess `archive_offset`. We know where the ZIP64
// central-directory-end structure *should* be, but unfortunately we
// don't know how to precisely relate that location to our current
// actual offset in the file, since there may be junk at its
// beginning. Therefore we need to perform another search, as in
// read::CentralDirectoryEnd::find_and_parse, except now we search
// forward.
let search_upper_bound = cde_start_pos
.checked_sub(60) // minimum size of Zip64CentralDirectoryEnd + Zip64CentralDirectoryEndLocator
.ok_or(ZipError::InvalidArchive(
"File cannot contain ZIP64 central directory end",
))?;
let (footer, archive_offset) = spec::Zip64CentralDirectoryEnd::find_and_parse(
reader,
locator64.end_of_central_directory_offset,
search_upper_bound,
)?;
if footer.disk_number != footer.disk_with_central_directory {
return unsupported_zip_error(
"Support for multi-disk files is not implemented",
);
}
let directory_start = footer
.central_directory_offset
.checked_add(archive_offset)
.ok_or({
ZipError::InvalidArchive("Invalid central directory size or offset")
})?;
Ok((
archive_offset,
directory_start,
footer.number_of_files as usize,
))
}
} }
} }
@ -415,32 +467,34 @@ impl<R: Read + io::Seek> ZipArchive<R> {
pub fn new(mut reader: R) -> ZipResult<ZipArchive<R>> { pub fn new(mut reader: R) -> ZipResult<ZipArchive<R>> {
let (footer, cde_start_pos) = spec::CentralDirectoryEnd::find_and_parse(&mut reader)?; let (footer, cde_start_pos) = spec::CentralDirectoryEnd::find_and_parse(&mut reader)?;
if !footer.record_too_small() && footer.disk_number != footer.disk_with_central_directory { let counts = Self::get_directory_counts(&mut reader, &footer, cde_start_pos)?;
if counts.disk_number != counts.disk_with_central_directory {
return unsupported_zip_error("Support for multi-disk files is not implemented"); return unsupported_zip_error("Support for multi-disk files is not implemented");
} }
let (archive_offset, directory_start, number_of_files) =
Self::get_directory_counts(&mut reader, &footer, cde_start_pos)?;
// If the parsed number of files is greater than the offset then // If the parsed number of files is greater than the offset then
// something fishy is going on and we shouldn't trust number_of_files. // something fishy is going on and we shouldn't trust number_of_files.
let file_capacity = if number_of_files > cde_start_pos as usize { let file_capacity = if counts.number_of_files > cde_start_pos as usize {
0 0
} else { } else {
number_of_files counts.number_of_files
}; };
let mut files = Vec::with_capacity(file_capacity); let mut files = Vec::with_capacity(file_capacity);
let mut names_map = HashMap::with_capacity(file_capacity); let mut names_map = HashMap::with_capacity(file_capacity);
if reader.seek(io::SeekFrom::Start(directory_start)).is_err() { if reader
.seek(io::SeekFrom::Start(counts.directory_start))
.is_err()
{
return Err(ZipError::InvalidArchive( return Err(ZipError::InvalidArchive(
"Could not seek to start of central directory", "Could not seek to start of central directory",
)); ));
} }
for _ in 0..number_of_files { for _ in 0..counts.number_of_files {
let file = central_header_to_zip_file(&mut reader, archive_offset)?; let file = central_header_to_zip_file(&mut reader, counts.archive_offset)?;
names_map.insert(file.file_name.clone(), files.len()); names_map.insert(file.file_name.clone(), files.len());
files.push(file); files.push(file);
} }
@ -448,7 +502,7 @@ impl<R: Read + io::Seek> ZipArchive<R> {
let shared = Arc::new(zip_archive::Shared { let shared = Arc::new(zip_archive::Shared {
files, files,
names_map, names_map,
offset: archive_offset, offset: counts.archive_offset,
comment: footer.zip_file_comment, comment: footer.zip_file_comment,
}); });
@ -543,7 +597,7 @@ impl<R: Read + io::Seek> ZipArchive<R> {
} }
/// Search for a file entry by name /// Search for a file entry by name
pub fn by_name<'a>(&'a mut self, name: &str) -> ZipResult<ZipFile<'a>> { pub fn by_name(&mut self, name: &str) -> ZipResult<ZipFile> {
Ok(self.by_name_with_optional_password(name, None)?.unwrap()) Ok(self.by_name_with_optional_password(name, None)?.unwrap())
} }
@ -574,11 +628,11 @@ impl<R: Read + io::Seek> ZipArchive<R> {
/// There are many passwords out there that will also pass the validity checks /// There are many passwords out there that will also pass the validity checks
/// we are able to perform. This is a weakness of the ZipCrypto algorithm, /// we are able to perform. This is a weakness of the ZipCrypto algorithm,
/// due to its fairly primitive approach to cryptography. /// due to its fairly primitive approach to cryptography.
pub fn by_index_decrypt<'a>( pub fn by_index_decrypt(
&'a mut self, &mut self,
file_number: usize, file_number: usize,
password: &[u8], password: &[u8],
) -> ZipResult<Result<ZipFile<'a>, InvalidPassword>> { ) -> ZipResult<Result<ZipFile, InvalidPassword>> {
self.by_index_with_optional_password(file_number, Some(password)) self.by_index_with_optional_password(file_number, Some(password))
} }
@ -605,11 +659,11 @@ impl<R: Read + io::Seek> ZipArchive<R> {
}) })
} }
fn by_index_with_optional_password<'a>( fn by_index_with_optional_password(
&'a mut self, &mut self,
file_number: usize, file_number: usize,
mut password: Option<&[u8]>, mut password: Option<&[u8]>,
) -> ZipResult<Result<ZipFile<'a>, InvalidPassword>> { ) -> ZipResult<Result<ZipFile, InvalidPassword>> {
let data = self let data = self
.shared .shared
.files .files
@ -652,12 +706,12 @@ impl<R: Read + io::Seek> ZipArchive<R> {
} }
} }
fn unsupported_zip_error<T>(detail: &'static str) -> ZipResult<T> { const fn unsupported_zip_error<T>(detail: &'static str) -> ZipResult<T> {
Err(ZipError::UnsupportedArchive(detail)) Err(ZipError::UnsupportedArchive(detail))
} }
/// Parse a central directory entry to collect the information for the file. /// Parse a central directory entry to collect the information for the file.
pub(crate) fn central_header_to_zip_file<R: Read + io::Seek>( pub(crate) fn central_header_to_zip_file<R: Read + Seek>(
reader: &mut R, reader: &mut R,
archive_offset: u64, archive_offset: u64,
) -> ZipResult<ZipFileData> { ) -> ZipResult<ZipFileData> {
@ -730,7 +784,8 @@ fn central_header_to_zip_file_inner<R: Read>(
uncompressed_size: uncompressed_size as u64, uncompressed_size: uncompressed_size as u64,
file_name, file_name,
file_name_raw, file_name_raw,
extra_field, extra_field: Arc::new(extra_field),
central_extra_field: Arc::new(vec![]),
file_comment, file_comment,
header_start: offset, header_start: offset,
central_header_start, central_header_start,
@ -762,7 +817,7 @@ fn central_header_to_zip_file_inner<R: Read>(
} }
fn parse_extra_field(file: &mut ZipFileData) -> ZipResult<()> { fn parse_extra_field(file: &mut ZipFileData) -> ZipResult<()> {
let mut reader = io::Cursor::new(&file.extra_field); let mut reader = io::Cursor::new(file.extra_field.as_ref());
while (reader.position() as usize) < file.extra_field.len() { while (reader.position() as usize) < file.extra_field.len() {
let kind = reader.read_u16::<LittleEndian>()?; let kind = reader.read_u16::<LittleEndian>()?;
@ -887,7 +942,7 @@ impl<'a> ZipFile<'a> {
note = "by stripping `..`s from the path, the meaning of paths can change. note = "by stripping `..`s from the path, the meaning of paths can change.
`mangled_name` can be used if this behaviour is desirable" `mangled_name` can be used if this behaviour is desirable"
)] )]
pub fn sanitized_name(&self) -> ::std::path::PathBuf { pub fn sanitized_name(&self) -> std::path::PathBuf {
self.mangled_name() self.mangled_name()
} }
@ -903,7 +958,7 @@ impl<'a> ZipFile<'a> {
/// [`ZipFile::enclosed_name`] is the better option in most scenarios. /// [`ZipFile::enclosed_name`] is the better option in most scenarios.
/// ///
/// [`ParentDir`]: `Component::ParentDir` /// [`ParentDir`]: `Component::ParentDir`
pub fn mangled_name(&self) -> ::std::path::PathBuf { pub fn mangled_name(&self) -> std::path::PathBuf {
self.data.file_name_sanitized() self.data.file_name_sanitized()
} }
@ -949,8 +1004,7 @@ impl<'a> ZipFile<'a> {
pub fn is_dir(&self) -> bool { pub fn is_dir(&self) -> bool {
self.name() self.name()
.chars() .chars()
.rev() .next_back()
.next()
.map_or(false, |c| c == '/' || c == '\\') .map_or(false, |c| c == '/' || c == '\\')
} }
@ -1003,13 +1057,13 @@ impl<'a> Drop for ZipFile<'a> {
let mut buffer = [0; 1 << 16]; let mut buffer = [0; 1 << 16];
// Get the inner `Take` reader so all decryption, decompression and CRC calculation is skipped. // Get the inner `Take` reader so all decryption, decompression and CRC calculation is skipped.
let mut reader: std::io::Take<&mut dyn std::io::Read> = match &mut self.reader { let mut reader: io::Take<&mut dyn Read> = match &mut self.reader {
ZipFileReader::NoReader => { ZipFileReader::NoReader => {
let innerreader = ::std::mem::replace(&mut self.crypto_reader, None); let innerreader = self.crypto_reader.take();
innerreader.expect("Invalid reader state").into_inner() innerreader.expect("Invalid reader state").into_inner()
} }
reader => { reader => {
let innerreader = ::std::mem::replace(reader, ZipFileReader::NoReader); let innerreader = std::mem::replace(reader, ZipFileReader::NoReader);
innerreader.into_inner() innerreader.into_inner()
} }
}; };
@ -1043,9 +1097,7 @@ impl<'a> Drop for ZipFile<'a> {
/// * `comment`: set to an empty string /// * `comment`: set to an empty string
/// * `data_start`: set to 0 /// * `data_start`: set to 0
/// * `external_attributes`: `unix_mode()`: will return None /// * `external_attributes`: `unix_mode()`: will return None
pub fn read_zipfile_from_stream<'a, R: io::Read>( pub fn read_zipfile_from_stream<'a, R: Read>(reader: &'a mut R) -> ZipResult<Option<ZipFile<'_>>> {
reader: &'a mut R,
) -> ZipResult<Option<ZipFile<'_>>> {
let signature = reader.read_u32::<LittleEndian>()?; let signature = reader.read_u32::<LittleEndian>()?;
match signature { match signature {
@ -1092,7 +1144,8 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(
uncompressed_size: uncompressed_size as u64, uncompressed_size: uncompressed_size as u64,
file_name, file_name,
file_name_raw, file_name_raw,
extra_field, extra_field: Arc::new(extra_field),
central_extra_field: Arc::new(vec![]),
file_comment: String::new(), // file comment is only available in the central directory file_comment: String::new(), // file comment is only available in the central directory
// header_start and data start are not available, but also don't matter, since seeking is // header_start and data start are not available, but also don't matter, since seeking is
// not available. // not available.
@ -1119,7 +1172,7 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(
return unsupported_zip_error("The file length is not available in the local header"); return unsupported_zip_error("The file length is not available in the local header");
} }
let limit_reader = (reader as &'a mut dyn io::Read).take(result.compressed_size); let limit_reader = (reader as &'a mut dyn Read).take(result.compressed_size);
let result_crc32 = result.crc32; let result_crc32 = result.crc32;
let result_compression_method = result.compression_method; let result_compression_method = result.compression_method;
@ -1145,47 +1198,46 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::ZipArchive;
use std::io::Cursor;
#[test] #[test]
fn invalid_offset() { fn invalid_offset() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/invalid_offset.zip")); v.extend_from_slice(include_bytes!("../tests/data/invalid_offset.zip"));
let reader = ZipArchive::new(io::Cursor::new(v)); let reader = ZipArchive::new(Cursor::new(v));
assert!(reader.is_err()); assert!(reader.is_err());
} }
#[test] #[test]
fn invalid_offset2() { fn invalid_offset2() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/invalid_offset2.zip")); v.extend_from_slice(include_bytes!("../tests/data/invalid_offset2.zip"));
let reader = ZipArchive::new(io::Cursor::new(v)); let reader = ZipArchive::new(Cursor::new(v));
assert!(reader.is_err()); assert!(reader.is_err());
} }
#[test] #[test]
fn zip64_with_leading_junk() { fn zip64_with_leading_junk() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/zip64_demo.zip")); v.extend_from_slice(include_bytes!("../tests/data/zip64_demo.zip"));
let reader = ZipArchive::new(io::Cursor::new(v)).unwrap(); let reader = ZipArchive::new(Cursor::new(v)).unwrap();
assert_eq!(reader.len(), 1); assert_eq!(reader.len(), 1);
} }
#[test] #[test]
fn zip_contents() { fn zip_contents() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
let mut reader = ZipArchive::new(io::Cursor::new(v)).unwrap(); let mut reader = ZipArchive::new(Cursor::new(v)).unwrap();
assert_eq!(reader.comment(), b""); assert_eq!(reader.comment(), b"");
assert_eq!(reader.by_index(0).unwrap().central_header_start(), 77); assert_eq!(reader.by_index(0).unwrap().central_header_start(), 77);
} }
@ -1193,11 +1245,10 @@ mod test {
#[test] #[test]
fn zip_read_streaming() { fn zip_read_streaming() {
use super::read_zipfile_from_stream; use super::read_zipfile_from_stream;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
let mut reader = io::Cursor::new(v); let mut reader = Cursor::new(v);
loop { loop {
if read_zipfile_from_stream(&mut reader).unwrap().is_none() { if read_zipfile_from_stream(&mut reader).unwrap().is_none() {
break; break;
@ -1208,11 +1259,11 @@ mod test {
#[test] #[test]
fn zip_clone() { fn zip_clone() {
use super::ZipArchive; use super::ZipArchive;
use std::io::{self, Read}; use std::io::Read;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip")); v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
let mut reader1 = ZipArchive::new(io::Cursor::new(v)).unwrap(); let mut reader1 = ZipArchive::new(Cursor::new(v)).unwrap();
let mut reader2 = reader1.clone(); let mut reader2 = reader1.clone();
let mut file1 = reader1.by_index(0).unwrap(); let mut file1 = reader1.by_index(0).unwrap();
@ -1249,11 +1300,10 @@ mod test {
#[test] #[test]
fn file_and_dir_predicates() { fn file_and_dir_predicates() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/files_and_dirs.zip")); v.extend_from_slice(include_bytes!("../tests/data/files_and_dirs.zip"));
let mut zip = ZipArchive::new(io::Cursor::new(v)).unwrap(); let mut zip = ZipArchive::new(Cursor::new(v)).unwrap();
for i in 0..zip.len() { for i in 0..zip.len() {
let zip_file = zip.by_index(i).unwrap(); let zip_file = zip.by_index(i).unwrap();
@ -1266,20 +1316,35 @@ mod test {
} }
} }
#[test]
fn zip64_magic_in_filenames() {
let files = vec![
include_bytes!("../tests/data/zip64_magic_in_filename_1.zip").to_vec(),
include_bytes!("../tests/data/zip64_magic_in_filename_2.zip").to_vec(),
include_bytes!("../tests/data/zip64_magic_in_filename_3.zip").to_vec(),
include_bytes!("../tests/data/zip64_magic_in_filename_4.zip").to_vec(),
include_bytes!("../tests/data/zip64_magic_in_filename_5.zip").to_vec(),
];
// Although we don't allow adding files whose names contain the ZIP64 CDB-end or
// CDB-end-locator signatures, we still read them when they aren't genuinely ambiguous.
for file in files {
ZipArchive::new(Cursor::new(file)).unwrap();
}
}
/// test case to ensure we don't preemptively over allocate based on the /// test case to ensure we don't preemptively over allocate based on the
/// declared number of files in the CDE of an invalid zip when the number of /// declared number of files in the CDE of an invalid zip when the number of
/// files declared is more than the alleged offset in the CDE /// files declared is more than the alleged offset in the CDE
#[test] #[test]
fn invalid_cde_number_of_files_allocation_smaller_offset() { fn invalid_cde_number_of_files_allocation_smaller_offset() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!( v.extend_from_slice(include_bytes!(
"../tests/data/invalid_cde_number_of_files_allocation_smaller_offset.zip" "../tests/data/invalid_cde_number_of_files_allocation_smaller_offset.zip"
)); ));
let reader = ZipArchive::new(io::Cursor::new(v)); let reader = ZipArchive::new(Cursor::new(v));
assert!(reader.is_err()); assert!(reader.is_err() || reader.unwrap().is_empty());
} }
/// test case to ensure we don't preemptively over allocate based on the /// test case to ensure we don't preemptively over allocate based on the
@ -1288,13 +1353,12 @@ mod test {
#[test] #[test]
fn invalid_cde_number_of_files_allocation_greater_offset() { fn invalid_cde_number_of_files_allocation_greater_offset() {
use super::ZipArchive; use super::ZipArchive;
use std::io;
let mut v = Vec::new(); let mut v = Vec::new();
v.extend_from_slice(include_bytes!( v.extend_from_slice(include_bytes!(
"../tests/data/invalid_cde_number_of_files_allocation_greater_offset.zip" "../tests/data/invalid_cde_number_of_files_allocation_greater_offset.zip"
)); ));
let reader = ZipArchive::new(io::Cursor::new(v)); let reader = ZipArchive::new(Cursor::new(v));
assert!(reader.is_err()); assert!(reader.is_err());
} }
} }

View file

@ -1,6 +1,6 @@
use std::fs; use std::fs;
use std::io::{self, Read}; use std::io::{self, Read};
use std::path::Path; use std::path::{Path, PathBuf};
use super::{ use super::{
central_header_to_zip_file_inner, read_zipfile_from_stream, spec, ZipError, ZipFile, central_header_to_zip_file_inner, read_zipfile_from_stream, spec, ZipError, ZipFile,
@ -15,7 +15,7 @@ pub struct ZipStreamReader<R>(R);
impl<R> ZipStreamReader<R> { impl<R> ZipStreamReader<R> {
/// Create a new ZipStreamReader /// Create a new ZipStreamReader
pub fn new(reader: R) -> Self { pub const fn new(reader: R) -> Self {
Self(reader) Self(reader)
} }
} }
@ -162,7 +162,7 @@ impl ZipStreamFileMetadata {
/// [`ZipFile::enclosed_name`] is the better option in most scenarios. /// [`ZipFile::enclosed_name`] is the better option in most scenarios.
/// ///
/// [`ParentDir`]: `Component::ParentDir` /// [`ParentDir`]: `Component::ParentDir`
pub fn mangled_name(&self) -> ::std::path::PathBuf { pub fn mangled_name(&self) -> PathBuf {
self.0.file_name_sanitized() self.0.file_name_sanitized()
} }
@ -184,8 +184,7 @@ impl ZipStreamFileMetadata {
pub fn is_dir(&self) -> bool { pub fn is_dir(&self) -> bool {
self.name() self.name()
.chars() .chars()
.rev() .next_back()
.next()
.map_or(false, |c| c == '/' || c == '\\') .map_or(false, |c| c == '/' || c == '\\')
} }
@ -205,7 +204,7 @@ impl ZipStreamFileMetadata {
} }
/// Get unix mode for the file /// Get unix mode for the file
pub fn unix_mode(&self) -> Option<u32> { pub const fn unix_mode(&self) -> Option<u32> {
self.0.unix_mode() self.0.unix_mode()
} }
} }

View file

@ -3,6 +3,8 @@
use std::error::Error; use std::error::Error;
use std::fmt; use std::fmt;
use std::io; use std::io;
use std::io::IntoInnerError;
use std::num::TryFromIntError;
/// Generic result type with ZipError as its error variant /// Generic result type with ZipError as its error variant
pub type ZipResult<T> = Result<T, ZipError>; pub type ZipResult<T> = Result<T, ZipError>;
@ -41,6 +43,12 @@ impl From<io::Error> for ZipError {
} }
} }
impl<W> From<IntoInnerError<W>> for ZipError {
fn from(value: IntoInnerError<W>) -> Self {
ZipError::Io(value.into_error())
}
}
impl fmt::Display for ZipError { impl fmt::Display for ZipError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self { match self {
@ -65,8 +73,8 @@ impl ZipError {
/// The text used as an error when a password is required and not supplied /// The text used as an error when a password is required and not supplied
/// ///
/// ```rust,no_run /// ```rust,no_run
/// # use zip::result::ZipError; /// # use zip_next::result::ZipError;
/// # let mut archive = zip::ZipArchive::new(std::io::Cursor::new(&[])).unwrap(); /// # let mut archive = zip_next::ZipArchive::new(std::io::Cursor::new(&[])).unwrap();
/// match archive.by_index(1) { /// match archive.by_index(1) {
/// Err(ZipError::UnsupportedArchive(ZipError::PASSWORD_REQUIRED)) => eprintln!("a password is needed to unzip this file"), /// Err(ZipError::UnsupportedArchive(ZipError::PASSWORD_REQUIRED)) => eprintln!("a password is needed to unzip this file"),
/// _ => (), /// _ => (),
@ -86,6 +94,13 @@ impl From<ZipError> for io::Error {
#[derive(Debug)] #[derive(Debug)]
pub struct DateTimeRangeError; pub struct DateTimeRangeError;
// TryFromIntError is also an out-of-range error.
impl From<TryFromIntError> for DateTimeRangeError {
fn from(_value: TryFromIntError) -> Self {
DateTimeRangeError
}
}
impl fmt::Display for DateTimeRangeError { impl fmt::Display for DateTimeRangeError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!( write!(

View file

@ -5,9 +5,9 @@ use std::io::prelude::*;
pub const LOCAL_FILE_HEADER_SIGNATURE: u32 = 0x04034b50; pub const LOCAL_FILE_HEADER_SIGNATURE: u32 = 0x04034b50;
pub const CENTRAL_DIRECTORY_HEADER_SIGNATURE: u32 = 0x02014b50; pub const CENTRAL_DIRECTORY_HEADER_SIGNATURE: u32 = 0x02014b50;
const CENTRAL_DIRECTORY_END_SIGNATURE: u32 = 0x06054b50; pub(crate) const CENTRAL_DIRECTORY_END_SIGNATURE: u32 = 0x06054b50;
pub const ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE: u32 = 0x06064b50; pub const ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE: u32 = 0x06064b50;
const ZIP64_CENTRAL_DIRECTORY_END_LOCATOR_SIGNATURE: u32 = 0x07064b50; pub(crate) const ZIP64_CENTRAL_DIRECTORY_END_LOCATOR_SIGNATURE: u32 = 0x07064b50;
pub const ZIP64_BYTES_THR: u64 = u32::MAX as u64; pub const ZIP64_BYTES_THR: u64 = u32::MAX as u64;
pub const ZIP64_ENTRY_THR: usize = u16::MAX as usize; pub const ZIP64_ENTRY_THR: usize = u16::MAX as usize;
@ -23,18 +23,6 @@ pub struct CentralDirectoryEnd {
} }
impl CentralDirectoryEnd { impl CentralDirectoryEnd {
// Per spec 4.4.1.4 - a CentralDirectoryEnd field might be insufficient to hold the
// required data. In this case the file SHOULD contain a ZIP64 format record
// and the field of this record will be set to -1
pub(crate) fn record_too_small(&self) -> bool {
self.disk_number == 0xFFFF
|| self.disk_with_central_directory == 0xFFFF
|| self.number_of_files_on_this_disk == 0xFFFF
|| self.number_of_files == 0xFFFF
|| self.central_directory_size == 0xFFFFFFFF
|| self.central_directory_offset == 0xFFFFFFFF
}
pub fn parse<T: Read>(reader: &mut T) -> ZipResult<CentralDirectoryEnd> { pub fn parse<T: Read>(reader: &mut T) -> ZipResult<CentralDirectoryEnd> {
let magic = reader.read_u32::<LittleEndian>()?; let magic = reader.read_u32::<LittleEndian>()?;
if magic != CENTRAL_DIRECTORY_END_SIGNATURE { if magic != CENTRAL_DIRECTORY_END_SIGNATURE {
@ -61,14 +49,12 @@ impl CentralDirectoryEnd {
}) })
} }
pub fn find_and_parse<T: Read + io::Seek>( pub fn find_and_parse<T: Read + Seek>(reader: &mut T) -> ZipResult<(CentralDirectoryEnd, u64)> {
reader: &mut T,
) -> ZipResult<(CentralDirectoryEnd, u64)> {
const HEADER_SIZE: u64 = 22; const HEADER_SIZE: u64 = 22;
const BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE: u64 = HEADER_SIZE - 6; const BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE: u64 = HEADER_SIZE - 6;
let file_length = reader.seek(io::SeekFrom::End(0))?; let file_length = reader.seek(io::SeekFrom::End(0))?;
let search_upper_bound = file_length.saturating_sub(HEADER_SIZE + ::std::u16::MAX as u64); let search_upper_bound = file_length.saturating_sub(HEADER_SIZE + u16::MAX as u64);
if file_length < HEADER_SIZE { if file_length < HEADER_SIZE {
return Err(ZipError::InvalidArchive("Invalid zip header")); return Err(ZipError::InvalidArchive("Invalid zip header"));
@ -155,14 +141,14 @@ pub struct Zip64CentralDirectoryEnd {
} }
impl Zip64CentralDirectoryEnd { impl Zip64CentralDirectoryEnd {
pub fn find_and_parse<T: Read + io::Seek>( pub fn find_and_parse<T: Read + Seek>(
reader: &mut T, reader: &mut T,
nominal_offset: u64, nominal_offset: u64,
search_upper_bound: u64, search_upper_bound: u64,
) -> ZipResult<(Zip64CentralDirectoryEnd, u64)> { ) -> ZipResult<(Zip64CentralDirectoryEnd, u64)> {
let mut pos = nominal_offset; let mut pos = search_upper_bound;
while pos <= search_upper_bound { while pos >= nominal_offset {
reader.seek(io::SeekFrom::Start(pos))?; reader.seek(io::SeekFrom::Start(pos))?;
if reader.read_u32::<LittleEndian>()? == ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE { if reader.read_u32::<LittleEndian>()? == ZIP64_CENTRAL_DIRECTORY_END_SIGNATURE {
@ -194,8 +180,11 @@ impl Zip64CentralDirectoryEnd {
archive_offset, archive_offset,
)); ));
} }
if pos > 0 {
pos += 1; pos -= 1;
} else {
break;
}
} }
Err(ZipError::InvalidArchive( Err(ZipError::InvalidArchive(

View file

@ -1,18 +1,20 @@
//! Types that specify what is contained in a ZIP. //! Types that specify what is contained in a ZIP.
use path::{Component, Path, PathBuf};
use std::path; use std::path;
use std::sync::Arc;
#[cfg(feature = "chrono")]
use chrono::{Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike};
#[cfg(not(any( #[cfg(not(any(
all(target_arch = "arm", target_pointer_width = "32"), all(target_arch = "arm", target_pointer_width = "32"),
target_arch = "mips", target_arch = "mips",
target_arch = "powerpc" target_arch = "powerpc"
)))] )))]
use std::sync::atomic; use std::sync::atomic;
#[cfg(not(feature = "time"))]
use std::time::SystemTime;
#[cfg(doc)] #[cfg(doc)]
use {crate::read::ZipFile, crate::write::FileOptions}; use {crate::read::ZipFile, crate::write::FileOptions};
mod ffi { pub(crate) mod ffi {
pub const S_IFDIR: u32 = 0o0040000; pub const S_IFDIR: u32 = 0o0040000;
pub const S_IFREG: u32 = 0o0100000; pub const S_IFREG: u32 = 0o0100000;
} }
@ -49,7 +51,6 @@ mod atomic {
} }
} }
#[cfg(feature = "time")]
use crate::result::DateTimeRangeError; use crate::result::DateTimeRangeError;
#[cfg(feature = "time")] #[cfg(feature = "time")]
use time::{error::ComponentRange, Date, Month, OffsetDateTime, PrimitiveDateTime, Time}; use time::{error::ComponentRange, Date, Month, OffsetDateTime, PrimitiveDateTime, Time};
@ -62,7 +63,7 @@ pub enum System {
} }
impl System { impl System {
pub fn from_u8(system: u8) -> System { pub const fn from_u8(system: u8) -> System {
use self::System::*; use self::System::*;
match system { match system {
@ -100,7 +101,51 @@ pub struct DateTime {
second: u8, second: u8,
} }
impl ::std::default::Default for DateTime { #[cfg(fuzzing)]
impl arbitrary::Arbitrary<'_> for DateTime {
fn arbitrary(u: &mut arbitrary::Unstructured) -> arbitrary::Result<Self> {
Ok(DateTime {
year: u.int_in_range(1980..=2107)?,
month: u.int_in_range(1..=12)?,
day: u.int_in_range(1..=31)?,
hour: u.int_in_range(0..=23)?,
minute: u.int_in_range(0..=59)?,
second: u.int_in_range(0..=60)?,
})
}
}
#[cfg(feature = "chrono")]
impl TryFrom<NaiveDateTime> for DateTime {
type Error = DateTimeRangeError;
fn try_from(value: NaiveDateTime) -> Result<Self, Self::Error> {
DateTime::from_date_and_time(
value.year().try_into()?,
value.month().try_into()?,
value.day().try_into()?,
value.hour().try_into()?,
value.minute().try_into()?,
value.second().try_into()?,
)
}
}
#[cfg(feature = "chrono")]
impl TryInto<NaiveDateTime> for DateTime {
type Error = DateTimeRangeError;
fn try_into(self) -> Result<NaiveDateTime, Self::Error> {
let date = NaiveDate::from_ymd_opt(self.year.into(), self.month.into(), self.day.into())
.ok_or(DateTimeRangeError)?;
let time =
NaiveTime::from_hms_opt(self.hour.into(), self.minute.into(), self.second.into())
.ok_or(DateTimeRangeError)?;
Ok(NaiveDateTime::new(date, time))
}
}
impl Default for DateTime {
/// Constructs an 'default' datetime of 1980-01-01 00:00:00 /// Constructs an 'default' datetime of 1980-01-01 00:00:00
fn default() -> DateTime { fn default() -> DateTime {
DateTime { DateTime {
@ -116,7 +161,7 @@ impl ::std::default::Default for DateTime {
impl DateTime { impl DateTime {
/// Converts an msdos (u16, u16) pair to a DateTime object /// Converts an msdos (u16, u16) pair to a DateTime object
pub fn from_msdos(datepart: u16, timepart: u16) -> DateTime { pub const fn from_msdos(datepart: u16, timepart: u16) -> DateTime {
let seconds = (timepart & 0b0000000000011111) << 1; let seconds = (timepart & 0b0000000000011111) << 1;
let minutes = (timepart & 0b0000011111100000) >> 5; let minutes = (timepart & 0b0000011111100000) >> 5;
let hours = (timepart & 0b1111100000000000) >> 11; let hours = (timepart & 0b1111100000000000) >> 11;
@ -143,7 +188,6 @@ impl DateTime {
/// * hour: [0, 23] /// * hour: [0, 23]
/// * minute: [0, 59] /// * minute: [0, 59]
/// * second: [0, 60] /// * second: [0, 60]
#[allow(clippy::result_unit_err)]
pub fn from_date_and_time( pub fn from_date_and_time(
year: u16, year: u16,
month: u8, month: u8,
@ -151,7 +195,7 @@ impl DateTime {
hour: u8, hour: u8,
minute: u8, minute: u8,
second: u8, second: u8,
) -> Result<DateTime, ()> { ) -> Result<DateTime, DateTimeRangeError> {
if (1980..=2107).contains(&year) if (1980..=2107).contains(&year)
&& (1..=12).contains(&month) && (1..=12).contains(&month)
&& (1..=31).contains(&day) && (1..=31).contains(&day)
@ -168,27 +212,39 @@ impl DateTime {
second, second,
}) })
} else { } else {
Err(()) Err(DateTimeRangeError)
} }
} }
/// Indicates whether this date and time can be written to a zip archive.
pub fn is_valid(&self) -> bool {
DateTime::from_date_and_time(
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
)
.is_ok()
}
#[cfg(feature = "time")] #[cfg(feature = "time")]
/// Converts a OffsetDateTime object to a DateTime /// Converts a OffsetDateTime object to a DateTime
/// ///
/// Returns `Err` when this object is out of bounds /// Returns `Err` when this object is out of bounds
#[allow(clippy::result_unit_err)]
#[deprecated(note = "use `DateTime::try_from()`")] #[deprecated(note = "use `DateTime::try_from()`")]
pub fn from_time(dt: OffsetDateTime) -> Result<DateTime, ()> { pub fn from_time(dt: OffsetDateTime) -> Result<DateTime, DateTimeRangeError> {
dt.try_into().map_err(|_err| ()) dt.try_into().map_err(|_err| DateTimeRangeError)
} }
/// Gets the time portion of this datetime in the msdos representation /// Gets the time portion of this datetime in the msdos representation
pub fn timepart(&self) -> u16 { pub const fn timepart(&self) -> u16 {
((self.second as u16) >> 1) | ((self.minute as u16) << 5) | ((self.hour as u16) << 11) ((self.second as u16) >> 1) | ((self.minute as u16) << 5) | ((self.hour as u16) << 11)
} }
/// Gets the date portion of this datetime in the msdos representation /// Gets the date portion of this datetime in the msdos representation
pub fn datepart(&self) -> u16 { pub const fn datepart(&self) -> u16 {
(self.day as u16) | ((self.month as u16) << 5) | ((self.year - 1980) << 9) (self.day as u16) | ((self.month as u16) << 5) | ((self.year - 1980) << 9)
} }
@ -202,7 +258,7 @@ impl DateTime {
} }
/// Get the year. There is no epoch, i.e. 2018 will be returned as 2018. /// Get the year. There is no epoch, i.e. 2018 will be returned as 2018.
pub fn year(&self) -> u16 { pub const fn year(&self) -> u16 {
self.year self.year
} }
@ -211,7 +267,7 @@ impl DateTime {
/// # Warning /// # Warning
/// ///
/// When read from a zip file, this may not be a reasonable value /// When read from a zip file, this may not be a reasonable value
pub fn month(&self) -> u8 { pub const fn month(&self) -> u8 {
self.month self.month
} }
@ -220,7 +276,7 @@ impl DateTime {
/// # Warning /// # Warning
/// ///
/// When read from a zip file, this may not be a reasonable value /// When read from a zip file, this may not be a reasonable value
pub fn day(&self) -> u8 { pub const fn day(&self) -> u8 {
self.day self.day
} }
@ -229,7 +285,7 @@ impl DateTime {
/// # Warning /// # Warning
/// ///
/// When read from a zip file, this may not be a reasonable value /// When read from a zip file, this may not be a reasonable value
pub fn hour(&self) -> u8 { pub const fn hour(&self) -> u8 {
self.hour self.hour
} }
@ -238,7 +294,7 @@ impl DateTime {
/// # Warning /// # Warning
/// ///
/// When read from a zip file, this may not be a reasonable value /// When read from a zip file, this may not be a reasonable value
pub fn minute(&self) -> u8 { pub const fn minute(&self) -> u8 {
self.minute self.minute
} }
@ -247,7 +303,7 @@ impl DateTime {
/// # Warning /// # Warning
/// ///
/// When read from a zip file, this may not be a reasonable value /// When read from a zip file, this may not be a reasonable value
pub fn second(&self) -> u8 { pub const fn second(&self) -> u8 {
self.second self.second
} }
} }
@ -259,8 +315,8 @@ impl TryFrom<OffsetDateTime> for DateTime {
fn try_from(dt: OffsetDateTime) -> Result<Self, Self::Error> { fn try_from(dt: OffsetDateTime) -> Result<Self, Self::Error> {
if dt.year() >= 1980 && dt.year() <= 2107 { if dt.year() >= 1980 && dt.year() <= 2107 {
Ok(DateTime { Ok(DateTime {
year: (dt.year()) as u16, year: (dt.year()).try_into()?,
month: (dt.month()) as u8, month: dt.month().into(),
day: dt.day(), day: dt.day(),
hour: dt.hour(), hour: dt.hour(),
minute: dt.minute(), minute: dt.minute(),
@ -282,7 +338,7 @@ pub const DEFAULT_VERSION: u8 = 46;
pub struct AtomicU64(atomic::AtomicU64); pub struct AtomicU64(atomic::AtomicU64);
impl AtomicU64 { impl AtomicU64 {
pub fn new(v: u64) -> Self { pub const fn new(v: u64) -> Self {
Self(atomic::AtomicU64::new(v)) Self(atomic::AtomicU64::new(v))
} }
@ -333,7 +389,9 @@ pub struct ZipFileData {
/// Raw file name. To be used when file_name was incorrectly decoded. /// Raw file name. To be used when file_name was incorrectly decoded.
pub file_name_raw: Vec<u8>, pub file_name_raw: Vec<u8>,
/// Extra field usually used for storage expansion /// Extra field usually used for storage expansion
pub extra_field: Vec<u8>, pub extra_field: Arc<Vec<u8>>,
/// Extra field only written to central directory
pub central_extra_field: Arc<Vec<u8>>,
/// File comment /// File comment
pub file_comment: String, pub file_comment: String,
/// Specifies where the local header of the file starts /// Specifies where the local header of the file starts
@ -353,7 +411,7 @@ pub struct ZipFileData {
} }
impl ZipFileData { impl ZipFileData {
pub fn file_name_sanitized(&self) -> ::std::path::PathBuf { pub fn file_name_sanitized(&self) -> PathBuf {
let no_null_filename = match self.file_name.find('\0') { let no_null_filename = match self.file_name.find('\0') {
Some(index) => &self.file_name[0..index], Some(index) => &self.file_name[0..index],
None => &self.file_name, None => &self.file_name,
@ -363,7 +421,7 @@ impl ZipFileData {
// zip files can contain both / and \ as separators regardless of the OS // zip files can contain both / and \ as separators regardless of the OS
// and as we want to return a sanitized PathBuf that only supports the // and as we want to return a sanitized PathBuf that only supports the
// OS separator let's convert incompatible separators to compatible ones // OS separator let's convert incompatible separators to compatible ones
let separator = ::std::path::MAIN_SEPARATOR; let separator = path::MAIN_SEPARATOR;
let opposite_separator = match separator { let opposite_separator = match separator {
'/' => '\\', '/' => '\\',
_ => '/', _ => '/',
@ -371,34 +429,34 @@ impl ZipFileData {
let filename = let filename =
no_null_filename.replace(&opposite_separator.to_string(), &separator.to_string()); no_null_filename.replace(&opposite_separator.to_string(), &separator.to_string());
::std::path::Path::new(&filename) Path::new(&filename)
.components() .components()
.filter(|component| matches!(*component, ::std::path::Component::Normal(..))) .filter(|component| matches!(*component, path::Component::Normal(..)))
.fold(::std::path::PathBuf::new(), |mut path, ref cur| { .fold(PathBuf::new(), |mut path, ref cur| {
path.push(cur.as_os_str()); path.push(cur.as_os_str());
path path
}) })
} }
pub(crate) fn enclosed_name(&self) -> Option<&path::Path> { pub(crate) fn enclosed_name(&self) -> Option<&Path> {
if self.file_name.contains('\0') { if self.file_name.contains('\0') {
return None; return None;
} }
let path = path::Path::new(&self.file_name); let path = Path::new(&self.file_name);
let mut depth = 0usize; let mut depth = 0usize;
for component in path.components() { for component in path.components() {
match component { match component {
path::Component::Prefix(_) | path::Component::RootDir => return None, Component::Prefix(_) | Component::RootDir => return None,
path::Component::ParentDir => depth = depth.checked_sub(1)?, Component::ParentDir => depth = depth.checked_sub(1)?,
path::Component::Normal(_) => depth += 1, Component::Normal(_) => depth += 1,
path::Component::CurDir => (), Component::CurDir => (),
} }
} }
Some(path) Some(path)
} }
/// Get unix mode for the file /// Get unix mode for the file
pub(crate) fn unix_mode(&self) -> Option<u32> { pub(crate) const fn unix_mode(&self) -> Option<u32> {
if self.external_attributes == 0 { if self.external_attributes == 0 {
return None; return None;
} }
@ -422,13 +480,13 @@ impl ZipFileData {
} }
} }
pub fn zip64_extension(&self) -> bool { pub const fn zip64_extension(&self) -> bool {
self.uncompressed_size > 0xFFFFFFFF self.uncompressed_size > 0xFFFFFFFF
|| self.compressed_size > 0xFFFFFFFF || self.compressed_size > 0xFFFFFFFF
|| self.header_start > 0xFFFFFFFF || self.header_start > 0xFFFFFFFF
} }
pub fn version_needed(&self) -> u16 { pub const fn version_needed(&self) -> u16 {
// higher versions matched first // higher versions matched first
match (self.zip64_extension(), self.compression_method) { match (self.zip64_extension(), self.compression_method) {
#[cfg(feature = "bzip2")] #[cfg(feature = "bzip2")]
@ -459,11 +517,11 @@ pub enum AesMode {
#[cfg(feature = "aes-crypto")] #[cfg(feature = "aes-crypto")]
impl AesMode { impl AesMode {
pub fn salt_length(&self) -> usize { pub const fn salt_length(&self) -> usize {
self.key_length() / 2 self.key_length() / 2
} }
pub fn key_length(&self) -> usize { pub const fn key_length(&self) -> usize {
match self { match self {
Self::Aes128 => 16, Self::Aes128 => 16,
Self::Aes192 => 24, Self::Aes192 => 24,
@ -500,7 +558,8 @@ mod test {
uncompressed_size: 0, uncompressed_size: 0,
file_name: file_name.clone(), file_name: file_name.clone(),
file_name_raw: file_name.into_bytes(), file_name_raw: file_name.into_bytes(),
extra_field: Vec::new(), extra_field: Arc::new(vec![]),
central_extra_field: Arc::new(vec![]),
file_comment: String::new(), file_comment: String::new(),
header_start: 0, header_start: 0,
data_start: AtomicU64::new(0), data_start: AtomicU64::new(0),
@ -509,10 +568,7 @@ mod test {
large_file: false, large_file: false,
aes_mode: None, aes_mode: None,
}; };
assert_eq!( assert_eq!(data.file_name_sanitized(), PathBuf::from("path/etc/passwd"));
data.file_name_sanitized(),
::std::path::PathBuf::from("path/etc/passwd")
);
} }
#[test] #[test]

View file

@ -8,7 +8,7 @@ pub mod write {
/// Unstable methods for [`FileOptions`]. /// Unstable methods for [`FileOptions`].
pub trait FileOptionsExt { pub trait FileOptionsExt {
/// Write the file with the given password using the deprecated ZipCrypto algorithm. /// Write the file with the given password using the deprecated ZipCrypto algorithm.
/// ///
/// This is not recommended for new archives, as ZipCrypto is not secure. /// This is not recommended for new archives, as ZipCrypto is not secure.
fn with_deprecated_encryption(self, password: &[u8]) -> Self; fn with_deprecated_encryption(self, password: &[u8]) -> Self;
} }
@ -17,4 +17,4 @@ pub mod write {
self.with_deprecated_encryption(password) self.with_deprecated_encryption(password)
} }
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -3,18 +3,40 @@
//! The following paper was used to implement the ZipCrypto algorithm: //! The following paper was used to implement the ZipCrypto algorithm:
//! [https://courses.cs.ut.ee/MTAT.07.022/2015_fall/uploads/Main/dmitri-report-f15-16.pdf](https://courses.cs.ut.ee/MTAT.07.022/2015_fall/uploads/Main/dmitri-report-f15-16.pdf) //! [https://courses.cs.ut.ee/MTAT.07.022/2015_fall/uploads/Main/dmitri-report-f15-16.pdf](https://courses.cs.ut.ee/MTAT.07.022/2015_fall/uploads/Main/dmitri-report-f15-16.pdf)
use std::fmt::{Debug, Formatter};
use std::hash::Hash;
use std::num::Wrapping; use std::num::Wrapping;
/// A container to hold the current key state /// A container to hold the current key state
#[derive(Clone, Copy)] #[cfg_attr(fuzzing, derive(arbitrary::Arbitrary))]
#[derive(Clone, Copy, Hash, Ord, PartialOrd, Eq, PartialEq)]
pub(crate) struct ZipCryptoKeys { pub(crate) struct ZipCryptoKeys {
key_0: Wrapping<u32>, key_0: Wrapping<u32>,
key_1: Wrapping<u32>, key_1: Wrapping<u32>,
key_2: Wrapping<u32>, key_2: Wrapping<u32>,
} }
impl Debug for ZipCryptoKeys {
#[allow(unreachable_code)]
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
#[cfg(not(any(test, fuzzing)))]
{
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut t = DefaultHasher::new();
self.hash(&mut t);
return f.write_fmt(format_args!("ZipCryptoKeys(hash {})", t.finish()));
}
#[cfg(any(test, fuzzing))]
return f.write_fmt(format_args!(
"ZipCryptoKeys({:#10x},{:#10x},{:#10x})",
self.key_0, self.key_1, self.key_2
));
}
}
impl ZipCryptoKeys { impl ZipCryptoKeys {
fn new() -> ZipCryptoKeys { const fn new() -> ZipCryptoKeys {
ZipCryptoKeys { ZipCryptoKeys {
key_0: Wrapping(0x12345678), key_0: Wrapping(0x12345678),
key_1: Wrapping(0x23456789), key_1: Wrapping(0x23456789),
@ -123,12 +145,14 @@ impl<R: std::io::Read> ZipCryptoReader<R> {
Ok(Some(ZipCryptoReaderValid { reader: self })) Ok(Some(ZipCryptoReaderValid { reader: self }))
} }
} }
#[allow(unused)]
pub(crate) struct ZipCryptoWriter<W> { pub(crate) struct ZipCryptoWriter<W> {
pub(crate) writer: W, pub(crate) writer: W,
pub(crate) buffer: Vec<u8>, pub(crate) buffer: Vec<u8>,
pub(crate) keys: ZipCryptoKeys, pub(crate) keys: ZipCryptoKeys,
} }
impl<W: std::io::Write> ZipCryptoWriter<W> { impl<W: std::io::Write> ZipCryptoWriter<W> {
#[allow(unused)]
pub(crate) fn finish(mut self, crc32: u32) -> std::io::Result<W> { pub(crate) fn finish(mut self, crc32: u32) -> std::io::Result<W> {
self.buffer[11] = (crc32 >> 24) as u8; self.buffer[11] = (crc32 >> 24) as u8;
for byte in self.buffer.iter_mut() { for byte in self.buffer.iter_mut() {

View file

@ -1,7 +1,7 @@
#![cfg(feature = "aes-crypto")] #![cfg(feature = "aes-crypto")]
use std::io::{self, Read}; use std::io::{self, Read};
use zip::ZipArchive; use zip_next::ZipArchive;
const SECRET_CONTENT: &str = "Lorem ipsum dolor sit amet"; const SECRET_CONTENT: &str = "Lorem ipsum dolor sit amet";

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -3,8 +3,9 @@ use std::collections::HashSet;
use std::io::prelude::*; use std::io::prelude::*;
use std::io::{Cursor, Seek}; use std::io::{Cursor, Seek};
use std::iter::FromIterator; use std::iter::FromIterator;
use zip::write::FileOptions; use zip_next::result::ZipResult;
use zip::{CompressionMethod, SUPPORTED_COMPRESSION_METHODS}; use zip_next::write::FileOptions;
use zip_next::{CompressionMethod, ZipWriter, SUPPORTED_COMPRESSION_METHODS};
// This test asserts that after creating a zip file, then reading its contents back out, // This test asserts that after creating a zip file, then reading its contents back out,
// the extracted data will *always* be exactly the same as the original data. // the extracted data will *always* be exactly the same as the original data.
@ -17,10 +18,11 @@ fn end_to_end() {
let file = &mut Cursor::new(Vec::new()); let file = &mut Cursor::new(Vec::new());
println!("Writing file with {method} compression"); println!("Writing file with {method} compression");
write_test_archive(file, method).expect("Couldn't write test zip archive"); write_test_archive(file, method, true);
println!("Checking file contents"); println!("Checking file contents");
check_archive_file(file, ENTRY_NAME, Some(method), LOREM_IPSUM); check_archive_file(file, ENTRY_NAME, Some(method), LOREM_IPSUM);
check_archive_file(file, INTERNAL_COPY_ENTRY_NAME, Some(method), LOREM_IPSUM);
} }
} }
@ -33,13 +35,13 @@ fn copy() {
continue continue
} }
let src_file = &mut Cursor::new(Vec::new()); let src_file = &mut Cursor::new(Vec::new());
write_test_archive(src_file, method).expect("Couldn't write to test file"); write_test_archive(src_file, method, false);
let mut tgt_file = &mut Cursor::new(Vec::new()); let mut tgt_file = &mut Cursor::new(Vec::new());
{ {
let mut src_archive = zip::ZipArchive::new(src_file).unwrap(); let mut src_archive = zip_next::ZipArchive::new(src_file).unwrap();
let mut zip = zip::ZipWriter::new(&mut tgt_file); let mut zip = ZipWriter::new(&mut tgt_file);
{ {
let file = src_archive let file = src_archive
@ -59,7 +61,7 @@ fn copy() {
} }
} }
let mut tgt_archive = zip::ZipArchive::new(tgt_file).unwrap(); let mut tgt_archive = zip_next::ZipArchive::new(tgt_file).unwrap();
check_archive_file_contents(&mut tgt_archive, ENTRY_NAME, LOREM_IPSUM); check_archive_file_contents(&mut tgt_archive, ENTRY_NAME, LOREM_IPSUM);
check_archive_file_contents(&mut tgt_archive, COPY_ENTRY_NAME, LOREM_IPSUM); check_archive_file_contents(&mut tgt_archive, COPY_ENTRY_NAME, LOREM_IPSUM);
@ -74,59 +76,68 @@ fn append() {
if method == CompressionMethod::DEFLATE64 { if method == CompressionMethod::DEFLATE64 {
continue continue
} }
let mut file = &mut Cursor::new(Vec::new()); for shallow_copy in &[false, true] {
write_test_archive(file, method).expect("Couldn't write to test file"); println!("Writing file with {method} compression, shallow_copy {shallow_copy}");
let mut file = &mut Cursor::new(Vec::new());
write_test_archive(file, method, *shallow_copy);
{ {
let mut zip = zip::ZipWriter::new_append(&mut file).unwrap(); let mut zip = ZipWriter::new_append(&mut file).unwrap();
zip.start_file( zip.start_file(
COPY_ENTRY_NAME, COPY_ENTRY_NAME,
FileOptions::default().compression_method(method), FileOptions::default()
) .compression_method(method)
.unwrap(); .unix_permissions(0o755),
zip.write_all(LOREM_IPSUM).unwrap(); )
zip.finish().unwrap(); .unwrap();
zip.write_all(LOREM_IPSUM).unwrap();
zip.finish().unwrap();
}
let mut zip = zip_next::ZipArchive::new(&mut file).unwrap();
check_archive_file_contents(&mut zip, ENTRY_NAME, LOREM_IPSUM);
check_archive_file_contents(&mut zip, COPY_ENTRY_NAME, LOREM_IPSUM);
check_archive_file_contents(&mut zip, INTERNAL_COPY_ENTRY_NAME, LOREM_IPSUM);
} }
let mut zip = zip::ZipArchive::new(&mut file).unwrap();
check_archive_file_contents(&mut zip, ENTRY_NAME, LOREM_IPSUM);
check_archive_file_contents(&mut zip, COPY_ENTRY_NAME, LOREM_IPSUM);
} }
} }
// Write a test zip archive to buffer. // Write a test zip archive to buffer.
fn write_test_archive( fn write_test_archive(file: &mut Cursor<Vec<u8>>, method: CompressionMethod, shallow_copy: bool) {
file: &mut Cursor<Vec<u8>>, let mut zip = ZipWriter::new(file);
method: CompressionMethod,
) -> zip::result::ZipResult<()> {
let mut zip = zip::ZipWriter::new(file);
zip.add_directory("test/", Default::default())?; zip.add_directory("test/", Default::default()).unwrap();
let options = FileOptions::default() let mut options = FileOptions::default()
.compression_method(method) .compression_method(method)
.unix_permissions(0o755); .unix_permissions(0o755);
zip.start_file("test/☃.txt", options)?; zip.start_file(ENTRY_NAME, options.clone()).unwrap();
zip.write_all(b"Hello, World!\n")?; zip.write_all(LOREM_IPSUM).unwrap();
zip.start_file_with_extra_data("test_with_extra_data/🐢.txt", options)?; if shallow_copy {
zip.write_u16::<LittleEndian>(0xbeef)?; zip.shallow_copy_file(ENTRY_NAME, INTERNAL_COPY_ENTRY_NAME)
zip.write_u16::<LittleEndian>(EXTRA_DATA.len() as u16)?; .unwrap();
zip.write_all(EXTRA_DATA)?; } else {
zip.end_extra_data()?; zip.deep_copy_file(ENTRY_NAME, INTERNAL_COPY_ENTRY_NAME)
zip.write_all(b"Hello, World! Again.\n")?; .unwrap();
}
zip.start_file(ENTRY_NAME, options)?; zip.start_file("test/☃.txt", options.clone()).unwrap();
zip.write_all(LOREM_IPSUM)?; zip.write_all(b"Hello, World!\n").unwrap();
zip.finish()?; options.add_extra_data(0xbeef, EXTRA_DATA, false).unwrap();
Ok(())
zip.start_file("test_with_extra_data/🐢.txt", options)
.unwrap();
zip.write_all(b"Hello, World! Again.\n").unwrap();
zip.finish().unwrap();
} }
// Load an archive from buffer and check for test data. // Load an archive from buffer and check for test data.
fn check_test_archive<R: Read + Seek>(zip_file: R) -> zip::result::ZipResult<zip::ZipArchive<R>> { fn check_test_archive<R: Read + Seek>(zip_file: R) -> ZipResult<zip_next::ZipArchive<R>> {
let mut archive = zip::ZipArchive::new(zip_file).unwrap(); let mut archive = zip_next::ZipArchive::new(zip_file).unwrap();
// Check archive contains expected file names. // Check archive contains expected file names.
{ {
@ -135,6 +146,7 @@ fn check_test_archive<R: Read + Seek>(zip_file: R) -> zip::result::ZipResult<zip
"test/☃.txt", "test/☃.txt",
"test_with_extra_data/🐢.txt", "test_with_extra_data/🐢.txt",
ENTRY_NAME, ENTRY_NAME,
INTERNAL_COPY_ENTRY_NAME,
]; ];
let expected_file_names = HashSet::from_iter(expected_file_names.iter().copied()); let expected_file_names = HashSet::from_iter(expected_file_names.iter().copied());
let file_names = archive.file_names().collect::<HashSet<_>>(); let file_names = archive.file_names().collect::<HashSet<_>>();
@ -156,9 +168,9 @@ fn check_test_archive<R: Read + Seek>(zip_file: R) -> zip::result::ZipResult<zip
// Read a file in the archive as a string. // Read a file in the archive as a string.
fn read_archive_file<R: Read + Seek>( fn read_archive_file<R: Read + Seek>(
archive: &mut zip::ZipArchive<R>, archive: &mut zip_next::ZipArchive<R>,
name: &str, name: &str,
) -> zip::result::ZipResult<String> { ) -> ZipResult<String> {
let mut file = archive.by_name(name)?; let mut file = archive.by_name(name)?;
let mut contents = String::new(); let mut contents = String::new();
@ -192,10 +204,13 @@ fn check_archive_file(
// Check a file in the archive contains the given data. // Check a file in the archive contains the given data.
fn check_archive_file_contents<R: Read + Seek>( fn check_archive_file_contents<R: Read + Seek>(
archive: &mut zip::ZipArchive<R>, archive: &mut zip_next::ZipArchive<R>,
name: &str, name: &str,
expected: &[u8], expected: &[u8],
) { ) {
let file_permissions: u32 = archive.by_name(name).unwrap().unix_mode().unwrap();
assert_eq!(file_permissions, 0o100755);
let file_contents: String = read_archive_file(archive, name).unwrap(); let file_contents: String = read_archive_file(archive, name).unwrap();
assert_eq!(file_contents.as_bytes(), expected); assert_eq!(file_contents.as_bytes(), expected);
} }
@ -212,3 +227,5 @@ const EXTRA_DATA: &[u8] = b"Extra Data";
const ENTRY_NAME: &str = "test/lorem_ipsum.txt"; const ENTRY_NAME: &str = "test/lorem_ipsum.txt";
const COPY_ENTRY_NAME: &str = "test/lorem_ipsum_renamed.txt"; const COPY_ENTRY_NAME: &str = "test/lorem_ipsum_renamed.txt";
const INTERNAL_COPY_ENTRY_NAME: &str = "test/lorem_ipsum_copied.txt";

View file

@ -1,5 +1,5 @@
use std::io::Cursor; use std::io::Cursor;
use zip::read::ZipArchive; use zip_next::read::ZipArchive;
const BUF: &[u8] = &[ const BUF: &[u8] = &[
0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

View file

@ -1,4 +1,4 @@
use zip::result::ZipError; use zip_next::result::ZipError;
const BUF: &[u8] = &[ const BUF: &[u8] = &[
0, 80, 75, 1, 2, 127, 120, 0, 3, 3, 75, 80, 232, 3, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 7, 0, 0, 0, 0, 80, 75, 1, 2, 127, 120, 0, 3, 3, 75, 80, 232, 3, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 7, 0, 0, 0,
@ -23,7 +23,7 @@ const BUF: &[u8] = &[
#[test] #[test]
fn invalid_header() { fn invalid_header() {
let reader = std::io::Cursor::new(&BUF); let reader = std::io::Cursor::new(&BUF);
let archive = zip::ZipArchive::new(reader); let archive = zip_next::ZipArchive::new(reader);
match archive { match archive {
Err(ZipError::InvalidArchive(_)) => {} Err(ZipError::InvalidArchive(_)) => {}
value => panic!("Unexpected value: {value:?}"), value => panic!("Unexpected value: {value:?}"),

View file

@ -190,7 +190,7 @@ impl Read for Zip64File {
#[test] #[test]
fn zip64_large() { fn zip64_large() {
let zipfile = Zip64File::new(); let zipfile = Zip64File::new();
let mut archive = zip::ZipArchive::new(zipfile).unwrap(); let mut archive = zip_next::ZipArchive::new(zipfile).unwrap();
let mut buf = [0u8; 32]; let mut buf = [0u8; 32];
for i in 0..archive.len() { for i in 0..archive.len() {

View file

@ -18,7 +18,7 @@
// 0000002e // 0000002e
use std::io; use std::io;
use zip::ZipArchive; use zip_next::ZipArchive;
#[test] #[test]
fn correctly_handle_zip_with_garbage_after_comment() { fn correctly_handle_zip_with_garbage_after_comment() {

View file

@ -22,20 +22,24 @@ use std::io::Read;
#[test] #[test]
fn encrypting_file() { fn encrypting_file() {
use zip::unstable::write::FileOptionsExt;
use std::io::{Read, Write}; use std::io::{Read, Write};
use zip_next::unstable::write::FileOptionsExt;
let mut buf = vec![0; 2048]; let mut buf = vec![0; 2048];
let mut archive = zip::write::ZipWriter::new(std::io::Cursor::new(&mut buf)); let mut archive = zip_next::write::ZipWriter::new(Cursor::new(&mut buf));
archive.start_file("name", zip::write::FileOptions::default().with_deprecated_encryption(b"password")).unwrap(); archive
.start_file(
"name",
zip_next::write::FileOptions::default().with_deprecated_encryption(b"password"),
)
.unwrap();
archive.write_all(b"test").unwrap(); archive.write_all(b"test").unwrap();
archive.finish().unwrap(); archive.finish().unwrap();
drop(archive); drop(archive);
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(&mut buf)).unwrap(); let mut archive = zip_next::ZipArchive::new(Cursor::new(&mut buf)).unwrap();
let mut file = archive.by_index_decrypt(0, b"password").unwrap().unwrap(); let mut file = archive.by_index_decrypt(0, b"password").unwrap().unwrap();
let mut buf = Vec::new(); let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap(); file.read_to_end(&mut buf).unwrap();
assert_eq!(buf, b"test"); assert_eq!(buf, b"test");
} }
#[test] #[test]
fn encrypted_file() { fn encrypted_file() {
@ -56,7 +60,7 @@ fn encrypted_file() {
0x00, 0x00, 0x00, 0x00,
]); ]);
let mut archive = zip::ZipArchive::new(zip_file_bytes).unwrap(); let mut archive = zip_next::ZipArchive::new(zip_file_bytes).unwrap();
assert_eq!(archive.len(), 1); //Only one file inside archive: `test.txt` assert_eq!(archive.len(), 1); //Only one file inside archive: `test.txt`
@ -64,8 +68,8 @@ fn encrypted_file() {
// No password // No password
let file = archive.by_index(0); let file = archive.by_index(0);
match file { match file {
Err(zip::result::ZipError::UnsupportedArchive( Err(zip_next::result::ZipError::UnsupportedArchive(
zip::result::ZipError::PASSWORD_REQUIRED, zip_next::result::ZipError::PASSWORD_REQUIRED,
)) => (), )) => (),
Err(_) => panic!( Err(_) => panic!(
"Expected PasswordRequired error when opening encrypted file without password" "Expected PasswordRequired error when opening encrypted file without password"
@ -78,7 +82,7 @@ fn encrypted_file() {
// Wrong password // Wrong password
let file = archive.by_index_decrypt(0, b"wrong password"); let file = archive.by_index_decrypt(0, b"wrong password");
match file { match file {
Ok(Err(zip::result::InvalidPassword)) => (), Ok(Err(zip_next::result::InvalidPassword)) => (),
Err(_) => panic!( Err(_) => panic!(
"Expected InvalidPassword error when opening encrypted file with wrong password" "Expected InvalidPassword error when opening encrypted file with wrong password"
), ),