chore: Fix build errors

This commit is contained in:
Chris Hennick 2024-07-06 14:26:37 -07:00
parent 1d551ff23c
commit 57f01ba946
No known key found for this signature in database
GPG key ID: DA47AABA4961C509
4 changed files with 120 additions and 109 deletions

View file

@ -2,9 +2,9 @@
use arbitrary::Arbitrary;
use core::fmt::{Debug, Formatter};
use std::borrow::Cow;
use libfuzzer_sys::fuzz_target;
use replace_with::replace_with_or_abort;
use std::borrow::Cow;
use std::io::{Cursor, Read, Seek, Write};
use std::path::PathBuf;
use tikv_jemallocator::Jemalloc;
@ -27,16 +27,16 @@ pub enum BasicFileOperation<'k> {
ShallowCopy(Box<FileOperation<'k>>),
DeepCopy(Box<FileOperation<'k>>),
MergeWithOtherFile {
operations: Box<[(FileOperation<'k>, bool)]>
operations: Box<[(FileOperation<'k>, bool)]>,
},
SetArchiveComment(Box<[u8]>)
SetArchiveComment(Box<[u8]>),
}
#[derive(Arbitrary, Clone, Debug, Eq, PartialEq)]
pub enum ReopenOption {
DoNotReopen,
ViaFinish,
ViaFinishIntoReadable
ViaFinishIntoReadable,
}
#[derive(Arbitrary, Clone)]
@ -47,78 +47,105 @@ pub struct FileOperation<'k> {
// 'abort' flag is separate, to prevent trying to copy an aborted file
}
impl <'k> FileOperation<'k> {
impl<'k> FileOperation<'k> {
fn get_path(&self) -> Option<Cow<PathBuf>> {
match &self.basic {
BasicFileOperation::SetArchiveComment(_) => None,
BasicFileOperation::WriteDirectory(_) => Some(Cow::Owned(self.path.join("/"))),
BasicFileOperation::MergeWithOtherFile { operations } =>
operations.iter().flat_map(|(op, abort)| if !abort { op.get_path() } else { None }).next(),
_ => Some(Cow::Borrowed(&self.path))
BasicFileOperation::MergeWithOtherFile { operations } => operations
.iter()
.flat_map(|(op, abort)| if !abort { op.get_path() } else { None })
.next(),
_ => Some(Cow::Borrowed(&self.path)),
}
}
}
impl <'k> Debug for FileOperation<'k> {
impl<'k> Debug for FileOperation<'k> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match &self.basic {
BasicFileOperation::WriteNormalFile {contents, options} => {
f.write_fmt(format_args!("let options = {:?};\n\
writer.start_file_from_path({:?}, options)?;\n", options, self.path))?;
BasicFileOperation::WriteNormalFile { contents, options } => {
f.write_fmt(format_args!(
"let options = {:?};\n\
writer.start_file_from_path({:?}, options)?;\n",
options, self.path
))?;
for content_slice in contents {
f.write_fmt(format_args!("writer.write_all(&({:?}))?;\n", content_slice))?;
}
},
}
BasicFileOperation::WriteDirectory(options) => {
f.write_fmt(format_args!("let options = {:?};\n\
f.write_fmt(format_args!(
"let options = {:?};\n\
writer.add_directory_from_path({:?}, options)?;\n",
options, self.path))?;
},
BasicFileOperation::WriteSymlinkWithTarget {target, options} => {
f.write_fmt(format_args!("let options = {:?};\n\
options, self.path
))?;
}
BasicFileOperation::WriteSymlinkWithTarget { target, options } => {
f.write_fmt(format_args!(
"let options = {:?};\n\
writer.add_symlink_from_path({:?}, {:?}, options)?;\n",
options, self.path, target.to_owned()))?;
},
options,
self.path,
target.to_owned()
))?;
}
BasicFileOperation::ShallowCopy(base) => {
let Some(base_path) = base.get_path() else {
return Ok(())
return Ok(());
};
f.write_fmt(format_args!("{:?}writer.shallow_copy_file_from_path({:?}, {:?})?;\n", base, base_path, self.path))?;
},
f.write_fmt(format_args!(
"{:?}writer.shallow_copy_file_from_path({:?}, {:?})?;\n",
base, base_path, self.path
))?;
}
BasicFileOperation::DeepCopy(base) => {
let Some(base_path) = base.get_path() else {
return Ok(())
return Ok(());
};
f.write_fmt(format_args!("{:?}writer.deep_copy_file_from_path({:?}, {:?})?;\n", base, base_path, self.path))?;
},
BasicFileOperation::MergeWithOtherFile {operations} => {
f.write_str("let sub_writer = {\n\
f.write_fmt(format_args!(
"{:?}writer.deep_copy_file_from_path({:?}, {:?})?;\n",
base, base_path, self.path
))?;
}
BasicFileOperation::MergeWithOtherFile { operations } => {
f.write_str(
"let sub_writer = {\n\
let mut writer = ZipWriter::new(Cursor::new(Vec::new()));\n\
writer.set_flush_on_finish_file(false);\n")?;
operations.iter().map(|op| {
f.write_fmt(format_args!("{:?}", op.0))?;
if op.1 {
f.write_str("writer.abort_file()?;\n")
} else {
Ok(())
}
}).collect::<Result<(), _>>()?;
f.write_str("writer\n\
writer.set_flush_on_finish_file(false);\n",
)?;
operations
.iter()
.map(|op| {
f.write_fmt(format_args!("{:?}", op.0))?;
if op.1 {
f.write_str("writer.abort_file()?;\n")
} else {
Ok(())
}
})
.collect::<Result<(), _>>()?;
f.write_str(
"writer\n\
};\n\
writer.merge_archive(sub_writer.finish_into_readable()?)?;\n")?;
},
writer.merge_archive(sub_writer.finish_into_readable()?)?;\n",
)?;
}
BasicFileOperation::SetArchiveComment(comment) => {
f.write_fmt(format_args!("writer.set_raw_comment({:?}.into());\n", comment))?;
f.write_fmt(format_args!(
"writer.set_raw_comment({:?}.into());\n",
comment
))?;
}
}
match &self.reopen {
ReopenOption::DoNotReopen => Ok(()),
ReopenOption::ViaFinish => {
f.write_str("writer = ZipWriter::new_append(writer.finish()?)?;\n")
},
ReopenOption::ViaFinishIntoReadable => {
f.write_str("writer = ZipWriter::new_append(writer.finish_into_readable()?.into_inner())?;\n")
}
ReopenOption::ViaFinishIntoReadable => f.write_str(
"writer = ZipWriter::new_append(writer.finish_into_readable()?.into_inner())?;\n",
),
}
}
}
@ -129,19 +156,23 @@ pub struct FuzzTestCase<'k> {
flush_on_finish_file: bool,
}
impl <'k> Debug for FuzzTestCase<'k> {
impl<'k> Debug for FuzzTestCase<'k> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!(
"let mut writer = ZipWriter::new(Cursor::new(Vec::new()));\n\
writer.set_flush_on_finish_file({:?});\n", self.flush_on_finish_file))?;
self.operations.iter().map(|op| {
f.write_fmt(format_args!("{:?}", op.0))?;
if op.1 {
f.write_str("writer.abort_file()?;\n")
} else {
Ok(())
}
})
writer.set_flush_on_finish_file({:?});\n",
self.flush_on_finish_file
))?;
self.operations
.iter()
.map(|op| {
f.write_fmt(format_args!("{:?}", op.0))?;
if op.1 {
f.write_str("writer.abort_file()?;\n")
} else {
Ok(())
}
})
.collect::<Result<(), _>>()?;
f.write_str("writer\n")
}
@ -154,8 +185,8 @@ fn deduplicate_paths(copy: &mut Cow<PathBuf>, original: &PathBuf) {
let mut new_name = name.to_owned();
new_name.push("_copy");
copy.with_file_name(new_name)
},
None => copy.with_file_name("copy")
}
None => copy.with_file_name("copy"),
};
*copy = Cow::Owned(new_path);
}
@ -166,7 +197,7 @@ fn do_operation<'k, T>(
operation: &FileOperation<'k>,
abort: bool,
flush_on_finish_file: bool,
files_added: &mut usize
files_added: &mut usize,
) -> Result<(), Box<dyn std::error::Error>>
where
T: Read + Write + Seek,
@ -175,9 +206,7 @@ where
let mut path = Cow::Borrowed(&operation.path);
match &operation.basic {
BasicFileOperation::WriteNormalFile {
contents,
options,
..
contents, options, ..
} => {
let uncompressed_size = contents.iter().map(|chunk| chunk.len()).sum::<usize>();
let mut options = (*options).to_owned();
@ -225,12 +254,12 @@ where
&operation,
*abort,
false,
&mut inner_files_added
&mut inner_files_added,
);
});
writer.merge_archive(other_writer.finish_into_readable()?)?;
*files_added += inner_files_added;
},
}
BasicFileOperation::SetArchiveComment(comment) => {
writer.set_raw_comment(comment.clone());
}
@ -250,14 +279,15 @@ where
zip::ZipWriter::new_append(old_writer.finish().unwrap()).unwrap()
});
assert!(writer.get_raw_comment().starts_with(&old_comment));
},
}
ReopenOption::ViaFinishIntoReadable => {
let old_comment = writer.get_raw_comment().to_owned();
replace_with_or_abort(writer, |old_writer: zip::ZipWriter<T>| {
zip::ZipWriter::new_append(old_writer.finish_into_readable().unwrap().into_inner()).unwrap()
zip::ZipWriter::new_append(old_writer.finish_into_readable().unwrap().into_inner())
.unwrap()
});
assert!(writer.get_raw_comment().starts_with(&old_comment));
},
}
}
Ok(())
}
@ -279,7 +309,7 @@ fuzz_target!(|test_case: FuzzTestCase| {
&operation,
*abort,
test_case.flush_on_finish_file,
&mut files_added
&mut files_added,
);
}
if final_reopen {

View file

@ -343,8 +343,11 @@ fn find_content_seek<'a, R: Read + Seek>(
reader: &'a mut R,
) -> ZipResult<SeekableTake<'a, R>> {
// Parse local header
reader.seek(io::SeekFrom::Start(find_data_start(data, reader)?))?;
SeekableTake::new(reader, data.compressed_size)
let data_start = find_data_start(data, reader)?;
reader.seek(io::SeekFrom::Start(data_start))?;
// Explicit Ok and ? are needed to convert io::Error to ZipError
Ok(SeekableTake::new(reader, data.compressed_size)?)
}
fn find_data_start(
@ -1584,7 +1587,7 @@ impl<'a> ZipFile<'a> {
}
/// Get the version of the file
fn version_made_by(&self) -> (u8, u8) {
pub fn version_made_by(&self) -> (u8, u8) {
(
self.get_metadata().version_made_by / 10,
self.get_metadata().version_made_by % 10,
@ -1603,14 +1606,14 @@ impl<'a> ZipFile<'a> {
///
/// You can use the [`ZipFile::enclosed_name`] method to validate the name
/// as a safe path.
fn name(&self) -> &str {
pub fn name(&self) -> &str {
&self.get_metadata().file_name
}
/// Get the name of the file, in the raw (internal) byte representation.
///
/// The encoding of this data is currently undefined.
fn name_raw(&self) -> &[u8] {
pub fn name_raw(&self) -> &[u8] {
&self.get_metadata().file_name_raw
}
@ -1621,7 +1624,7 @@ impl<'a> ZipFile<'a> {
note = "by stripping `..`s from the path, the meaning of paths can change.
`mangled_name` can be used if this behaviour is desirable"
)]
fn sanitized_name(&self) -> PathBuf {
pub fn sanitized_name(&self) -> PathBuf {
self.mangled_name()
}
@ -1637,7 +1640,7 @@ impl<'a> ZipFile<'a> {
/// [`ZipFile::enclosed_name`] is the better option in most scenarios.
///
/// [`ParentDir`]: `Component::ParentDir`
fn mangled_name(&self) -> PathBuf {
pub fn mangled_name(&self) -> PathBuf {
self.get_metadata().file_name_sanitized()
}
@ -1651,27 +1654,27 @@ impl<'a> ZipFile<'a> {
/// This will read well-formed ZIP files correctly, and is resistant
/// to path-based exploits. It is recommended over
/// [`ZipFile::mangled_name`].
fn enclosed_name(&self) -> Option<PathBuf> {
pub fn enclosed_name(&self) -> Option<PathBuf> {
self.get_metadata().enclosed_name()
}
/// Get the comment of the file
fn comment(&self) -> &str {
pub fn comment(&self) -> &str {
&self.get_metadata().file_comment
}
/// Get the compression method used to store the file
fn compression(&self) -> CompressionMethod {
pub fn compression(&self) -> CompressionMethod {
self.get_metadata().compression_method
}
/// Get the size of the file, in bytes, in the archive
fn compressed_size(&self) -> u64 {
pub fn compressed_size(&self) -> u64 {
self.get_metadata().compressed_size
}
/// Get the size of the file, in bytes, when uncompressed
fn size(&self) -> u64 {
pub fn size(&self) -> u64 {
self.get_metadata().uncompressed_size
}
@ -1696,17 +1699,17 @@ impl<'a> ZipFile<'a> {
}
/// Get unix mode for the file
fn unix_mode(&self) -> Option<u32> {
pub fn unix_mode(&self) -> Option<u32> {
self.get_metadata().unix_mode()
}
/// Get the CRC32 hash of the original file
fn crc32(&self) -> u32 {
pub fn crc32(&self) -> u32 {
self.get_metadata().crc32
}
/// Get the extra data of the zip header for this file
fn extra_data(&self) -> Option<&[u8]> {
pub fn extra_data(&self) -> Option<&[u8]> {
self.get_metadata()
.extra_field
.as_ref()
@ -1719,34 +1722,17 @@ impl<'a> ZipFile<'a> {
}
/// Get the starting offset of the zip header for this file
fn header_start(&self) -> u64 {
pub fn header_start(&self) -> u64 {
self.get_metadata().header_start
}
/// Get the starting offset of the zip header in the central directory for this file
fn central_header_start(&self) -> u64 {
pub fn central_header_start(&self) -> u64 {
self.get_metadata().central_header_start
}
}
/// Methods for retrieving information on zip files
impl<'a> ZipFile<'a> {
fn get_reader(&mut self) -> &mut ZipFileReader<'a> {
if let ZipFileReader::NoReader = self.reader {
let data = &self.data;
let crypto_reader = self.crypto_reader.take().expect("Invalid reader state");
self.reader = make_reader(data.compression_method, data.crc32, crypto_reader)
}
&mut self.reader
}
pub(crate) fn get_raw_reader(&mut self) -> &mut dyn Read {
if let ZipFileReader::NoReader = self.reader {
let crypto_reader = self.crypto_reader.take().expect("Invalid reader state");
self.reader = ZipFileReader::Raw(crypto_reader.into_inner())
}
&mut self.reader
}
/// iterate through all extra fields
pub fn extra_data_fields(&self) -> impl Iterator<Item = &ExtraField> {
self.data.extra_fields.iter()
@ -1916,7 +1902,6 @@ mod test {
#[test]
fn zip_contents() {
use super::HasZipMetadata;
use super::ZipArchive;
let mut v = Vec::new();
@ -1942,7 +1927,6 @@ mod test {
#[test]
fn zip_clone() {
use super::HasZipMetadata;
use super::ZipArchive;
use std::io::Read;
@ -1984,7 +1968,6 @@ mod test {
#[test]
fn file_and_dir_predicates() {
use super::HasZipMetadata;
use super::ZipArchive;
let mut v = Vec::new();

View file

@ -3,8 +3,8 @@ use std::io::{self, Read};
use std::path::{Path, PathBuf};
use super::{
central_header_to_zip_file_inner, read_zipfile_from_stream, spec, HasZipMetadata, ZipCentralEntryBlock,
ZipError, ZipFile, ZipFileData, ZipResult,
central_header_to_zip_file_inner, read_zipfile_from_stream, ZipCentralEntryBlock, ZipError,
ZipFile, ZipFileData, ZipResult,
};
use crate::spec::FixedSizeBlock;

View file

@ -7,7 +7,7 @@ use crate::read::{
find_content, parse_single_extra_field, Config, ZipArchive, ZipFile, ZipFileReader,
};
use crate::result::{ZipError, ZipResult};
use crate::spec::{self, FixedSizeBlock, HasZipMetadata, Zip32CDEBlock};
use crate::spec::{self, FixedSizeBlock, Zip32CDEBlock};
#[cfg(feature = "aes-crypto")]
use crate::types::AesMode;
use crate::types::{
@ -545,8 +545,6 @@ impl<'k> FileOptions<'k, ExtendedFileOptions> {
impl<'k, T: FileOptionExtension> Default for FileOptions<'k, T> {
/// Construct a new FileOptions object
fn default() -> Self {
#[cfg(feature = "time")]
use core::convert::TryInto;
Self {
compression_method: Default::default(),
compression_level: None,
@ -1978,7 +1976,7 @@ mod test {
use crate::write::SimpleFileOptions;
use crate::zipcrypto::ZipCryptoKeys;
use crate::CompressionMethod::Stored;
use crate::{HasZipMetadata, ZipArchive};
use crate::ZipArchive;
use std::io;
use std::io::{Cursor, Read, Write};
use std::marker::PhantomData;