fix: We now keep searching for a real CDE header after read an invalid one from the file comment
This commit is contained in:
parent
5231469734
commit
cb2d7abde7
4 changed files with 59 additions and 30 deletions
|
@ -125,7 +125,8 @@ impl <'k> Debug for FuzzTestCase<'k> {
|
|||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_fmt(format_args!(
|
||||
"let mut writer = ZipWriter::new(Cursor::new(Vec::new()));\n\
|
||||
writer.set_flush_on_finish_file({:?});\n", self.flush_on_finish_file))?;
|
||||
writer.set_flush_on_finish_file({:?});\n\
|
||||
writer.set_raw_comment(Box::<[u8]>::from({:?}));\n", self.flush_on_finish_file, self.comment))?;
|
||||
self.operations.iter().map(|op| {
|
||||
f.write_fmt(format_args!("{:?}", op.0))?;
|
||||
if op.1 {
|
||||
|
@ -135,9 +136,6 @@ impl <'k> Debug for FuzzTestCase<'k> {
|
|||
}
|
||||
})
|
||||
.collect::<Result<(), _>>()?;
|
||||
if self.comment.len() > 0 {
|
||||
f.write_fmt(format_args!("writer.set_raw_comment(Box::<[u8]>::from({:?}));\n", self.comment))?;
|
||||
}
|
||||
f.write_str("writer\n")
|
||||
}
|
||||
}
|
||||
|
|
18
src/read.rs
18
src/read.rs
|
@ -810,13 +810,17 @@ impl<R: Read + Seek> ZipArchive<R> {
|
|||
///
|
||||
/// This uses the central directory record of the ZIP file, and ignores local file headers.
|
||||
pub fn with_config(config: Config, mut reader: R) -> ZipResult<ZipArchive<R>> {
|
||||
let (footer, cde_start_pos) = spec::Zip32CentralDirectoryEnd::find_and_parse(&mut reader)?;
|
||||
let shared = Self::get_metadata(config, &mut reader, &footer, cde_start_pos)?;
|
||||
Ok(ZipArchive {
|
||||
reader,
|
||||
shared: shared.into(),
|
||||
comment: footer.zip_file_comment.into(),
|
||||
})
|
||||
let results = spec::Zip32CentralDirectoryEnd::find_and_parse(&mut reader)?;
|
||||
for (footer, cde_start_pos) in results {
|
||||
if let Ok(shared) = Self::get_metadata(config, &mut reader, &footer, cde_start_pos) {
|
||||
return Ok(ZipArchive {
|
||||
reader,
|
||||
shared: shared.into(),
|
||||
comment: footer.zip_file_comment.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(InvalidArchive("No valid central directory found"))
|
||||
}
|
||||
|
||||
/// Extract a Zip archive into a directory, overwriting files if they
|
||||
|
|
16
src/spec.rs
16
src/spec.rs
|
@ -298,7 +298,8 @@ impl Zip32CentralDirectoryEnd {
|
|||
|
||||
pub fn find_and_parse<T: Read + Seek>(
|
||||
reader: &mut T,
|
||||
) -> ZipResult<(Zip32CentralDirectoryEnd, u64)> {
|
||||
) -> ZipResult<Box<[(Zip32CentralDirectoryEnd, u64)]>> {
|
||||
let mut results = vec![];
|
||||
let file_length = reader.seek(io::SeekFrom::End(0))?;
|
||||
|
||||
if file_length < mem::size_of::<Zip32CDEBlock>() as u64 {
|
||||
|
@ -337,7 +338,7 @@ impl Zip32CentralDirectoryEnd {
|
|||
reader.seek(io::SeekFrom::Start(cde_start_pos))?;
|
||||
/* Drop any headers that don't parse. */
|
||||
if let Ok(cde) = Self::parse(reader) {
|
||||
return Ok((cde, cde_start_pos));
|
||||
results.push((cde, cde_start_pos));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -365,10 +366,13 @@ impl Zip32CentralDirectoryEnd {
|
|||
* `if window_start == search_lower_bound` check above. */
|
||||
.max(search_lower_bound);
|
||||
}
|
||||
|
||||
Err(ZipError::InvalidArchive(
|
||||
"Could not find central directory end",
|
||||
))
|
||||
if results.is_empty() {
|
||||
Err(ZipError::InvalidArchive(
|
||||
"Could not find central directory end",
|
||||
))
|
||||
} else {
|
||||
Ok(results.into_boxed_slice())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write<T: Write>(self, writer: &mut T) -> ZipResult<()> {
|
||||
|
|
49
src/write.rs
49
src/write.rs
|
@ -625,19 +625,23 @@ impl<A: Read + Write + Seek> ZipWriter<A> {
|
|||
///
|
||||
/// This uses the given read configuration to initially read the archive.
|
||||
pub fn new_append_with_config(config: Config, mut readwriter: A) -> ZipResult<ZipWriter<A>> {
|
||||
let (footer, cde_start_pos) =
|
||||
spec::Zip32CentralDirectoryEnd::find_and_parse(&mut readwriter)?;
|
||||
let metadata = ZipArchive::get_metadata(config, &mut readwriter, &footer, cde_start_pos)?;
|
||||
|
||||
Ok(ZipWriter {
|
||||
inner: Storer(MaybeEncrypted::Unencrypted(readwriter)),
|
||||
files: metadata.files,
|
||||
stats: Default::default(),
|
||||
writing_to_file: false,
|
||||
comment: footer.zip_file_comment,
|
||||
writing_raw: true, // avoid recomputing the last file's header
|
||||
flush_on_finish_file: false,
|
||||
})
|
||||
let results = spec::Zip32CentralDirectoryEnd::find_and_parse(&mut readwriter)?;
|
||||
for (footer, cde_start_pos) in results {
|
||||
if let Ok(metadata) =
|
||||
ZipArchive::get_metadata(config, &mut readwriter, &footer, cde_start_pos)
|
||||
{
|
||||
return Ok(ZipWriter {
|
||||
inner: Storer(MaybeEncrypted::Unencrypted(readwriter)),
|
||||
files: metadata.files,
|
||||
stats: Default::default(),
|
||||
writing_to_file: false,
|
||||
comment: footer.zip_file_comment,
|
||||
writing_raw: true, // avoid recomputing the last file's header
|
||||
flush_on_finish_file: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(InvalidArchive("No central-directory end header found"))
|
||||
}
|
||||
|
||||
/// `flush_on_finish_file` is designed to support a streaming `inner` that may unload flushed
|
||||
|
@ -3277,4 +3281,23 @@ mod test {
|
|||
let _ = writer.finish_into_readable()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzz_crash_2024_06_18() -> ZipResult<()> {
|
||||
let mut writer = ZipWriter::new(Cursor::new(Vec::new()));
|
||||
writer.set_raw_comment(Box::<[u8]>::from([
|
||||
80, 75, 5, 6, 255, 255, 255, 255, 255, 255, 80, 75, 5, 6, 255, 255, 255, 255, 255, 255,
|
||||
13, 0, 13, 13, 13, 13, 13, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||
]));
|
||||
let sub_writer = {
|
||||
let mut writer = ZipWriter::new(Cursor::new(Vec::new()));
|
||||
writer.set_flush_on_finish_file(false);
|
||||
writer.set_raw_comment(Box::new([]));
|
||||
writer
|
||||
};
|
||||
writer.merge_archive(sub_writer.finish_into_readable()?)?;
|
||||
writer = ZipWriter::new_append(writer.finish()?)?;
|
||||
let _ = writer.finish_into_readable()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue