Truncate huge files during read fuzz rather than skipping them
This commit is contained in:
parent
e787187cdd
commit
026a49ffa7
1 changed files with 3 additions and 4 deletions
|
@ -1,15 +1,14 @@
|
|||
#![no_main]
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
use std::io::Read;
|
||||
|
||||
fn decompress_all(data: &[u8]) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let reader = std::io::Cursor::new(data);
|
||||
let mut zip = zip_next::ZipArchive::new(reader)?;
|
||||
|
||||
for i in 0..zip.len() {
|
||||
let mut file = zip.by_index(i)?;
|
||||
if file.size() <= 1 << 24 {
|
||||
let _ = std::io::copy(&mut file, &mut std::io::sink());
|
||||
}
|
||||
let file = zip.by_index(i)?;
|
||||
std::io::copy(&mut file.take(1 << 24), &mut std::io::sink())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
Loading…
Add table
Reference in a new issue