mirror of
https://github.com/lune-org/lune.git
synced 2024-12-12 13:00:37 +00:00
Finish up serde compression, implement tests
This commit is contained in:
parent
4d5bf6c2ae
commit
bcdc5f14a5
13 changed files with 281 additions and 33 deletions
4
.luaurc
4
.luaurc
|
@ -5,5 +5,7 @@
|
|||
},
|
||||
"lintErrors": false,
|
||||
"typeErrors": true,
|
||||
"globals": []
|
||||
"globals": [
|
||||
"warn"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
use async_compression::tokio::write::{
|
||||
BrotliDecoder, BrotliEncoder, GzipDecoder, GzipEncoder, ZlibDecoder, ZlibEncoder,
|
||||
};
|
||||
use blocking::unblock;
|
||||
use lz4_flex::{compress_prepend_size, decompress_size_prepended};
|
||||
use mlua::prelude::*;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::{copy, BufReader};
|
||||
|
||||
use async_compression::{
|
||||
tokio::bufread::{
|
||||
BrotliDecoder, BrotliEncoder, GzipDecoder, GzipEncoder, ZlibDecoder, ZlibEncoder,
|
||||
},
|
||||
Level::Best as CompressionQuality,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum CompressDecompressFormat {
|
||||
|
@ -92,28 +96,30 @@ pub async fn compress<'lua>(
|
|||
format: CompressDecompressFormat,
|
||||
source: impl AsRef<[u8]>,
|
||||
) -> LuaResult<Vec<u8>> {
|
||||
if let CompressDecompressFormat::LZ4 = format {
|
||||
let source = source.as_ref().to_vec();
|
||||
return Ok(unblock(move || compress_prepend_size(&source)).await);
|
||||
}
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
let reader = BufReader::new(source.as_ref());
|
||||
|
||||
match format {
|
||||
CompressDecompressFormat::Brotli => {
|
||||
BrotliEncoder::new(&mut bytes)
|
||||
.write_all(source.as_ref())
|
||||
.await?
|
||||
let mut encoder = BrotliEncoder::with_quality(reader, CompressionQuality);
|
||||
copy(&mut encoder, &mut bytes).await?;
|
||||
}
|
||||
CompressDecompressFormat::GZip => {
|
||||
GzipEncoder::new(&mut bytes)
|
||||
.write_all(source.as_ref())
|
||||
.await?
|
||||
let mut encoder = GzipEncoder::with_quality(reader, CompressionQuality);
|
||||
copy(&mut encoder, &mut bytes).await?;
|
||||
}
|
||||
CompressDecompressFormat::ZLib => {
|
||||
ZlibEncoder::new(&mut bytes)
|
||||
.write_all(source.as_ref())
|
||||
.await?
|
||||
}
|
||||
CompressDecompressFormat::LZ4 => {
|
||||
let source = source.as_ref().to_vec();
|
||||
bytes = unblock(move || compress_prepend_size(&source)).await;
|
||||
let mut encoder = ZlibEncoder::with_quality(reader, CompressionQuality);
|
||||
copy(&mut encoder, &mut bytes).await?;
|
||||
}
|
||||
CompressDecompressFormat::LZ4 => unreachable!(),
|
||||
}
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
|
@ -121,29 +127,31 @@ pub async fn decompress<'lua>(
|
|||
format: CompressDecompressFormat,
|
||||
source: impl AsRef<[u8]>,
|
||||
) -> LuaResult<Vec<u8>> {
|
||||
if let CompressDecompressFormat::LZ4 = format {
|
||||
let source = source.as_ref().to_vec();
|
||||
return unblock(move || decompress_size_prepended(&source))
|
||||
.await
|
||||
.map_err(LuaError::external);
|
||||
}
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
let reader = BufReader::new(source.as_ref());
|
||||
|
||||
match format {
|
||||
CompressDecompressFormat::Brotli => {
|
||||
BrotliDecoder::new(&mut bytes)
|
||||
.write_all(source.as_ref())
|
||||
.await?
|
||||
let mut decoder = BrotliDecoder::new(reader);
|
||||
copy(&mut decoder, &mut bytes).await?;
|
||||
}
|
||||
CompressDecompressFormat::GZip => {
|
||||
GzipDecoder::new(&mut bytes)
|
||||
.write_all(source.as_ref())
|
||||
.await?
|
||||
let mut decoder = GzipDecoder::new(reader);
|
||||
copy(&mut decoder, &mut bytes).await?;
|
||||
}
|
||||
CompressDecompressFormat::ZLib => {
|
||||
ZlibDecoder::new(&mut bytes)
|
||||
.write_all(source.as_ref())
|
||||
.await?
|
||||
}
|
||||
CompressDecompressFormat::LZ4 => {
|
||||
let source = source.as_ref().to_vec();
|
||||
bytes = unblock(move || decompress_size_prepended(&source))
|
||||
.await
|
||||
.map_err(LuaError::external)?;
|
||||
let mut decoder = ZlibDecoder::new(reader);
|
||||
copy(&mut decoder, &mut bytes).await?;
|
||||
}
|
||||
CompressDecompressFormat::LZ4 => unreachable!(),
|
||||
}
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
|
|
@ -47,6 +47,7 @@ create_tests! {
|
|||
fs_files: "fs/files",
|
||||
fs_dirs: "fs/dirs",
|
||||
fs_move: "fs/move",
|
||||
|
||||
net_request_codes: "net/request/codes",
|
||||
net_request_methods: "net/request/methods",
|
||||
net_request_query: "net/request/query",
|
||||
|
@ -55,11 +56,13 @@ create_tests! {
|
|||
net_url_decode: "net/url/decode",
|
||||
net_serve_requests: "net/serve/requests",
|
||||
net_serve_websockets: "net/serve/websockets",
|
||||
|
||||
process_args: "process/args",
|
||||
process_cwd: "process/cwd",
|
||||
process_env: "process/env",
|
||||
process_exit: "process/exit",
|
||||
process_spawn: "process/spawn",
|
||||
|
||||
require_async: "require/tests/async",
|
||||
require_async_concurrent: "require/tests/async_concurrent",
|
||||
require_async_sequential: "require/tests/async_sequential",
|
||||
|
@ -69,6 +72,7 @@ create_tests! {
|
|||
require_nested: "require/tests/nested",
|
||||
require_parents: "require/tests/parents",
|
||||
require_siblings: "require/tests/siblings",
|
||||
|
||||
global_g_table: "globals/_G",
|
||||
// TODO: Uncomment this test, it is commented out right
|
||||
// now to let CI pass so that we can make a new release
|
||||
|
@ -77,15 +81,20 @@ create_tests! {
|
|||
global_type: "globals/type",
|
||||
global_typeof: "globals/typeof",
|
||||
global_version: "globals/version",
|
||||
|
||||
serde_compression_files: "serde/compression/files",
|
||||
serde_compression_roundtrip: "serde/compression/roundtrip",
|
||||
serde_json_decode: "serde/json/decode",
|
||||
serde_json_encode: "serde/json/encode",
|
||||
serde_toml_decode: "serde/toml/decode",
|
||||
serde_toml_encode: "serde/toml/encode",
|
||||
|
||||
stdio_format: "stdio/format",
|
||||
stdio_color: "stdio/color",
|
||||
stdio_style: "stdio/style",
|
||||
stdio_write: "stdio/write",
|
||||
stdio_ewrite: "stdio/ewrite",
|
||||
|
||||
task_cancel: "task/cancel",
|
||||
task_defer: "task/defer",
|
||||
task_delay: "task/delay",
|
||||
|
|
103
tests/serde/compression/files.luau
Normal file
103
tests/serde/compression/files.luau
Normal file
|
@ -0,0 +1,103 @@
|
|||
local fs = require("@lune/fs")
|
||||
local process = require("@lune/process")
|
||||
local serde = require("@lune/serde")
|
||||
local stdio = require("@lune/stdio")
|
||||
|
||||
type Test = {
|
||||
Format: serde.CompressDecompressFormat,
|
||||
Source: string,
|
||||
Target: string,
|
||||
}
|
||||
|
||||
local TESTS: { Test } = {
|
||||
{
|
||||
Format = "brotli",
|
||||
Source = "tests/serde/test-files/loremipsum.txt",
|
||||
Target = "tests/serde/test-files/loremipsum.txt.br",
|
||||
},
|
||||
{
|
||||
Format = "gzip",
|
||||
Source = "tests/serde/test-files/loremipsum.txt",
|
||||
Target = "tests/serde/test-files/loremipsum.txt.gz",
|
||||
},
|
||||
{
|
||||
Format = "lz4",
|
||||
Source = "tests/serde/test-files/loremipsum.txt",
|
||||
Target = "tests/serde/test-files/loremipsum.txt.lz4",
|
||||
},
|
||||
{
|
||||
Format = "zlib",
|
||||
Source = "tests/serde/test-files/loremipsum.txt",
|
||||
Target = "tests/serde/test-files/loremipsum.txt.z",
|
||||
},
|
||||
}
|
||||
|
||||
local failed = false
|
||||
for _, test in TESTS do
|
||||
local source = fs.readFile(test.Source)
|
||||
local target = fs.readFile(test.Target)
|
||||
|
||||
local success, compressed = pcall(serde.compress, test.Format, source)
|
||||
if not success then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Compressing source using '%s' format threw an error!\n%s",
|
||||
tostring(test.Format),
|
||||
tostring(compressed)
|
||||
)
|
||||
)
|
||||
failed = true
|
||||
continue
|
||||
elseif compressed ~= target then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Compressing source using '%s' format did not produce target!\n",
|
||||
tostring(test.Format)
|
||||
)
|
||||
)
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Compressed (%d chars long):\n%s\nTarget (%d chars long):\n%s\n\n",
|
||||
#compressed,
|
||||
tostring(compressed),
|
||||
#target,
|
||||
tostring(target)
|
||||
)
|
||||
)
|
||||
failed = true
|
||||
continue
|
||||
end
|
||||
|
||||
local success2, decompressed = pcall(serde.decompress, test.Format, target)
|
||||
if not success2 then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Decompressing source using '%s' format threw an error!\n%s",
|
||||
tostring(test.Format),
|
||||
tostring(decompressed)
|
||||
)
|
||||
)
|
||||
failed = true
|
||||
continue
|
||||
elseif decompressed ~= source then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Decompressing target using '%s' format did not produce source!\n",
|
||||
tostring(test.Format)
|
||||
)
|
||||
)
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Decompressed (%d chars long):\n%s\n\n",
|
||||
#decompressed,
|
||||
tostring(decompressed)
|
||||
)
|
||||
)
|
||||
failed = true
|
||||
continue
|
||||
end
|
||||
end
|
||||
|
||||
if failed then
|
||||
process.exit(1)
|
||||
end
|
80
tests/serde/compression/roundtrip.luau
Normal file
80
tests/serde/compression/roundtrip.luau
Normal file
|
@ -0,0 +1,80 @@
|
|||
local fs = require("@lune/fs")
|
||||
local process = require("@lune/process")
|
||||
local serde = require("@lune/serde")
|
||||
local stdio = require("@lune/stdio")
|
||||
|
||||
local FORMATS: { serde.CompressDecompressFormat } = { "brotli", "gzip", "lz4", "zlib" }
|
||||
local FILES: { string } = {
|
||||
"tests/serde/test-files/loremipsum.txt",
|
||||
"tests/serde/test-files/uncompressed.csv",
|
||||
"tests/serde/test-files/uncompressed.json",
|
||||
"tests/serde/test-files/uncompressed.yaml",
|
||||
}
|
||||
|
||||
local failed = false
|
||||
for _, filePath in FILES do
|
||||
local source = fs.readFile(filePath)
|
||||
for _, format: serde.CompressDecompressFormat in FORMATS do
|
||||
local compressed = serde.compress(format, source)
|
||||
local decompressed = serde.decompress(format, compressed)
|
||||
|
||||
-- Compressing something should return something else
|
||||
if #compressed <= 0 then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Compressing source using '%s' returned an empty string!\n",
|
||||
tostring(format)
|
||||
)
|
||||
)
|
||||
stdio.ewrite(string.format("Source (%d chars long):\n%s\n", #source, tostring(source)))
|
||||
failed = true
|
||||
continue
|
||||
end
|
||||
if compressed == source then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Compressing source using '%s' format did not change contents!\n",
|
||||
tostring(format)
|
||||
)
|
||||
)
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Source (%d chars long):\n%s\nCompressed (%d chars long):\n%s\n",
|
||||
#source,
|
||||
tostring(source),
|
||||
#compressed,
|
||||
tostring(compressed)
|
||||
)
|
||||
)
|
||||
failed = true
|
||||
continue
|
||||
end
|
||||
|
||||
-- Decompressing that something else should return the original source
|
||||
if decompressed ~= source then
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Decompressing using '%s' format did not return the source!\n",
|
||||
tostring(format)
|
||||
)
|
||||
)
|
||||
stdio.ewrite(
|
||||
string.format(
|
||||
"Source (%d chars long):\n%s\nCompressed (%d chars long):\n%s\nDecompressed (%d chars long):\n%s\n",
|
||||
#source,
|
||||
tostring(source),
|
||||
#compressed,
|
||||
tostring(compressed),
|
||||
#decompressed,
|
||||
tostring(decompressed)
|
||||
)
|
||||
)
|
||||
failed = true
|
||||
continue
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if failed then
|
||||
process.exit(1)
|
||||
end
|
4
tests/serde/test-files/loremipsum.txt
Normal file
4
tests/serde/test-files/loremipsum.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed euismod, sapien ut efficitur tempor, nulla dolor bibendum eros, in faucibus leo quam sit amet purus.
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed euismod, sapien ut efficitur tempor, nulla dolor bibendum eros, in faucibus leo quam sit amet purus.
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed euismod, sapien ut efficitur tempor, nulla dolor bibendum eros, in faucibus leo quam sit amet purus.
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed euismod, sapien ut efficitur tempor, nulla dolor bibendum eros, in faucibus leo quam sit amet purus.
|
BIN
tests/serde/test-files/loremipsum.txt.br
Normal file
BIN
tests/serde/test-files/loremipsum.txt.br
Normal file
Binary file not shown.
BIN
tests/serde/test-files/loremipsum.txt.gz
Normal file
BIN
tests/serde/test-files/loremipsum.txt.gz
Normal file
Binary file not shown.
BIN
tests/serde/test-files/loremipsum.txt.lz4
Normal file
BIN
tests/serde/test-files/loremipsum.txt.lz4
Normal file
Binary file not shown.
1
tests/serde/test-files/loremipsum.txt.z
Normal file
1
tests/serde/test-files/loremipsum.txt.z
Normal file
|
@ -0,0 +1 @@
|
|||
xÚíŽÛ
Â0Eÿ™âu’þ1Aš¸èJIb{@B,A8<>]—tpZtTmº`tä.žPt˜<17>…\9i…ãiô
w©<77> u –'e rž,ü.}êJÑZþº2ê;%K-<2D>gŽÂ#MÏÈý×ÇŒ¶Ýöëð_ð°ìÍ
|
4
tests/serde/test-files/uncompressed.csv
Normal file
4
tests/serde/test-files/uncompressed.csv
Normal file
|
@ -0,0 +1,4 @@
|
|||
name,age,hobbies,friends
|
||||
John,30,"reading, writing, coding, 👽","Ξθής, Bob"
|
||||
Ξθής,28,"painting, hiking, 🦛",""
|
||||
Bob,35,"fishing, gardening, 🌿",""
|
|
17
tests/serde/test-files/uncompressed.json
Normal file
17
tests/serde/test-files/uncompressed.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
"hobbies": ["reading", "writing", "coding", "👽"],
|
||||
"friends": [
|
||||
{
|
||||
"name": "Ξθής",
|
||||
"age": 28,
|
||||
"hobbies": ["painting", "hiking", "🦛"]
|
||||
},
|
||||
{
|
||||
"name": "Bob",
|
||||
"age": 35,
|
||||
"hobbies": ["fishing", "gardening", "🌿"]
|
||||
}
|
||||
]
|
||||
}
|
20
tests/serde/test-files/uncompressed.yaml
Normal file
20
tests/serde/test-files/uncompressed.yaml
Normal file
|
@ -0,0 +1,20 @@
|
|||
- name: John
|
||||
age: 30
|
||||
hobbies:
|
||||
- reading
|
||||
- writing
|
||||
- coding
|
||||
- 👽
|
||||
friends:
|
||||
- name: Ξθής
|
||||
age: 28
|
||||
hobbies:
|
||||
- painting
|
||||
- hiking
|
||||
- 🦛
|
||||
- name: Bob
|
||||
age: 35
|
||||
hobbies:
|
||||
- fishing
|
||||
- gardening
|
||||
- 🌿
|
Loading…
Reference in a new issue