Add rudimentary zip bombing protection

This commit is contained in:
Mikko Ahlroth 2022-10-26 11:13:32 +03:00
parent 3d52ba9734
commit e76d91a484
2 changed files with 78 additions and 22 deletions

View file

@ -20,7 +20,7 @@
<div id="length">Waiting...</div>
<div id="info">
v0.0.4 | © Nicd 2022 | <a href="https://gitlab.com/Nicd/tahnaroskakori" target="_blank">Source</a> | <a
v1.0.0 | © Nicd 2022 | <a href="https://gitlab.com/Nicd/tahnaroskakori" target="_blank">Source</a> | <a
href="./licenses.txt" target="_blank">Licenses</a>
</div>
</footer>

View file

@ -1,21 +1,21 @@
import brotliInit, { compress, decompress } from "./vendor/brotli_wasm.js";
import brotliInit, { compress, decompress, DecompressStream, BrotliStreamResult } from "./vendor/brotli_wasm.js";
import base from "./vendor/base-x.js";
const COMPRESS_WAIT = 500;
const ENCODER = new TextEncoder();
const DECODER = new TextDecoder();
const BASE66 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~";
const BASE66CODEC = base(BASE66);
const QUALITY = 11;
const MAX_URL = 2048;
const DECOMPRESS_CHUNK_SIZE = 1000;
const DECOMPRESS_CHUNK_TIMEOUT = 100;
let brotli;
let compressTimeout = null;
let rootURLSize = 0;
let statusEl = null;
let codeEl = null;
/**
* Returns a promise that is resolved when page is loaded enough to run JavaScripts.
*/
function waitForLoad() {
return new Promise(resolve => {
// If already loaded, fire immediately
@ -38,11 +38,78 @@ function maxHashLength() {
return MAX_URL - rootURLSize - 1;
}
async function syncCompress(data) {
const compressed = compress(ENCODER.encode(data), { quality: QUALITY });
const encoded = BASE66CODEC.encode(compressed);
statusEl.textContent = `Length: ${data.length} chars -> ${compressed.length} bytes -> ${encoded.length}/${maxHashLength()} chars`;
if (encoded.length <= maxHashLength()) {
history.replaceState(null, "", `#${encoded}`);
} else {
statusEl.textContent += " (TOO BIG!)";
}
}
async function streamDecompress(data) {
statusEl.textContent = "Initializing decompress...";
const inputStream = new ReadableStream({
start(controller) {
controller.enqueue(data);
controller.close();
}
});
const decompressStream = new DecompressStream();
const decompressionRunner = new TransformStream({
start() { },
transform(chunk, controller) {
controller.enqueue(decompressStream.decompress(chunk, DECOMPRESS_CHUNK_SIZE));
let slice = chunk;
while (decompressStream.result() === BrotliStreamResult.NeedsMoreOutput) {
slice = slice.slice(decompressStream.last_input_offset());
controller.enqueue(decompressStream.decompress(slice, DECOMPRESS_CHUNK_SIZE));
}
},
flush(controller) {
if (decompressStream.result() === BrotliStreamResult.NeedsMoreInput) {
controller.enqueue(decompressStream.decompress(undefined, DECOMPRESS_CHUNK_SIZE));
}
controller.terminate();
}
});
let decompressedChunks = 0;
const textDecoderStream = new TextDecoderStream();
const outputStream = new WritableStream({
write(chunk) {
codeEl.value += chunk;
++decompressedChunks;
statusEl.textContent = `Decompressing: ${decompressedChunks} chunks...`;
// Delay stream between every chunk to avoid zip bombing
return new Promise(resolve => setTimeout(resolve, DECOMPRESS_CHUNK_TIMEOUT));
}
});
await inputStream
.pipeThrough(decompressionRunner)
.pipeThrough(textDecoderStream)
.pipeTo(outputStream);
await syncCompress(codeEl.value);
}
async function init() {
brotli = await brotliInit();
const codeEl = document.getElementById("code");
const lengthEl = document.getElementById("length");
codeEl = document.getElementById("code");
statusEl = document.getElementById("length");
codeEl.addEventListener("input", () => {
if (compressTimeout) {
@ -51,31 +118,20 @@ async function init() {
if (codeEl.value === "") {
history.replaceState(null, "", "#");
lengthEl.textContent = "Waiting...";
statusEl.textContent = "Waiting...";
return;
}
compressTimeout = setTimeout(async () => {
const content = codeEl.value;
const compressed = compress(ENCODER.encode(content), { quality: QUALITY });
const encoded = BASE66CODEC.encode(compressed);
lengthEl.textContent = `Length: ${content.length} chars -> ${compressed.length} bytes -> ${encoded.length}/${maxHashLength()} chars`;
if (encoded.length <= maxHashLength()) {
history.replaceState(null, "", `#${encoded}`);
} else {
lengthEl.textContent += " (TOO BIG!)";
}
await syncCompress(content);
}, COMPRESS_WAIT);
});
if (window.location.hash.length > 1) {
try {
const bytes = BASE66CODEC.decode(window.location.hash.substring(1));
const decompressed = decompress(bytes);
const content = DECODER.decode(decompressed);
codeEl.textContent = content;
await streamDecompress(bytes);
} catch (e) {
codeEl.textContent = e.stack;
}