Respond to hash change, improve error reporting

This commit is contained in:
Mikko Ahlroth 2022-10-29 00:06:19 +03:00
parent 7c809b189e
commit 90aaa894c8
2 changed files with 71 additions and 71 deletions

View file

@ -35,7 +35,7 @@
<div id="length">Waiting...</div>
<div id="info">
v2.0.1 | © Nicd 2022 | <a href="https://gitlab.com/Nicd/tahnaroskakori" target="_blank">Source</a> | <a
v2.0.2 | © Nicd 2022 | <a href="https://gitlab.com/Nicd/tahnaroskakori" target="_blank">Source</a> | <a
href="./licenses.txt" target="_blank">Licenses</a>
</div>
</footer>

View file

@ -154,83 +154,90 @@ async function syncCompress() {
}
/**
* Decompress the given data in a streaming way, with pauses between every chunk, to avoid
* Decompress the hash data in a streaming way, with pauses between every chunk, to avoid
* the user being zip bombed with a short hash that generates gigabytes of output.
* @param {Uint8Array} data
*/
async function streamDecompress(data) {
statusEl.textContent = "Initializing decompress...";
currentCode = "";
async function streamDecompress() {
try {
const data = BASECODEC.decode(window.location.hash.substring(1));
let decompressedChunks = 0;
statusEl.textContent = "Initializing decompress...";
currentCode = "";
const inputStream = new ReadableStream({
start(controller) {
controller.enqueue(data);
controller.close();
}
});
let decompressedChunks = 0;
const decompressStream = new DecompressStream();
const decompressionRunner = new TransformStream({
start() { },
transform(chunk, controller) {
controller.enqueue(decompressStream.decompress(chunk, DECOMPRESS_CHUNK_SIZE));
let slice = chunk;
while (decompressStream.result() === BrotliStreamResult.NeedsMoreOutput) {
slice = slice.slice(decompressStream.last_input_offset());
controller.enqueue(decompressStream.decompress(slice, DECOMPRESS_CHUNK_SIZE));
const inputStream = new ReadableStream({
start(controller) {
controller.enqueue(data);
controller.close();
}
},
flush(controller) {
if (decompressStream.result() === BrotliStreamResult.NeedsMoreInput) {
controller.enqueue(decompressStream.decompress(undefined, DECOMPRESS_CHUNK_SIZE));
});
const decompressStream = new DecompressStream();
const decompressionRunner = new TransformStream({
start() { },
transform(chunk, controller) {
controller.enqueue(decompressStream.decompress(chunk, DECOMPRESS_CHUNK_SIZE));
let slice = chunk;
while (decompressStream.result() === BrotliStreamResult.NeedsMoreOutput) {
slice = slice.slice(decompressStream.last_input_offset());
controller.enqueue(decompressStream.decompress(slice, DECOMPRESS_CHUNK_SIZE));
}
},
flush(controller) {
if (decompressStream.result() === BrotliStreamResult.NeedsMoreInput) {
controller.enqueue(decompressStream.decompress(undefined, DECOMPRESS_CHUNK_SIZE));
}
controller.terminate();
}
});
controller.terminate();
}
});
const optionsPickerStream = new TransformStream({
firstChunk: false,
start() { },
transform(chunk, controller) {
if (!this.firstChunk) {
const rest = dataOptions.parseFrom(chunk);
const optionsPickerStream = new TransformStream({
firstChunk: false,
start() { },
transform(chunk, controller) {
if (!this.firstChunk) {
const rest = dataOptions.parseFrom(chunk);
languageSelectEl.value = dataOptions.language;
languageSelectEl.value = dataOptions.language;
controller.enqueue(rest);
this.firstChunk = true;
} else {
controller.enqueue(chunk);
controller.enqueue(rest);
this.firstChunk = true;
} else {
controller.enqueue(chunk);
}
}
}
});
});
const textDecoderStream = new TextDecoderStream();
const outputStream = new WritableStream({
write(chunk) {
currentCode += chunk;
++decompressedChunks;
const textDecoderStream = new TextDecoderStream();
const outputStream = new WritableStream({
write(chunk) {
currentCode += chunk;
++decompressedChunks;
statusEl.textContent = `Decompressing: ${decompressedChunks} chunks...`;
statusEl.textContent = `Decompressing: ${decompressedChunks} chunks...`;
renderCode();
renderCode();
// Delay stream between every chunk to avoid zip bombing
return new Promise(resolve => setTimeout(resolve, DECOMPRESS_CHUNK_TIMEOUT));
}
});
// Delay stream between every chunk to avoid zip bombing
return new Promise(resolve => setTimeout(resolve, DECOMPRESS_CHUNK_TIMEOUT));
}
});
await inputStream
.pipeThrough(decompressionRunner)
.pipeThrough(optionsPickerStream)
.pipeThrough(textDecoderStream)
.pipeTo(outputStream);
await syncCompress();
await inputStream
.pipeThrough(decompressionRunner)
.pipeThrough(optionsPickerStream)
.pipeThrough(textDecoderStream)
.pipeTo(outputStream);
await syncCompress();
} catch (e) {
currentCode = `Unable to open the paste. Perhaps the URL is mistyped?\n\n${e}`;
console.error(e);
renderCode();
}
}
/**
@ -299,16 +306,9 @@ async function init() {
codeEditEl.addEventListener("input", codeEdited);
viewModeSwitcherEl.addEventListener("click", switchMode);
languageSelectEl.addEventListener("change", languageSelected);
window.addEventListener("hashchange", streamDecompress);
if (window.location.hash.length > 1) {
try {
const bytes = BASECODEC.decode(window.location.hash.substring(1));
await streamDecompress(bytes);
} catch (e) {
currentCode = `Unable to open the paste. Perhaps the URL is mistyped?\n\n${e.stack}`;
renderCode();
}
}
await streamDecompress();
}
await waitForLoad();