Skip to content

Commit

Permalink
Avoid Packer.makeDecoder choking on lengthy output.
Browse files Browse the repository at this point in the history
  • Loading branch information
lifthrasiir committed Aug 19, 2021
1 parent 052faef commit df2f67f
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 1 deletion.
7 changes: 6 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
Expand Up @@ -835,7 +835,12 @@ export class Packer {
// 0.
// _: rANS output encoded in lowest 6 bits (higher bits are chosen to avoid backslash)
// this should be isolated from other code for the best DEFLATE result
const firstLine = `_='${String.fromCharCode(...buf.map(c => c === 0x1c || c === 0x3f ? c : c | 0x40))}'`;
let firstLine = `_='`;
const CHUNK_SIZE = 8192;
for (let i = 0; i < buf.length; i += CHUNK_SIZE) {
firstLine += String.fromCharCode(...buf.slice(i, i + CHUNK_SIZE).map(c => c === 0x1c || c === 0x3f ? c : c | 0x40));
}
firstLine += `'`;

let secondLine =
// 1. initialize other variables
Expand Down
12 changes: 12 additions & 0 deletions test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import test from 'ava';
import * as crypto from 'crypto';
import {
AnsEncoder, AnsDecoder, DefaultModel, Packer,
compressWithModel, decompressWithModel
Expand Down Expand Up @@ -279,3 +280,14 @@ test('Packer with very high order context', t => {
t.is(packAndEval('3 + 4 * 5', { sparseSelectors: [512] }), 23);
});

test('Packer with high entropy', t => {
let data = '';
for (let i = 0; i < (1 << 8); ++i) {
data += String.fromCharCode(...crypto.randomBytes(1 << 12));
}
// we've got 1 MB of random data, which can't be really compressed
const packer = new Packer([{ type: 'text', action: 'write', data }], { maxMemoryMB: 10, sparseSelectors: [0] });
packer.makeDecoder();
t.pass();
});

0 comments on commit df2f67f

Please sign in to comment.