Browse Source

It's over, peesisters...

pull/46/head
coomdev 2 years ago
parent
commit
df84bc33aa
  1. 2
      README.md
  2. 1278
      chrome/dist/main.js
  3. 2
      chrome/manifest.json
  4. 1278
      dist/main.js
  5. BIN
      efdb47d2f0e04144bbaa-0.281.xpi
  6. 1289
      firefox/dist/main.js
  7. 2
      firefox/manifest.json
  8. 2
      firefox_update.json
  9. 2
      main.meta.js
  10. 1280
      main.user.js
  11. 27
      package-lock.json
  12. 1
      package.json
  13. BIN
      pngextraembedder-0.281.xpi
  14. 2
      src/Components/App.svelte
  15. 59
      src/bitstream.ts
  16. 557
      src/dh-deflate.ts
  17. 19
      src/png.ts
  18. 270
      src/pngv3.ts
  19. 2
      src/stores.ts
  20. 4
      src/utils.ts

2
README.md

@ -25,7 +25,7 @@ Please report any issue you have with those (only for mainstream browsers)
Also, use this if you plan to use b4k's archive. Also, use this if you plan to use b4k's archive.
- [Install 4chanX (recommended)](https://www.4chan-x.net/builds/4chan-X.user.js) - [Install 4chanX (recommended)](https://www.4chan-x.net/builds/4chan-X.user.js)
- Install the correct WebExtension for your Browser ([Firefox](https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.279.xpi) or [Chrome-based](https://chrome.google.com/webstore/detail/pngextraembedder/bfhpobiikighljcapcfmfganodihbicj)) - Install the correct WebExtension for your Browser ([Firefox](https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.281.xpi) or [Chrome-based](https://chrome.google.com/webstore/detail/pngextraembedder/bfhpobiikighljcapcfmfganodihbicj))
For FF users, the extension is signed so you can just drag and drop it on your about:addons tab. For FF users, the extension is signed so you can just drag and drop it on your about:addons tab.

1278
chrome/dist/main.js

File diff suppressed because it is too large

2
chrome/manifest.json

@ -2,7 +2,7 @@
"manifest_version": 3, "manifest_version": 3,
"name": "PngExtraEmbedder", "name": "PngExtraEmbedder",
"description": "Discover embedded files on 4chan and archives!", "description": "Discover embedded files on 4chan and archives!",
"version": "0.280", "version": "0.281",
"icons": { "icons": {
"64": "1449696017588.png" "64": "1449696017588.png"
}, },

1278
dist/main.js

File diff suppressed because it is too large

BIN
efdb47d2f0e04144bbaa-0.281.xpi

Binary file not shown.

1289
firefox/dist/main.js

File diff suppressed because it is too large

2
firefox/manifest.json

@ -7,7 +7,7 @@
}, },
"name": "PngExtraEmbedder", "name": "PngExtraEmbedder",
"description": "Discover embedded files on 4chan and archives!", "description": "Discover embedded files on 4chan and archives!",
"version": "0.279", "version": "0.281",
"icons": { "icons": {
"64": "1449696017588.png" "64": "1449696017588.png"
}, },

2
firefox_update.json

@ -1 +1 @@
{"addons":{"{34ac4994-07f2-44d2-8599-682516a6c6a6}":{"updates":[{"version":"0.279","update_link":"https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.279.xpi"}]}}} {"addons":{"{34ac4994-07f2-44d2-8599-682516a6c6a6}":{"updates":[{"version":"0.281","update_link":"https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.281.xpi"}]}}}

2
main.meta.js

@ -1,7 +1,7 @@
// ==UserScript== // ==UserScript==
// @name PNGExtraEmbed // @name PNGExtraEmbed
// @namespace https://coom.tech/ // @namespace https://coom.tech/
// @version 0.280 // @version 0.281
// @description uhh // @description uhh
// @author You // @author You
// @match https://boards.4channel.org/* // @match https://boards.4channel.org/*

1280
main.user.js

File diff suppressed because it is too large

27
package-lock.json

@ -9,6 +9,7 @@
"version": "1.0.0", "version": "1.0.0",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@astronautlabs/bitstream": "^4.1.2",
"base58": "^2.0.1", "base58": "^2.0.1",
"blockhash": "^0.2.0", "blockhash": "^0.2.0",
"bs58": "^5.0.0", "bs58": "^5.0.0",
@ -48,6 +49,14 @@
"web-ext-types": "^3.2.1" "web-ext-types": "^3.2.1"
} }
}, },
"node_modules/@astronautlabs/bitstream": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/@astronautlabs/bitstream/-/bitstream-4.1.2.tgz",
"integrity": "sha512-4mkxvaM9O1SLEKAoPaGifEwIGrzArzGNu9FFVv0JPf/KA2u029ufqLVVlGp6GBxiKdb4Ulk5+d3HgW81MjJdxQ==",
"peerDependencies": {
"reflect-metadata": "^0.1.13"
}
},
"node_modules/@babel/code-frame": { "node_modules/@babel/code-frame": {
"version": "7.16.7", "version": "7.16.7",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz",
@ -7359,6 +7368,12 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/reflect-metadata": {
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz",
"integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==",
"peer": true
},
"node_modules/regenerator-runtime": { "node_modules/regenerator-runtime": {
"version": "0.13.9", "version": "0.13.9",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",
@ -9306,6 +9321,12 @@
} }
}, },
"dependencies": { "dependencies": {
"@astronautlabs/bitstream": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/@astronautlabs/bitstream/-/bitstream-4.1.2.tgz",
"integrity": "sha512-4mkxvaM9O1SLEKAoPaGifEwIGrzArzGNu9FFVv0JPf/KA2u029ufqLVVlGp6GBxiKdb4Ulk5+d3HgW81MjJdxQ==",
"requires": {}
},
"@babel/code-frame": { "@babel/code-frame": {
"version": "7.16.7", "version": "7.16.7",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz",
@ -14735,6 +14756,12 @@
"strip-indent": "^4.0.0" "strip-indent": "^4.0.0"
} }
}, },
"reflect-metadata": {
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz",
"integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==",
"peer": true
},
"regenerator-runtime": { "regenerator-runtime": {
"version": "0.13.9", "version": "0.13.9",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",

1
package.json

@ -15,6 +15,7 @@
"author": "", "author": "",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@astronautlabs/bitstream": "^4.1.2",
"base58": "^2.0.1", "base58": "^2.0.1",
"blockhash": "^0.2.0", "blockhash": "^0.2.0",
"bs58": "^5.0.0", "bs58": "^5.0.0",

BIN
pngextraembedder-0.281.xpi

Binary file not shown.

2
src/Components/App.svelte

@ -413,7 +413,7 @@
<label> <label>
PNG Embedding method PNG Embedding method
<select bind:value={$settings.pmeth}> <select bind:value={$settings.pmeth}>
{#each [0, 1, 2, 3, 4] as m} {#each [0, 1, 2, 3, 4, 5] as m}
<option value={m}>Method {m}</option> <option value={m}>Method {m}</option>
{/each} {/each}
</select> </select>

59
src/bitstream.ts

@ -0,0 +1,59 @@
import { BitstreamReader as br, BitstreamWriter as bw } from '@astronautlabs/bitstream';
export const revbyte = (n: number, len = 8) => {
let acc = 0;
let n2 = n;
let len2 = len;
while (len2) {
// can't use bitshifts or binray ops or else becomes negative
acc = (acc * 2) + (n2 & 1);
n2 >>= 1;
len2--;
}
return acc;
};
export class BitstreamReader extends br {
rtotal = 0;
addBuffer(hidden: Buffer) {
const inp = Buffer.from(hidden);
for (let i = 0; i < inp.byteLength; ++i)
inp[i] = revbyte(inp[i]);
super.addBuffer(inp);
}
readSync(len: number) {
const v = super.readSync(len);
this.rtotal += len;
return revbyte(v, len);
}
}
export type Writable = {
write: (chunk: Buffer) => void;
};
export class BitstreamWriter extends bw {
wtotal = 0;
constructor(private w: Writable) {
super({
write: (c) => {
const inp = Buffer.from(c);
for (let i = 0; i < inp.byteLength; ++i)
inp[i] = revbyte(inp[i]);
this.w.write(inp);
}
});
}
write(length: number, value: number): void {
this.wtotal += length;
if (length) {
//tconsole.log(length, value)
value = revbyte(value, length);
}
super.write(length, value);
}
}

557
src/dh-deflate.ts

@ -0,0 +1,557 @@
import { BitstreamReader, BitstreamWriter, revbyte } from './bitstream';
const TINF_OK = 0;
const TINF_DATA_ERROR = -3;
class Tree {
table = new Uint16Array(16); /* table of code length counts */
trans = new Uint16Array(288); /* code -> symbol translation table */
}
type HCtree = [number | HCtree, (number | HCtree)?];
// these two functions are a big bottleneck because im not clever enough to figure out how to encode
// something directly by using the sorted code length/value tables, haha
const getPathTo = (tree: HCtree, value: number): string | undefined => {
if (tree[0] === value)
return '0';
if (tree[1] === value)
return '1';
let p: string | undefined;
if (typeof tree[0] != "number")
p = getPathTo(tree[0], value);
let b = '0';
if (!p) {
if (tree[1] && typeof tree[1] != "number")
p = getPathTo(tree[1], value);
b = '1';
}
if (p)
return b + p;
};
// from jpeg-js, in turns this means that jpeg-js decoding could be faster
// if they decoded directly from the symbol tables instead of building a tree
function buildHuffmanTable(codeLengths: ArrayLike<number>, values: ArrayLike<number>) {
// eslint-disable-next-line prefer-const
let k = 0, code: any = [], i, j, length = 16;
while (length > 0 && !codeLengths[length - 1])
length--;
code.push({ children: [], index: 0 });
let p = code[0], q;
for (i = 0; i < length; i++) {
for (j = 0; j < codeLengths[i]; j++) {
p = code.pop();
p.children[p.index] = values[k];
while (p.index > 0) {
if (code.length === 0)
throw new Error('Could not recreate Huffman Table');
p = code.pop();
}
p.index++;
code.push(p);
while (code.length <= i) {
code.push(q = { children: [], index: 0 });
p.children[p.index] = q.children;
p = q;
}
k++;
}
if (i + 1 < length) {
// p here points to last code
code.push(q = { children: [], index: 0 });
p.children[p.index] = q.children;
p = q;
}
}
return code[0].children as HCtree;
}
class Data {
computeReverse() {
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0]! as any; // unneeded, but maybe sometime i'll throw symbol reduction into the mix
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0]! as any;
}
ltree: Tree;
dtree: Tree;
rltree!: HCtree;
rdtree!: HCtree;
dest: number[] = [];
constructor(public source: BitstreamReader, public dests: BitstreamWriter, public to_hide: BitstreamReader | BitstreamWriter, public hidden: BitstreamWriter) {
this.ltree = new Tree(); /* dynamic length/symbol tree */
this.dtree = new Tree(); /* dynamic distance tree */
}
}
/* --------------------------------------------------- *
* -- uninitialized global data (static structures) -- *
* --------------------------------------------------- */
const sltree = new Tree();
const sdtree = new Tree();
// eslint-disable-next-line prefer-const
let rltree: HCtree;
// eslint-disable-next-line prefer-const
let rdtree: HCtree;
/* extra bits and base tables for length codes */
const length_bits = new Uint8Array(30);
const length_base = new Uint16Array(30);
/* extra bits and base tables for distance codes */
const dist_bits = new Uint8Array(30);
const dist_base = new Uint16Array(30);
/* special ordering of code length codes */
const clcidx = new Uint8Array([
16, 17, 18, 0, 8, 7, 9, 6,
10, 5, 11, 4, 12, 3, 13, 2,
14, 1, 15
]);
/* used by tinf_decode_trees, avoids allocations every call */
const code_tree = new Tree();
const lengths = new Uint8Array(288 + 32);
/* ----------------------- *
* -- utility functions -- *
* ----------------------- */
/* build extra bits and base tables */
function tinf_build_bits_base(bits: Uint8Array, base: Uint16Array, delta: number, first: number) {
let i, sum;
/* build bits table */
for (i = 0; i < delta; ++i) bits[i] = 0;
for (i = 0; i < 30 - delta; ++i) bits[i + delta] = i / delta | 0;
/* build base table */
for (sum = first, i = 0; i < 30; ++i) {
base[i] = sum;
sum += 1 << bits[i];
}
}
/* build the fixed huffman trees */
function tinf_build_fixed_trees(lt: Tree, dt: Tree) {
let i;
/* build fixed length tree */
for (i = 0; i < 7; ++i) lt.table[i] = 0;
lt.table[7] = 24;
lt.table[8] = 152;
lt.table[9] = 112;
for (i = 0; i < 24; ++i) lt.trans[i] = 256 + i;
for (i = 0; i < 144; ++i) lt.trans[24 + i] = i;
for (i = 0; i < 8; ++i) lt.trans[24 + 144 + i] = 280 + i;
for (i = 0; i < 112; ++i) lt.trans[24 + 144 + 8 + i] = 144 + i;
/* build fixed distance tree */
for (i = 0; i < 5; ++i) dt.table[i] = 0;
dt.table[5] = 32;
for (i = 0; i < 32; ++i) dt.trans[i] = i;
}
/* given an array of code lengths, build a tree */
const offs = new Uint16Array(16);
function tinf_build_tree(t: Tree, lengths: Uint8Array, off: number, num: number) {
let i, sum;
/* clear code length count table */
for (i = 0; i < 16; ++i) t.table[i] = 0;
/* scan symbol lengths, and sum code length counts */
for (i = 0; i < num; ++i) t.table[lengths[off + i]]++;
t.table[0] = 0;
/* compute offset table for distribution sort */
for (sum = 0, i = 0; i < 16; ++i) {
offs[i] = sum;
sum += t.table[i];
}
/* create code->symbol translation table (symbols sorted by code) */
for (i = 0; i < num; ++i) {
if (lengths[off + i]) t.trans[offs[lengths[off + i]]++] = i;
}
}
/* ---------------------- *
* -- decode functions -- *
* ---------------------- */
/* get one bit from source stream */
function tinf_getbit(d: Data) {
return d.source.readSync(1);
}
/* read a num bit value from a stream and add base */
function tinf_read_bits(d: Data, num: number, base: number) {
if (!num)
return base;
const v = d.source.readSync(num) + base;
return v;
}
/* given a data stream and a tree, decode a symbol */
function tinf_decode_symbol(d: Data, t: Tree, copy = true, ext: any = {}) {
let sum = 0, cur = 0, len = 0;
/* get more bits while code value is above sum */
let s = 0;
do {
const b = d.source.readSync(1);
copy && d.hidden.write(1, b);
s = (s << 1) | b;
cur = 2 * cur + b;
++len;
sum += t.table[len];
cur -= t.table[len];
} while (cur >= 0);
ext.length = len;
ext.sym = s;
return t.trans[sum + cur];
}
/* given a data stream, decode dynamic trees from it */
function tinf_decode_trees(d: Data, lt: Tree, dt: Tree) {
let i, num, length;
/* get 5 bits HLIT (257-286) */
const hlit = tinf_read_bits(d, 5, 257);
d.hidden?.write(5, hlit - 257);
/* get 5 bits HDIST (1-32) */
const hdist = tinf_read_bits(d, 5, 1);
d.hidden?.write(5, hdist - 1);
/* get 4 bits HCLEN (4-19) */
const hclen = tinf_read_bits(d, 4, 4);
d.hidden?.write(4, hclen - 4);
for (i = 0; i < 19; ++i) lengths[i] = 0;
/* read code lengths for code length alphabet */
for (i = 0; i < hclen; ++i) {
/* get 3 bits code length (0-7) */
const clen = tinf_read_bits(d, 3, 0);
d.hidden?.write(3, clen);
lengths[clcidx[i]] = clen;
}
/* build code length tree */
tinf_build_tree(code_tree, lengths, 0, 19);
/* decode code lengths for the dynamic trees */
for (num = 0; num < hlit + hdist;) {
const sym = tinf_decode_symbol(d, code_tree);
let prev: number;
switch (sym) {
case 16:
/* copy previous code length 3-6 times (read 2 bits) */
prev = lengths[num - 1];
length = tinf_read_bits(d, 2, 3);
d.hidden?.write(2, length - 3);
for (; length; --length) {
lengths[num++] = prev;
}
break;
case 17:
/* repeat code length 0 for 3-10 times (read 3 bits) */
length = tinf_read_bits(d, 3, 3);
d.hidden?.write(3, length - 3);
for (; length; --length) {
lengths[num++] = 0;
}
break;
case 18:
/* repeat code length 0 for 11-138 times (read 7 bits) */
length = tinf_read_bits(d, 7, 11);
d.hidden?.write(7, length - 11);
for (; length; --length) {
lengths[num++] = 0;
}
break;
default:
/* values 0-15 represent the actual code lengths */
lengths[num++] = sym;
break;
}
}
/* build dynamic trees */
tinf_build_tree(lt, lengths, 0, hlit);
tinf_build_tree(dt, lengths, hlit, hdist);
}
const bufferEq = (a: Uint8Array, b: Uint8Array) => {
// this is assumed
// if (a.byteLength != b.byteLength)
// return false;
for (let i = 0; i < a.byteLength; ++i)
if (a[i] != b[i])
return i;
return -1;
};
const get_symbol = (value: number, bits_table: Uint8Array, base_table: Uint16Array): [number, number, number] => {
let i = 0;
for (i = 0; i < base_table.length; ++i) {
if (base_table[i] > value) {
i--;
return [i, bits_table[i], value - base_table[i]];
}
}
i--;
return [i, bits_table[i], value - base_table[i]];
};
const encode_symbol = (sym: number, tree: HCtree) => {
const code = getPathTo(tree, sym)!;
return {
length: code?.length,
val: parseInt(code, 2)
};
};
/* ----------------------------- *
* -- block inflate functions -- *
* ----------------------------- */
/* given a stream and two trees, inflate a block of data */
export let capacity = 0;
function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) {
let finished = false;
// eslint-disable-next-line no-constant-condition
while (1) {
if (finished) {
return true;
}
let sym = tinf_decode_symbol(d, lt); // copy
/* check for end of block */
if (sym === 256) {
return TINF_OK;
}
if (sym < 256) {
d.dest.push(sym);
// same
} else {
sym -= 257;
/* possibly get more bits from length code */
const length = tinf_read_bits(d, length_bits[sym], length_base[sym]);
//d.hidden.write(length_bits[sym], length - length_base[sym]);
// length is unchanged, so copy as is
if (length_bits[sym])
d.hidden.write(length_bits[sym], length - length_base[sym]);
const ext = { length: 0, sym: 0 };
const dist = tinf_decode_symbol(d, dt, false, ext); // don't copy immediately, we may change the code
//ext.sym = revbyte(ext.sym, ext.length);
//d.hidden.write(ext.length, ext.sym);
/* possibly get more bits from distance code */
let backoffset = tinf_read_bits(d, dist_bits[dist], dist_base[dist]);
const offs = d.dest.length - backoffset;
const match = Buffer.from(d.dest.slice(offs, offs + length));
// don't consider matches that could be in the lookahead buffer
if (match.length == length) {
let begin = d.dest.length - 32768;
if (begin < 0)
begin = 0;
let matches: number[] = [];
let o = 0;
const slic = Buffer.from(d.dest.slice(begin + o, d.dest.length));
while (begin + o < d.dest.length) {
const r = slic.slice(o, d.dest.length).indexOf(match);
if (r >= 0) {
matches.push(r + begin + o);
o += r;
}
else {
break;
}
o++;
}
if (matches.length > 1) {
matches = matches.map(e => -(e - d.dest.length));
matches.reverse();
const v = Math.floor(Math.log2(matches.length));
capacity += v;
// a ""perfectly"" compressed file is like a file with only 0s embedded
//console.log('LLLL', matches.length)
if (d.to_hide instanceof BitstreamReader) {
if (d.to_hide.available) {
const s = d.to_hide.readSync(Math.min(d.to_hide.available, v));
backoffset = matches[s];
} else {
finished = true;
}
}
// extract hidden bit
else {
const idx = matches.indexOf(backoffset);
d.to_hide.write(v, idx);
}
}
}
// match length should be the same so no need to rewrite
//const [lsym, llen, loff] = get_symbol(length, length_bits, length_base)
//let enclen = encode_symbol(lsym, d.rltree);
//d.hidden.write(enclen.length, enclen.val);
//d.hidden.write(llen, loff);
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base);
const encdist = encode_symbol(dsym, d.rdtree);
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length));
d.hidden.write(dlen, doff);
/* copy match */
for (let i = offs; i < offs + length; ++i) {
d.dest.push(d.dest[i]);
}
}
}
}
/* inflate an uncompressed block of data */
function tinf_inflate_uncompressed_block(d: Data) {
/* get length */
const length = d.source.readSync(16);
/* get one's complement of length */
const invlength = d.source.readSync(16);
/* check length */
if (length !== (~invlength & 0x0000ffff))
return -4;
for (let i = length; i; --i)
d.dest.push(d.source.readSync(8));
return TINF_OK;
}
/* inflate stream from source to dest */
export function tinf_uncompress(source: BitstreamReader,
// normal decompressed data
decompressed: (chunk: Uint8Array) => void,
// stream of data to hide
to_hide: BitstreamReader | BitstreamWriter,
// compressed stream containing hidden data
hidden: (chunk: Uint8Array) => void) {
const decomp = new BitstreamWriter({ write: (decompressed || (() => {/* */ })) });
const hid = new BitstreamWriter({ write: (hidden || (() => {/* */ })) });
const d = new Data(source, decomp, to_hide, hid);
let res: number | undefined | true;
let bfinal: number, btype: number;
do {
if (to_hide instanceof BitstreamReader) {
if (to_hide.available == 0) {
// copy until we're byte-aligned
while (source.available && source.offset & 0x7) {
hid.write(1, source.readSync(1));
}
// nothing left to embed, we are byte aligned, so we just "memcpy" the rest
return source.offset >> 3; // this is block aligned, so this doesn't tell us where the last hidden bit is, just an upper bound
}
}
// TODO: truncate to_hide if we get close to 70k?
/* read final block flag */
bfinal = tinf_getbit(d);
d.hidden.write(1, bfinal);
/* read block type (2 bits) */
btype = tinf_read_bits(d, 2, 0);
d.hidden?.write(2, btype);
/* decompress block */
//console.log(btype, capacity);
switch (btype) {
case 0:
/* decompress uncompressed block */
res = tinf_inflate_uncompressed_block(d);
break;
case 1:
/* decompress block with fixed huffman trees */
d.rdtree = rdtree;
d.rltree = rltree;
res = tinf_inflate_block_data(d, sltree, sdtree);
if (res === true) {
continue;
}
break;
case 2:
/* decompress block with dynamic huffman trees */
tinf_decode_trees(d, d.ltree, d.dtree);
d.computeReverse();
res = tinf_inflate_block_data(d, d.ltree, d.dtree);
if (res === true) {
continue;
}
break;
default:
res = -2;
}
if (res !== TINF_OK)
throw new Error('Data error ' + res);
} while (!bfinal);
decomp.end();
hid.end();
//if (d.dest.byteOffset < d.dest.length) {
// if (typeof d.dest.slice === 'function')
// return d.dest.slice(0, d.dest.byteOffset);
// else
// return d.dest.subarray(0, d.dest.byteOffset);
//}
//return d.dest;
}
/* -------------------- *
* -- initialization -- *
* -------------------- */
/* build fixed huffman trees */
tinf_build_fixed_trees(sltree, sdtree);
/* build extra bits and base tables */
tinf_build_bits_base(length_bits, length_base, 4, 3);
tinf_build_bits_base(dist_bits, dist_base, 2, 1);
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0] as any;
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0] as any;
/* fix a special case */
length_bits[28] = 0;
length_base[28] = 258;

19
src/png.ts

@ -9,13 +9,15 @@ export type PNGChunk = [
number];// offset number];// offset
export class PNGDecoder { export class PNGDecoder {
stopped = false;
repr: Buffer; repr: Buffer;
req = 8; req = 8;
ptr = 8; ptr = 8;
constructor(private reader: ReadableStreamDefaultReader<Uint8Array>) { constructor(private reader: ReadableStreamDefaultReader<Uint8Array>, private strict = true) {
this.repr = Buffer.from([]); this.repr = Buffer.from([]);
} }
@ -23,7 +25,10 @@ export class PNGDecoder {
while (this.repr.byteLength < this.req) { while (this.repr.byteLength < this.req) {
const chunk = await this.reader.read(); const chunk = await this.reader.read();
if (chunk.done) { if (chunk.done) {
throw new Error(`Unexpected EOF, got ${this.repr.byteLength}, required ${this.req}, ${chunk.value}`); this.stopped = true;
if (this.strict)
throw new Error(`Unexpected EOF, got ${this.repr.byteLength}, required ${this.req}, ${chunk.value}`);
return;
} }
this.repr = Buffer.concat([this.repr, chunk.value]); this.repr = Buffer.concat([this.repr, chunk.value]);
} }
@ -33,23 +38,27 @@ export class PNGDecoder {
while (true) { while (true) {
this.req += 8; // req length and name this.req += 8; // req length and name
await this.catchup(); await this.catchup();
if (this.stopped)
break;
const length = this.repr.readUInt32BE(this.ptr); const length = this.repr.readUInt32BE(this.ptr);
const name = this.repr.slice(this.ptr + 4, this.ptr + 8).toString(); const name = this.repr.slice(this.ptr + 4, this.ptr + 8).toString();
this.ptr += 4; this.ptr += 4;
this.req += length + 4; // crc this.req += length + 4; // crc
//await this.catchup(); //await this.catchup();
const pos = this.ptr; const pos = this.ptr;
yield [name, yield [name,
async () => { async () => {
await this.catchup(); await this.catchup();
return this.repr.slice(pos, pos + length + 4); return this.repr.slice(pos, pos + length + 4);
}, },
async () => { async () => {
await this.catchup(); await this.catchup();
return this.repr.readUInt32BE(this.ptr + length + 4); return this.repr.readUInt32BE(this.ptr + length + 4);
}, },
this.ptr] as PNGChunk; this.ptr] as PNGChunk;
this.ptr += length + 8; this.ptr += length + 8;
if (this.stopped)
break;
if (name == 'IEND') if (name == 'IEND')
break; break;
} }

270
src/pngv3.ts

@ -5,6 +5,8 @@ import { decodeCoom3Payload } from "./utils";
import { settings } from "./stores"; import { settings } from "./stores";
import { filehosts } from "./filehosts"; import { filehosts } from "./filehosts";
import * as bs58 from 'bs58'; import * as bs58 from 'bs58';
import { BitstreamReader, BitstreamWriter } from "./bitstream";
import { tinf_uncompress } from "./dh-deflate";
export let csettings: Parameters<typeof settings['set']>[0]; export let csettings: Parameters<typeof settings['set']>[0];
@ -52,15 +54,104 @@ const rprefs: any = {
'z': 'z.zz.fo', 'z': 'z.zz.fo',
}; };
const extractFromRawDeflate = (b: Buffer) => {
const src = new BitstreamReader();
src.addBuffer(b);
const chnks: number[] = [];
const hidden = new BitstreamWriter({
write(chunk) {
for (const i of chunk) {
if (i)
chnks.push(i);
else
throw "Finish";
/* NOOOOOO YOU CANT JUST THROW DURING NORMAL OPERATION THATS A CODE SMEL-ACK!
%@@@@@(////////@
@&/////(@&/////@.
#@@@@@&&(, %@/////////@@@@@
#@@& (@@@@&. &%/////@@#/////%@
&@@ .&@@% ,* (@@@@ @(/%@&/////////(@
*@@ ,@@@@, @@@@. @@.*@%@@ @@@(///////////(@
@@# .@*@@@@@@@. &@& *@@# *@@ %@@* @@%/////#@@&////#@
%@/ , %@@@@@@. @@/ (@@* @% &@@ @(///////////%@@@#
,@@ @@ @@@# .@@, ,@@,.@* @@ @(////////@@///@%
@@, *@& @@@ @@@ .@@@ @& @(/////%@///////@
@@ #@* ,@ @@@ *@@@@% @(///&@/////////@
@@ #@ *, @@( /@@@@@@@@( @@ *@@% @(/(@(//////////@,
#@ (@* %@% @@ @@ #@ %@@ %@@@@@#/////////@,
(@. *@@ %@ @@* @& @@@@@@@@, .@, %@( @//////#@@#////@
@@ @@ @( @@. @@ @@@@@@% .@* @@/ @#///////////%@@@
@* (@ @@ @ @@@% %@. %@@@ @////////@@&///@%
@. @@ @( (@ @@/ @@ (@@ .@/////@@(///////@
@, @@@@@.@@ @@@& .@@ @@ (@//(@#//////////@@
@% @@ .@@@@ #@@ #@, @&&@(////////////@@
#@ @@@ *@@@@@ @& @% /@@& &@@, ,@. @@(//////%@@%////%@
@, @@@@* @@ *((#&@@@@@ @@ @%///////////%@@@@@@
#@ ,@@@@ &@@@@@@@@# ,@@@@@@@@@@@& @ /@////////%@@///////@
@@ @@@ @@/ @@@@@@ @ .@@@@/ @ (@/////@@///////////@
@@ @@.(@ &@,*@@@ @@# @@ @@@@& @@ *&@@@@%, @. @///@%/////////////@
@ @@ @@@@# ,@ @# %@ @# @ (@@@( @& @/@@//////////////#@
&@ @@ @@@@ @@@, @ /@ .@@@ @@ @#/////////////(@(
@& #@@ @@@@ &@@@@@@@@@@@ @@@, #@@@ @@@ @@@///////////(@@,
@@ @@ @@ @@ @% @ @# *@@ @@@@@@@@@@@&///////////(@@
@& /, @@ @@ (/, @@ @* @/ ,((. .@@@@@@@@@@@@@. *@/ *@@//////////////////////@,
@* @.#@ @% %@@@@@@# @@ @% @@. @@ @@&/@@@@ @@ *@/@@@ .@# @@@///////////////////@@
@# @& @# @@@@@. /@. %@ @& @@ #@@((@& @@ #@@@@////@& @@@@@@@@@@@@@@@@@@@@(
@@@. @, @@ .@* @& @@@@* @@@@. %@@/ @@@///////////@@. @@&%///////////////@@
/@@ @@,@. *@* #@* ,@@(/@* @@@@@@#/////////////(@@ @* @@@@@//////////////@@
@@ @( &@. @. #@@@ %@& @@@@@@@@@@@@/////////////&@* @@ .&@@@@&////////////@&
,@. @@ @@ @. @@/(@. @@@@@@@@@@@@@@@@@@@@@(/////////@ (@ @@@@@@@@@@@@@@@
*@@ %&*@, /@# @. &@/(@@@@, .@@@@@@@@%,,,,,,,,,,,,,,,@@@%/////@@ @ %@,&#@@&////////////@*
@@@& %@@* ,@@ @. @(//@@ ,@#@@@@@@%(@*,,,,,,,,,,,,,,,,,,@@@@@///&@ #@ &@ (@@@#//////////@
.@&@@ #@@@@@@@@@@& @, @@@*%@@@@*@@@@@@@,,,,,@@,,,,,,,,,,,,,,,,,,@(. /@//#@ #@ %@ @&/////////(@*
@@@@& @@ @, ,@, %@@, %@@@@@@@,,,,,,,,@@,,,,,,,,,,,,,,,,*@ @@///@/ /@ %@@&%@ @@@@&@@@@@(
%@@@@/@% @ .@.%@( .@@@@@@@@,,,,,,,,,,@@,,,,,,,,,,,,,,,,@@@ @@/@@ *@, @@@@@# @#///%@@
@@#@@@ @ @@( @@///@@@@@/,,,,,,,,,,&@,,,,,,,,,,,,,,,,%@&.,, *@/@@ %@*@@@@@@@@@@@@@%////@,
&@@@@ @ &@////////@@@,,,,,,,,,,,#@,,,,,,,,,,,,,,,,(@/%@@@@@@@(&@ &@ @@ @#///////@#
.@( @/ ,@/////////@*,,,,,,,,,,#@,,,,,,,,,,,,,,,,/@///&@. .@/@@ &@ @@@ @@ @&////////@
/@# &@ @@///////@*,,,,,,,,,,&@,,,,,,,,,,,,,,,,/@/////@@, ,@/@# %@@ @@@@@@@@///////@#
@@ .@* @@/////@/,,,,,,,,,,@@,,,,,,,,,,,,,,,,/@/////&@,..@@/@ *@@@* *@@@@//////@/
(@/ @@ @@///@&,,,,,,,,,,@*,,,,,,,,,,,,,,,&@//////@@. @//@* #@@@@#/#@@
@@@@@ @@ @@/@@,,,,,,,,,/@,,,,,,,,,,,,,,*@@@@/////@@@@@@//@* /@@@@@@@@@*
@@/////(@@ @@ @@@,,,,,,,,,#@,,,,,,,,,,,,,%@///////@@@@&,@#/@& ,&@@@/@@@@@@@////@# %@@%
@/////////%@@ %@ @(,,,,,,,,%@,,,,,,,,,,,,@@//////@@@# (@(/@@../%& (@@@((@&//(@ .@@#
@(///////////@@( ( @@,,,,,,,,/@,,,,,,,,,,*@@@@@@@ @@.*@@#//@@ @@@& #@@@@//////@, /@@@/
.(&@@@@@@@@(////////////(@@# @*,,,,,,,,,,,,,,,,,,@@@@@. @@#@@(//(@@@@ @@,,@ .@@@@///////@@ @@@#
(@@@@@&. @@&//////////&@#/&@@* @*,,,,,,,,,,,,,,,,@@//////////(@@& @@ @@@@& @@@@@@@@@@@@ #@@,
%@@@, %@@@@@&(//////////(@@@/ (. @@,,,,,,,,,,,,,@@ *#%&@@@@@.@% %@ ,* .% *@@@@//////#@ .@@%
/@@@ /@@////////////////@@&@@@( @.@@ @@@@%##&@@@@ , %@ @@@ (@# / @@@@@@////@* @@@
&@@( *@@&/////////(@@///////&@@@@@@@@@/ &@@@.@@ %@@@. .. @@@*@(@.%@@( #@%,@@@@&//(//@, ,
@@@. ,@@@@@%/////////////(@(@@@@@@ @@@ #@ *#./@@@@ & &*.@#@ @#@ @@@&*@@@ (@@@@@@@@@@
@@@. ,@&////////////////@@#//@@@@@@@@@ @@@ @@ @@@@@@ @@@ ,@ @@@@@ &@@@@@@#/////@@
.@@/ @@@%(/////#@@@%////////%@/(##@%/@@@@@@@@@@@@@@@@@@@ @@@ /@ @@@@@@@@@@(#@////@.
@@#//////////%@/////////@(///@@/////&@//#@@@/%@@/#@/@@//(@/@@/////@%
/@@@@@&(////////////@@//////////@&/////////#//#///@@@##%@@@(
.@@#////////////%@&////////@@////////////////#@#
#@@@@@@@@@( /@@&(/////&@@@@@@@@@@@@@,
*/
}
},
});
try {
tinf_uncompress(src, () => {/** */ }, hidden, () => {/** */ });
} catch (e) {
if (e == "Finish")
return Buffer.from(chnks);
}
return false; // possibly incorrect?
};
const extract = async (png: Buffer) => { const extract = async (png: Buffer) => {
const reader = BufferReadStream(png).getReader(); const reader = BufferReadStream(png).getReader();
const sneed = new PNGDecoder(reader); const sneed = new PNGDecoder(reader, false);
const ret: EmbeddedFile[] = []; const ret: EmbeddedFile[] = [];
let w: Buffer | undefined; let w: Buffer | undefined;
if (!csettings) if (!csettings)
throw new Error("Settings uninit"); throw new Error("Settings uninit");
try { try {
let complete = false;
const idats: Buffer[] = [];
for await (const [name, chunk, crc, offset] of sneed.chunks()) { for await (const [name, chunk, crc, offset] of sneed.chunks()) {
let buff: Buffer; let buff: Buffer;
switch (name) { switch (name) {
@ -113,14 +204,34 @@ const extract = async (png: Buffer) => {
} }
break; break;
case 'IDAT': case 'IDAT':
if (ret.length)
return ret;
buff = await chunk();
idats.push(buff.slice(4));
// eslint-disable-next-line no-fallthrough // eslint-disable-next-line no-fallthrough
case 'IEND': case 'IEND':
return ret.slice(0, csettings.maxe); complete = true;
// eslint-disable-next-line no-fallthrough // eslint-disable-next-line no-fallthrough
default: default:
break; break;
} }
} }
if (idats.length) {
let decoded: Buffer | false;
if ((decoded = extractFromRawDeflate(Buffer.concat(idats).slice(2))) === false)
return false;
const dec = decoded
.toString()
.split(' ')
.map(e => {
if (!(e[0] in rprefs))
throw "Uhh";
// should also check if the id has a len of 6-8 or ends in .pee
return `https://${rprefs[e[0]]}/${e.slice(1)}`;
}).join(' ');
return decodeCoom3Payload(Buffer.from(dec));
}
} catch (e) { } catch (e) {
console.error(e); console.error(e);
} finally { } finally {
@ -145,47 +256,86 @@ export const BufferWriteStream = () => {
return [ret, () => b] as [WritableStream<Buffer>, () => Buffer]; return [ret, () => b] as [WritableStream<Buffer>, () => Buffer];
}; };
const embedInRawDeflate = (b: Buffer, h: Buffer) => {
const src = new BitstreamReader();
const hid = new BitstreamReader();
hid.addBuffer(h);
src.addBuffer(b);
const chnks: Uint8Array[] = [];
const endo = tinf_uncompress(src, () => { /** */ }, hid, c => chnks.push(c));
if (endo)
chnks.push(b.slice(endo));
return Buffer.concat(chnks);
};
export const inject_data = async (container: File, injb: Buffer) => { export const inject_data = async (container: File, injb: Buffer) => {
if (!csettings) if (!csettings)
throw new Error("Settings uninit"); throw new Error("Settings uninit");
let magic = false; if (csettings.pmeth < 5) {
const [writestream, extract] = BufferWriteStream(); let magic = false;
const encoder = new PNGEncoder(writestream); const [writestream, extract] = BufferWriteStream();
const decoder = new PNGDecoder(container.stream().getReader()); const encoder = new PNGEncoder(writestream);
const decoder = new PNGDecoder(container.stream().getReader());
for await (const [name, chunk, crc, offset] of decoder.chunks()) { for await (const [name, chunk, crc, offset] of decoder.chunks()) {
if (magic && name != "IDAT") if (magic && name != "IDAT")
break; break;
if (!magic && name == "IDAT") { if (!magic && name == "IDAT") {
const passed = Buffer.from(injb); const passed = Buffer.from(injb);
switch (csettings.pmeth) { switch (csettings.pmeth) {
case 0: case 0:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]); await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]);
break; break;
case 1: case 1:
xor(passed, password); xor(passed, password);
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM4, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]); await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM4, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
break; break;
case 2: case 2:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM5, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]); await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM5, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
break; break;
case 3: case 3:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM6, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]); await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM6, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
break; break;
case 4: case 4:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM7, Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]); await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM7, Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]);
break; break;
}
magic = true;
} }
magic = true; await encoder.insertchunk([name, chunk, crc, offset]);
} }
await encoder.insertchunk([name, chunk, crc, offset]); await encoder.insertchunk(["IEND",
async () => Promise.resolve(buildChunk("IEND", Buffer.from([]))),
async () => Promise.resolve(0),
0]);
return extract();
} }
await encoder.insertchunk(["IEND",
async () => Promise.resolve(buildChunk("IEND", Buffer.from([]))),
async () => Promise.resolve(0),
0]);
return extract();
let pdec = new PNGDecoder(container.stream().getReader());
const concat: Buffer[] = [];
for await (const chk of pdec.chunks())
if (chk[0] == "IDAT")
concat.push((await chk[1]()).slice(4));
const comp = Buffer.concat(concat);
const head = comp.slice(0, 2); // keep the header the same
const chksum = comp.slice(-4); // checksum is over the uncompressed data, so no need to recalculate
const idatblk = embedInRawDeflate(comp.slice(2, -4), injb);
const bws = BufferWriteStream();
const [writestream, extract] = BufferWriteStream();
const penc = new PNGEncoder(writestream);
pdec = new PNGDecoder(container.stream().getReader()); // restart again
let ins = false;
for await (const chk of pdec.chunks()) {
if (chk[0] != "IDAT") {
await penc.insertchunk(chk);
} else {
if (!ins) {
await penc.insertchunk(["IDAT", async () => Buffer.concat([Buffer.from('IDAT'), head, idatblk, chksum]), () => Promise.resolve(0), 0]);
ins = true;
}
}
}
return extract();
}; };
const inject = async (container: File, links: string[]) => { const inject = async (container: File, links: string[]) => {
@ -203,54 +353,8 @@ const inject = async (container: File, links: string[]) => {
}; };
const has_embed = async (png: Buffer) => { const has_embed = async (png: Buffer) => {
const reader = BufferReadStream(png).getReader(); const r = await extract(png);
const sneed = new PNGDecoder(reader); return !!r;
try {
for await (const [name, chunk, crc, offset] of sneed.chunks()) {
let buff: Buffer;
switch (name) {
// should exist at the beginning of file to signal decoders if the file indeed has an embedded chunk
case 'tEXt':
buff = await chunk();
if (buff.slice(4, 4 + CUM3.length).equals(CUM3))
return true;
if (buff.slice(4, 4 + CUM4.length).equals(CUM4))
return true;
if (buff.slice(4, 4 + CUM5.length).equals(CUM5))
return true;
if (buff.slice(4, 4 + CUM6.length).equals(CUM6)) {
const passed = buff.slice(4 + CUM6.length).toString();
if (passed.match(/^[0-9a-zA-Z+/=]+$/g)) {
if (Buffer.from(passed, "base64").toString().split(" ").every(l => l[0] in rprefs))
return true;
}
}
if (buff.slice(4, 4 + CUM7.length).equals(CUM7)) {
const passed = buff.slice(4 + CUM7.length).toString();
if (passed.match(/^[0-9a-zA-Z+/=]+$/g)) {
// base 58 is a subset of base64 so test that first
if (Buffer.from(passed, "base64").toString().split(" ").every(l => l[0] in rprefs))
return true;
if (Buffer.from(bs58.decode(passed)).toString().split(" ").every(l => l[0] in rprefs))
return true;
}
}
break;
case 'IDAT':
// eslint-disable-next-line no-fallthrough
case 'IEND':
return false; // Didn't find tExt Chunk; Definite no
// eslint-disable-next-line no-fallthrough
default:
break;
}
}
// stream ended on chunk boundary, so no unexpected EOF was fired, need more data anyway
} catch (e) {
return; // possibly unexpected EOF, need more data to decide
} finally {
reader.releaseLock();
}
}; };
export default { export default {

2
src/stores.ts

@ -40,7 +40,7 @@ const localSet = (key: string, value: any) => {
export const initial_settings = localLoad('settingsv2', { export const initial_settings = localLoad('settingsv2', {
loop: true, loop: true,
dh: false, dh: false,
pmeth: 4, pmeth: 5,
xpv: false, xpv: false,
xpi: false, xpi: false,
hyd: false, hyd: false,

4
src/utils.ts

@ -155,12 +155,16 @@ threadDataCache.subscribe(newval => {
}); });
export const refreshThreadDataCache = async (board: string, op: number) => { export const refreshThreadDataCache = async (board: string, op: number) => {
console.log('refreshing thread info cache...');
threadDataCache.set(await getThreadInfo(board, op)); threadDataCache.set(await getThreadInfo(board, op));
}; };
export const getThreadDataCache = async (board: string, op: number) => { export const getThreadDataCache = async (board: string, op: number) => {
if (!cthreadDataCache) if (!cthreadDataCache)
await refreshThreadDataCache(board, op); await refreshThreadDataCache(board, op);
else {
console.log('skipped thread cache refresh...');
}
return threadDataCache; return threadDataCache;
}; };

Loading…
Cancel
Save