Browse Source

Various deflate edge-case fixes (still not complete)

pull/46/head
coomdev 2 years ago
parent
commit
2538fbb4bd
  1. 2
      README.md
  2. 152
      chrome/dist/main.js
  3. 2
      chrome/manifest.json
  4. 152
      dist/main.js
  5. 152
      firefox/dist/main.js
  6. 2
      firefox/manifest.json
  7. 2
      firefox_update.json
  8. 2
      main.meta.js
  9. 154
      main.user.js
  10. BIN
      pngextraembedder-0.284.xpi
  11. 17
      src/Components/PostOptions.svelte
  12. 48
      src/dh-deflate.ts
  13. 53
      src/png.ts
  14. 47
      src/pngv3.ts

2
README.md

@ -25,7 +25,7 @@ Please report any issue you have with those (only for mainstream browsers)
Also, use this if you plan to use b4k's archive.
- [Install 4chanX (recommended)](https://www.4chan-x.net/builds/4chan-X.user.js)
- Install the correct WebExtension for your Browser ([Firefox](https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.281.xpi) or Chrome-based (Down for "maintainance"))
- Install the correct WebExtension for your Browser ([Firefox](https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.284.xpi) or Chrome-based (Down for "maintainance"))
For FF users, the extension is signed so you can just drag and drop it on your about:addons tab.

152
chrome/dist/main.js

@ -73,7 +73,7 @@
var define_BUILD_VERSION_default;
var init_define_BUILD_VERSION = __esm({
"<define:BUILD_VERSION>"() {
define_BUILD_VERSION_default = [0, 281];
define_BUILD_VERSION_default = [0, 284];
}
});
@ -14162,20 +14162,16 @@
const name = this.repr.slice(this.ptr + 4, this.ptr + 8).toString();
this.ptr += 4;
this.req += length + 4;
const pos = this.ptr;
const pos = this.ptr + length + 8;
await this.catchup();
yield [
name,
async () => {
await this.catchup();
return this.repr.slice(pos, pos + length + 4);
},
async () => {
await this.catchup();
return this.repr.readUInt32BE(this.ptr + length + 4);
},
this.repr.slice(this.ptr, this.ptr + length + 4),
this.repr.readUInt32BE(this.ptr + length + 4),
this.ptr
];
this.ptr += length + 8;
await this.catchup();
if (this.stopped)
break;
if (name == "IEND")
@ -14185,23 +14181,34 @@
async dtor() {
}
};
var SyncBufferWriter = class {
constructor() {
this.cumul = [];
}
write(b) {
this.cumul.push(b);
}
getBuffer() {
return import_buffer.Buffer.concat(this.cumul);
}
};
var PNGEncoder = class {
constructor(bytes) {
this.writer = bytes.getWriter();
constructor(writer) {
this.writer = writer;
this.writer.write(import_buffer.Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]));
}
async insertchunk(chunk) {
const b = import_buffer.Buffer.alloc(4);
const buff = await chunk[1]();
insertchunk(chunk) {
let b = import_buffer.Buffer.alloc(4);
const buff = chunk[1];
b.writeInt32BE(buff.length - 4, 0);
await this.writer.write(b);
await this.writer.write(buff);
this.writer.write(b);
this.writer.write(buff);
b = import_buffer.Buffer.alloc(4);
b.writeInt32BE((0, import_crc_32.buf)(buff), 0);
await this.writer.write(b);
this.writer.write(b);
console.log("finished inserting");
}
async dtor() {
this.writer.releaseLock();
await this.writer.close();
}
};
var BufferWriteStream = () => {
@ -17553,12 +17560,14 @@
computeReverse() {
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0];
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0];
this.adists = new Set(this.rdtree.flat(16));
}
};
var sltree = new Tree();
var sdtree = new Tree();
var rltree;
var rdtree;
var sadist;
var length_bits = new Uint8Array(30);
var length_base = new Uint16Array(30);
var dist_bits = new Uint8Array(30);
@ -17636,7 +17645,8 @@
}
}
function tinf_getbit(d) {
return d.source.readSync(1);
const v = d.source.readSync(1);
return v;
}
function tinf_read_bits(d, num, base) {
if (!num)
@ -17769,7 +17779,10 @@
o++;
}
if (matches.length > 1) {
matches = matches.map((e) => -(e - d.dest.length));
matches = matches.map((e) => -(e - d.dest.length)).filter((e) => {
const [dsym2] = get_symbol(e, dist_bits, dist_base);
return d.adists.has(dsym2);
});
matches.reverse();
const v = Math.floor(Math.log2(matches.length));
capacity += v;
@ -17787,7 +17800,11 @@
}
}
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base);
const encdist = encode_symbol(dsym, d.rdtree);
let encdist = encode_symbol(dsym, d.rdtree);
if (isNaN(encdist.val)) {
debugger;
encdist = encode_symbol(dsym, d.rdtree);
}
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length));
d.hidden.write(dlen, doff);
for (let i = offs2; i < offs2 + length; ++i) {
@ -17798,11 +17815,16 @@
}
function tinf_inflate_uncompressed_block(d) {
const length = d.source.readSync(16);
d.hidden.write(16, length);
const invlength = d.source.readSync(16);
d.hidden.write(16, invlength);
if (length !== (~invlength & 65535))
return -4;
for (let i = length; i; --i)
d.dest.push(d.source.readSync(8));
for (let i = length; i; --i) {
const v = d.source.readSync(8);
d.dest.push(v);
d.hidden.write(8, v);
}
return TINF_OK;
}
function tinf_uncompress(source, decompressed, to_hide, hidden) {
@ -17816,8 +17838,9 @@
do {
if (to_hide instanceof BitstreamReader2) {
if (to_hide.available == 0) {
while (source.available && source.offset & 7) {
hid.write(1, source.readSync(1));
while (source.available) {
const r = 1;
hid.write(r, source.readSync(r));
}
return source.offset >> 3;
}
@ -17833,18 +17856,17 @@
case 1:
d.rdtree = rdtree;
d.rltree = rltree;
d.adists = sadist;
res = tinf_inflate_block_data(d, sltree, sdtree);
if (res === true) {
if (res === true)
continue;
}
break;
case 2:
tinf_decode_trees(d, d.ltree, d.dtree);
d.computeReverse();
res = tinf_inflate_block_data(d, d.ltree, d.dtree);
if (res === true) {
if (res === true)
continue;
}
break;
default:
res = -2;
@ -17860,6 +17882,7 @@
tinf_build_bits_base(dist_bits, dist_base, 2, 1);
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0];
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0];
sadist = new Set(rdtree.flat(16));
length_bits[28] = 0;
length_base[28] = 258;
@ -17941,7 +17964,7 @@
let buff;
switch (name) {
case "tEXt":
buff = await chunk();
buff = chunk;
if (buff.slice(4, 4 + CUM3.length).equals(CUM3)) {
const k = await decodeCoom3Payload(buff.slice(4 + CUM3.length));
ret.push(...k.filter((e) => e).map((e) => e));
@ -17985,7 +18008,7 @@
case "IDAT":
if (ret.length)
return ret;
buff = await chunk();
buff = chunk;
idats.push(buff.slice(4));
case "IEND":
complete = true;
@ -18016,15 +18039,6 @@
data.copy(ret, 4);
return ret;
};
var BufferWriteStream2 = () => {
let b = import_buffer3.Buffer.from([]);
const ret = new WritableStream({
write(chunk) {
b = import_buffer3.Buffer.concat([b, chunk]);
}
});
return [ret, () => b];
};
var embedInRawDeflate = (b, h) => {
const src = new BitstreamReader2();
const hid = new BitstreamReader2();
@ -18042,8 +18056,8 @@
throw new Error("Settings uninit");
if (csettings2.pmeth < 5) {
let magic4 = false;
const [writestream2, extract8] = BufferWriteStream2();
const encoder = new PNGEncoder(writestream2);
const bws2 = new SyncBufferWriter();
const encoder = new PNGEncoder(bws2);
const decoder = new PNGDecoder(container.stream().getReader());
for await (const [name, chunk, crc, offset] of decoder.chunks()) {
if (magic4 && name != "IDAT")
@ -18052,59 +18066,60 @@
const passed = import_buffer3.Buffer.from(injb);
switch (csettings2.pmeth) {
case 0:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), 0, 0]);
break;
case 1:
xor(passed, password);
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 2:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 3:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 4:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), 0, 0]);
break;
}
magic4 = true;
}
await encoder.insertchunk([name, chunk, crc, offset]);
encoder.insertchunk([name, chunk, crc, offset]);
}
await encoder.insertchunk([
encoder.insertchunk([
"IEND",
async () => Promise.resolve(buildChunk("IEND", import_buffer3.Buffer.from([]))),
async () => Promise.resolve(0),
buildChunk("IEND", import_buffer3.Buffer.from([])),
0,
0
]);
return extract8();
return bws2.getBuffer();
}
let pdec = new PNGDecoder(container.stream().getReader());
const concat = [];
for await (const chk of pdec.chunks())
if (chk[0] == "IDAT")
concat.push((await chk[1]()).slice(4));
concat.push(chk[1].slice(4));
const comp = import_buffer3.Buffer.concat(concat);
const head = comp.slice(0, 2);
const chksum = comp.slice(-4);
const idatblk = embedInRawDeflate(comp.slice(2, -4), injb);
const bws = BufferWriteStream2();
const [writestream, extract7] = BufferWriteStream2();
const penc = new PNGEncoder(writestream);
const bws = new SyncBufferWriter();
const penc = new PNGEncoder(bws);
pdec = new PNGDecoder(container.stream().getReader());
let ins = false;
for await (const chk of pdec.chunks()) {
if (chk[0] != "IDAT") {
await penc.insertchunk(chk);
penc.insertchunk(chk);
} else {
if (!ins) {
await penc.insertchunk(["IDAT", async () => import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), () => Promise.resolve(0), 0]);
penc.insertchunk(["IDAT", import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), 0, 0]);
ins = true;
}
}
}
return extract7();
await penc.dtor();
console.log("Finished writing");
return bws.getBuffer();
};
var inject = async (container, links) => {
links = links.map((link) => {
@ -25804,6 +25819,20 @@
await addContent(new File([new Blob([textinput.value], { type: "text/plain" })], `message${links.length}.txt`));
$$invalidate(6, textinput.value = "", textinput);
};
const downloadFile = (f) => {
let file;
if ("file" in f)
file = f.file;
else
file = f;
var element2 = document.createElement("a");
element2.setAttribute("href", URL.createObjectURL(file));
element2.setAttribute("download", file.name);
element2.style.display = "none";
document.body.appendChild(element2);
element2.click();
document.body.removeChild(element2);
};
const embedContent = async (e) => {
let tfile = original;
if (!tfile)
@ -25828,6 +25857,7 @@
currentEmbed = {
file: new File([buff], file.name, { type })
};
downloadFile(currentEmbed);
externalDispatch("QRSetFile", currentEmbed);
fireNotification("success", `File${links.length > 1 ? "s" : ""} successfully embedded!`);
} catch (err) {

2
chrome/manifest.json

@ -2,7 +2,7 @@
"manifest_version": 3,
"name": "PngExtraEmbedder",
"description": "Discover embedded files on 4chan and archives!",
"version": "0.281",
"version": "0.284",
"icons": {
"64": "1449696017588.png"
},

152
dist/main.js

@ -73,7 +73,7 @@
var define_BUILD_VERSION_default;
var init_define_BUILD_VERSION = __esm({
"<define:BUILD_VERSION>"() {
define_BUILD_VERSION_default = [0, 281];
define_BUILD_VERSION_default = [0, 284];
}
});
@ -14162,20 +14162,16 @@
const name = this.repr.slice(this.ptr + 4, this.ptr + 8).toString();
this.ptr += 4;
this.req += length + 4;
const pos = this.ptr;
const pos = this.ptr + length + 8;
await this.catchup();
yield [
name,
async () => {
await this.catchup();
return this.repr.slice(pos, pos + length + 4);
},
async () => {
await this.catchup();
return this.repr.readUInt32BE(this.ptr + length + 4);
},
this.repr.slice(this.ptr, this.ptr + length + 4),
this.repr.readUInt32BE(this.ptr + length + 4),
this.ptr
];
this.ptr += length + 8;
await this.catchup();
if (this.stopped)
break;
if (name == "IEND")
@ -14185,23 +14181,34 @@
async dtor() {
}
};
var SyncBufferWriter = class {
constructor() {
this.cumul = [];
}
write(b) {
this.cumul.push(b);
}
getBuffer() {
return import_buffer.Buffer.concat(this.cumul);
}
};
var PNGEncoder = class {
constructor(bytes) {
this.writer = bytes.getWriter();
constructor(writer) {
this.writer = writer;
this.writer.write(import_buffer.Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]));
}
async insertchunk(chunk) {
const b = import_buffer.Buffer.alloc(4);
const buff = await chunk[1]();
insertchunk(chunk) {
let b = import_buffer.Buffer.alloc(4);
const buff = chunk[1];
b.writeInt32BE(buff.length - 4, 0);
await this.writer.write(b);
await this.writer.write(buff);
this.writer.write(b);
this.writer.write(buff);
b = import_buffer.Buffer.alloc(4);
b.writeInt32BE((0, import_crc_32.buf)(buff), 0);
await this.writer.write(b);
this.writer.write(b);
console.log("finished inserting");
}
async dtor() {
this.writer.releaseLock();
await this.writer.close();
}
};
var BufferWriteStream = () => {
@ -17388,12 +17395,14 @@
computeReverse() {
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0];
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0];
this.adists = new Set(this.rdtree.flat(16));
}
};
var sltree = new Tree();
var sdtree = new Tree();
var rltree;
var rdtree;
var sadist;
var length_bits = new Uint8Array(30);
var length_base = new Uint16Array(30);
var dist_bits = new Uint8Array(30);
@ -17471,7 +17480,8 @@
}
}
function tinf_getbit(d) {
return d.source.readSync(1);
const v = d.source.readSync(1);
return v;
}
function tinf_read_bits(d, num, base) {
if (!num)
@ -17604,7 +17614,10 @@
o++;
}
if (matches.length > 1) {
matches = matches.map((e) => -(e - d.dest.length));
matches = matches.map((e) => -(e - d.dest.length)).filter((e) => {
const [dsym2] = get_symbol(e, dist_bits, dist_base);
return d.adists.has(dsym2);
});
matches.reverse();
const v = Math.floor(Math.log2(matches.length));
capacity += v;
@ -17622,7 +17635,11 @@
}
}
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base);
const encdist = encode_symbol(dsym, d.rdtree);
let encdist = encode_symbol(dsym, d.rdtree);
if (isNaN(encdist.val)) {
debugger;
encdist = encode_symbol(dsym, d.rdtree);
}
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length));
d.hidden.write(dlen, doff);
for (let i = offs2; i < offs2 + length; ++i) {
@ -17633,11 +17650,16 @@
}
function tinf_inflate_uncompressed_block(d) {
const length = d.source.readSync(16);
d.hidden.write(16, length);
const invlength = d.source.readSync(16);
d.hidden.write(16, invlength);
if (length !== (~invlength & 65535))
return -4;
for (let i = length; i; --i)
d.dest.push(d.source.readSync(8));
for (let i = length; i; --i) {
const v = d.source.readSync(8);
d.dest.push(v);
d.hidden.write(8, v);
}
return TINF_OK;
}
function tinf_uncompress(source, decompressed, to_hide, hidden) {
@ -17651,8 +17673,9 @@
do {
if (to_hide instanceof BitstreamReader2) {
if (to_hide.available == 0) {
while (source.available && source.offset & 7) {
hid.write(1, source.readSync(1));
while (source.available) {
const r = 1;
hid.write(r, source.readSync(r));
}
return source.offset >> 3;
}
@ -17668,18 +17691,17 @@
case 1:
d.rdtree = rdtree;
d.rltree = rltree;
d.adists = sadist;
res = tinf_inflate_block_data(d, sltree, sdtree);
if (res === true) {
if (res === true)
continue;
}
break;
case 2:
tinf_decode_trees(d, d.ltree, d.dtree);
d.computeReverse();
res = tinf_inflate_block_data(d, d.ltree, d.dtree);
if (res === true) {
if (res === true)
continue;
}
break;
default:
res = -2;
@ -17695,6 +17717,7 @@
tinf_build_bits_base(dist_bits, dist_base, 2, 1);
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0];
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0];
sadist = new Set(rdtree.flat(16));
length_bits[28] = 0;
length_base[28] = 258;
@ -17776,7 +17799,7 @@
let buff;
switch (name) {
case "tEXt":
buff = await chunk();
buff = chunk;
if (buff.slice(4, 4 + CUM3.length).equals(CUM3)) {
const k = await decodeCoom3Payload(buff.slice(4 + CUM3.length));
ret.push(...k.filter((e) => e).map((e) => e));
@ -17820,7 +17843,7 @@
case "IDAT":
if (ret.length)
return ret;
buff = await chunk();
buff = chunk;
idats.push(buff.slice(4));
case "IEND":
complete = true;
@ -17851,15 +17874,6 @@
data.copy(ret, 4);
return ret;
};
var BufferWriteStream2 = () => {
let b = import_buffer3.Buffer.from([]);
const ret = new WritableStream({
write(chunk) {
b = import_buffer3.Buffer.concat([b, chunk]);
}
});
return [ret, () => b];
};
var embedInRawDeflate = (b, h) => {
const src = new BitstreamReader2();
const hid = new BitstreamReader2();
@ -17877,8 +17891,8 @@
throw new Error("Settings uninit");
if (csettings2.pmeth < 5) {
let magic4 = false;
const [writestream2, extract8] = BufferWriteStream2();
const encoder = new PNGEncoder(writestream2);
const bws2 = new SyncBufferWriter();
const encoder = new PNGEncoder(bws2);
const decoder = new PNGDecoder(container.stream().getReader());
for await (const [name, chunk, crc, offset] of decoder.chunks()) {
if (magic4 && name != "IDAT")
@ -17887,59 +17901,60 @@
const passed = import_buffer3.Buffer.from(injb);
switch (csettings2.pmeth) {
case 0:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), 0, 0]);
break;
case 1:
xor(passed, password);
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 2:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 3:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 4:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), 0, 0]);
break;
}
magic4 = true;
}
await encoder.insertchunk([name, chunk, crc, offset]);
encoder.insertchunk([name, chunk, crc, offset]);
}
await encoder.insertchunk([
encoder.insertchunk([
"IEND",
async () => Promise.resolve(buildChunk("IEND", import_buffer3.Buffer.from([]))),
async () => Promise.resolve(0),
buildChunk("IEND", import_buffer3.Buffer.from([])),
0,
0
]);
return extract8();
return bws2.getBuffer();
}
let pdec = new PNGDecoder(container.stream().getReader());
const concat = [];
for await (const chk of pdec.chunks())
if (chk[0] == "IDAT")
concat.push((await chk[1]()).slice(4));
concat.push(chk[1].slice(4));
const comp = import_buffer3.Buffer.concat(concat);
const head = comp.slice(0, 2);
const chksum = comp.slice(-4);
const idatblk = embedInRawDeflate(comp.slice(2, -4), injb);
const bws = BufferWriteStream2();
const [writestream, extract7] = BufferWriteStream2();
const penc = new PNGEncoder(writestream);
const bws = new SyncBufferWriter();
const penc = new PNGEncoder(bws);
pdec = new PNGDecoder(container.stream().getReader());
let ins = false;
for await (const chk of pdec.chunks()) {
if (chk[0] != "IDAT") {
await penc.insertchunk(chk);
penc.insertchunk(chk);
} else {
if (!ins) {
await penc.insertchunk(["IDAT", async () => import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), () => Promise.resolve(0), 0]);
penc.insertchunk(["IDAT", import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), 0, 0]);
ins = true;
}
}
}
return extract7();
await penc.dtor();
console.log("Finished writing");
return bws.getBuffer();
};
var inject = async (container, links) => {
links = links.map((link) => {
@ -25639,6 +25654,20 @@
await addContent(new File([new Blob([textinput.value], { type: "text/plain" })], `message${links.length}.txt`));
$$invalidate(6, textinput.value = "", textinput);
};
const downloadFile = (f) => {
let file;
if ("file" in f)
file = f.file;
else
file = f;
var element2 = document.createElement("a");
element2.setAttribute("href", URL.createObjectURL(file));
element2.setAttribute("download", file.name);
element2.style.display = "none";
document.body.appendChild(element2);
element2.click();
document.body.removeChild(element2);
};
const embedContent = async (e) => {
let tfile = original;
if (!tfile)
@ -25663,6 +25692,7 @@
currentEmbed = {
file: new File([buff], file.name, { type })
};
downloadFile(currentEmbed);
externalDispatch("QRSetFile", currentEmbed);
fireNotification("success", `File${links.length > 1 ? "s" : ""} successfully embedded!`);
} catch (err) {

152
firefox/dist/main.js

@ -73,7 +73,7 @@
var define_BUILD_VERSION_default;
var init_define_BUILD_VERSION = __esm({
"<define:BUILD_VERSION>"() {
define_BUILD_VERSION_default = [0, 281];
define_BUILD_VERSION_default = [0, 284];
}
});
@ -14162,20 +14162,16 @@
const name = this.repr.slice(this.ptr + 4, this.ptr + 8).toString();
this.ptr += 4;
this.req += length + 4;
const pos = this.ptr;
const pos = this.ptr + length + 8;
await this.catchup();
yield [
name,
async () => {
await this.catchup();
return this.repr.slice(pos, pos + length + 4);
},
async () => {
await this.catchup();
return this.repr.readUInt32BE(this.ptr + length + 4);
},
this.repr.slice(this.ptr, this.ptr + length + 4),
this.repr.readUInt32BE(this.ptr + length + 4),
this.ptr
];
this.ptr += length + 8;
await this.catchup();
if (this.stopped)
break;
if (name == "IEND")
@ -14185,23 +14181,34 @@
async dtor() {
}
};
var SyncBufferWriter = class {
constructor() {
this.cumul = [];
}
write(b) {
this.cumul.push(b);
}
getBuffer() {
return import_buffer.Buffer.concat(this.cumul);
}
};
var PNGEncoder = class {
constructor(bytes) {
this.writer = bytes.getWriter();
constructor(writer) {
this.writer = writer;
this.writer.write(import_buffer.Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]));
}
async insertchunk(chunk) {
const b = import_buffer.Buffer.alloc(4);
const buff = await chunk[1]();
insertchunk(chunk) {
let b = import_buffer.Buffer.alloc(4);
const buff = chunk[1];
b.writeInt32BE(buff.length - 4, 0);
await this.writer.write(b);
await this.writer.write(buff);
this.writer.write(b);
this.writer.write(buff);
b = import_buffer.Buffer.alloc(4);
b.writeInt32BE((0, import_crc_32.buf)(buff), 0);
await this.writer.write(b);
this.writer.write(b);
console.log("finished inserting");
}
async dtor() {
this.writer.releaseLock();
await this.writer.close();
}
};
var BufferWriteStream = () => {
@ -17530,12 +17537,14 @@
computeReverse() {
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0];
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0];
this.adists = new Set(this.rdtree.flat(16));
}
};
var sltree = new Tree();
var sdtree = new Tree();
var rltree;
var rdtree;
var sadist;
var length_bits = new Uint8Array(30);
var length_base = new Uint16Array(30);
var dist_bits = new Uint8Array(30);
@ -17613,7 +17622,8 @@
}
}
function tinf_getbit(d) {
return d.source.readSync(1);
const v = d.source.readSync(1);
return v;
}
function tinf_read_bits(d, num, base) {
if (!num)
@ -17746,7 +17756,10 @@
o++;
}
if (matches.length > 1) {
matches = matches.map((e) => -(e - d.dest.length));
matches = matches.map((e) => -(e - d.dest.length)).filter((e) => {
const [dsym2] = get_symbol(e, dist_bits, dist_base);
return d.adists.has(dsym2);
});
matches.reverse();
const v = Math.floor(Math.log2(matches.length));
capacity += v;
@ -17764,7 +17777,11 @@
}
}
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base);
const encdist = encode_symbol(dsym, d.rdtree);
let encdist = encode_symbol(dsym, d.rdtree);
if (isNaN(encdist.val)) {
debugger;
encdist = encode_symbol(dsym, d.rdtree);
}
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length));
d.hidden.write(dlen, doff);
for (let i = offs2; i < offs2 + length; ++i) {
@ -17775,11 +17792,16 @@
}
function tinf_inflate_uncompressed_block(d) {
const length = d.source.readSync(16);
d.hidden.write(16, length);
const invlength = d.source.readSync(16);
d.hidden.write(16, invlength);
if (length !== (~invlength & 65535))
return -4;
for (let i = length; i; --i)
d.dest.push(d.source.readSync(8));
for (let i = length; i; --i) {
const v = d.source.readSync(8);
d.dest.push(v);
d.hidden.write(8, v);
}
return TINF_OK;
}
function tinf_uncompress(source, decompressed, to_hide, hidden) {
@ -17793,8 +17815,9 @@
do {
if (to_hide instanceof BitstreamReader2) {
if (to_hide.available == 0) {
while (source.available && source.offset & 7) {
hid.write(1, source.readSync(1));
while (source.available) {
const r = 1;
hid.write(r, source.readSync(r));
}
return source.offset >> 3;
}
@ -17810,18 +17833,17 @@
case 1:
d.rdtree = rdtree;
d.rltree = rltree;
d.adists = sadist;
res = tinf_inflate_block_data(d, sltree, sdtree);
if (res === true) {
if (res === true)
continue;
}
break;
case 2:
tinf_decode_trees(d, d.ltree, d.dtree);
d.computeReverse();
res = tinf_inflate_block_data(d, d.ltree, d.dtree);
if (res === true) {
if (res === true)
continue;
}
break;
default:
res = -2;
@ -17837,6 +17859,7 @@
tinf_build_bits_base(dist_bits, dist_base, 2, 1);
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0];
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0];
sadist = new Set(rdtree.flat(16));
length_bits[28] = 0;
length_base[28] = 258;
@ -17918,7 +17941,7 @@
let buff;
switch (name) {
case "tEXt":
buff = await chunk();
buff = chunk;
if (buff.slice(4, 4 + CUM3.length).equals(CUM3)) {
const k = await decodeCoom3Payload(buff.slice(4 + CUM3.length));
ret.push(...k.filter((e) => e).map((e) => e));
@ -17962,7 +17985,7 @@
case "IDAT":
if (ret.length)
return ret;
buff = await chunk();
buff = chunk;
idats.push(buff.slice(4));
case "IEND":
complete = true;
@ -17993,15 +18016,6 @@
data.copy(ret, 4);
return ret;
};
var BufferWriteStream2 = () => {
let b = import_buffer3.Buffer.from([]);
const ret = new WritableStream({
write(chunk) {
b = import_buffer3.Buffer.concat([b, chunk]);
}
});
return [ret, () => b];
};
var embedInRawDeflate = (b, h) => {
const src = new BitstreamReader2();
const hid = new BitstreamReader2();
@ -18019,8 +18033,8 @@
throw new Error("Settings uninit");
if (csettings2.pmeth < 5) {
let magic4 = false;
const [writestream2, extract8] = BufferWriteStream2();
const encoder = new PNGEncoder(writestream2);
const bws2 = new SyncBufferWriter();
const encoder = new PNGEncoder(bws2);
const decoder = new PNGDecoder(container.stream().getReader());
for await (const [name, chunk, crc, offset] of decoder.chunks()) {
if (magic4 && name != "IDAT")
@ -18029,59 +18043,60 @@
const passed = import_buffer3.Buffer.from(injb);
switch (csettings2.pmeth) {
case 0:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), 0, 0]);
break;
case 1:
xor(passed, password);
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 2:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 3:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 4:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), 0, 0]);
break;
}
magic4 = true;
}
await encoder.insertchunk([name, chunk, crc, offset]);
encoder.insertchunk([name, chunk, crc, offset]);
}
await encoder.insertchunk([
encoder.insertchunk([
"IEND",
async () => Promise.resolve(buildChunk("IEND", import_buffer3.Buffer.from([]))),
async () => Promise.resolve(0),
buildChunk("IEND", import_buffer3.Buffer.from([])),
0,
0
]);
return extract8();
return bws2.getBuffer();
}
let pdec = new PNGDecoder(container.stream().getReader());
const concat = [];
for await (const chk of pdec.chunks())
if (chk[0] == "IDAT")
concat.push((await chk[1]()).slice(4));
concat.push(chk[1].slice(4));
const comp = import_buffer3.Buffer.concat(concat);
const head = comp.slice(0, 2);
const chksum = comp.slice(-4);
const idatblk = embedInRawDeflate(comp.slice(2, -4), injb);
const bws = BufferWriteStream2();
const [writestream, extract7] = BufferWriteStream2();
const penc = new PNGEncoder(writestream);
const bws = new SyncBufferWriter();
const penc = new PNGEncoder(bws);
pdec = new PNGDecoder(container.stream().getReader());
let ins = false;
for await (const chk of pdec.chunks()) {
if (chk[0] != "IDAT") {
await penc.insertchunk(chk);
penc.insertchunk(chk);
} else {
if (!ins) {
await penc.insertchunk(["IDAT", async () => import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), () => Promise.resolve(0), 0]);
penc.insertchunk(["IDAT", import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), 0, 0]);
ins = true;
}
}
}
return extract7();
await penc.dtor();
console.log("Finished writing");
return bws.getBuffer();
};
var inject = async (container, links) => {
links = links.map((link) => {
@ -25781,6 +25796,20 @@
await addContent(new File([new Blob([textinput.value], { type: "text/plain" })], `message${links.length}.txt`));
$$invalidate(6, textinput.value = "", textinput);
};
const downloadFile = (f) => {
let file;
if ("file" in f)
file = f.file;
else
file = f;
var element2 = document.createElement("a");
element2.setAttribute("href", URL.createObjectURL(file));
element2.setAttribute("download", file.name);
element2.style.display = "none";
document.body.appendChild(element2);
element2.click();
document.body.removeChild(element2);
};
const embedContent = async (e) => {
let tfile = original;
if (!tfile)
@ -25805,6 +25834,7 @@
currentEmbed = {
file: new File([buff], file.name, { type })
};
downloadFile(currentEmbed);
externalDispatch("QRSetFile", currentEmbed);
fireNotification("success", `File${links.length > 1 ? "s" : ""} successfully embedded!`);
} catch (err) {

2
firefox/manifest.json

@ -7,7 +7,7 @@
},
"name": "PngExtraEmbedder",
"description": "Discover embedded files on 4chan and archives!",
"version": "0.281",
"version": "0.284",
"icons": {
"64": "1449696017588.png"
},

2
firefox_update.json

@ -1 +1 @@
{"addons":{"{34ac4994-07f2-44d2-8599-682516a6c6a6}":{"updates":[{"version":"0.281","update_link":"https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.281.xpi"}]}}}
{"addons":{"{34ac4994-07f2-44d2-8599-682516a6c6a6}":{"updates":[{"version":"0.284","update_link":"https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.284.xpi"}]}}}

2
main.meta.js

@ -1,7 +1,7 @@
// ==UserScript==
// @name PNGExtraEmbed
// @namespace https://coom.tech/
// @version 0.281
// @version 0.284
// @description uhh
// @author You
// @match https://boards.4channel.org/*

154
main.user.js

@ -1,7 +1,7 @@
// ==UserScript==
// @name PNGExtraEmbed
// @namespace https://coom.tech/
// @version 0.281
// @version 0.284
// @description uhh
// @author You
// @match https://boards.4channel.org/*
@ -109,7 +109,7 @@ const _DOMParser = DOMParser;
var define_BUILD_VERSION_default;
var init_define_BUILD_VERSION = __esm({
"<define:BUILD_VERSION>"() {
define_BUILD_VERSION_default = [0, 281];
define_BUILD_VERSION_default = [0, 284];
}
});
@ -14198,20 +14198,16 @@ const _DOMParser = DOMParser;
const name = this.repr.slice(this.ptr + 4, this.ptr + 8).toString();
this.ptr += 4;
this.req += length + 4;
const pos = this.ptr;
const pos = this.ptr + length + 8;
await this.catchup();
yield [
name,
async () => {
await this.catchup();
return this.repr.slice(pos, pos + length + 4);
},
async () => {
await this.catchup();
return this.repr.readUInt32BE(this.ptr + length + 4);
},
this.repr.slice(this.ptr, this.ptr + length + 4),
this.repr.readUInt32BE(this.ptr + length + 4),
this.ptr
];
this.ptr += length + 8;
await this.catchup();
if (this.stopped)
break;
if (name == "IEND")
@ -14221,23 +14217,34 @@ const _DOMParser = DOMParser;
async dtor() {
}
};
var SyncBufferWriter = class {
constructor() {
this.cumul = [];
}
write(b) {
this.cumul.push(b);
}
getBuffer() {
return import_buffer.Buffer.concat(this.cumul);
}
};
var PNGEncoder = class {
constructor(bytes) {
this.writer = bytes.getWriter();
constructor(writer) {
this.writer = writer;
this.writer.write(import_buffer.Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]));
}
async insertchunk(chunk) {
const b = import_buffer.Buffer.alloc(4);
const buff = await chunk[1]();
insertchunk(chunk) {
let b = import_buffer.Buffer.alloc(4);
const buff = chunk[1];
b.writeInt32BE(buff.length - 4, 0);
await this.writer.write(b);
await this.writer.write(buff);
this.writer.write(b);
this.writer.write(buff);
b = import_buffer.Buffer.alloc(4);
b.writeInt32BE((0, import_crc_32.buf)(buff), 0);
await this.writer.write(b);
this.writer.write(b);
console.log("finished inserting");
}
async dtor() {
this.writer.releaseLock();
await this.writer.close();
}
};
var BufferWriteStream = () => {
@ -17424,12 +17431,14 @@ const _DOMParser = DOMParser;
computeReverse() {
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0];
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0];
this.adists = new Set(this.rdtree.flat(16));
}
};
var sltree = new Tree();
var sdtree = new Tree();
var rltree;
var rdtree;
var sadist;
var length_bits = new Uint8Array(30);
var length_base = new Uint16Array(30);
var dist_bits = new Uint8Array(30);
@ -17507,7 +17516,8 @@ const _DOMParser = DOMParser;
}
}
function tinf_getbit(d) {
return d.source.readSync(1);
const v = d.source.readSync(1);
return v;
}
function tinf_read_bits(d, num, base) {
if (!num)
@ -17640,7 +17650,10 @@ const _DOMParser = DOMParser;
o++;
}
if (matches.length > 1) {
matches = matches.map((e) => -(e - d.dest.length));
matches = matches.map((e) => -(e - d.dest.length)).filter((e) => {
const [dsym2] = get_symbol(e, dist_bits, dist_base);
return d.adists.has(dsym2);
});
matches.reverse();
const v = Math.floor(Math.log2(matches.length));
capacity += v;
@ -17658,7 +17671,11 @@ const _DOMParser = DOMParser;
}
}
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base);
const encdist = encode_symbol(dsym, d.rdtree);
let encdist = encode_symbol(dsym, d.rdtree);
if (isNaN(encdist.val)) {
debugger;
encdist = encode_symbol(dsym, d.rdtree);
}
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length));
d.hidden.write(dlen, doff);
for (let i = offs2; i < offs2 + length; ++i) {
@ -17669,11 +17686,16 @@ const _DOMParser = DOMParser;
}
function tinf_inflate_uncompressed_block(d) {
const length = d.source.readSync(16);
d.hidden.write(16, length);
const invlength = d.source.readSync(16);
d.hidden.write(16, invlength);
if (length !== (~invlength & 65535))
return -4;
for (let i = length; i; --i)
d.dest.push(d.source.readSync(8));
for (let i = length; i; --i) {
const v = d.source.readSync(8);
d.dest.push(v);
d.hidden.write(8, v);
}
return TINF_OK;
}
function tinf_uncompress(source, decompressed, to_hide, hidden) {
@ -17687,8 +17709,9 @@ const _DOMParser = DOMParser;
do {
if (to_hide instanceof BitstreamReader2) {
if (to_hide.available == 0) {
while (source.available && source.offset & 7) {
hid.write(1, source.readSync(1));
while (source.available) {
const r = 1;
hid.write(r, source.readSync(r));
}
return source.offset >> 3;
}
@ -17704,18 +17727,17 @@ const _DOMParser = DOMParser;
case 1:
d.rdtree = rdtree;
d.rltree = rltree;
d.adists = sadist;
res = tinf_inflate_block_data(d, sltree, sdtree);
if (res === true) {
if (res === true)
continue;
}
break;
case 2:
tinf_decode_trees(d, d.ltree, d.dtree);
d.computeReverse();
res = tinf_inflate_block_data(d, d.ltree, d.dtree);
if (res === true) {
if (res === true)
continue;
}
break;
default:
res = -2;
@ -17731,6 +17753,7 @@ const _DOMParser = DOMParser;
tinf_build_bits_base(dist_bits, dist_base, 2, 1);
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0];
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0];
sadist = new Set(rdtree.flat(16));
length_bits[28] = 0;
length_base[28] = 258;
@ -17812,7 +17835,7 @@ const _DOMParser = DOMParser;
let buff;
switch (name) {
case "tEXt":
buff = await chunk();
buff = chunk;
if (buff.slice(4, 4 + CUM3.length).equals(CUM3)) {
const k = await decodeCoom3Payload(buff.slice(4 + CUM3.length));
ret.push(...k.filter((e) => e).map((e) => e));
@ -17856,7 +17879,7 @@ const _DOMParser = DOMParser;
case "IDAT":
if (ret.length)
return ret;
buff = await chunk();
buff = chunk;
idats.push(buff.slice(4));
case "IEND":
complete = true;
@ -17887,15 +17910,6 @@ const _DOMParser = DOMParser;
data.copy(ret, 4);
return ret;
};
var BufferWriteStream2 = () => {
let b = import_buffer3.Buffer.from([]);
const ret = new WritableStream({
write(chunk) {
b = import_buffer3.Buffer.concat([b, chunk]);
}
});
return [ret, () => b];
};
var embedInRawDeflate = (b, h) => {
const src = new BitstreamReader2();
const hid = new BitstreamReader2();
@ -17913,8 +17927,8 @@ const _DOMParser = DOMParser;
throw new Error("Settings uninit");
if (csettings2.pmeth < 5) {
let magic4 = false;
const [writestream2, extract8] = BufferWriteStream2();
const encoder = new PNGEncoder(writestream2);
const bws2 = new SyncBufferWriter();
const encoder = new PNGEncoder(bws2);
const decoder = new PNGDecoder(container.stream().getReader());
for await (const [name, chunk, crc, offset] of decoder.chunks()) {
if (magic4 && name != "IDAT")
@ -17923,59 +17937,60 @@ const _DOMParser = DOMParser;
const passed = import_buffer3.Buffer.from(injb);
switch (csettings2.pmeth) {
case 0:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM3, passed])), 0, 0]);
break;
case 1:
xor(passed, password);
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM4, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 2:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM5, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 3:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM6, import_buffer3.Buffer.from(import_buffer3.Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 4:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", import_buffer3.Buffer.concat([CUM7, import_buffer3.Buffer.from(bs58.encode(passed))])), 0, 0]);
break;
}
magic4 = true;
}
await encoder.insertchunk([name, chunk, crc, offset]);
encoder.insertchunk([name, chunk, crc, offset]);
}
await encoder.insertchunk([
encoder.insertchunk([
"IEND",
async () => Promise.resolve(buildChunk("IEND", import_buffer3.Buffer.from([]))),
async () => Promise.resolve(0),
buildChunk("IEND", import_buffer3.Buffer.from([])),
0,
0
]);
return extract8();
return bws2.getBuffer();
}
let pdec = new PNGDecoder(container.stream().getReader());
const concat = [];
for await (const chk of pdec.chunks())
if (chk[0] == "IDAT")
concat.push((await chk[1]()).slice(4));
concat.push(chk[1].slice(4));
const comp = import_buffer3.Buffer.concat(concat);
const head = comp.slice(0, 2);
const chksum = comp.slice(-4);
const idatblk = embedInRawDeflate(comp.slice(2, -4), injb);
const bws = BufferWriteStream2();
const [writestream, extract7] = BufferWriteStream2();
const penc = new PNGEncoder(writestream);
const bws = new SyncBufferWriter();
const penc = new PNGEncoder(bws);
pdec = new PNGDecoder(container.stream().getReader());
let ins = false;
for await (const chk of pdec.chunks()) {
if (chk[0] != "IDAT") {
await penc.insertchunk(chk);
penc.insertchunk(chk);
} else {
if (!ins) {
await penc.insertchunk(["IDAT", async () => import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), () => Promise.resolve(0), 0]);
penc.insertchunk(["IDAT", import_buffer3.Buffer.concat([import_buffer3.Buffer.from("IDAT"), head, idatblk, chksum]), 0, 0]);
ins = true;
}
}
}
return extract7();
await penc.dtor();
console.log("Finished writing");
return bws.getBuffer();
};
var inject = async (container, links) => {
links = links.map((link) => {
@ -25675,6 +25690,20 @@ const _DOMParser = DOMParser;
await addContent(new File([new Blob([textinput.value], { type: "text/plain" })], `message${links.length}.txt`));
$$invalidate(6, textinput.value = "", textinput);
};
const downloadFile = (f) => {
let file;
if ("file" in f)
file = f.file;
else
file = f;
var element2 = document.createElement("a");
element2.setAttribute("href", URL.createObjectURL(file));
element2.setAttribute("download", file.name);
element2.style.display = "none";
document.body.appendChild(element2);
element2.click();
document.body.removeChild(element2);
};
const embedContent = async (e) => {
let tfile = original;
if (!tfile)
@ -25699,6 +25728,7 @@ const _DOMParser = DOMParser;
currentEmbed = {
file: new File([buff], file.name, { type })
};
downloadFile(currentEmbed);
externalDispatch("QRSetFile", currentEmbed);
fireNotification("success", `File${links.length > 1 ? "s" : ""} successfully embedded!`);
} catch (err) {

BIN
pngextraembedder-0.284.xpi

Binary file not shown.

17
src/Components/PostOptions.svelte

@ -133,6 +133,22 @@
textinput.value = "";
};
const downloadFile = (f: File | { file: File }) => {
let file: File;
if ("file" in f) file = f.file;
else file = f;
var element = document.createElement("a");
element.setAttribute("href", URL.createObjectURL(file));
element.setAttribute("download", file.name);
element.style.display = "none";
document.body.appendChild(element);
element.click();
document.body.removeChild(element);
};
const embedContent = async (e: Event) => {
let tfile: File | Blob | undefined = original;
if (!tfile) return;
@ -155,6 +171,7 @@
currentEmbed = {
file: new File([buff], file.name, { type }),
} as { file: File };
downloadFile(currentEmbed);
externalDispatch("QRSetFile", currentEmbed);
fireNotification(
"success",

48
src/dh-deflate.ts

@ -73,6 +73,7 @@ class Data {
computeReverse() {
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0]! as any; // unneeded, but maybe sometime i'll throw symbol reduction into the mix
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0]! as any;
this.adists = new Set(this.rdtree.flat(16) as number[]);
}
ltree: Tree;
@ -83,6 +84,8 @@ class Data {
rdtree!: HCtree;
adists!: Set<number>;
dest: number[] = [];
constructor(public source: BitstreamReader, public dests: BitstreamWriter, public to_hide: BitstreamReader | BitstreamWriter, public hidden: BitstreamWriter) {
@ -102,6 +105,8 @@ const sdtree = new Tree();
let rltree: HCtree;
// eslint-disable-next-line prefer-const
let rdtree: HCtree;
// eslint-disable-next-line prefer-const
let sadist: Set<number>;
/* extra bits and base tables for length codes */
const length_bits = new Uint8Array(30);
@ -197,7 +202,8 @@ function tinf_build_tree(t: Tree, lengths: Uint8Array, off: number, num: number)
/* get one bit from source stream */
function tinf_getbit(d: Data) {
return d.source.readSync(1);
const v = d.source.readSync(1);
return v;
}
/* read a num bit value from a stream and add base */
@ -346,7 +352,6 @@ function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) {
if (finished) {
return true;
}
let sym = tinf_decode_symbol(d, lt); // copy
/* check for end of block */
@ -358,6 +363,7 @@ function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) {
d.dest.push(sym);
// same
} else {
sym -= 257;
/* possibly get more bits from length code */
@ -397,7 +403,10 @@ function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) {
o++;
}
if (matches.length > 1) {
matches = matches.map(e => -(e - d.dest.length));
matches = matches.map(e => -(e - d.dest.length)).filter(e => {
const [dsym] = get_symbol(e, dist_bits, dist_base);
return d.adists.has(dsym);
});
matches.reverse();
const v = Math.floor(Math.log2(matches.length));
capacity += v;
@ -424,7 +433,11 @@ function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) {
//d.hidden.write(enclen.length, enclen.val);
//d.hidden.write(llen, loff);
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base);
const encdist = encode_symbol(dsym, d.rdtree);
let encdist = encode_symbol(dsym, d.rdtree);
if (isNaN(encdist.val)) {
debugger;
encdist = encode_symbol(dsym, d.rdtree);
}
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length));
d.hidden.write(dlen, doff);
@ -440,16 +453,20 @@ function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) {
function tinf_inflate_uncompressed_block(d: Data) {
/* get length */
const length = d.source.readSync(16);
d.hidden.write(16, length);
/* get one's complement of length */
const invlength = d.source.readSync(16);
d.hidden.write(16, invlength);
/* check length */
if (length !== (~invlength & 0x0000ffff))
return -4;
for (let i = length; i; --i)
d.dest.push(d.source.readSync(8));
for (let i = length; i; --i) {
const v = d.source.readSync(8);
d.dest.push(v);
d.hidden.write(8, v);
}
return TINF_OK;
}
@ -462,8 +479,8 @@ export function tinf_uncompress(source: BitstreamReader,
// compressed stream containing hidden data
hidden: (chunk: Uint8Array) => void) {
const decomp = new BitstreamWriter({ write: (decompressed || (() => {/* */ })) });
const hid = new BitstreamWriter({ write: (hidden || (() => {/* */ })) });
const decomp = new BitstreamWriter({ write: (decompressed || (() => {/** */ })) });
const hid = new BitstreamWriter({ write: (hidden || (() => {/** */ })) });
const d = new Data(source, decomp, to_hide, hid);
let res: number | undefined | true;
@ -473,8 +490,9 @@ export function tinf_uncompress(source: BitstreamReader,
if (to_hide instanceof BitstreamReader) {
if (to_hide.available == 0) {
// copy until we're byte-aligned
while (source.available && source.offset & 0x7) {
hid.write(1, source.readSync(1));
while (source.available) {
const r = 1;
hid.write(r, source.readSync(r));
}
// nothing left to embed, we are byte aligned, so we just "memcpy" the rest
return source.offset >> 3; // this is block aligned, so this doesn't tell us where the last hidden bit is, just an upper bound
@ -502,19 +520,18 @@ export function tinf_uncompress(source: BitstreamReader,
/* decompress block with fixed huffman trees */
d.rdtree = rdtree;
d.rltree = rltree;
d.adists = sadist;
res = tinf_inflate_block_data(d, sltree, sdtree);
if (res === true) {
if (res === true)
continue;
}
break;
case 2:
/* decompress block with dynamic huffman trees */
tinf_decode_trees(d, d.ltree, d.dtree);
d.computeReverse();
res = tinf_inflate_block_data(d, d.ltree, d.dtree);
if (res === true) {
if (res === true)
continue;
}
break;
default:
res = -2;
@ -551,6 +568,7 @@ tinf_build_bits_base(dist_bits, dist_base, 2, 1);
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0] as any;
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0] as any;
sadist = new Set(rdtree.flat(16) as number[]);
/* fix a special case */
length_bits[28] = 0;

53
src/png.ts

@ -4,8 +4,8 @@ import type { ImageProcessor } from "./main";
export type PNGChunk = [
string, // name
() => Promise<Buffer>, // data
() => Promise<number>, // crc
Buffer, // data
number, // crc
number];// offset
export class PNGDecoder {
@ -45,18 +45,14 @@ export class PNGDecoder {
this.ptr += 4;
this.req += length + 4; // crc
//await this.catchup();
const pos = this.ptr;
const pos = this.ptr + length + 8;
await this.catchup();
yield [name,
async () => {
await this.catchup();
return this.repr.slice(pos, pos + length + 4);
},
async () => {
await this.catchup();
return this.repr.readUInt32BE(this.ptr + length + 4);
},
this.repr.slice(this.ptr, this.ptr + length + 4),
this.repr.readUInt32BE(this.ptr + length + 4),
this.ptr] as PNGChunk;
this.ptr += length + 8;
await this.catchup();
if (this.stopped)
break;
if (name == 'IEND')
@ -69,27 +65,40 @@ export class PNGDecoder {
}
}
export class SyncBufferWriter {
cumul: Buffer[] = [];
write(b: Buffer) {
this.cumul.push(b);
}
getBuffer() {
return Buffer.concat(this.cumul);
}
}
export class PNGEncoder {
writer: WritableStreamDefaultWriter<Buffer>;
// writer: WritableStreamDefaultWriter<Buffer>;
constructor(bytes: WritableStream<Buffer>) {
this.writer = bytes.getWriter();
constructor(private writer: SyncBufferWriter) {
this.writer.write(Buffer.from([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]));
}
async insertchunk(chunk: PNGChunk) {
const b = Buffer.alloc(4);
const buff = await chunk[1]();
insertchunk(chunk: PNGChunk) {
let b = Buffer.alloc(4);
const buff = chunk[1];
b.writeInt32BE(buff.length - 4, 0);
await this.writer.write(b); // write length
await this.writer.write(buff); // chunk includes chunkname
this.writer.write(b); // write length
this.writer.write(buff); // chunk includes chunkname
b = Buffer.alloc(4);
b.writeInt32BE(buf(buff), 0);
await this.writer.write(b);
this.writer.write(b);
console.log("finished inserting");
}
async dtor() {
this.writer.releaseLock();
await this.writer.close();
//this.writer.releaseLock();
//await this.writer.close();
}
}

47
src/pngv3.ts

@ -1,6 +1,6 @@
import { Buffer } from "buffer";
import type { EmbeddedFile, ImageProcessor } from "./main";
import { PNGDecoder, PNGEncoder } from "./png";
import { PNGDecoder, PNGEncoder, SyncBufferWriter } from "./png";
import { decodeCoom3Payload } from "./utils";
import { settings } from "./stores";
import { filehosts } from "./filehosts";
@ -157,7 +157,7 @@ const extract = async (png: Buffer) => {
switch (name) {
// should exist at the beginning of file to signal decoders if the file indeed has an embedded chunk
case 'tEXt':
buff = await chunk();
buff = chunk;
if (buff.slice(4, 4 + CUM3.length).equals(CUM3)) {
const k = await decodeCoom3Payload(buff.slice(4 + CUM3.length));
ret.push(...k.filter(e => e).map(e => e as EmbeddedFile));
@ -206,7 +206,7 @@ const extract = async (png: Buffer) => {
case 'IDAT':
if (ret.length)
return ret;
buff = await chunk();
buff = chunk;
idats.push(buff.slice(4));
// eslint-disable-next-line no-fallthrough
@ -251,6 +251,7 @@ export const BufferWriteStream = () => {
const ret = new WritableStream<Buffer>({
write(chunk) {
b = Buffer.concat([b, chunk]);
console.log("finished appending");
}
});
return [ret, () => b] as [WritableStream<Buffer>, () => Buffer];
@ -273,8 +274,9 @@ export const inject_data = async (container: File, injb: Buffer) => {
throw new Error("Settings uninit");
if (csettings.pmeth < 5) {
let magic = false;
const [writestream, extract] = BufferWriteStream();
const encoder = new PNGEncoder(writestream);
//const [writestream, extract] = BufferWriteStream();
const bws = new SyncBufferWriter();
const encoder = new PNGEncoder(bws);
const decoder = new PNGDecoder(container.stream().getReader());
for await (const [name, chunk, crc, offset] of decoder.chunks()) {
@ -284,58 +286,59 @@ export const inject_data = async (container: File, injb: Buffer) => {
const passed = Buffer.from(injb);
switch (csettings.pmeth) {
case 0:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM3, passed])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", Buffer.concat([CUM3, passed])), 0, 0]);
break;
case 1:
xor(passed, password);
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM4, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", Buffer.concat([CUM4, Buffer.from(Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 2:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM5, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", Buffer.concat([CUM5, Buffer.from(Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 3:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM6, Buffer.from(Buffer.from(passed).toString("base64"))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", Buffer.concat([CUM6, Buffer.from(Buffer.from(passed).toString("base64"))])), 0, 0]);
break;
case 4:
await encoder.insertchunk(["tEXt", async () => buildChunk("tEXt", Buffer.concat([CUM7, Buffer.from(bs58.encode(passed))])), () => Promise.resolve(0), 0]);
encoder.insertchunk(["tEXt", buildChunk("tEXt", Buffer.concat([CUM7, Buffer.from(bs58.encode(passed))])), 0, 0]);
break;
}
magic = true;
}
await encoder.insertchunk([name, chunk, crc, offset]);
encoder.insertchunk([name, chunk, crc, offset]);
}
await encoder.insertchunk(["IEND",
async () => Promise.resolve(buildChunk("IEND", Buffer.from([]))),
async () => Promise.resolve(0),
encoder.insertchunk(["IEND",
buildChunk("IEND", Buffer.from([])),
0,
0]);
return extract();
return bws.getBuffer();
}
let pdec = new PNGDecoder(container.stream().getReader());
const concat: Buffer[] = [];
for await (const chk of pdec.chunks())
if (chk[0] == "IDAT")
concat.push((await chk[1]()).slice(4));
concat.push((chk[1]).slice(4));
const comp = Buffer.concat(concat);
const head = comp.slice(0, 2); // keep the header the same
const chksum = comp.slice(-4); // checksum is over the uncompressed data, so no need to recalculate
const idatblk = embedInRawDeflate(comp.slice(2, -4), injb);
const bws = BufferWriteStream();
const [writestream, extract] = BufferWriteStream();
const penc = new PNGEncoder(writestream);
const bws = new SyncBufferWriter();
const penc = new PNGEncoder(bws);
pdec = new PNGDecoder(container.stream().getReader()); // restart again
let ins = false;
for await (const chk of pdec.chunks()) {
if (chk[0] != "IDAT") {
await penc.insertchunk(chk);
penc.insertchunk(chk);
} else {
if (!ins) {
await penc.insertchunk(["IDAT", async () => Buffer.concat([Buffer.from('IDAT'), head, idatblk, chksum]), () => Promise.resolve(0), 0]);
penc.insertchunk(["IDAT", Buffer.concat([Buffer.from('IDAT'), head, idatblk, chksum]), 0, 0]);
ins = true;
}
}
}
return extract();
await penc.dtor();
console.log("Finished writing");
return bws.getBuffer();
};
const inject = async (container: File, links: string[]) => {

Loading…
Cancel
Save