coomdev
2 years ago
20 changed files with 5626 additions and 450 deletions
File diff suppressed because it is too large
File diff suppressed because it is too large
Binary file not shown.
File diff suppressed because it is too large
@ -1 +1 @@ |
|||
{"addons":{"{34ac4994-07f2-44d2-8599-682516a6c6a6}":{"updates":[{"version":"0.279","update_link":"https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.279.xpi"}]}}} |
|||
{"addons":{"{34ac4994-07f2-44d2-8599-682516a6c6a6}":{"updates":[{"version":"0.281","update_link":"https://git.coom.tech/fuckjannies/lolipiss/raw/branch/%E4%B8%AD%E5%87%BA%E3%81%97/pngextraembedder-0.281.xpi"}]}}} |
File diff suppressed because it is too large
Binary file not shown.
@ -0,0 +1,59 @@ |
|||
import { BitstreamReader as br, BitstreamWriter as bw } from '@astronautlabs/bitstream'; |
|||
|
|||
export const revbyte = (n: number, len = 8) => { |
|||
let acc = 0; |
|||
let n2 = n; |
|||
let len2 = len; |
|||
while (len2) { |
|||
// can't use bitshifts or binray ops or else becomes negative
|
|||
acc = (acc * 2) + (n2 & 1); |
|||
n2 >>= 1; |
|||
len2--; |
|||
} |
|||
return acc; |
|||
}; |
|||
|
|||
export class BitstreamReader extends br { |
|||
rtotal = 0; |
|||
|
|||
addBuffer(hidden: Buffer) { |
|||
const inp = Buffer.from(hidden); |
|||
for (let i = 0; i < inp.byteLength; ++i) |
|||
inp[i] = revbyte(inp[i]); |
|||
super.addBuffer(inp); |
|||
} |
|||
|
|||
readSync(len: number) { |
|||
const v = super.readSync(len); |
|||
this.rtotal += len; |
|||
return revbyte(v, len); |
|||
} |
|||
} |
|||
|
|||
export type Writable = { |
|||
write: (chunk: Buffer) => void; |
|||
}; |
|||
|
|||
export class BitstreamWriter extends bw { |
|||
wtotal = 0; |
|||
|
|||
constructor(private w: Writable) { |
|||
super({ |
|||
write: (c) => { |
|||
const inp = Buffer.from(c); |
|||
for (let i = 0; i < inp.byteLength; ++i) |
|||
inp[i] = revbyte(inp[i]); |
|||
this.w.write(inp); |
|||
} |
|||
}); |
|||
} |
|||
|
|||
write(length: number, value: number): void { |
|||
this.wtotal += length; |
|||
if (length) { |
|||
//tconsole.log(length, value)
|
|||
value = revbyte(value, length); |
|||
} |
|||
super.write(length, value); |
|||
} |
|||
} |
@ -0,0 +1,557 @@ |
|||
import { BitstreamReader, BitstreamWriter, revbyte } from './bitstream'; |
|||
|
|||
const TINF_OK = 0; |
|||
const TINF_DATA_ERROR = -3; |
|||
|
|||
class Tree { |
|||
table = new Uint16Array(16); /* table of code length counts */ |
|||
|
|||
trans = new Uint16Array(288); /* code -> symbol translation table */ |
|||
} |
|||
|
|||
type HCtree = [number | HCtree, (number | HCtree)?]; |
|||
|
|||
// these two functions are a big bottleneck because im not clever enough to figure out how to encode
|
|||
// something directly by using the sorted code length/value tables, haha
|
|||
const getPathTo = (tree: HCtree, value: number): string | undefined => { |
|||
if (tree[0] === value) |
|||
return '0'; |
|||
if (tree[1] === value) |
|||
return '1'; |
|||
let p: string | undefined; |
|||
if (typeof tree[0] != "number") |
|||
p = getPathTo(tree[0], value); |
|||
let b = '0'; |
|||
if (!p) { |
|||
if (tree[1] && typeof tree[1] != "number") |
|||
p = getPathTo(tree[1], value); |
|||
b = '1'; |
|||
} |
|||
|
|||
if (p) |
|||
return b + p; |
|||
}; |
|||
|
|||
// from jpeg-js, in turns this means that jpeg-js decoding could be faster
|
|||
// if they decoded directly from the symbol tables instead of building a tree
|
|||
function buildHuffmanTable(codeLengths: ArrayLike<number>, values: ArrayLike<number>) { |
|||
// eslint-disable-next-line prefer-const
|
|||
let k = 0, code: any = [], i, j, length = 16; |
|||
while (length > 0 && !codeLengths[length - 1]) |
|||
length--; |
|||
code.push({ children: [], index: 0 }); |
|||
let p = code[0], q; |
|||
for (i = 0; i < length; i++) { |
|||
for (j = 0; j < codeLengths[i]; j++) { |
|||
p = code.pop(); |
|||
p.children[p.index] = values[k]; |
|||
while (p.index > 0) { |
|||
if (code.length === 0) |
|||
throw new Error('Could not recreate Huffman Table'); |
|||
p = code.pop(); |
|||
} |
|||
p.index++; |
|||
code.push(p); |
|||
while (code.length <= i) { |
|||
code.push(q = { children: [], index: 0 }); |
|||
p.children[p.index] = q.children; |
|||
p = q; |
|||
} |
|||
k++; |
|||
} |
|||
if (i + 1 < length) { |
|||
// p here points to last code
|
|||
code.push(q = { children: [], index: 0 }); |
|||
p.children[p.index] = q.children; |
|||
p = q; |
|||
} |
|||
} |
|||
return code[0].children as HCtree; |
|||
} |
|||
|
|||
class Data { |
|||
computeReverse() { |
|||
this.rltree = buildHuffmanTable(this.ltree.table, this.ltree.trans)[0]! as any; // unneeded, but maybe sometime i'll throw symbol reduction into the mix
|
|||
this.rdtree = buildHuffmanTable(this.dtree.table, this.dtree.trans)[0]! as any; |
|||
} |
|||
|
|||
ltree: Tree; |
|||
|
|||
dtree: Tree; |
|||
|
|||
rltree!: HCtree; |
|||
|
|||
rdtree!: HCtree; |
|||
|
|||
dest: number[] = []; |
|||
|
|||
constructor(public source: BitstreamReader, public dests: BitstreamWriter, public to_hide: BitstreamReader | BitstreamWriter, public hidden: BitstreamWriter) { |
|||
this.ltree = new Tree(); /* dynamic length/symbol tree */ |
|||
this.dtree = new Tree(); /* dynamic distance tree */ |
|||
} |
|||
} |
|||
|
|||
/* --------------------------------------------------- * |
|||
* -- uninitialized global data (static structures) -- * |
|||
* --------------------------------------------------- */ |
|||
|
|||
const sltree = new Tree(); |
|||
const sdtree = new Tree(); |
|||
|
|||
// eslint-disable-next-line prefer-const
|
|||
let rltree: HCtree; |
|||
// eslint-disable-next-line prefer-const
|
|||
let rdtree: HCtree; |
|||
|
|||
/* extra bits and base tables for length codes */ |
|||
const length_bits = new Uint8Array(30); |
|||
const length_base = new Uint16Array(30); |
|||
|
|||
/* extra bits and base tables for distance codes */ |
|||
const dist_bits = new Uint8Array(30); |
|||
const dist_base = new Uint16Array(30); |
|||
|
|||
/* special ordering of code length codes */ |
|||
const clcidx = new Uint8Array([ |
|||
16, 17, 18, 0, 8, 7, 9, 6, |
|||
10, 5, 11, 4, 12, 3, 13, 2, |
|||
14, 1, 15 |
|||
]); |
|||
|
|||
/* used by tinf_decode_trees, avoids allocations every call */ |
|||
const code_tree = new Tree(); |
|||
const lengths = new Uint8Array(288 + 32); |
|||
|
|||
/* ----------------------- * |
|||
* -- utility functions -- * |
|||
* ----------------------- */ |
|||
|
|||
/* build extra bits and base tables */ |
|||
function tinf_build_bits_base(bits: Uint8Array, base: Uint16Array, delta: number, first: number) { |
|||
let i, sum; |
|||
|
|||
/* build bits table */ |
|||
for (i = 0; i < delta; ++i) bits[i] = 0; |
|||
for (i = 0; i < 30 - delta; ++i) bits[i + delta] = i / delta | 0; |
|||
|
|||
/* build base table */ |
|||
for (sum = first, i = 0; i < 30; ++i) { |
|||
base[i] = sum; |
|||
sum += 1 << bits[i]; |
|||
} |
|||
} |
|||
|
|||
/* build the fixed huffman trees */ |
|||
function tinf_build_fixed_trees(lt: Tree, dt: Tree) { |
|||
let i; |
|||
|
|||
/* build fixed length tree */ |
|||
for (i = 0; i < 7; ++i) lt.table[i] = 0; |
|||
|
|||
lt.table[7] = 24; |
|||
lt.table[8] = 152; |
|||
lt.table[9] = 112; |
|||
|
|||
for (i = 0; i < 24; ++i) lt.trans[i] = 256 + i; |
|||
for (i = 0; i < 144; ++i) lt.trans[24 + i] = i; |
|||
for (i = 0; i < 8; ++i) lt.trans[24 + 144 + i] = 280 + i; |
|||
for (i = 0; i < 112; ++i) lt.trans[24 + 144 + 8 + i] = 144 + i; |
|||
|
|||
/* build fixed distance tree */ |
|||
for (i = 0; i < 5; ++i) dt.table[i] = 0; |
|||
|
|||
dt.table[5] = 32; |
|||
|
|||
for (i = 0; i < 32; ++i) dt.trans[i] = i; |
|||
} |
|||
|
|||
/* given an array of code lengths, build a tree */ |
|||
const offs = new Uint16Array(16); |
|||
|
|||
function tinf_build_tree(t: Tree, lengths: Uint8Array, off: number, num: number) { |
|||
let i, sum; |
|||
|
|||
/* clear code length count table */ |
|||
for (i = 0; i < 16; ++i) t.table[i] = 0; |
|||
|
|||
/* scan symbol lengths, and sum code length counts */ |
|||
for (i = 0; i < num; ++i) t.table[lengths[off + i]]++; |
|||
|
|||
t.table[0] = 0; |
|||
|
|||
/* compute offset table for distribution sort */ |
|||
for (sum = 0, i = 0; i < 16; ++i) { |
|||
offs[i] = sum; |
|||
sum += t.table[i]; |
|||
} |
|||
|
|||
/* create code->symbol translation table (symbols sorted by code) */ |
|||
for (i = 0; i < num; ++i) { |
|||
if (lengths[off + i]) t.trans[offs[lengths[off + i]]++] = i; |
|||
} |
|||
} |
|||
|
|||
/* ---------------------- * |
|||
* -- decode functions -- * |
|||
* ---------------------- */ |
|||
|
|||
/* get one bit from source stream */ |
|||
function tinf_getbit(d: Data) { |
|||
return d.source.readSync(1); |
|||
} |
|||
|
|||
/* read a num bit value from a stream and add base */ |
|||
function tinf_read_bits(d: Data, num: number, base: number) { |
|||
if (!num) |
|||
return base; |
|||
const v = d.source.readSync(num) + base; |
|||
return v; |
|||
} |
|||
|
|||
/* given a data stream and a tree, decode a symbol */ |
|||
function tinf_decode_symbol(d: Data, t: Tree, copy = true, ext: any = {}) { |
|||
let sum = 0, cur = 0, len = 0; |
|||
|
|||
/* get more bits while code value is above sum */ |
|||
let s = 0; |
|||
do { |
|||
const b = d.source.readSync(1); |
|||
copy && d.hidden.write(1, b); |
|||
s = (s << 1) | b; |
|||
cur = 2 * cur + b; |
|||
++len; |
|||
|
|||
sum += t.table[len]; |
|||
cur -= t.table[len]; |
|||
} while (cur >= 0); |
|||
ext.length = len; |
|||
ext.sym = s; |
|||
return t.trans[sum + cur]; |
|||
} |
|||
|
|||
/* given a data stream, decode dynamic trees from it */ |
|||
function tinf_decode_trees(d: Data, lt: Tree, dt: Tree) { |
|||
let i, num, length; |
|||
|
|||
/* get 5 bits HLIT (257-286) */ |
|||
const hlit = tinf_read_bits(d, 5, 257); |
|||
d.hidden?.write(5, hlit - 257); |
|||
|
|||
/* get 5 bits HDIST (1-32) */ |
|||
const hdist = tinf_read_bits(d, 5, 1); |
|||
d.hidden?.write(5, hdist - 1); |
|||
|
|||
/* get 4 bits HCLEN (4-19) */ |
|||
const hclen = tinf_read_bits(d, 4, 4); |
|||
d.hidden?.write(4, hclen - 4); |
|||
|
|||
for (i = 0; i < 19; ++i) lengths[i] = 0; |
|||
|
|||
/* read code lengths for code length alphabet */ |
|||
for (i = 0; i < hclen; ++i) { |
|||
/* get 3 bits code length (0-7) */ |
|||
const clen = tinf_read_bits(d, 3, 0); |
|||
d.hidden?.write(3, clen); |
|||
|
|||
lengths[clcidx[i]] = clen; |
|||
} |
|||
|
|||
/* build code length tree */ |
|||
tinf_build_tree(code_tree, lengths, 0, 19); |
|||
|
|||
/* decode code lengths for the dynamic trees */ |
|||
for (num = 0; num < hlit + hdist;) { |
|||
const sym = tinf_decode_symbol(d, code_tree); |
|||
let prev: number; |
|||
switch (sym) { |
|||
case 16: |
|||
/* copy previous code length 3-6 times (read 2 bits) */ |
|||
prev = lengths[num - 1]; |
|||
length = tinf_read_bits(d, 2, 3); |
|||
d.hidden?.write(2, length - 3); |
|||
for (; length; --length) { |
|||
lengths[num++] = prev; |
|||
} |
|||
break; |
|||
case 17: |
|||
/* repeat code length 0 for 3-10 times (read 3 bits) */ |
|||
length = tinf_read_bits(d, 3, 3); |
|||
d.hidden?.write(3, length - 3); |
|||
for (; length; --length) { |
|||
lengths[num++] = 0; |
|||
} |
|||
break; |
|||
case 18: |
|||
/* repeat code length 0 for 11-138 times (read 7 bits) */ |
|||
length = tinf_read_bits(d, 7, 11); |
|||
d.hidden?.write(7, length - 11); |
|||
for (; length; --length) { |
|||
lengths[num++] = 0; |
|||
} |
|||
break; |
|||
default: |
|||
/* values 0-15 represent the actual code lengths */ |
|||
lengths[num++] = sym; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
/* build dynamic trees */ |
|||
tinf_build_tree(lt, lengths, 0, hlit); |
|||
tinf_build_tree(dt, lengths, hlit, hdist); |
|||
} |
|||
|
|||
const bufferEq = (a: Uint8Array, b: Uint8Array) => { |
|||
// this is assumed
|
|||
// if (a.byteLength != b.byteLength)
|
|||
// return false;
|
|||
for (let i = 0; i < a.byteLength; ++i) |
|||
if (a[i] != b[i]) |
|||
return i; |
|||
return -1; |
|||
}; |
|||
|
|||
const get_symbol = (value: number, bits_table: Uint8Array, base_table: Uint16Array): [number, number, number] => { |
|||
let i = 0; |
|||
for (i = 0; i < base_table.length; ++i) { |
|||
if (base_table[i] > value) { |
|||
i--; |
|||
return [i, bits_table[i], value - base_table[i]]; |
|||
} |
|||
} |
|||
i--; |
|||
return [i, bits_table[i], value - base_table[i]]; |
|||
}; |
|||
|
|||
const encode_symbol = (sym: number, tree: HCtree) => { |
|||
const code = getPathTo(tree, sym)!; |
|||
|
|||
return { |
|||
length: code?.length, |
|||
val: parseInt(code, 2) |
|||
}; |
|||
}; |
|||
|
|||
/* ----------------------------- * |
|||
* -- block inflate functions -- * |
|||
* ----------------------------- */ |
|||
|
|||
/* given a stream and two trees, inflate a block of data */ |
|||
export let capacity = 0; |
|||
|
|||
function tinf_inflate_block_data(d: Data, lt: Tree, dt: Tree) { |
|||
let finished = false; |
|||
// eslint-disable-next-line no-constant-condition
|
|||
while (1) { |
|||
if (finished) { |
|||
return true; |
|||
} |
|||
|
|||
let sym = tinf_decode_symbol(d, lt); // copy
|
|||
|
|||
/* check for end of block */ |
|||
if (sym === 256) { |
|||
return TINF_OK; |
|||
} |
|||
|
|||
if (sym < 256) { |
|||
d.dest.push(sym); |
|||
// same
|
|||
} else { |
|||
sym -= 257; |
|||
|
|||
/* possibly get more bits from length code */ |
|||
const length = tinf_read_bits(d, length_bits[sym], length_base[sym]); |
|||
//d.hidden.write(length_bits[sym], length - length_base[sym]);
|
|||
|
|||
// length is unchanged, so copy as is
|
|||
if (length_bits[sym]) |
|||
d.hidden.write(length_bits[sym], length - length_base[sym]); |
|||
|
|||
const ext = { length: 0, sym: 0 }; |
|||
const dist = tinf_decode_symbol(d, dt, false, ext); // don't copy immediately, we may change the code
|
|||
//ext.sym = revbyte(ext.sym, ext.length);
|
|||
//d.hidden.write(ext.length, ext.sym);
|
|||
/* possibly get more bits from distance code */ |
|||
let backoffset = tinf_read_bits(d, dist_bits[dist], dist_base[dist]); |
|||
|
|||
const offs = d.dest.length - backoffset; |
|||
const match = Buffer.from(d.dest.slice(offs, offs + length)); |
|||
// don't consider matches that could be in the lookahead buffer
|
|||
if (match.length == length) { |
|||
let begin = d.dest.length - 32768; |
|||
if (begin < 0) |
|||
begin = 0; |
|||
let matches: number[] = []; |
|||
let o = 0; |
|||
const slic = Buffer.from(d.dest.slice(begin + o, d.dest.length)); |
|||
while (begin + o < d.dest.length) { |
|||
const r = slic.slice(o, d.dest.length).indexOf(match); |
|||
if (r >= 0) { |
|||
matches.push(r + begin + o); |
|||
o += r; |
|||
} |
|||
else { |
|||
break; |
|||
} |
|||
o++; |
|||
} |
|||
if (matches.length > 1) { |
|||
matches = matches.map(e => -(e - d.dest.length)); |
|||
matches.reverse(); |
|||
const v = Math.floor(Math.log2(matches.length)); |
|||
capacity += v; |
|||
// a ""perfectly"" compressed file is like a file with only 0s embedded
|
|||
//console.log('LLLL', matches.length)
|
|||
if (d.to_hide instanceof BitstreamReader) { |
|||
if (d.to_hide.available) { |
|||
const s = d.to_hide.readSync(Math.min(d.to_hide.available, v)); |
|||
backoffset = matches[s]; |
|||
} else { |
|||
finished = true; |
|||
} |
|||
} |
|||
// extract hidden bit
|
|||
else { |
|||
const idx = matches.indexOf(backoffset); |
|||
d.to_hide.write(v, idx); |
|||
} |
|||
} |
|||
} |
|||
// match length should be the same so no need to rewrite
|
|||
//const [lsym, llen, loff] = get_symbol(length, length_bits, length_base)
|
|||
//let enclen = encode_symbol(lsym, d.rltree);
|
|||
//d.hidden.write(enclen.length, enclen.val);
|
|||
//d.hidden.write(llen, loff);
|
|||
const [dsym, dlen, doff] = get_symbol(backoffset, dist_bits, dist_base); |
|||
const encdist = encode_symbol(dsym, d.rdtree); |
|||
d.hidden.write(encdist.length, revbyte(encdist.val, encdist.length)); |
|||
d.hidden.write(dlen, doff); |
|||
|
|||
/* copy match */ |
|||
for (let i = offs; i < offs + length; ++i) { |
|||
d.dest.push(d.dest[i]); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
/* inflate an uncompressed block of data */ |
|||
function tinf_inflate_uncompressed_block(d: Data) { |
|||
/* get length */ |
|||
const length = d.source.readSync(16); |
|||
|
|||
/* get one's complement of length */ |
|||
const invlength = d.source.readSync(16); |
|||
/* check length */ |
|||
if (length !== (~invlength & 0x0000ffff)) |
|||
return -4; |
|||
|
|||
for (let i = length; i; --i) |
|||
d.dest.push(d.source.readSync(8)); |
|||
|
|||
return TINF_OK; |
|||
} |
|||
|
|||
/* inflate stream from source to dest */ |
|||
export function tinf_uncompress(source: BitstreamReader, |
|||
// normal decompressed data
|
|||
decompressed: (chunk: Uint8Array) => void, |
|||
// stream of data to hide
|
|||
to_hide: BitstreamReader | BitstreamWriter, |
|||
// compressed stream containing hidden data
|
|||
hidden: (chunk: Uint8Array) => void) { |
|||
|
|||
const decomp = new BitstreamWriter({ write: (decompressed || (() => {/* */ })) }); |
|||
const hid = new BitstreamWriter({ write: (hidden || (() => {/* */ })) }); |
|||
|
|||
const d = new Data(source, decomp, to_hide, hid); |
|||
let res: number | undefined | true; |
|||
let bfinal: number, btype: number; |
|||
|
|||
do { |
|||
if (to_hide instanceof BitstreamReader) { |
|||
if (to_hide.available == 0) { |
|||
// copy until we're byte-aligned
|
|||
while (source.available && source.offset & 0x7) { |
|||
hid.write(1, source.readSync(1)); |
|||
} |
|||
// nothing left to embed, we are byte aligned, so we just "memcpy" the rest
|
|||
return source.offset >> 3; // this is block aligned, so this doesn't tell us where the last hidden bit is, just an upper bound
|
|||
} |
|||
} |
|||
|
|||
// TODO: truncate to_hide if we get close to 70k?
|
|||
|
|||
/* read final block flag */ |
|||
bfinal = tinf_getbit(d); |
|||
d.hidden.write(1, bfinal); |
|||
|
|||
/* read block type (2 bits) */ |
|||
btype = tinf_read_bits(d, 2, 0); |
|||
d.hidden?.write(2, btype); |
|||
|
|||
/* decompress block */ |
|||
//console.log(btype, capacity);
|
|||
switch (btype) { |
|||
case 0: |
|||
/* decompress uncompressed block */ |
|||
res = tinf_inflate_uncompressed_block(d); |
|||
break; |
|||
case 1: |
|||
/* decompress block with fixed huffman trees */ |
|||
d.rdtree = rdtree; |
|||
d.rltree = rltree; |
|||
res = tinf_inflate_block_data(d, sltree, sdtree); |
|||
if (res === true) { |
|||
continue; |
|||
} |
|||
break; |
|||
case 2: |
|||
/* decompress block with dynamic huffman trees */ |
|||
tinf_decode_trees(d, d.ltree, d.dtree); |
|||
d.computeReverse(); |
|||
res = tinf_inflate_block_data(d, d.ltree, d.dtree); |
|||
if (res === true) { |
|||
continue; |
|||
} |
|||
break; |
|||
default: |
|||
res = -2; |
|||
} |
|||
|
|||
if (res !== TINF_OK) |
|||
throw new Error('Data error ' + res); |
|||
|
|||
} while (!bfinal); |
|||
|
|||
decomp.end(); |
|||
hid.end(); |
|||
|
|||
//if (d.dest.byteOffset < d.dest.length) {
|
|||
// if (typeof d.dest.slice === 'function')
|
|||
// return d.dest.slice(0, d.dest.byteOffset);
|
|||
// else
|
|||
// return d.dest.subarray(0, d.dest.byteOffset);
|
|||
//}
|
|||
|
|||
//return d.dest;
|
|||
} |
|||
|
|||
/* -------------------- * |
|||
* -- initialization -- * |
|||
* -------------------- */ |
|||
|
|||
/* build fixed huffman trees */ |
|||
tinf_build_fixed_trees(sltree, sdtree); |
|||
|
|||
/* build extra bits and base tables */ |
|||
tinf_build_bits_base(length_bits, length_base, 4, 3); |
|||
tinf_build_bits_base(dist_bits, dist_base, 2, 1); |
|||
|
|||
rltree = buildHuffmanTable(sltree.table, sltree.trans)[0] as any; |
|||
rdtree = buildHuffmanTable(sdtree.table, sdtree.trans)[0] as any; |
|||
|
|||
/* fix a special case */ |
|||
length_bits[28] = 0; |
|||
length_base[28] = 258; |
Loading…
Reference in new issue