@ -1,7 +1,7 @@
// ==UserScript==
// @name PNGExtraEmbed
// @namespace https://coom.tech/
// @version 0.281
// @version 0.284
// @description uhh
// @author You
// @match https://boards.4channel.org/*
@ -109,7 +109,7 @@ const _DOMParser = DOMParser;
var define_BUILD_VERSION_default ;
var init_define_BUILD_VERSION = __ esm ( {
"<define:BUILD_VERSION>" ( ) {
define_BUILD_VERSION_default = [ 0 , 281 ] ;
define_BUILD_VERSION_default = [ 0 , 284 ] ;
}
} ) ;
@ -14198,20 +14198,16 @@ const _DOMParser = DOMParser;
const name = this . repr . slice ( this . ptr + 4 , this . ptr + 8 ) . toString ( ) ;
this . ptr += 4 ;
this . req += length + 4 ;
const pos = this . ptr ;
const pos = this . ptr + length + 8 ;
await this . catchup ( ) ;
yield [
name ,
async ( ) => {
await this . catchup ( ) ;
return this . repr . slice ( pos , pos + length + 4 ) ;
} ,
async ( ) => {
await this . catchup ( ) ;
return this . repr . readUInt32BE ( this . ptr + length + 4 ) ;
} ,
this . repr . slice ( this . ptr , this . ptr + length + 4 ) ,
this . repr . readUInt32BE ( this . ptr + length + 4 ) ,
this . ptr
] ;
this . ptr += length + 8 ;
await this . catchup ( ) ;
if ( this . stopped )
break ;
if ( name == "IEND" )
@ -14221,23 +14217,34 @@ const _DOMParser = DOMParser;
async dtor ( ) {
}
} ;
var SyncBufferWriter = class {
constructor ( ) {
this . cumul = [ ] ;
}
write ( b ) {
this . cumul . push ( b ) ;
}
getBuffer ( ) {
return import_buffer . Buffer . concat ( this . cumul ) ;
}
} ;
var PNGEncoder = class {
constructor ( bytes ) {
this . writer = bytes . getWriter ( ) ;
constructor ( writer ) {
this . writer = writer ;
this . writer . write ( import_buffer . Buffer . from ( [ 137 , 80 , 78 , 71 , 13 , 10 , 26 , 10 ] ) ) ;
}
async insertchunk ( chunk ) {
const b = import_buffer . Buffer . alloc ( 4 ) ;
const buff = await chunk [ 1 ] ( ) ;
insertchunk ( chunk ) {
le t b = import_buffer . Buffer . alloc ( 4 ) ;
const buff = chunk [ 1 ] ;
b . writeInt32BE ( buff . length - 4 , 0 ) ;
await this . writer . write ( b ) ;
await this . writer . write ( buff ) ;
this . writer . write ( b ) ;
this . writer . write ( buff ) ;
b = import_buffer . Buffer . alloc ( 4 ) ;
b . writeInt32BE ( ( 0 , import_crc_32 . buf ) ( buff ) , 0 ) ;
await this . writer . write ( b ) ;
this . writer . write ( b ) ;
console . log ( "finished inserting" ) ;
}
async dtor ( ) {
this . writer . releaseLock ( ) ;
await this . writer . close ( ) ;
}
} ;
var BufferWriteStream = ( ) => {
@ -17424,12 +17431,14 @@ const _DOMParser = DOMParser;
computeReverse ( ) {
this . rltree = buildHuffmanTable ( this . ltree . table , this . ltree . trans ) [ 0 ] ;
this . rdtree = buildHuffmanTable ( this . dtree . table , this . dtree . trans ) [ 0 ] ;
this . adists = new Set ( this . rdtree . flat ( 16 ) ) ;
}
} ;
var sltree = new Tree ( ) ;
var sdtree = new Tree ( ) ;
var rltree ;
var rdtree ;
var sadist ;
var length_bits = new Uint8Array ( 30 ) ;
var length_base = new Uint16Array ( 30 ) ;
var dist_bits = new Uint8Array ( 30 ) ;
@ -17507,7 +17516,8 @@ const _DOMParser = DOMParser;
}
}
function tinf_getbit ( d ) {
return d . source . readSync ( 1 ) ;
const v = d . source . readSync ( 1 ) ;
return v ;
}
function tinf_read_bits ( d , num , base ) {
if ( ! num )
@ -17640,7 +17650,10 @@ const _DOMParser = DOMParser;
o ++ ;
}
if ( matches . length > 1 ) {
matches = matches . map ( ( e ) => - ( e - d . dest . length ) ) ;
matches = matches . map ( ( e ) => - ( e - d . dest . length ) ) . filter ( ( e ) => {
const [ dsym2 ] = get_symbol ( e , dist_bits , dist_base ) ;
return d . adists . has ( dsym2 ) ;
} ) ;
matches . reverse ( ) ;
const v = Math . floor ( Math . log2 ( matches . length ) ) ;
capacity += v ;
@ -17658,7 +17671,11 @@ const _DOMParser = DOMParser;
}
}
const [ dsym , dlen , doff ] = get_symbol ( backoffset , dist_bits , dist_base ) ;
const encdist = encode_symbol ( dsym , d . rdtree ) ;
let encdist = encode_symbol ( dsym , d . rdtree ) ;
if ( isNaN ( encdist . val ) ) {
debugger ;
encdist = encode_symbol ( dsym , d . rdtree ) ;
}
d . hidden . write ( encdist . length , revbyte ( encdist . val , encdist . length ) ) ;
d . hidden . write ( dlen , doff ) ;
for ( let i = offs2 ; i < offs2 + length ; ++ i ) {
@ -17669,11 +17686,16 @@ const _DOMParser = DOMParser;
}
function tinf_inflate_uncompressed_block ( d ) {
const length = d . source . readSync ( 16 ) ;
d . hidden . write ( 16 , length ) ;
const invlength = d . source . readSync ( 16 ) ;
d . hidden . write ( 16 , invlength ) ;
if ( length !== ( ~ invlength & 65535 ) )
return - 4 ;
for ( let i = length ; i ; -- i )
d . dest . push ( d . source . readSync ( 8 ) ) ;
for ( let i = length ; i ; -- i ) {
const v = d . source . readSync ( 8 ) ;
d . dest . push ( v ) ;
d . hidden . write ( 8 , v ) ;
}
return TINF_OK ;
}
function tinf_uncompress ( source , decompressed , to_hide , hidden ) {
@ -17687,8 +17709,9 @@ const _DOMParser = DOMParser;
do {
if ( to_hide instanceof BitstreamReader2 ) {
if ( to_hide . available == 0 ) {
while ( source . available && source . offset & 7 ) {
hid . write ( 1 , source . readSync ( 1 ) ) ;
while ( source . available ) {
const r = 1 ;
hid . write ( r , source . readSync ( r ) ) ;
}
return source . offset >> 3 ;
}
@ -17704,18 +17727,17 @@ const _DOMParser = DOMParser;
case 1 :
d . rdtree = rdtree ;
d . rltree = rltree ;
d . adists = sadist ;
res = tinf_inflate_block_data ( d , sltree , sdtree ) ;
if ( res === true ) {
if ( res === true )
continue ;
}
break ;
case 2 :
tinf_decode_trees ( d , d . ltree , d . dtree ) ;
d . computeReverse ( ) ;
res = tinf_inflate_block_data ( d , d . ltree , d . dtree ) ;
if ( res === true ) {
if ( res === true )
continue ;
}
break ;
default :
res = - 2 ;
@ -17731,6 +17753,7 @@ const _DOMParser = DOMParser;
tinf_build_bits_base ( dist_bits , dist_base , 2 , 1 ) ;
rltree = buildHuffmanTable ( sltree . table , sltree . trans ) [ 0 ] ;
rdtree = buildHuffmanTable ( sdtree . table , sdtree . trans ) [ 0 ] ;
sadist = new Set ( rdtree . flat ( 16 ) ) ;
length_bits [ 28 ] = 0 ;
length_base [ 28 ] = 258 ;
@ -17812,7 +17835,7 @@ const _DOMParser = DOMParser;
let buff ;
switch ( name ) {
case "tEXt" :
buff = await chunk ( ) ;
buff = chunk ;
if ( buff . slice ( 4 , 4 + CUM3 . length ) . equals ( CUM3 ) ) {
const k = await decodeCoom3Payload ( buff . slice ( 4 + CUM3 . length ) ) ;
ret . push ( ... k . filter ( ( e ) => e ) . map ( ( e ) => e ) ) ;
@ -17856,7 +17879,7 @@ const _DOMParser = DOMParser;
case "IDAT" :
if ( ret . length )
return ret ;
buff = await chunk ( ) ;
buff = chunk ;
idats . push ( buff . slice ( 4 ) ) ;
case "IEND" :
complete = true ;
@ -17887,15 +17910,6 @@ const _DOMParser = DOMParser;
data . copy ( ret , 4 ) ;
return ret ;
} ;
var BufferWriteStream2 = ( ) => {
let b = import_buffer3 . Buffer . from ( [ ] ) ;
const ret = new WritableStream ( {
write ( chunk ) {
b = import_buffer3 . Buffer . concat ( [ b , chunk ] ) ;
}
} ) ;
return [ ret , ( ) => b ] ;
} ;
var embedInRawDeflate = ( b , h ) => {
const src = new BitstreamReader2 ( ) ;
const hid = new BitstreamReader2 ( ) ;
@ -17913,8 +17927,8 @@ const _DOMParser = DOMParser;
throw new Error ( "Settings uninit" ) ;
if ( csettings2 . pmeth < 5 ) {
let magic4 = false ;
const [ writestream2 , extract8 ] = BufferWriteStream2 ( ) ;
const encoder = new PNGEncoder ( write stream 2 ) ;
const bws2 = new SyncBufferWriter ( ) ;
const encoder = new PNGEncoder ( b ws2) ;
const decoder = new PNGDecoder ( container . stream ( ) . getReader ( ) ) ;
for await ( const [ name , chunk , crc , offset ] of decoder . chunks ( ) ) {
if ( magic4 && name != "IDAT" )
@ -17923,59 +17937,60 @@ const _DOMParser = DOMParser;
const passed = import_buffer3 . Buffer . from ( injb ) ;
switch ( csettings2 . pmeth ) {
case 0 :
await encoder . insertchunk ( [ "tEXt" , async ( ) => buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM3 , passed ] ) ) , ( ) => Promise . resolve ( 0 ) , 0 ] ) ;
encoder . insertchunk ( [ "tEXt" , buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM3 , passed ] ) ) , 0 , 0 ] ) ;
break ;
case 1 :
xor ( passed , password ) ;
await encoder . insertchunk ( [ "tEXt" , async ( ) => buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM4 , import_buffer3 . Buffer . from ( import_buffer3 . Buffer . from ( passed ) . toString ( "base64" ) ) ] ) ) , ( ) => Promise . resolve ( 0 ) , 0 ] ) ;
encoder . insertchunk ( [ "tEXt" , buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM4 , import_buffer3 . Buffer . from ( import_buffer3 . Buffer . from ( passed ) . toString ( "base64" ) ) ] ) ) , 0 , 0 ] ) ;
break ;
case 2 :
await encoder . insertchunk ( [ "tEXt" , async ( ) => buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM5 , import_buffer3 . Buffer . from ( import_buffer3 . Buffer . from ( passed ) . toString ( "base64" ) ) ] ) ) , ( ) => Promise . resolve ( 0 ) , 0 ] ) ;
encoder . insertchunk ( [ "tEXt" , buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM5 , import_buffer3 . Buffer . from ( import_buffer3 . Buffer . from ( passed ) . toString ( "base64" ) ) ] ) ) , 0 , 0 ] ) ;
break ;
case 3 :
await encoder . insertchunk ( [ "tEXt" , async ( ) => buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM6 , import_buffer3 . Buffer . from ( import_buffer3 . Buffer . from ( passed ) . toString ( "base64" ) ) ] ) ) , ( ) => Promise . resolve ( 0 ) , 0 ] ) ;
encoder . insertchunk ( [ "tEXt" , buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM6 , import_buffer3 . Buffer . from ( import_buffer3 . Buffer . from ( passed ) . toString ( "base64" ) ) ] ) ) , 0 , 0 ] ) ;
break ;
case 4 :
await encoder . insertchunk ( [ "tEXt" , async ( ) => buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM7 , import_buffer3 . Buffer . from ( bs58 . encode ( passed ) ) ] ) ) , ( ) => Promise . resolve ( 0 ) , 0 ] ) ;
encoder . insertchunk ( [ "tEXt" , buildChunk ( "tEXt" , import_buffer3 . Buffer . concat ( [ CUM7 , import_buffer3 . Buffer . from ( bs58 . encode ( passed ) ) ] ) ) , 0 , 0 ] ) ;
break ;
}
magic4 = true ;
}
await encoder . insertchunk ( [ name , chunk , crc , offset ] ) ;
encoder . insertchunk ( [ name , chunk , crc , offset ] ) ;
}
await encoder . insertchunk ( [
encoder . insertchunk ( [
"IEND" ,
async ( ) => Promise . resolve ( buildChunk ( "IEND" , import_buffer3 . Buffer . from ( [ ] ) ) ) ,
async ( ) => Promise . resolve ( 0 ) ,
buildChunk ( "IEND" , import_buffer3 . Buffer . from ( [ ] ) ) ,
0 ,
0
] ) ;
return extract8 ( ) ;
return bws2 . getBuffer ( ) ;
}
let pdec = new PNGDecoder ( container . stream ( ) . getReader ( ) ) ;
const concat = [ ] ;
for await ( const chk of pdec . chunks ( ) )
if ( chk [ 0 ] == "IDAT" )
concat . push ( ( await chk [ 1 ] ( ) ) . slice ( 4 ) ) ;
concat . push ( chk [ 1 ] . slice ( 4 ) ) ;
const comp = import_buffer3 . Buffer . concat ( concat ) ;
const head = comp . slice ( 0 , 2 ) ;
const chksum = comp . slice ( - 4 ) ;
const idatblk = embedInRawDeflate ( comp . slice ( 2 , - 4 ) , injb ) ;
const bws = BufferWriteStream2 ( ) ;
const [ writestream , extract7 ] = BufferWriteStream2 ( ) ;
const penc = new PNGEncoder ( writestream ) ;
const bws = new SyncBufferWriter ( ) ;
const penc = new PNGEncoder ( bws ) ;
pdec = new PNGDecoder ( container . stream ( ) . getReader ( ) ) ;
let ins = false ;
for await ( const chk of pdec . chunks ( ) ) {
if ( chk [ 0 ] != "IDAT" ) {
await penc . insertchunk ( chk ) ;
penc . insertchunk ( chk ) ;
} else {
if ( ! ins ) {
await penc . insertchunk ( [ "IDAT" , async ( ) => import_buffer3 . Buffer . concat ( [ import_buffer3 . Buffer . from ( "IDAT" ) , head , idatblk , chksum ] ) , ( ) => Promise . resolve ( 0 ) , 0 ] ) ;
penc . insertchunk ( [ "IDAT" , import_buffer3 . Buffer . concat ( [ import_buffer3 . Buffer . from ( "IDAT" ) , head , idatblk , chksum ] ) , 0 , 0 ] ) ;
ins = true ;
}
}
}
return extract7 ( ) ;
await penc . dtor ( ) ;
console . log ( "Finished writing" ) ;
return bws . getBuffer ( ) ;
} ;
var inject = async ( container , links ) => {
links = links . map ( ( link ) => {
@ -25675,6 +25690,20 @@ const _DOMParser = DOMParser;
await addContent ( new File ( [ new Blob ( [ textinput . value ] , { type : "text/plain" } ) ] , ` message ${ links . length } .txt ` ) ) ;
$$invalidate ( 6 , textinput . value = "" , textinput ) ;
} ;
const downloadFile = ( f ) => {
let file ;
if ( "file" in f )
file = f . file ;
else
file = f ;
var element2 = document . createElement ( "a" ) ;
element2 . setAttribute ( "href" , URL . createObjectURL ( file ) ) ;
element2 . setAttribute ( "download" , file . name ) ;
element2 . style . display = "none" ;
document . body . appendChild ( element2 ) ;
element2 . click ( ) ;
document . body . removeChild ( element2 ) ;
} ;
const embedContent = async ( e ) => {
let tfile = original ;
if ( ! tfile )
@ -25699,6 +25728,7 @@ const _DOMParser = DOMParser;
currentEmbed = {
file : new File ( [ buff ] , file . name , { type } )
} ;
downloadFile ( currentEmbed ) ;
externalDispatch ( "QRSetFile" , currentEmbed ) ;
fireNotification ( "success" , ` File ${ links . length > 1 ? "s" : "" } successfully embedded! ` ) ;
} catch ( err ) {