added streaming bullshit
performance still isn't good enough for long af tracks, maybe i'll try bf decryption in wasm
This commit is contained in:
parent
026edf3808
commit
3fe1482da5
File diff suppressed because one or more lines are too long
38
index.js
38
index.js
|
@ -180,35 +180,51 @@ async function track(id, format, access_token, tagging) {
|
||||||
let { readable, writable } = new TransformStream()
|
let { readable, writable } = new TransformStream()
|
||||||
const writer = writable.getWriter()
|
const writer = writable.getWriter()
|
||||||
|
|
||||||
if (tagging) {
|
if (id3) {
|
||||||
writer.write(id3.arrayBuffer)
|
writer.write(id3.arrayBuffer)
|
||||||
}
|
}
|
||||||
|
|
||||||
const bfKey = await bf_key(id)
|
const bfKey = await bf_key(id)
|
||||||
console.log(bfKey)
|
const length = Number(track.headers.get('Content-Length'))
|
||||||
|
|
||||||
|
pipeDecryptedStream(writer, track.body, length, bfKey, id3)
|
||||||
|
|
||||||
|
return new Response(readable, { status: 200, headers: { 'content-type': 'audio/mpeg' } })
|
||||||
|
}
|
||||||
|
|
||||||
|
async function pipeDecryptedStream(writer, body, length, bfKey) {
|
||||||
const cipher = new Blowfish(bfKey, Blowfish.MODE.CBC, Blowfish.PADDING.NULL)
|
const cipher = new Blowfish(bfKey, Blowfish.MODE.CBC, Blowfish.PADDING.NULL)
|
||||||
cipher.setIv('\x00\x01\x02\x03\x04\x05\x06\x07')
|
cipher.setIv('\x00\x01\x02\x03\x04\x05\x06\x07')
|
||||||
|
|
||||||
const buffer = await track.arrayBuffer()
|
const reader = body.getReader({ mode: 'byob' })
|
||||||
const length = buffer.byteLength
|
|
||||||
let byteCount = 0
|
let byteCount = 0
|
||||||
let end = false
|
let end = false
|
||||||
while (!end) {
|
while (!end) {
|
||||||
let chunkEnd = byteCount + 2048
|
end = byteCount + 2048 > length
|
||||||
if (chunkEnd > length) {
|
let chunk
|
||||||
chunkEnd = length
|
if (!end) {
|
||||||
end = true
|
chunk = new Int8Array(2048)
|
||||||
|
} else {
|
||||||
|
chunk = new Int8Array(length - byteCount)
|
||||||
}
|
}
|
||||||
let chunk = buffer.slice(byteCount, byteCount+2048)
|
|
||||||
|
// if read chunk isn't 2048 bytes, read until it is
|
||||||
|
// cause of retarded readable streams not having an option to specify min bytes
|
||||||
|
let tempLength = 0
|
||||||
|
while (tempLength !== chunk.length) {
|
||||||
|
let read = (await reader.read(new Int8Array(chunk.length - tempLength))).value
|
||||||
|
chunk.set(read, tempLength)
|
||||||
|
tempLength += read.length
|
||||||
|
}
|
||||||
|
|
||||||
if (byteCount % 6144 === 0 && !end) {
|
if (byteCount % 6144 === 0 && !end) {
|
||||||
// encrypted chunk
|
// encrypted chunk
|
||||||
chunk = cipher.decode(chunk, Blowfish.TYPE.UINT8_ARRAY)
|
chunk = cipher.decode(chunk, Blowfish.TYPE.UINT8_ARRAY)
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.write(chunk)
|
writer.write(chunk)
|
||||||
byteCount += 2048
|
byteCount += 2048
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Response(readable, { status: 200, headers: { 'content-type': 'audio/mpeg' } })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function track_url(json, format) {
|
async function track_url(json, format) {
|
||||||
|
|
Loading…
Reference in New Issue