407 lines
11 KiB
JavaScript
407 lines
11 KiB
JavaScript
|
(function() {
|
||
|
var VideoQueue = []
|
||
|
var AudioQueue = []
|
||
|
|
||
|
var BlarfEl = document.getElementById("BLARF")
|
||
|
BlarfEl.innerHTML = `
|
||
|
<canvas width="1280" height="720"></canvas>
|
||
|
<div class="MKVControls">
|
||
|
<div class="MKVSpeaker"><span class="MKVSpeakerOff">🔈︎</span><span class="MKVSpeakerOn" style="display:none;">🔊︎</span></div>
|
||
|
<span class="MKVCurrentTime">00:00:00</span>
|
||
|
<span class="MKVStats"></span>
|
||
|
</div>
|
||
|
`
|
||
|
|
||
|
var Canvus = BlarfEl.querySelector("canvas")
|
||
|
var CanvCtx = Canvus.getContext("2d")
|
||
|
|
||
|
var AudCtx
|
||
|
var AudScript
|
||
|
|
||
|
function create_audio(hz, channels) {
|
||
|
if(AudCtx) {
|
||
|
AudCtx.close()
|
||
|
}
|
||
|
|
||
|
AudCtx = new AudioContext({sampleRate: hz})
|
||
|
AudScript = AudCtx.createScriptProcessor(1024, 2, 2)
|
||
|
AudScript.onaudioprocess = function(e) {
|
||
|
var outL = e.outputBuffer.getChannelData(0)
|
||
|
var outR = channels > 1 ? e.outputBuffer.getChannelData(1) : null
|
||
|
|
||
|
var leftToWrite = outL.length
|
||
|
var offset = 0
|
||
|
|
||
|
while(AudioQueue.length && leftToWrite) {
|
||
|
var amount = Math.min(leftToWrite, AudioQueue[0].left.length)
|
||
|
|
||
|
outL.set(AudioQueue[0].left.subarray(0, amount), offset)
|
||
|
if(outR) outR.set(AudioQueue[0].right.subarray(0, amount), offset)
|
||
|
|
||
|
AudioQueue[0].left = AudioQueue[0].left.subarray(amount)
|
||
|
if(outR) AudioQueue[0].right = AudioQueue[0].right.subarray(amount)
|
||
|
|
||
|
if(AudioQueue[0].left.length == 0) {
|
||
|
AudioQueue.shift()
|
||
|
}
|
||
|
|
||
|
leftToWrite -= amount
|
||
|
offset += amount
|
||
|
}
|
||
|
|
||
|
if(RenderStartTime && leftToWrite) {
|
||
|
buffering(1000)
|
||
|
}
|
||
|
}
|
||
|
AudScript.connect(AudCtx.destination)
|
||
|
}
|
||
|
|
||
|
var LastControlsInterrupt
|
||
|
function interruptcontrols() {
|
||
|
LastControlsInterrupt = document.timeline.currentTime
|
||
|
}
|
||
|
interruptcontrols()
|
||
|
|
||
|
function togglemute() {
|
||
|
if(AudCtx)
|
||
|
if(document.querySelector(".MKVSpeakerOn").style.display == "none") {
|
||
|
AudCtx.resume()
|
||
|
} else {
|
||
|
AudCtx.suspend()
|
||
|
}
|
||
|
|
||
|
document.querySelectorAll(".MKVSpeaker *").forEach(function(el) { el.style.display = el.style.display == "none" ? "" : "none" })
|
||
|
|
||
|
interruptcontrols()
|
||
|
}
|
||
|
|
||
|
document.querySelector(".MKVSpeaker").onclick = togglemute
|
||
|
|
||
|
document.onkeypress = function(e) {
|
||
|
if(e.key.toUpperCase() == "M") {
|
||
|
togglemute()
|
||
|
}
|
||
|
}
|
||
|
|
||
|
BlarfEl.onmousemove = function() {
|
||
|
interruptcontrols()
|
||
|
}
|
||
|
|
||
|
var RenderStartTime, VideoStartTime
|
||
|
|
||
|
var Statistics = {}
|
||
|
var TheWorker = new Worker("blarfwork.js")
|
||
|
TheWorker.onmessage = function(e) {
|
||
|
if(e.data.width) {
|
||
|
var imgData = new ImageData(new Uint8ClampedArray(e.data.data.buffer), e.data.width, e.data.height, {colorSpace: "srgb"})
|
||
|
VideoQueue.push({t: e.data.t, imgData: imgData})
|
||
|
} else if(e.data.samples) {
|
||
|
AudioQueue.push({left: e.data.left, right: e.data.right || e.data.left})
|
||
|
|
||
|
// Audio may be loaded but it might not play because of autoplay permissions
|
||
|
// In this case the audio queue will fill up and cause ever-increasing AV desync
|
||
|
|
||
|
// To prevent this, manually crop the audio to the duration in the video queue
|
||
|
|
||
|
if(AudCtx && AudCtx.state != "running") {
|
||
|
var durationInAudioQueue = AudioQueue.length ? AudioQueue.reduce((acc, el) => acc + el.left.length, 0) : 0
|
||
|
|
||
|
var durationToRemove = Math.max(durationInAudioQueue - (VideoQueue.length ? (VideoQueue[VideoQueue.length - 1].t - VideoQueue[0].t) : 0) * 48, 0)
|
||
|
|
||
|
while(AudioQueue.length && durationToRemove) {
|
||
|
var amount = Math.min(durationToRemove, AudioQueue[0].left.length)
|
||
|
|
||
|
AudioQueue[0].left = AudioQueue[0].left.subarray(amount)
|
||
|
AudioQueue[0].right = AudioQueue[0].left.subarray(amount)
|
||
|
|
||
|
if(AudioQueue[0].left.length == 0) {
|
||
|
AudioQueue.shift()
|
||
|
}
|
||
|
|
||
|
durationToRemove -= amount
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if(!Statistics[e.data.id]) {
|
||
|
Statistics[e.data.id] = {sum: 0, count: 0}
|
||
|
}
|
||
|
|
||
|
Statistics[e.data.id].sum += e.data.taken
|
||
|
Statistics[e.data.id].count++
|
||
|
|
||
|
var stats = document.querySelector(".MKVStats")
|
||
|
if(stats) {
|
||
|
/* var text = ""
|
||
|
for(var k in Statistics) {
|
||
|
text = text + k + ":" + (Math.floor(100 * Statistics[k].sum / Statistics[k].count) / 100) + ","
|
||
|
}
|
||
|
stats.innerText = text*/
|
||
|
stats.innerHTML = (VideoQueue.length ? (VideoQueue[VideoQueue.length - 1].t - VideoQueue[0].t) : "0") + "v" + (AudioQueue.reduce(function(acc, obj) {return acc + obj.left.length / 48}, 0)|0) + "a"
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Canvus.onclick = function() {
|
||
|
if(AudCtx) AudCtx.resume()
|
||
|
}
|
||
|
|
||
|
var VideoBufferingOffset = 0
|
||
|
function buffering(millis) {
|
||
|
//var silence = new Float32Array(millis * 48);
|
||
|
//AudioQueue.push({left: silence, right: silence})
|
||
|
//VideoBufferingOffset += millis
|
||
|
}
|
||
|
|
||
|
function toHex(buffer) {
|
||
|
return Array.prototype.map.call(buffer, x => ('00' + x.toString(16)).slice(-2)).join('');
|
||
|
}
|
||
|
function pad(str, n, z) {
|
||
|
z = z || '0'
|
||
|
str = str + ''
|
||
|
while(str.length < n) {
|
||
|
str = z + str
|
||
|
}
|
||
|
return str
|
||
|
}
|
||
|
|
||
|
class EBMLParser {
|
||
|
Accum = new Uint8Array([])
|
||
|
I = 0
|
||
|
|
||
|
IdStack = []
|
||
|
SizeStack = []
|
||
|
|
||
|
get_varint() {
|
||
|
if(this.Accum.length == 0) return null;
|
||
|
|
||
|
var bytes = Math.clz32(this.Accum[this.I]) - 23
|
||
|
|
||
|
if(this.Accum.length - this.I < bytes) return null;
|
||
|
|
||
|
var ret = this.Accum.subarray(this.I, this.I + bytes).slice(0)
|
||
|
|
||
|
this.I += bytes
|
||
|
|
||
|
return ret
|
||
|
}
|
||
|
|
||
|
poosh(toAdd) {
|
||
|
var a = this.Accum
|
||
|
this.Accum = new Uint8Array(a.length + toAdd.length)
|
||
|
this.Accum.set(a)
|
||
|
this.Accum.set(toAdd, a.length)
|
||
|
}
|
||
|
|
||
|
parse() {
|
||
|
do {
|
||
|
var IOld = this.I
|
||
|
|
||
|
var elID = this.get_varint()
|
||
|
|
||
|
if(elID === null) {
|
||
|
this.I = IOld
|
||
|
break
|
||
|
}
|
||
|
|
||
|
elID = EBMLParser.vi_to_i(elID)
|
||
|
|
||
|
var elSize = this.get_varint()
|
||
|
|
||
|
if(elSize === null) {
|
||
|
this.I = IOld
|
||
|
break
|
||
|
}
|
||
|
|
||
|
EBMLParser.parse_varint(elSize)
|
||
|
|
||
|
elSize = EBMLParser.vi_to_i(elSize)
|
||
|
|
||
|
if(elID == 0x18538067 || elID == 0x114D9B74 || elID == 0x1549A966 || elID == 0x1F43B675 || elID == 0x1654AE6B || elID == 0xE0 || elID == 0xE1 || elID == 0xAE) {
|
||
|
// tree
|
||
|
|
||
|
this.IdStack.push(elID)
|
||
|
this.SizeStack.push(elSize + (this.I - IOld))
|
||
|
|
||
|
if(this.onenter) {
|
||
|
this.onenter(elID)
|
||
|
}
|
||
|
} else {
|
||
|
// binary
|
||
|
|
||
|
if(this.Accum.length - this.I >= elSize) {
|
||
|
if(this.ondata) {
|
||
|
this.ondata(elID, this.Accum.subarray(this.I, this.I + elSize))
|
||
|
}
|
||
|
|
||
|
this.I += elSize
|
||
|
} else {
|
||
|
this.I = IOld
|
||
|
break
|
||
|
}
|
||
|
}
|
||
|
|
||
|
for(var i = 0; i < this.IdStack.length; i++) {
|
||
|
this.SizeStack[i] -= this.I - IOld
|
||
|
}
|
||
|
|
||
|
while(this.SizeStack.length && this.SizeStack[this.SizeStack.length - 1] <= 0) {
|
||
|
if(this.SizeStack[this.SizeStack.length] - 1 < 0) console.log("Shit")
|
||
|
|
||
|
if(this.onexit) {
|
||
|
this.onexit(this.IdStack[this.IdStack.length - 1])
|
||
|
}
|
||
|
|
||
|
this.SizeStack.pop()
|
||
|
this.IdStack.pop()
|
||
|
}
|
||
|
} while(true);
|
||
|
|
||
|
this.Accum = this.Accum.subarray(this.I)
|
||
|
this.I = 0
|
||
|
}
|
||
|
|
||
|
static parse_varint(vi) {
|
||
|
vi[0] = vi[0] & ((1 << (31 - Math.clz32(vi[0]))) - 1)
|
||
|
}
|
||
|
|
||
|
static vi_to_i(vi) {
|
||
|
var ret = 0
|
||
|
for(var i = 0; i < vi.length; i++) {
|
||
|
ret = ret * 256 + vi[i]
|
||
|
}
|
||
|
return ret
|
||
|
}
|
||
|
}
|
||
|
|
||
|
class MatroskaState {
|
||
|
tracks = []
|
||
|
|
||
|
onenter(elID) {
|
||
|
if(elID == 0xAE) {
|
||
|
// Track Entry
|
||
|
this.tracks.push({})
|
||
|
} else if(elID == 0xE0) {
|
||
|
// Track Entry -> Track Video
|
||
|
this.tracks[this.tracks.length - 1].type = "video"
|
||
|
} else if(elID == 0xE1) {
|
||
|
// Track Entry -> Track Audio
|
||
|
this.tracks[this.tracks.length - 1].type = "audio"
|
||
|
}
|
||
|
}
|
||
|
|
||
|
ondata(elID, data) {
|
||
|
if(EBMLParser.vi_to_i(data) == 48000) debugger
|
||
|
if(elID == 0xD7) {
|
||
|
// Track Entry -> Track Number
|
||
|
this.tracks[this.tracks.length - 1].id = EBMLParser.vi_to_i(data)
|
||
|
} else if(elID == 0xB0) {
|
||
|
// Track Entry -> Track Video -> Width
|
||
|
this.tracks[this.tracks.length - 1].width = EBMLParser.vi_to_i(data)
|
||
|
} else if(elID == 0xBA) {
|
||
|
// Track Entry -> Track Video -> Height
|
||
|
this.tracks[this.tracks.length - 1].height = EBMLParser.vi_to_i(data)
|
||
|
} else if(elID == 0x9F) {
|
||
|
// Track Entry -> Track Audio -> Channels
|
||
|
this.tracks[this.tracks.length - 1].channels = EBMLParser.vi_to_i(data)
|
||
|
} else if(elID == 0xB5) {
|
||
|
// Track Entry -> Track Audio -> Sampling Frequency
|
||
|
var dv = new DataView(data.slice(0).buffer)
|
||
|
this.tracks[this.tracks.length - 1].samplerate = data.length == 4 ? dv.getFloat32(0, false) : dv.getFloat64(0, false)
|
||
|
} else if(elID == 0x86) {
|
||
|
// Track Entry -> Codec Type
|
||
|
this.tracks[this.tracks.length - 1].codec = new TextDecoder().decode(data);
|
||
|
} else if(elID == 0x63A2) {
|
||
|
// Track Entry -> Codec Private
|
||
|
this.tracks[this.tracks.length - 1].priv = data.slice(0)
|
||
|
} else if(elID == 0xE7) {
|
||
|
// Cluster -> Timestamp
|
||
|
|
||
|
this.currentClusterTime = EBMLParser.vi_to_i(data)
|
||
|
|
||
|
if(!RenderStartTime) {
|
||
|
RenderStartTime = document.timeline.currentTime + 1000
|
||
|
}
|
||
|
if(!VideoStartTime) {
|
||
|
VideoStartTime = this.currentClusterTime
|
||
|
}
|
||
|
} else if(elID == 0xA3) {
|
||
|
// Cluster -> SimpleBlock
|
||
|
|
||
|
var trackID = data[0] & 127
|
||
|
var track = this.tracks.find(function(t) {return t.id == trackID})
|
||
|
|
||
|
var timestamp = data[1] * 256 + data[2]
|
||
|
|
||
|
var flags = data[3]
|
||
|
|
||
|
var kf = !!(flags & 128)
|
||
|
|
||
|
var TotalTime = (this.currentClusterTime + timestamp) / 1000
|
||
|
document.querySelector(".MKVCurrentTime").innerText = pad(Math.floor(TotalTime / 3600), 2) + ":" + pad(Math.floor(TotalTime / 60 % 60), 2) + ":" + pad(Math.floor(TotalTime % 60), 2)
|
||
|
|
||
|
if(track) {
|
||
|
var packet = data.subarray(4)
|
||
|
|
||
|
TheWorker.postMessage({cmd: "decode", id: trackID, t: timestamp + this.currentClusterTime - VideoStartTime, packet: packet, kf: kf})
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
onexit(elID) {
|
||
|
if(elID == 0xAE) {
|
||
|
// Track Entry
|
||
|
|
||
|
var track = this.tracks[this.tracks.length - 1]
|
||
|
|
||
|
var codec = track.codec
|
||
|
var id = track.id
|
||
|
var priv = track.priv
|
||
|
|
||
|
var channels = track.samples // undefined if not audio
|
||
|
|
||
|
TheWorker.postMessage({cmd: "create", codec: codec, id: id, priv: priv, channels: channels})
|
||
|
|
||
|
if(track.type == "video") {
|
||
|
Canvus.width = track.width
|
||
|
Canvus.height = track.height
|
||
|
} else {
|
||
|
create_audio(track.samplerate, track.channels)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var matr = new MatroskaState()
|
||
|
|
||
|
var ebml = new EBMLParser()
|
||
|
ebml.onenter = matr.onenter.bind(matr)
|
||
|
ebml.ondata = matr.ondata.bind(matr)
|
||
|
ebml.onexit = matr.onexit.bind(matr)
|
||
|
|
||
|
function reconnect_ws() {
|
||
|
var ws = new WebSocket(BlarfEl.getAttribute("data-target"))
|
||
|
ws.binaryType = "arraybuffer"
|
||
|
ws.onmessage = function(ev) {
|
||
|
ebml.poosh(new Uint8Array(ev.data))
|
||
|
}
|
||
|
ws.onclose = function(ev) {
|
||
|
setTimeout(reconnect_ws, 5000)
|
||
|
}
|
||
|
}
|
||
|
reconnect_ws()
|
||
|
|
||
|
function render(timestamp) {
|
||
|
ebml.parse()
|
||
|
|
||
|
document.querySelector(".MKVControls").style.opacity = Math.max(0, Math.min(1, 5 - (timestamp - LastControlsInterrupt) / 1000))
|
||
|
|
||
|
while(RenderStartTime && VideoQueue.length && VideoQueue[0].t + VideoBufferingOffset <= (timestamp - RenderStartTime)) {
|
||
|
CanvCtx.putImageData(VideoQueue[0].imgData, 0, 0)
|
||
|
VideoQueue.shift()
|
||
|
}
|
||
|
|
||
|
requestAnimationFrame(render)
|
||
|
}
|
||
|
requestAnimationFrame(render)
|
||
|
})()
|