Fix memory leakage

This commit is contained in:
Mid 2025-08-31 20:07:15 +03:00
parent e929b5af1e
commit 862c52f567
3 changed files with 134 additions and 57 deletions

149
blarf.js
View File

@ -2,24 +2,51 @@
var VideoQueue = []
var AudioQueue = []
class DynamicTypedArray {
constructor(type) {
this.type = type
this.backend = new type(1024)
this.length = 0
}
add(b) {
if(this.length + b.length > this.backend.length) {
var newlen = this.backend.length
while(this.length + b.length > newlen) { newlen = newlen * 2 }
var be2 = new this.type(newlen)
be2.set(this.backend, 0)
this.backend = be2
}
this.backend.set(b, this.length)
this.length += b.length
}
}
var BlarfEl = document.getElementById("BLARF")
BlarfEl.innerHTML = `
<canvas width="1280" height="720"></canvas>
<div class="MKVControls">
<div class="MKVSpeaker"><span class="MKVSpeakerOff">🔈&#xFE0E;</span><span class="MKVSpeakerOn" style="display:none;">🔊&#xFE0E;</span></div>
<span class="MKVCurrentTime">00:00:00</span>
<span class="MKVStats"></span>
<div>
<div class="MKVSpeaker"><span class="MKVSpeakerOff">🔈&#xFE0E;</span><span class="MKVSpeakerOn" style="display:none;">🔊&#xFE0E;</span></div>
<span class="MKVCurrentTime">00:00:00</span>
<span class="MKVStats"></span>
</div>
<div>
<span class="MKVStatus"></span>
</div>
</div>
`
var Canvus = BlarfEl.querySelector("canvas")
var CanvCtx = Canvus.getContext("2d")
var CanvImageData
var LatencyMS = 1000
var AudCtx
var AudScript, AudWorklet
var AudHz
var AudDejitter
var AudMuted = true
var AudSampleIndex = 0
function create_audio(hz, channels) {
if(AudCtx) {
@ -30,9 +57,6 @@
AudHz = hz
// Fill up buffer for 1 second before playing
AudDejitter = AudHz
var DebugSine = 0
AudCtx = new AudioContext({sampleRate: hz})
@ -104,7 +128,7 @@
document.querySelector(".MKVSpeaker").onclick = togglemute
document.onkeypress = function(e) {
if(e.key.toUpperCase() == "M") {
if(document.activeElement.tagName != "TEXTAREA" && e.key.toUpperCase() == "M") {
togglemute()
}
}
@ -130,20 +154,31 @@
}
}
var BufferPool = new Set()
var Statistics = {}
var TheWorker = new Worker("blarfwork.js")
TheWorker.onmessage = function(e) {
if(e.data.width) {
var imgData = new ImageData(new Uint8ClampedArray(e.data.data.buffer), e.data.width, e.data.height, {colorSpace: "srgb"})
VideoQueue.push({t: e.data.t, imgData: imgData})
// var imgData = new ImageData(new Uint8ClampedArray(e.data.data.buffer), e.data.width, e.data.height, {colorSpace: "srgb"})
var b
if(BufferPool.size == 0) {
b = new Uint8ClampedArray(e.data.data.buffer)
} else {
for(const v of BufferPool) {
b = v
break
}
BufferPool.delete(b)
b.set(e.data.data)
}
VideoQueue.push({t: e.data.t, imgData: b, w: e.data.width, h: e.data.height})
} else if(e.data.samples) {
AudioQueue.push({left: e.data.left, right: e.data.right || e.data.left})
AudioQueue.push({t: e.data.t, left: e.data.left, right: e.data.right || e.data.left})
// Prevent the audio queue filling up and causing ever-increasing AV desync
if(AudCtx.state != "running") {
var durationInAudioQueue = AudioQueue.length ? AudioQueue.reduce((acc, el) => acc + el.left.length, 0) : 0
var durationToRemove = Math.max(durationInAudioQueue - (VideoQueue.length ? (VideoQueue[VideoQueue.length - 1].t - VideoQueue[0].t) : 0) * AudHz / 1000, 0)
crop_audio_queue(durationToRemove)
if(AudCtx.state == "running" && AudWorklet && AudioQueue.length) {
AudWorklet.port.postMessage(merge_audio_queue())
AudioQueue.length = 0
}
}
@ -333,10 +368,7 @@
this.currentClusterTime = EBMLParser.vi_to_i(data)
if(!RenderStartTime) {
RenderStartTime = document.timeline.currentTime + 600
}
if(!VideoStartTime) {
VideoStartTime = this.currentClusterTime
RenderStartTime = performance.now()
}
} else if(elID == 0xA3) {
// Cluster -> SimpleBlock
@ -353,10 +385,16 @@
var TotalTime = (this.currentClusterTime + timestamp) / 1000
document.querySelector(".MKVCurrentTime").innerText = pad(Math.floor(TotalTime / 3600), 2) + ":" + pad(Math.floor(TotalTime / 60 % 60), 2) + ":" + pad(Math.floor(TotalTime % 60), 2)
var playerTimestamp = this.currentClusterTime + timestamp
if(track) {
if(!VideoStartTime) {
VideoStartTime = playerTimestamp
}
var packet = data.subarray(4)
TheWorker.postMessage({cmd: "decode", id: trackID, t: timestamp + this.currentClusterTime - VideoStartTime, packet: packet, kf: kf})
TheWorker.postMessage({cmd: "decode", id: trackID, t: playerTimestamp - VideoStartTime, packet: packet, kf: kf})
}
}
}
@ -378,6 +416,9 @@
if(track.type == "video") {
Canvus.width = track.width
Canvus.height = track.height
CanvImageData = new ImageData(new Uint8ClampedArray(Canvus.width * Canvus.height * 4), Canvus.width, Canvus.height, {"colorSpace": "srgb"})
RenderStartTime = null
VideoStartTime = null
} else {
create_audio(track.samplerate, track.channels)
}
@ -410,24 +451,28 @@
s += AudioQueue[i].left.length
}
return {msg: "data", left: L, right: R}
var ret = {msg: "data", t: AudSampleIndex, left: L, right: R}
AudSampleIndex += L.length
return ret
}
function reconnect_ws() {
var ws = new WebSocket(BlarfEl.getAttribute("data-target"))
ws.binaryType = "arraybuffer"
ws.onmessage = function(ev) {
ebml.poosh(new Uint8Array(ev.data))
ebml.parse()
// It would make more sense for this to be in `render` but we need the guarantee that this will run when the tab is out of focus
if(AudCtx.state == "running" && AudWorklet && AudioQueue.length) {
AudWorklet.port.postMessage(merge_audio_queue())
AudioQueue.length = 0
}
if(VideoQueue.length) {
while(document.timeline.currentTime - VideoQueue[0].t > 5000) {
VideoQueue.shift()
if(typeof ev.data === "string") {
var obj = JSON.parse(ev.data)
if(obj.status) {
BlarfEl.querySelector(".MKVStatus").innerHTML = "&bull; " + obj.status.viewer_count
}
} else {
ebml.poosh(new Uint8Array(ev.data))
ebml.parse()
while(document.hidden && VideoQueue.length > 1 && VideoQueue[VideoQueue.length - 1].t - VideoQueue[0].t <= LatencyMS) {
BufferPool.add(VideoQueue.shift().imgData)
}
}
}
@ -438,19 +483,37 @@
reconnect_ws()
function render(timestamp) {
document.querySelector(".MKVControls").style.opacity = Math.max(0, Math.min(1, 5 - (timestamp - LastControlsInterrupt) / 1000))
var nextImg = null
while(RenderStartTime && VideoQueue.length && VideoQueue[0].t + VideoBufferingOffset <= (timestamp - RenderStartTime)) {
nextImg = VideoQueue[0].imgData
VideoQueue.shift()
}
if(nextImg) {
CanvCtx.putImageData(nextImg, 0, 0)
try {
document.querySelector(".MKVControls").style.opacity = Math.max(0, Math.min(1, 5 - (timestamp - LastControlsInterrupt) / 1000))
var nextImg = null
while(RenderStartTime && VideoQueue.length && VideoQueue[0].t <= (timestamp - RenderStartTime - LatencyMS)) {
if(nextImg) BufferPool.add(nextImg.imgData)
nextImg = VideoQueue[0]
VideoQueue.shift()
}
if(nextImg) {
document.querySelector(".MKVControls").style.display = null
// Prevent the audio queue filling up and causing ever-increasing AV desync
if(AudCtx && AudCtx.state != "running" && AudioQueue && AudioQueue.length) {
if(AudioQueue[0].t < nextImg.t) {
crop_audio_queue(Math.round((nextImg.t - AudioQueue[0].t) / 1000 * AudHz))
}
}
CanvImageData.data.set(nextImg.imgData)
CanvCtx.putImageData(CanvImageData, 0, 0)
BufferPool.add(nextImg.imgData)
}
} catch(e) {
console.error(e)
}
requestAnimationFrame(render)
}
requestAnimationFrame(render)
document.querySelector(".MKVControls").style.display = "none"
})()

View File

@ -42,6 +42,9 @@
font-size: 0.4cm;
background: rgb(0, 0, 0);
background: linear-gradient(0deg, rgba(0, 0, 0, 1) 0%, rgba(0, 0, 0, 0) 100%);
display: flex;
justify-content: space-between;
align-items: baseline;
}
div#BLARF .MKVControls > * {
vertical-align: middle;
@ -53,6 +56,9 @@
cursor: pointer;
font-size: 0.75cm;
}
div#BLARF .MKVStatus {
margin-right: 0.5em;
}
div#BLARF > canvas {
background: url(intermission.jpg) black;
background-position: 0 30%;
@ -71,6 +77,10 @@
display: block;
line-height: initial;
}
span.chat-msg__heading {
width: inherit !important;
margin-bottom: 0;
}
@media(max-aspect-ratio: 1) {
div.everything {
@ -122,6 +132,11 @@
<script>
document.querySelector("#BLARF").setAttribute("data-target", STREAM_SOURCE_WS)
function randomHex(size) {
return [...self.crypto.getRandomValues(new Uint8Array(size))].map(b=>b.toString(16).padStart(2, "0")).join("")
}
const un = 'lol' + randomHex(16)
if(ENABLE_CHAT) {
converse.initialize({
view_mode: 'embedded',

View File

@ -10,9 +10,8 @@ class RawPCMWorklet extends AudioWorkletProcessor {
constructor() {
super()
this.ringL = new Float32Array(65536)
this.ringR = new Float32Array(65536)
this.ringWrite = 0
this.ringL = new Float32Array(144000)
this.ringR = new Float32Array(144000)
this.ringRead = 0
this.mute = true
@ -27,6 +26,7 @@ class RawPCMWorklet extends AudioWorkletProcessor {
}
var newaudioframes = event.data
var writeIndex = newaudioframes.t
var newlen = newaudioframes.left.length
@ -35,22 +35,18 @@ class RawPCMWorklet extends AudioWorkletProcessor {
newaudioframes.right = newaudioframes.right.slice(newaudioframes.right.length - this.ringL.length)
}
if(this.ringWrite % this.ringL.length + newaudioframes.left.length <= this.ringL.length) {
this.ringL.set(newaudioframes.left, this.ringWrite % this.ringL.length)
this.ringR.set(newaudioframes.right, this.ringWrite % this.ringL.length)
if(writeIndex % this.ringL.length + newaudioframes.left.length <= this.ringL.length) {
this.ringL.set(newaudioframes.left, writeIndex % this.ringL.length)
this.ringR.set(newaudioframes.right, writeIndex % this.ringL.length)
} else {
var boundary = this.ringL.length - this.ringWrite % this.ringL.length
var boundary = this.ringL.length - writeIndex % this.ringL.length
this.ringL.set(newaudioframes.left.slice(0, boundary), this.ringWrite % this.ringL.length)
this.ringL.set(newaudioframes.left.slice(0, boundary), writeIndex % this.ringL.length)
this.ringL.set(newaudioframes.left.slice(boundary), 0)
this.ringR.set(newaudioframes.right.slice(0, boundary), this.ringWrite % this.ringL.length)
this.ringR.set(newaudioframes.right.slice(0, boundary), writeIndex % this.ringL.length)
this.ringR.set(newaudioframes.right.slice(boundary), 0)
}
this.ringWrite += newlen
console.log(this.ringWrite - this.ringRead)
}
}
@ -64,7 +60,8 @@ class RawPCMWorklet extends AudioWorkletProcessor {
return true
}*/
var available = Math.min(left.length, Math.max(0, this.ringWrite - this.ringRead))
//var available = Math.min(left.length, Math.max(0, this.ringWrite - this.ringRead))
var available = left.length
if(this.mute === false) {
if(this.ringRead % this.ringL.length + available <= this.ringL.length) {
@ -83,6 +80,8 @@ class RawPCMWorklet extends AudioWorkletProcessor {
this.ringRead += left.length
//console.log(this.ringRead / 44100)
/*for(var s = 0; s < available; s++) {
var sw = Math.sin((this.debug + s) / 48000 * 440 * 2 * 3.1415926) * 0.3
left[s] = sw