Fix memory leakage
This commit is contained in:
		
							parent
							
								
									e929b5af1e
								
							
						
					
					
						commit
						862c52f567
					
				
							
								
								
									
										149
									
								
								blarf.js
									
									
									
									
									
								
							
							
						
						
									
										149
									
								
								blarf.js
									
									
									
									
									
								
							@ -2,24 +2,51 @@
 | 
				
			|||||||
	var VideoQueue = []
 | 
						var VideoQueue = []
 | 
				
			||||||
	var AudioQueue = []
 | 
						var AudioQueue = []
 | 
				
			||||||
	
 | 
						
 | 
				
			||||||
 | 
						class DynamicTypedArray {
 | 
				
			||||||
 | 
							constructor(type) {
 | 
				
			||||||
 | 
									this.type = type
 | 
				
			||||||
 | 
									this.backend = new type(1024)
 | 
				
			||||||
 | 
									this.length = 0
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
							add(b) {
 | 
				
			||||||
 | 
								if(this.length + b.length > this.backend.length) {
 | 
				
			||||||
 | 
										var newlen = this.backend.length
 | 
				
			||||||
 | 
										while(this.length + b.length > newlen) { newlen = newlen * 2 }
 | 
				
			||||||
 | 
										var be2 = new this.type(newlen)
 | 
				
			||||||
 | 
										be2.set(this.backend, 0)
 | 
				
			||||||
 | 
										this.backend = be2
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
 | 
								this.backend.set(b, this.length)
 | 
				
			||||||
 | 
								this.length += b.length
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
						
 | 
				
			||||||
	var BlarfEl = document.getElementById("BLARF")
 | 
						var BlarfEl = document.getElementById("BLARF")
 | 
				
			||||||
	BlarfEl.innerHTML = `
 | 
						BlarfEl.innerHTML = `
 | 
				
			||||||
		<canvas width="1280" height="720"></canvas>
 | 
							<canvas width="1280" height="720"></canvas>
 | 
				
			||||||
		<div class="MKVControls">
 | 
							<div class="MKVControls">
 | 
				
			||||||
			<div class="MKVSpeaker"><span class="MKVSpeakerOff">🔈︎</span><span class="MKVSpeakerOn" style="display:none;">🔊︎</span></div>
 | 
								<div>
 | 
				
			||||||
			<span class="MKVCurrentTime">00:00:00</span>
 | 
									<div class="MKVSpeaker"><span class="MKVSpeakerOff">🔈︎</span><span class="MKVSpeakerOn" style="display:none;">🔊︎</span></div>
 | 
				
			||||||
			<span class="MKVStats"></span>
 | 
									<span class="MKVCurrentTime">00:00:00</span>
 | 
				
			||||||
 | 
									<span class="MKVStats"></span>
 | 
				
			||||||
 | 
								</div>
 | 
				
			||||||
 | 
								<div>
 | 
				
			||||||
 | 
									<span class="MKVStatus"></span>
 | 
				
			||||||
 | 
								</div>
 | 
				
			||||||
		</div>
 | 
							</div>
 | 
				
			||||||
	`
 | 
						`
 | 
				
			||||||
	
 | 
						
 | 
				
			||||||
	var Canvus = BlarfEl.querySelector("canvas")
 | 
						var Canvus = BlarfEl.querySelector("canvas")
 | 
				
			||||||
	var CanvCtx = Canvus.getContext("2d")
 | 
						var CanvCtx = Canvus.getContext("2d")
 | 
				
			||||||
 | 
						var CanvImageData
 | 
				
			||||||
 | 
						
 | 
				
			||||||
 | 
						var LatencyMS = 1000
 | 
				
			||||||
	
 | 
						
 | 
				
			||||||
	var AudCtx
 | 
						var AudCtx
 | 
				
			||||||
	var AudScript, AudWorklet
 | 
						var AudScript, AudWorklet
 | 
				
			||||||
	var AudHz
 | 
						var AudHz
 | 
				
			||||||
	var AudDejitter
 | 
					 | 
				
			||||||
	var AudMuted = true
 | 
						var AudMuted = true
 | 
				
			||||||
 | 
						var AudSampleIndex = 0
 | 
				
			||||||
	
 | 
						
 | 
				
			||||||
	function create_audio(hz, channels) {
 | 
						function create_audio(hz, channels) {
 | 
				
			||||||
		if(AudCtx) {
 | 
							if(AudCtx) {
 | 
				
			||||||
@ -30,9 +57,6 @@
 | 
				
			|||||||
		
 | 
							
 | 
				
			||||||
		AudHz = hz
 | 
							AudHz = hz
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		// Fill up buffer for 1 second before playing
 | 
					 | 
				
			||||||
		AudDejitter = AudHz
 | 
					 | 
				
			||||||
		
 | 
					 | 
				
			||||||
		var DebugSine = 0
 | 
							var DebugSine = 0
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		AudCtx = new AudioContext({sampleRate: hz})
 | 
							AudCtx = new AudioContext({sampleRate: hz})
 | 
				
			||||||
@ -104,7 +128,7 @@
 | 
				
			|||||||
	document.querySelector(".MKVSpeaker").onclick = togglemute
 | 
						document.querySelector(".MKVSpeaker").onclick = togglemute
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	document.onkeypress = function(e) {
 | 
						document.onkeypress = function(e) {
 | 
				
			||||||
		if(e.key.toUpperCase() == "M") {
 | 
							if(document.activeElement.tagName != "TEXTAREA" && e.key.toUpperCase() == "M") {
 | 
				
			||||||
			togglemute()
 | 
								togglemute()
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
@ -130,20 +154,31 @@
 | 
				
			|||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	
 | 
						
 | 
				
			||||||
 | 
						var BufferPool = new Set()
 | 
				
			||||||
 | 
						
 | 
				
			||||||
	var Statistics = {}
 | 
						var Statistics = {}
 | 
				
			||||||
	var TheWorker = new Worker("blarfwork.js")
 | 
						var TheWorker = new Worker("blarfwork.js")
 | 
				
			||||||
	TheWorker.onmessage = function(e) {
 | 
						TheWorker.onmessage = function(e) {
 | 
				
			||||||
		if(e.data.width) {
 | 
							if(e.data.width) {
 | 
				
			||||||
			var imgData = new ImageData(new Uint8ClampedArray(e.data.data.buffer), e.data.width, e.data.height, {colorSpace: "srgb"})
 | 
					//			var imgData = new ImageData(new Uint8ClampedArray(e.data.data.buffer), e.data.width, e.data.height, {colorSpace: "srgb"})
 | 
				
			||||||
			VideoQueue.push({t: e.data.t, imgData: imgData})
 | 
								var b
 | 
				
			||||||
 | 
								if(BufferPool.size == 0) {
 | 
				
			||||||
 | 
									b = new Uint8ClampedArray(e.data.data.buffer)
 | 
				
			||||||
 | 
								} else {
 | 
				
			||||||
 | 
									for(const v of BufferPool) {
 | 
				
			||||||
 | 
										b = v
 | 
				
			||||||
 | 
										break
 | 
				
			||||||
 | 
									}
 | 
				
			||||||
 | 
									BufferPool.delete(b)
 | 
				
			||||||
 | 
									b.set(e.data.data)
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
 | 
								VideoQueue.push({t: e.data.t, imgData: b, w: e.data.width, h: e.data.height})
 | 
				
			||||||
		} else if(e.data.samples) {
 | 
							} else if(e.data.samples) {
 | 
				
			||||||
			AudioQueue.push({left: e.data.left, right: e.data.right || e.data.left})
 | 
								AudioQueue.push({t: e.data.t, left: e.data.left, right: e.data.right || e.data.left})
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
			// Prevent the audio queue filling up and causing ever-increasing AV desync
 | 
								if(AudCtx.state == "running" && AudWorklet && AudioQueue.length) {
 | 
				
			||||||
			if(AudCtx.state != "running") {
 | 
									AudWorklet.port.postMessage(merge_audio_queue())
 | 
				
			||||||
				var durationInAudioQueue = AudioQueue.length ? AudioQueue.reduce((acc, el) => acc + el.left.length, 0) : 0
 | 
									AudioQueue.length = 0
 | 
				
			||||||
				var durationToRemove = Math.max(durationInAudioQueue - (VideoQueue.length ? (VideoQueue[VideoQueue.length - 1].t - VideoQueue[0].t) : 0) * AudHz / 1000, 0)
 | 
					 | 
				
			||||||
				crop_audio_queue(durationToRemove)
 | 
					 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
@ -333,10 +368,7 @@
 | 
				
			|||||||
				this.currentClusterTime = EBMLParser.vi_to_i(data)
 | 
									this.currentClusterTime = EBMLParser.vi_to_i(data)
 | 
				
			||||||
				
 | 
									
 | 
				
			||||||
				if(!RenderStartTime) {
 | 
									if(!RenderStartTime) {
 | 
				
			||||||
					RenderStartTime = document.timeline.currentTime + 600
 | 
										RenderStartTime = performance.now()
 | 
				
			||||||
				}
 | 
					 | 
				
			||||||
				if(!VideoStartTime) {
 | 
					 | 
				
			||||||
					VideoStartTime = this.currentClusterTime
 | 
					 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
			} else if(elID == 0xA3) {
 | 
								} else if(elID == 0xA3) {
 | 
				
			||||||
				// Cluster -> SimpleBlock
 | 
									// Cluster -> SimpleBlock
 | 
				
			||||||
@ -353,10 +385,16 @@
 | 
				
			|||||||
				var TotalTime = (this.currentClusterTime + timestamp) / 1000
 | 
									var TotalTime = (this.currentClusterTime + timestamp) / 1000
 | 
				
			||||||
				document.querySelector(".MKVCurrentTime").innerText = pad(Math.floor(TotalTime / 3600), 2) + ":" + pad(Math.floor(TotalTime / 60 % 60), 2) + ":" + pad(Math.floor(TotalTime % 60), 2)
 | 
									document.querySelector(".MKVCurrentTime").innerText = pad(Math.floor(TotalTime / 3600), 2) + ":" + pad(Math.floor(TotalTime / 60 % 60), 2) + ":" + pad(Math.floor(TotalTime % 60), 2)
 | 
				
			||||||
				
 | 
									
 | 
				
			||||||
 | 
									var playerTimestamp = this.currentClusterTime + timestamp
 | 
				
			||||||
 | 
									
 | 
				
			||||||
				if(track) {
 | 
									if(track) {
 | 
				
			||||||
 | 
										if(!VideoStartTime) {
 | 
				
			||||||
 | 
											VideoStartTime = playerTimestamp
 | 
				
			||||||
 | 
										}
 | 
				
			||||||
 | 
										
 | 
				
			||||||
					var packet = data.subarray(4)
 | 
										var packet = data.subarray(4)
 | 
				
			||||||
					
 | 
										
 | 
				
			||||||
					TheWorker.postMessage({cmd: "decode", id: trackID, t: timestamp + this.currentClusterTime - VideoStartTime, packet: packet, kf: kf})
 | 
										TheWorker.postMessage({cmd: "decode", id: trackID, t: playerTimestamp - VideoStartTime, packet: packet, kf: kf})
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
@ -378,6 +416,9 @@
 | 
				
			|||||||
				if(track.type == "video") {
 | 
									if(track.type == "video") {
 | 
				
			||||||
					Canvus.width = track.width
 | 
										Canvus.width = track.width
 | 
				
			||||||
					Canvus.height = track.height
 | 
										Canvus.height = track.height
 | 
				
			||||||
 | 
										CanvImageData = new ImageData(new Uint8ClampedArray(Canvus.width * Canvus.height * 4), Canvus.width, Canvus.height, {"colorSpace": "srgb"})
 | 
				
			||||||
 | 
										RenderStartTime = null
 | 
				
			||||||
 | 
										VideoStartTime = null
 | 
				
			||||||
				} else {
 | 
									} else {
 | 
				
			||||||
					create_audio(track.samplerate, track.channels)
 | 
										create_audio(track.samplerate, track.channels)
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
@ -410,24 +451,28 @@
 | 
				
			|||||||
			s += AudioQueue[i].left.length
 | 
								s += AudioQueue[i].left.length
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		return {msg: "data", left: L, right: R}
 | 
							var ret = {msg: "data", t: AudSampleIndex, left: L, right: R}
 | 
				
			||||||
 | 
							
 | 
				
			||||||
 | 
							AudSampleIndex += L.length
 | 
				
			||||||
 | 
							
 | 
				
			||||||
 | 
							return ret
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	function reconnect_ws() {
 | 
						function reconnect_ws() {
 | 
				
			||||||
		var ws = new WebSocket(BlarfEl.getAttribute("data-target"))
 | 
							var ws = new WebSocket(BlarfEl.getAttribute("data-target"))
 | 
				
			||||||
		ws.binaryType = "arraybuffer"
 | 
							ws.binaryType = "arraybuffer"
 | 
				
			||||||
		ws.onmessage = function(ev) {
 | 
							ws.onmessage = function(ev) {
 | 
				
			||||||
			ebml.poosh(new Uint8Array(ev.data))
 | 
								if(typeof ev.data === "string") {
 | 
				
			||||||
			ebml.parse()
 | 
									var obj = JSON.parse(ev.data)
 | 
				
			||||||
			
 | 
									if(obj.status) {
 | 
				
			||||||
			// It would make more sense for this to be in `render` but we need the guarantee that this will run when the tab is out of focus
 | 
										BlarfEl.querySelector(".MKVStatus").innerHTML = "• " + obj.status.viewer_count
 | 
				
			||||||
			if(AudCtx.state == "running" && AudWorklet && AudioQueue.length) {
 | 
									}
 | 
				
			||||||
				AudWorklet.port.postMessage(merge_audio_queue())
 | 
								} else {
 | 
				
			||||||
				AudioQueue.length = 0
 | 
									ebml.poosh(new Uint8Array(ev.data))
 | 
				
			||||||
			}
 | 
									ebml.parse()
 | 
				
			||||||
			if(VideoQueue.length) {
 | 
									
 | 
				
			||||||
				while(document.timeline.currentTime - VideoQueue[0].t > 5000) {
 | 
									while(document.hidden && VideoQueue.length > 1 && VideoQueue[VideoQueue.length - 1].t - VideoQueue[0].t <= LatencyMS) {
 | 
				
			||||||
					VideoQueue.shift()
 | 
										BufferPool.add(VideoQueue.shift().imgData)
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
@ -438,19 +483,37 @@
 | 
				
			|||||||
	reconnect_ws()
 | 
						reconnect_ws()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	function render(timestamp) {
 | 
						function render(timestamp) {
 | 
				
			||||||
		document.querySelector(".MKVControls").style.opacity = Math.max(0, Math.min(1, 5 - (timestamp - LastControlsInterrupt) / 1000))
 | 
							try {
 | 
				
			||||||
		
 | 
								document.querySelector(".MKVControls").style.opacity = Math.max(0, Math.min(1, 5 - (timestamp - LastControlsInterrupt) / 1000))
 | 
				
			||||||
		var nextImg = null
 | 
								
 | 
				
			||||||
		while(RenderStartTime && VideoQueue.length && VideoQueue[0].t + VideoBufferingOffset <= (timestamp - RenderStartTime)) {
 | 
								var nextImg = null
 | 
				
			||||||
			nextImg = VideoQueue[0].imgData
 | 
								while(RenderStartTime && VideoQueue.length && VideoQueue[0].t <= (timestamp - RenderStartTime - LatencyMS)) {
 | 
				
			||||||
			VideoQueue.shift()
 | 
									if(nextImg) BufferPool.add(nextImg.imgData)
 | 
				
			||||||
		}
 | 
									nextImg = VideoQueue[0]
 | 
				
			||||||
		
 | 
									VideoQueue.shift()
 | 
				
			||||||
		if(nextImg) {
 | 
								}
 | 
				
			||||||
			CanvCtx.putImageData(nextImg, 0, 0)
 | 
								
 | 
				
			||||||
 | 
								if(nextImg) {
 | 
				
			||||||
 | 
									document.querySelector(".MKVControls").style.display = null
 | 
				
			||||||
 | 
									
 | 
				
			||||||
 | 
									// Prevent the audio queue filling up and causing ever-increasing AV desync
 | 
				
			||||||
 | 
									if(AudCtx && AudCtx.state != "running" && AudioQueue && AudioQueue.length) {
 | 
				
			||||||
 | 
										if(AudioQueue[0].t < nextImg.t) {
 | 
				
			||||||
 | 
											crop_audio_queue(Math.round((nextImg.t - AudioQueue[0].t) / 1000 * AudHz))
 | 
				
			||||||
 | 
										}
 | 
				
			||||||
 | 
									}
 | 
				
			||||||
 | 
									
 | 
				
			||||||
 | 
									CanvImageData.data.set(nextImg.imgData)
 | 
				
			||||||
 | 
									CanvCtx.putImageData(CanvImageData, 0, 0)
 | 
				
			||||||
 | 
									BufferPool.add(nextImg.imgData)
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
 | 
							} catch(e) {
 | 
				
			||||||
 | 
								console.error(e)
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		requestAnimationFrame(render)
 | 
							requestAnimationFrame(render)
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	requestAnimationFrame(render)
 | 
						requestAnimationFrame(render)
 | 
				
			||||||
 | 
						
 | 
				
			||||||
 | 
						document.querySelector(".MKVControls").style.display = "none"
 | 
				
			||||||
})()
 | 
					})()
 | 
				
			||||||
 | 
				
			|||||||
							
								
								
									
										15
									
								
								index.html
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								index.html
									
									
									
									
									
								
							@ -42,6 +42,9 @@
 | 
				
			|||||||
				font-size: 0.4cm;
 | 
									font-size: 0.4cm;
 | 
				
			||||||
				background: rgb(0, 0, 0);
 | 
									background: rgb(0, 0, 0);
 | 
				
			||||||
				background: linear-gradient(0deg, rgba(0, 0, 0, 1) 0%, rgba(0, 0, 0, 0) 100%);
 | 
									background: linear-gradient(0deg, rgba(0, 0, 0, 1) 0%, rgba(0, 0, 0, 0) 100%);
 | 
				
			||||||
 | 
									display: flex;
 | 
				
			||||||
 | 
									justify-content: space-between;
 | 
				
			||||||
 | 
									align-items: baseline;
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
			div#BLARF .MKVControls > * {
 | 
								div#BLARF .MKVControls > * {
 | 
				
			||||||
				vertical-align: middle;
 | 
									vertical-align: middle;
 | 
				
			||||||
@ -53,6 +56,9 @@
 | 
				
			|||||||
				cursor: pointer;
 | 
									cursor: pointer;
 | 
				
			||||||
				font-size: 0.75cm;
 | 
									font-size: 0.75cm;
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
 | 
								div#BLARF .MKVStatus {
 | 
				
			||||||
 | 
									margin-right: 0.5em;
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
			div#BLARF > canvas {
 | 
								div#BLARF > canvas {
 | 
				
			||||||
				background: url(intermission.jpg) black;
 | 
									background: url(intermission.jpg) black;
 | 
				
			||||||
				background-position: 0 30%;
 | 
									background-position: 0 30%;
 | 
				
			||||||
@ -71,6 +77,10 @@
 | 
				
			|||||||
				display: block;
 | 
									display: block;
 | 
				
			||||||
				line-height: initial;
 | 
									line-height: initial;
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
 | 
								span.chat-msg__heading {
 | 
				
			||||||
 | 
									width: inherit !important;
 | 
				
			||||||
 | 
									margin-bottom: 0;
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
			@media(max-aspect-ratio: 1) {
 | 
								@media(max-aspect-ratio: 1) {
 | 
				
			||||||
				div.everything {
 | 
									div.everything {
 | 
				
			||||||
@ -122,6 +132,11 @@
 | 
				
			|||||||
		<script>
 | 
							<script>
 | 
				
			||||||
			document.querySelector("#BLARF").setAttribute("data-target", STREAM_SOURCE_WS)
 | 
								document.querySelector("#BLARF").setAttribute("data-target", STREAM_SOURCE_WS)
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
 | 
								function randomHex(size) {
 | 
				
			||||||
 | 
									return [...self.crypto.getRandomValues(new Uint8Array(size))].map(b=>b.toString(16).padStart(2, "0")).join("")
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
 | 
								const un = 'lol' + randomHex(16)
 | 
				
			||||||
 | 
								
 | 
				
			||||||
			if(ENABLE_CHAT) {
 | 
								if(ENABLE_CHAT) {
 | 
				
			||||||
				converse.initialize({
 | 
									converse.initialize({
 | 
				
			||||||
					view_mode: 'embedded',
 | 
										view_mode: 'embedded',
 | 
				
			||||||
 | 
				
			|||||||
@ -10,9 +10,8 @@ class RawPCMWorklet extends AudioWorkletProcessor {
 | 
				
			|||||||
	constructor() {
 | 
						constructor() {
 | 
				
			||||||
		super()
 | 
							super()
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		this.ringL = new Float32Array(65536)
 | 
							this.ringL = new Float32Array(144000)
 | 
				
			||||||
		this.ringR = new Float32Array(65536)
 | 
							this.ringR = new Float32Array(144000)
 | 
				
			||||||
		this.ringWrite = 0
 | 
					 | 
				
			||||||
		this.ringRead = 0
 | 
							this.ringRead = 0
 | 
				
			||||||
		this.mute = true
 | 
							this.mute = true
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
@ -27,6 +26,7 @@ class RawPCMWorklet extends AudioWorkletProcessor {
 | 
				
			|||||||
			}
 | 
								}
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
			var newaudioframes = event.data
 | 
								var newaudioframes = event.data
 | 
				
			||||||
 | 
								var writeIndex = newaudioframes.t
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
			var newlen = newaudioframes.left.length
 | 
								var newlen = newaudioframes.left.length
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
@ -35,22 +35,18 @@ class RawPCMWorklet extends AudioWorkletProcessor {
 | 
				
			|||||||
				newaudioframes.right = newaudioframes.right.slice(newaudioframes.right.length - this.ringL.length)
 | 
									newaudioframes.right = newaudioframes.right.slice(newaudioframes.right.length - this.ringL.length)
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
			
 | 
								
 | 
				
			||||||
			if(this.ringWrite % this.ringL.length + newaudioframes.left.length <= this.ringL.length) {
 | 
								if(writeIndex % this.ringL.length + newaudioframes.left.length <= this.ringL.length) {
 | 
				
			||||||
				this.ringL.set(newaudioframes.left, this.ringWrite % this.ringL.length)
 | 
									this.ringL.set(newaudioframes.left, writeIndex % this.ringL.length)
 | 
				
			||||||
				this.ringR.set(newaudioframes.right, this.ringWrite % this.ringL.length)
 | 
									this.ringR.set(newaudioframes.right, writeIndex % this.ringL.length)
 | 
				
			||||||
			} else {
 | 
								} else {
 | 
				
			||||||
				var boundary = this.ringL.length - this.ringWrite % this.ringL.length
 | 
									var boundary = this.ringL.length - writeIndex % this.ringL.length
 | 
				
			||||||
				
 | 
									
 | 
				
			||||||
				this.ringL.set(newaudioframes.left.slice(0, boundary), this.ringWrite % this.ringL.length)
 | 
									this.ringL.set(newaudioframes.left.slice(0, boundary), writeIndex % this.ringL.length)
 | 
				
			||||||
				this.ringL.set(newaudioframes.left.slice(boundary), 0)
 | 
									this.ringL.set(newaudioframes.left.slice(boundary), 0)
 | 
				
			||||||
				
 | 
									
 | 
				
			||||||
				this.ringR.set(newaudioframes.right.slice(0, boundary), this.ringWrite % this.ringL.length)
 | 
									this.ringR.set(newaudioframes.right.slice(0, boundary), writeIndex % this.ringL.length)
 | 
				
			||||||
				this.ringR.set(newaudioframes.right.slice(boundary), 0)
 | 
									this.ringR.set(newaudioframes.right.slice(boundary), 0)
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
			
 | 
					 | 
				
			||||||
			this.ringWrite += newlen
 | 
					 | 
				
			||||||
			
 | 
					 | 
				
			||||||
			console.log(this.ringWrite - this.ringRead)
 | 
					 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -64,7 +60,8 @@ class RawPCMWorklet extends AudioWorkletProcessor {
 | 
				
			|||||||
			return true
 | 
								return true
 | 
				
			||||||
		}*/
 | 
							}*/
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		var available = Math.min(left.length, Math.max(0, this.ringWrite - this.ringRead))
 | 
							//var available = Math.min(left.length, Math.max(0, this.ringWrite - this.ringRead))
 | 
				
			||||||
 | 
							var available = left.length
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
		if(this.mute === false) {
 | 
							if(this.mute === false) {
 | 
				
			||||||
			if(this.ringRead % this.ringL.length + available <= this.ringL.length) {
 | 
								if(this.ringRead % this.ringL.length + available <= this.ringL.length) {
 | 
				
			||||||
@ -83,6 +80,8 @@ class RawPCMWorklet extends AudioWorkletProcessor {
 | 
				
			|||||||
		
 | 
							
 | 
				
			||||||
		this.ringRead += left.length
 | 
							this.ringRead += left.length
 | 
				
			||||||
		
 | 
							
 | 
				
			||||||
 | 
							//console.log(this.ringRead / 44100)
 | 
				
			||||||
 | 
							
 | 
				
			||||||
		/*for(var s = 0; s < available; s++) {
 | 
							/*for(var s = 0; s < available; s++) {
 | 
				
			||||||
			var sw = Math.sin((this.debug + s) / 48000 * 440 * 2 * 3.1415926) * 0.3
 | 
								var sw = Math.sin((this.debug + s) / 48000 * 440 * 2 * 3.1415926) * 0.3
 | 
				
			||||||
			left[s] = sw
 | 
								left[s] = sw
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user