started work on video using webworker

This commit is contained in:
Boofdev 2025-03-16 15:52:07 +01:00
parent fa275a2187
commit 176c22c16f
5 changed files with 133 additions and 12 deletions

View File

@ -611,7 +611,7 @@
{#each lineData as line, i (i)}
<span>
{#if line.type === 'input'}
<p class="prompt">{user}@{machine}:$&nbsp;</p>
<p class="prompt">{user}@{machine}:$</p>
<pre class="input-old">{line.command}</pre>
<br />
{:else if line.type === 'output'}

View File

@ -138,6 +138,8 @@ pre {
.input {
color: var(--color-input);
font-family: var(--font-family);
background: transparent;
@apply p-0
}
.input:focus {

View File

@ -63,4 +63,31 @@ export type LineDataEntry =
| {
output: string;
type: 'output' | 'outputHtml';
};
};
export type Video2WorkerMessage = {
type: 'init';
video: TextVideo2;
oneBit?: boolean;
} | {
type: 'requestFrame';
index: number;
} | {
type: 'stats';
}
export type Video2WorkerResponse = {
type: 'frame';
index: number;
text: string;
} | {
type: 'end';
} | {
type: 'stats';
stats: {
buffer: {
size: number;
max: number;
}
};
}

View File

@ -1,8 +1,9 @@
import type { StdlibType, TextVideo2 } from "./types";
import type { StdlibType, TextVideo2, Video2WorkerResponse, Video2WorkerMessage } from "./types";
import * as Tone from "tone";
import axios, { type AxiosResponse } from "axios";
import { unpack, pack } from 'msgpackr';
import Pako from "pako";
import video2Worker from "./video2.worker?worker";
type PlayOptions = {
speed?: number;
@ -70,11 +71,36 @@ export async function play(
stdlib.print("Width: " + video.video_info.width);
stdlib.print("Height: " + video.video_info.height);
// Create the web worker (make sure the worker file is named "videoWorker.js")
const worker = new video2Worker();
// Receive messages from the worker.
worker.onmessage = (e) => {
const data = e.data as Video2WorkerResponse;
switch (data.type) {
case "frame":
const frame = data.text;
const lines = frame.split("\n");
stdlib.setLineData([]);
for (let i = 0; i < lines.length; i++) {
stdlib.print(lines[i]);
}
break;
case "stats":
}
};
// Send the video data and oneBit option to the worker for decoding.
worker.postMessage({ type: "init", video, oneBit: options.oneBit} as Video2WorkerMessage);
stdlib.print(
"Your video will start in 5 seconds, if the video looks weird then you might need to zoom out."
);
await new Promise((resolve) => setTimeout(resolve, 5000));
// Start audio when Tone is loaded.
Tone.loaded().then(() => {
if (options.speed) {
player.playbackRate = options.speed;
@ -123,15 +149,8 @@ export async function play(
} else {
skippedInARow = 0
}
let frameData: Uint8Array
if (video.video_info.compression === "gzip") {
frameData = Pako.inflate(video.frames[i].data)
} else {
frameData = video.frames[i].data
}
// Print the frame
stdlib.setLineData([]);
stdlib.print(pixelsToChars(frameData, video.video_info.width, video.video_info.height, lut));
worker.postMessage({ type: "requestFrame", index: i } as Video2WorkerMessage);
i++;
} else {
stdlib.showStuff;
@ -142,7 +161,6 @@ export async function play(
}, delay);
});
}
function frameToTime(fps: number, frame: number) {
let ms = (frame / fps) * 1000;
let seconds = Math.floor(ms / 1000);

View File

@ -0,0 +1,74 @@
import Pako from 'pako';
import type { Video2WorkerMessage, Video2WorkerResponse, TextVideo2 } from './types';
let frameBuffer: string[] = [];
let video = {} as TextVideo2;
let oneBit = false;
// Build the pixel-to-char lookup table (LUT)
function createLUT(oneBit: boolean): string[] {
const chars = oneBit ? ' #' : ' .,-:;=+*#%$@';
const lut = new Array(256);
for (let i = 0; i < 256; i++) {
const index = Math.floor((i / 255) * (chars.length - 1));
lut[i] = chars[index];
}
return lut;
}
// Convert the raw pixel data to a text frame.
function pixelsToChars(pixels: Uint8Array, width: number, height: number, lut: string[]): string {
const lines: string[] = [];
for (let y = 0; y < height; y++) {
let line = '';
for (let x = 0; x < width; x++) {
line += lut[pixels[y * width + x]];
}
lines.push(line);
}
return lines.join('\n');
}
function addFrameToBuffer(index: number, options: { addEvenIfpresent?: boolean } = {}) {
if (!options.addEvenIfpresent && frameBuffer[index]) {
return
}
const lut = createLUT(oneBit);
const frame = video.frames[index];
let frameData: Uint8Array;
if (video.video_info.compression === 'gzip') {
frameData = Pako.inflate(frame.data);
} else {
frameData = frame.data;
}
const textFrame = pixelsToChars(frameData, video.video_info.width, video.video_info.height, lut);
frameBuffer[index] = textFrame;
}
function add5sOfFramesToBuffer(index: number) {
const totalFrames = video.frames.length;
for (let i = index; i < totalFrames && i < index + 5 * video.video_info.fps; i++) {
addFrameToBuffer(i);
}
}
self.onmessage = function (e) {
let data = e.data as Video2WorkerMessage;
switch (data.type) {
case 'init':
video = data.video;
oneBit = data.oneBit || false;
add5sOfFramesToBuffer(0);
break;
case 'requestFrame':
let frame = frameBuffer[data.index];
if (frame) {
postMessage({ type: 'frame', index: data.index, text: frame });
}
add5sOfFramesToBuffer(data.index);
break;
}
};
export {};