Init
This commit is contained in:
57
src/hooks/useAppConfig.tsx
Normal file
57
src/hooks/useAppConfig.tsx
Normal file
@@ -0,0 +1,57 @@
|
||||
import jsYaml from "js-yaml";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
const APP_CONFIG = process.env.NEXT_PUBLIC_APP_CONFIG;
|
||||
|
||||
export type AppConfig = {
|
||||
title: string;
|
||||
description: string;
|
||||
github_link?: string;
|
||||
theme_color?: string;
|
||||
outputs: {
|
||||
audio: boolean;
|
||||
video: boolean;
|
||||
chat: boolean;
|
||||
};
|
||||
inputs: {
|
||||
mic: boolean;
|
||||
camera: boolean;
|
||||
};
|
||||
show_qr?: boolean;
|
||||
};
|
||||
|
||||
// Fallback if NEXT_PUBLIC_APP_CONFIG is not set
|
||||
const defaultConfig: AppConfig = {
|
||||
title: "Agent Playground",
|
||||
description: "A playground for testing LiveKit agents",
|
||||
theme_color: "cyan",
|
||||
outputs: {
|
||||
audio: true,
|
||||
video: true,
|
||||
chat: true,
|
||||
},
|
||||
inputs: {
|
||||
mic: true,
|
||||
camera: true,
|
||||
},
|
||||
show_qr: false,
|
||||
};
|
||||
|
||||
export const useAppConfig = (): AppConfig => {
|
||||
const [config, setConfig] = useState<any>(null);
|
||||
useEffect(() => {
|
||||
try {
|
||||
if (APP_CONFIG) {
|
||||
const parsedConfig = jsYaml.load(APP_CONFIG);
|
||||
setConfig(parsedConfig);
|
||||
console.log("parsedConfig:", parsedConfig);
|
||||
} else {
|
||||
setConfig(defaultConfig);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error parsing NEXT_PUBLIC_APP_CONFIG:", error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
return config;
|
||||
};
|
||||
112
src/hooks/useTrackVolume.tsx
Normal file
112
src/hooks/useTrackVolume.tsx
Normal file
@@ -0,0 +1,112 @@
|
||||
import { Track } from "livekit-client";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
export const useTrackVolume = (track?: Track) => {
|
||||
const [volume, setVolume] = useState(0);
|
||||
useEffect(() => {
|
||||
if (!track || !track.mediaStream) {
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = new AudioContext();
|
||||
const source = ctx.createMediaStreamSource(track.mediaStream);
|
||||
const analyser = ctx.createAnalyser();
|
||||
analyser.fftSize = 32;
|
||||
analyser.smoothingTimeConstant = 0;
|
||||
source.connect(analyser);
|
||||
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
|
||||
const updateVolume = () => {
|
||||
analyser.getByteFrequencyData(dataArray);
|
||||
let sum = 0;
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
const a = dataArray[i];
|
||||
sum += a * a;
|
||||
}
|
||||
setVolume(Math.sqrt(sum / dataArray.length) / 255);
|
||||
};
|
||||
|
||||
const interval = setInterval(updateVolume, 1000 / 30);
|
||||
|
||||
return () => {
|
||||
source.disconnect();
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, [track, track?.mediaStream]);
|
||||
|
||||
return volume;
|
||||
};
|
||||
|
||||
const normalizeFrequencies = (frequencies: Float32Array) => {
|
||||
const normalizeDb = (value: number) => {
|
||||
const minDb = -100;
|
||||
const maxDb = -10;
|
||||
let db = 1 - (Math.max(minDb, Math.min(maxDb, value)) * -1) / 100;
|
||||
db = Math.sqrt(db);
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
// Normalize all frequency values
|
||||
return frequencies.map((value) => {
|
||||
if (value === -Infinity) {
|
||||
return 0;
|
||||
}
|
||||
return normalizeDb(value);
|
||||
});
|
||||
};
|
||||
|
||||
export const useMultibandTrackVolume = (
|
||||
track?: Track,
|
||||
bands: number = 5,
|
||||
loPass: number = 100,
|
||||
hiPass: number = 600
|
||||
) => {
|
||||
const [frequencyBands, setFrequencyBands] = useState<Float32Array[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!track || !track.mediaStream) {
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = new AudioContext();
|
||||
const source = ctx.createMediaStreamSource(track.mediaStream);
|
||||
const analyser = ctx.createAnalyser();
|
||||
analyser.fftSize = 2048;
|
||||
source.connect(analyser);
|
||||
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
const dataArray = new Float32Array(bufferLength);
|
||||
|
||||
const updateVolume = () => {
|
||||
analyser.getFloatFrequencyData(dataArray);
|
||||
let frequencies: Float32Array = new Float32Array(dataArray.length);
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
frequencies[i] = dataArray[i];
|
||||
}
|
||||
frequencies = frequencies.slice(loPass, hiPass);
|
||||
|
||||
const normalizedFrequencies = normalizeFrequencies(frequencies);
|
||||
const chunkSize = Math.ceil(normalizedFrequencies.length / bands);
|
||||
const chunks: Float32Array[] = [];
|
||||
for (let i = 0; i < bands; i++) {
|
||||
chunks.push(
|
||||
normalizedFrequencies.slice(i * chunkSize, (i + 1) * chunkSize)
|
||||
);
|
||||
}
|
||||
|
||||
setFrequencyBands(chunks);
|
||||
};
|
||||
|
||||
const interval = setInterval(updateVolume, 10);
|
||||
|
||||
return () => {
|
||||
source.disconnect();
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, [track, track?.mediaStream, loPass, hiPass, bands]);
|
||||
|
||||
return frequencyBands;
|
||||
};
|
||||
27
src/hooks/useWindowResize.ts
Normal file
27
src/hooks/useWindowResize.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
export const useWindowResize = () => {
|
||||
const [size, setSize] = useState({
|
||||
width: 0,
|
||||
height: 0,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const handleResize = () => {
|
||||
setSize({
|
||||
width: window.innerWidth,
|
||||
height: window.innerHeight,
|
||||
});
|
||||
};
|
||||
|
||||
handleResize();
|
||||
|
||||
window.addEventListener("resize", handleResize);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("resize", handleResize);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return size;
|
||||
};
|
||||
Reference in New Issue
Block a user