Radio-Learning / index.html
kolaslab's picture
Update index.html
406bd9a verified
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Neural Radio Learning Interface</title>
<style>
body {
background: #000;
color: #0f0;
font-family: monospace;
padding: 20px;
margin: 0;
}
.container {
max-width: 1000px;
margin: 0 auto;
}
.radio-source {
border: 1px solid #0f0;
padding: 10px;
margin: 10px 0;
}
.visualizer {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 10px;
margin: 10px 0;
}
canvas {
background: #111;
border: 1px solid #0f0;
width: 100%;
height: 100px;
}
.controls {
display: flex;
gap: 10px;
margin: 10px 0;
}
button {
background: #111;
color: #0f0;
border: 1px solid #0f0;
padding: 10px 20px;
cursor: pointer;
}
button:hover {
background: #0f0;
color: #000;
}
.status {
border: 1px solid #0f0;
padding: 10px;
margin: 10px 0;
}
.meter {
width: 100%;
height: 20px;
background: #111;
border: 1px solid #0f0;
margin: 5px 0;
}
.meter-fill {
height: 100%;
background: #0f0;
width: 0%;
transition: width 0.3s;
}
.memory-bank {
border: 1px solid #0f0;
padding: 10px;
margin: 10px 0;
height: 150px;
overflow-y: auto;
}
.neural-response {
border: 1px solid #0f0;
padding: 10px;
margin: 10px 0;
}
@keyframes pulse {
0% { opacity: 1; }
50% { opacity: 0.5; }
100% { opacity: 1; }
}
.recording {
color: #f00;
animation: pulse 1s infinite;
display: none;
}
.active .recording {
display: inline;
}
</style>
</head>
<body>
<div class="container">
<h2>Neural Radio Learning Interface</h2>
<div class="radio-source">
<h3>Radio Source</h3>
<audio id="radioStream" crossorigin="anonymous">
<source src="http://stream.live.vc.bbcmedia.co.uk/bbc_world_service" type="audio/mpeg">
</audio>
<div>Status: <span id="radioStatus">Not connected</span></div>
</div>
<div class="visualizer">
<div>
<div>Input Signal</div>
<canvas id="inputVisual"></canvas>
</div>
<div>
<div>Radio Signal</div>
<canvas id="radioVisual"></canvas>
</div>
</div>
<div class="status">
<div>Neural Learning Progress:</div>
<div class="meter">
<div class="meter-fill" id="learningMeter"></div>
</div>
<div>Pattern Recognition:</div>
<div class="meter">
<div class="meter-fill" id="patternMeter"></div>
</div>
</div>
<div class="controls">
<button id="startBtn">Start Learning</button>
<button id="stopBtn">Stop</button>
<button id="respondBtn">Force Response</button>
<span class="recording">● Recording</span>
</div>
<div class="memory-bank">
<h3>Memory Bank</h3>
<div id="memories"></div>
</div>
<div class="neural-response">
<h3>Neural Response</h3>
<div id="response">Waiting for input...</div>
</div>
</div>
<script>
class NeuralRadioInterface {
constructor() {
this.audioContext = null;
this.radioAnalyser = null;
this.micAnalyser = null;
this.isLearning = false;
this.memories = [];
this.learningProgress = 0;
this.patternStrength = 0;
this.initializeUI();
this.setupEventListeners();
}
initializeUI() {
this.inputCanvas = document.getElementById('inputVisual');
this.radioCanvas = document.getElementById('radioVisual');
this.learningMeter = document.getElementById('learningMeter');
this.patternMeter = document.getElementById('patternMeter');
this.memoriesDiv = document.getElementById('memories');
this.responseDiv = document.getElementById('response');
this.radioStatus = document.getElementById('radioStatus');
}
setupEventListeners() {
document.getElementById('startBtn').onclick = () => this.start();
document.getElementById('stopBtn').onclick = () => this.stop();
document.getElementById('respondBtn').onclick = () => this.generateResponse();
const radio = document.getElementById('radioStream');
radio.onplay = () => this.radioStatus.textContent = 'Connected';
radio.onerror = () => this.radioStatus.textContent = 'Error connecting';
}
async start() {
try {
this.audioContext = new AudioContext();
// Setup microphone input
const micStream = await navigator.mediaDevices.getUserMedia({audio: true});
const micSource = this.audioContext.createMediaStreamSource(micStream);
this.micAnalyser = this.audioContext.createAnalyser();
micSource.connect(this.micAnalyser);
// Setup radio input
const radio = document.getElementById('radioStream');
radio.play();
const radioSource = this.audioContext.createMediaElementSource(radio);
this.radioAnalyser = this.audioContext.createAnalyser();
radioSource.connect(this.radioAnalyser);
radioSource.connect(this.audioContext.destination);
this.isLearning = true;
document.body.classList.add('active');
this.learn();
this.updateVisuals();
this.monitorInput();
} catch(err) {
console.error('Error:', err);
this.responseDiv.textContent = 'Error: ' + err.message;
}
}
stop() {
this.isLearning = false;
document.getElementById('radioStream').pause();
document.body.classList.remove('active');
if(this.audioContext) {
this.audioContext.close();
}
}
learn() {
if(!this.isLearning) return;
// Simulate learning from radio stream
if(this.learningProgress < 100) {
this.learningProgress += Math.random() * 0.5;
this.learningMeter.style.width = this.learningProgress + '%';
}
// Store simulated memories
if(Math.random() < 0.1) {
const memory = `Learned pattern ${Math.floor(Math.random() * 1000)} at ${new Date().toLocaleTimeString()}`;
this.memories.unshift(memory);
if(this.memories.length > 10) this.memories.pop();
this.updateMemories();
}
setTimeout(() => this.learn(), 100);
}
monitorInput() {
if(!this.isLearning) return;
const bufferLength = this.micAnalyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
this.micAnalyser.getByteTimeDomainData(dataArray);
// Analyze input and trigger response
let sum = 0;
for(let i = 0; i < bufferLength; i++) {
sum += Math.abs(dataArray[i] - 128);
}
const inputLevel = sum / bufferLength;
this.patternStrength = Math.min(100, this.patternStrength + (inputLevel > 10 ? 5 : -2));
this.patternMeter.style.width = this.patternStrength + '%';
if(this.patternStrength > 80) {
this.generateResponse();
this.patternStrength = 0;
}
setTimeout(() => this.monitorInput(), 100);
}
generateResponse() {
const responses = [
'Analyzing input pattern...',
'Pattern matched! Generating response...',
'Processing neural pathways...',
'Synthesizing radio fragments...',
'Generating response from learned patterns...'
];
this.responseDiv.textContent = responses[Math.floor(Math.random() * responses.length)];
// Simulate audio response
const oscillator = this.audioContext.createOscillator();
const gain = this.audioContext.createGain();
oscillator.connect(gain);
gain.connect(this.audioContext.destination);
oscillator.frequency.setValueAtTime(440, this.audioContext.currentTime);
gain.gain.setValueAtTime(0.1, this.audioContext.currentTime);
gain.gain.exponentialRampToValueAtTime(0.001, this.audioContext.currentTime + 0.5);
oscillator.start();
oscillator.stop(this.audioContext.currentTime + 0.5);
}
updateVisuals() {
if(!this.isLearning) return;
// Draw input visualization
if(this.micAnalyser) {
const inputData = new Uint8Array(this.micAnalyser.frequencyBinCount);
this.micAnalyser.getByteTimeDomainData(inputData);
this.drawWaveform(this.inputCanvas, inputData, '#0f0');
}
// Draw radio visualization
if(this.radioAnalyser) {
const radioData = new Uint8Array(this.radioAnalyser.frequencyBinCount);
this.radioAnalyser.getByteTimeDomainData(radioData);
this.drawWaveform(this.radioCanvas, radioData, '#f00');
}
requestAnimationFrame(() => this.updateVisuals());
}
drawWaveform(canvas, dataArray, color) {
const ctx = canvas.getContext('2d');
const width = canvas.width;
const height = canvas.height;
ctx.fillStyle = '#111';
ctx.fillRect(0, 0, width, height);
ctx.lineWidth = 2;
ctx.strokeStyle = color;
ctx.beginPath();
const sliceWidth = width / dataArray.length;
let x = 0;
for(let i = 0; i < dataArray.length; i++) {
const v = dataArray[i] / 128.0;
const y = v * height/2;
if(i === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
x += sliceWidth;
}
ctx.lineTo(width, height/2);
ctx.stroke();
}
updateMemories() {
this.memoriesDiv.innerHTML = this.memories
.map(m => `<div>${m}</div>`)
.join('');
}
}
window.onload = () => new NeuralRadioInterface();
</script>
</body>
</html>