G.E.N.I.EmarkII / index.html
nihalaninihal's picture
Update index.html
76006d4 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>G.E.N.I.E. - GitHub Enhanced Natural Intelligence Engine</title>
<script src="https://cdn.tailwindcss.com"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&display=swap" rel="stylesheet">
<style>
/* Base styles */
body {
font-family: 'Inter', sans-serif;
background: linear-gradient(135deg, #0f0a1f 0%, #1a102f 50%, #2c1c4a 100%);
color: #e0e0e0;
overflow: hidden; /* Prevent body scroll */
}
/* Custom Scrollbar */
.custom-scrollbar::-webkit-scrollbar { width: 8px; }
.custom-scrollbar::-webkit-scrollbar-track { background: rgba(26, 32, 58, 0.3); border-radius: 10px; }
.custom-scrollbar::-webkit-scrollbar-thumb { background-color: #8a2be2; border-radius: 10px; border: 2px solid transparent; background-clip: content-box; }
.custom-scrollbar::-webkit-scrollbar-thumb:hover { background-color: #9932cc; }
/* Glassmorphism effect */
.glass-panel {
background: rgba(26, 32, 58, 0.6);
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
border: 1px solid rgba(138, 43, 226, 0.3); /* Purple border */
border-radius: 1rem;
box-shadow: 0 4px 30px rgba(0, 0, 0, 0.2);
}
/* Input field styling */
.futuristic-input, .futuristic-select {
background-color: rgba(15, 10, 31, 0.7);
border: 1px solid rgba(138, 43, 226, 0.5); /* Slightly stronger purple border */
color: #e0e0e0;
border-radius: 0.5rem;
padding: 0.75rem 1rem;
transition: border-color 0.3s, box-shadow 0.3s;
width: 100%; /* Ensure inputs take full width */
box-sizing: border-box; /* Include padding and border in element's total width and height */
}
.futuristic-input:focus, .futuristic-select:focus {
outline: none;
border-color: #ffd700; /* Gold focus */
box-shadow: 0 0 10px rgba(255, 215, 0, 0.25);
}
.futuristic-input::placeholder { color: #718096; }
.futuristic-select { appearance: none; /* Remove default arrow */ background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 24 24' stroke-width='1.5' stroke='%23e0e0e0' class='w-6 h-6'%3E%3Cpath stroke-linecap='round' stroke-linejoin='round' d='m19.5 8.25-7.5 7.5-7.5-7.5' /%3E%3C/svg%3E%0A"); background-repeat: no-repeat; background-position: right 0.75rem center; background-size: 1.25em; padding-right: 2.5rem; /* Space for arrow */ }
/* Button styling */
.futuristic-button {
background: linear-gradient(90deg, #0077ff, #00aaff);
color: white;
font-weight: 600;
padding: 0.75rem 1.5rem;
border-radius: 0.5rem;
border: none;
cursor: pointer;
transition: all 0.3s ease;
box-shadow: 0 4px 15px rgba(0, 127, 255, 0.3);
white-space: nowrap; /* Prevent text wrapping */
}
.futuristic-button:hover {
box-shadow: 0 6px 20px rgba(0, 170, 255, 0.5);
transform: translateY(-2px);
background: linear-gradient(90deg, #00aaff, #00ccff);
}
.futuristic-button:active {
transform: translateY(0);
box-shadow: 0 2px 10px rgba(0, 127, 255, 0.2);
}
.futuristic-button:disabled {
background: #555;
cursor: not-allowed;
box-shadow: none;
transform: none;
opacity: 0.7;
}
/* Mic button specific style */
.mic-button {
background: #1f3a6e;
color: #90ee90; /* Light green mic */
padding: 0.75rem;
width: 50px; /* Fixed width */
height: 50px; /* Fixed height */
display: flex;
align-items: center;
justify-content: center;
font-size: 1.25rem;
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.3);
flex-shrink: 0; /* Prevent shrinking */
}
.mic-button:hover {
background: #2c5282;
box-shadow: 0 4px 15px rgba(144, 238, 144, 0.4);
transform: translateY(-1px);
color: #adffad;
}
.mic-button.listening {
background: linear-gradient(90deg, #3cb371, #5fbf5f); /* Green gradient */
color: white;
box-shadow: 0 4px 15px rgba(60, 179, 113, 0.5);
}
/* Interrupt button specific style */
.interrupt-button {
background: linear-gradient(90deg, #ff6b6b, #ff8e8e); /* Red gradient */
color: white;
font-weight: 600;
padding: 0.5rem 1rem;
border-radius: 0.5rem;
border: none;
cursor: pointer;
transition: all 0.3s ease;
box-shadow: 0 4px 15px rgba(255, 107, 107, 0.4);
opacity: 1;
}
.interrupt-button:hover {
box-shadow: 0 6px 20px rgba(255, 107, 107, 0.6);
transform: translateY(-2px);
background: linear-gradient(90deg, #ff8e8e, #ffa7a7);
}
.interrupt-button:active {
transform: translateY(0);
box-shadow: 0 2px 10px rgba(255, 107, 107, 0.3);
}
.interrupt-button.hidden {
opacity: 0;
pointer-events: none;
transform: scale(0.8);
transition: opacity 0.3s ease, transform 0.3s ease;
}
/* Chat bubble styling */
.chat-bubble {
padding: 0.75rem 1rem;
border-radius: 0.75rem;
max-width: 85%; /* Slightly wider max width */
word-wrap: break-word;
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2);
}
/* User bubble */
.user-bubble {
background-color: #0055aa; /* Kept blue */
margin-left: auto;
border-bottom-right-radius: 0.25rem;
color: #f0f0f0;
}
/* Bot bubble */
.bot-bubble {
background-color: #4b0082; /* Dark purple */
margin-right: auto;
border-bottom-left-radius: 0.25rem;
color: #f0f0f0;
}
.bot-bubble.thinking {
font-style: italic;
color: #a0aec0;
display: flex;
align-items: center;
background-color: #2d1f4a; /* Lighter purple for thinking */
}
.bot-bubble.interrupted {
font-style: italic;
color: #f6ad55; /* Orange tone */
background-color: #5c3d1c; /* Dark orange/brown */
}
/* Dot animation for thinking */
.dot-flashing {
position: relative;
width: 5px; height: 5px; border-radius: 5px;
background-color: #a0aec0; color: #a0aec0;
animation: dotFlashing 1s infinite linear alternate;
animation-delay: .5s;
margin-left: 8px; /* Increased spacing */
}
.dot-flashing::before, .dot-flashing::after {
content: ''; display: inline-block; position: absolute; top: 0;
width: 5px; height: 5px; border-radius: 5px;
background-color: #a0aec0; color: #a0aec0;
}
.dot-flashing::before { left: -10px; animation: dotFlashing 1s infinite alternate; animation-delay: 0s; }
.dot-flashing::after { left: 10px; animation: dotFlashing 1s infinite alternate; animation-delay: 1s; }
@keyframes dotFlashing { 0% { background-color: #a0aec0; } 50%, 100% { background-color: rgba(160, 174, 192, 0.2); } }
/* Genie Orb Styling */
.genie-orb {
width: 100px; height: 100px;
border-radius: 50%;
background: radial-gradient(circle, rgba(138, 43, 226, 0.7) 0%, rgba(75, 0, 130, 0.9) 70%);
box-shadow: 0 0 25px rgba(138, 43, 226, 0.6), inset 0 0 15px rgba(255, 255, 255, 0.2);
position: relative;
transition: all 0.5s ease;
border: 2px solid rgba(255, 215, 0, 0.3); /* Subtle gold border */
}
.genie-orb::before { /* Inner glow */
content: ''; position: absolute;
top: 10%; left: 10%; width: 80%; height: 80%;
border-radius: 50%;
background: radial-gradient(circle, rgba(255, 255, 255, 0.15) 0%, rgba(255, 255, 255, 0) 70%);
}
.genie-orb.listening {
box-shadow: 0 0 35px rgba(60, 179, 113, 0.8), inset 0 0 20px rgba(144, 238, 144, 0.4);
background: radial-gradient(circle, rgba(60, 179, 113, 0.8) 0%, rgba(46, 139, 87, 1) 70%);
border-color: rgba(144, 238, 144, 0.5);
}
.genie-orb.active {
box-shadow: 0 0 35px rgba(0, 170, 255, 0.8), inset 0 0 20px rgba(0, 204, 255, 0.4);
background: radial-gradient(circle, rgba(0, 170, 255, 0.8) 0%, rgba(0, 127, 255, 1) 70%);
border-color: rgba(0, 204, 255, 0.5);
animation: pulse 1.5s infinite ease-in-out;
}
@keyframes pulse {
0% { transform: scale(1); }
50% { transform: scale(1.05); }
100% { transform: scale(1); }
}
/* Audio visualizer */
.box-container {
display: flex;
justify-content: space-between;
height: 64px;
width: 100%;
margin-top: 1rem;
}
.box {
height: 100%;
width: 8px;
background: var(--color-accent, #6366f1);
border-radius: 8px;
transition: transform 0.05s ease;
}
/* Responsive Design */
/* Stack columns on smaller screens */
.main-container {
display: flex;
flex-direction: column; /* Default: Stack vertically */
height: 100vh; /* Full viewport height */
}
.chat-section {
flex-grow: 1; /* Takes available space */
display: flex;
flex-direction: column;
padding: 1rem; /* Padding for mobile */
overflow: hidden; /* Prevent overflow */
}
.status-section {
padding: 1rem; /* Padding for mobile */
height: auto; /* Adjust height automatically */
flex-shrink: 0; /* Prevent shrinking */
background: linear-gradient(to bottom, #1a102f, #0f0a1f); /* Gradient for status section */
}
/* Apply row layout on medium screens and up */
@media (min-width: 768px) { /* md breakpoint */
.main-container {
flex-direction: row; /* Side-by-side layout */
}
.chat-section {
width: 66.666667%; /* 2/3 width */
padding: 1.5rem; /* Larger padding */
height: 100vh; /* Full height */
}
.status-section {
width: 33.333333%; /* 1/3 width */
padding: 1.5rem; /* Larger padding */
height: 100vh; /* Full height */
display: flex;
flex-direction: column;
align-items: center;
justify-content: center; /* Center vertically */
}
.status-content {
width: 100%;
max-width: 400px; /* Max width for status content */
display: flex;
flex-direction: column;
align-items: center;
gap: 1.5rem; /* Space between items */
}
.chat-input-area {
display: flex; /* Keep input and button side-by-side */
align-items: center;
gap: 0.75rem; /* Space between mic, input, send */
}
}
/* Ensure chat history scrolls within its container */
#chatHistory {
flex-grow: 1; /* Takes up remaining space in chat-section */
overflow-y: auto; /* Enable vertical scroll */
}
/* Adjust input area layout for smaller screens */
.chat-input-area {
display: flex;
flex-wrap: wrap; /* Allow wrapping */
gap: 0.5rem; /* Space between elements */
}
#userQuery {
flex-grow: 1; /* Take available space */
min-width: 150px; /* Minimum width before wrapping */
}
.preferences-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(150px, 1fr)); /* Responsive grid */
gap: 0.75rem; /* Space between grid items */
}
/* Toast notification */
.toast {
position: fixed;
top: 20px;
left: 50%;
transform: translateX(-50%);
padding: 16px 24px;
border-radius: 4px;
font-size: 14px;
z-index: 1000;
display: none;
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2);
}
.toast.error {
background-color: #f44336;
color: white;
}
.toast.warning {
background-color: #ffd700;
color: black;
}
.toast.info {
background-color: #2196F3;
color: white;
}
</style>
</head>
<body class="text-sm md:text-base">
<!-- Toast notification element -->
<div id="toast" class="toast"></div>
<div class="main-container">
<div class="chat-section">
<div class="text-center mb-3 md:mb-4">
<h1 class="text-2xl md:text-3xl font-bold text-purple-300">G.E.N.I.E.</h1>
<p class="text-purple-100 text-xs md:text-sm">
GitHub Enhanced Natural Intelligence Engine <br class="md:hidden">
<span class="hidden md:inline"> - </span>
A voice-controlled AI that "grants your GitHub wishes."
</p>
</div>
<div class="glass-panel p-3 md:p-4 space-y-3 mb-3 md:mb-4">
<input type="url" id="repoUrl" placeholder="GitHub Repository URL (e.g., https://github.com/owner/repo)" class="futuristic-input">
<div class="preferences-grid">
<input type="password" id="githubToken" placeholder="GitHub Token (Optional)" title="Enter a GitHub PAT for potentially accessing private repos (stored locally, use with caution)." class="futuristic-input">
<select id="userType" class="futuristic-select" title="Select your role for tailored responses.">
<option value="coder">Role: Coder</option>
<option value="manager">Role: Manager</option>
<option value="researcher">Role: Researcher</option>
<option value="student">Role: Student</option>
</select>
<select id="responseDetail" class="futuristic-select" title="Select the desired level of detail for G.E.N.I.E.'s responses.">
<option value="concise">Detail: Concise</option>
<option value="normal" selected>Detail: Normal</option>
<option value="detailed">Detail: Detailed</option>
</select>
</div>
<div class="chat-input-area">
<button id="micButton" title="Toggle Listening Mode / Interrupt" class="futuristic-button mic-button">🎙️</button>
<input type="text" id="userQuery" placeholder="Make a wish about the repository..." class="futuristic-input flex-grow">
<button id="sendButton" class="futuristic-button">Ask G.E.N.I.E.</button>
</div>
</div>
<div id="chatHistory" class="flex-grow glass-panel p-3 md:p-4 space-y-3 overflow-y-auto custom-scrollbar">
<div class="chat-bubble bot-bubble">
Greetings! I am G.E.N.I.E. Provide a repo URL, set preferences (optional), and make your wish (ask a question). Click 🎙️ or start typing to interrupt me.
</div>
</div>
</div>
<div class="status-section">
<div class="status-content"> <!-- Wrapper for centering content -->
<h2 id="agentStatusTitle" class="text-lg md:text-xl font-semibold text-purple-400 text-center">G.E.N.I.E. Status</h2>
<div id="genieOrb" class="genie-orb"></div>
<button id="interruptButton" class="interrupt-button hidden">Interrupt G.E.N.I.E.</button>
<!-- Audio visualizer -->
<div class="glass-panel p-3 md:p-4 w-full">
<div class="box-container" id="audioVisualizer">
<!-- Bars will be added dynamically -->
</div>
</div>
<div class="w-full glass-panel p-3 md:p-4 h-32 md:h-40 overflow-y-auto custom-scrollbar">
<h3 class="text-base md:text-lg font-medium text-purple-300 mb-2 border-b border-purple-700 pb-1">G.E.N.I.E. Output:</h3>
<p id="genieOutput" class="text-xs md:text-sm whitespace-pre-wrap">Awaiting your command...</p>
</div>
</div>
</div>
</div>
<script>
// --- DOM Elements ---
const repoUrlInput = document.getElementById('repoUrl');
const githubTokenInput = document.getElementById('githubToken');
const userTypeSelect = document.getElementById('userType');
const responseDetailSelect = document.getElementById('responseDetail');
const userQueryInput = document.getElementById('userQuery');
const sendButton = document.getElementById('sendButton');
const micButton = document.getElementById('micButton');
const interruptButton = document.getElementById('interruptButton');
const chatHistory = document.getElementById('chatHistory');
const genieOrb = document.getElementById('genieOrb');
const genieOutput = document.getElementById('genieOutput');
const agentStatusTitle = document.getElementById('agentStatusTitle');
const toast = document.getElementById('toast');
const audioVisualizer = document.getElementById('audioVisualizer');
const audioOutput = new Audio();
// --- State ---
let isListening = false;
let isBotActive = false;
let botResponseTimeoutId = null;
let thinkingMessageElement = null;
let webSocket = null;
let audioContext = null;
let audioAnalyser = null;
let microphoneStream = null;
let microphoneProcessor = null;
let audioSequence = 0;
let isConnected = false;
let isGeminiResponding = false;
// --- WebSocket and Audio Setup ---
function setupWebSocket() {
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsUrl = `${protocol}//${window.location.host}/ws`;
webSocket = new WebSocket(wsUrl);
webSocket.onopen = () => {
console.log("WebSocket connection established");
isConnected = true;
showToast("Connected to G.E.N.I.E. server", "info");
// Send initial preferences when connection is established
sendPreferences();
};
webSocket.onmessage = (event) => {
const message = JSON.parse(event.data);
if (message.type === "audio") {
// Convert base64 to audio buffer and play
playAudioFromServer(message.payload);
// Set Gemini as responding when we get audio data
if (!isGeminiResponding) {
isGeminiResponding = true;
setBotActiveState(true, false); // Active but not thinking
}
} else if (message.type === "text") {
// Handle text responses
addMessageToChat(message.content, 'bot');
updateGenieText(message.content);
// End of response
if (message.turn_complete) {
isGeminiResponding = false;
setBotActiveState(false);
}
} else if (message.type === "status") {
// Handle status updates
console.log("Status update:", message.status, message.message);
if (message.status === "interrupted") {
isGeminiResponding = false;
setBotActiveState(false);
// Remove thinking message if it exists
if (thinkingMessageElement) {
thinkingMessageElement.remove();
thinkingMessageElement = null;
}
// Add interrupted message
addMessageToChat(message.message, 'interrupted');
}
}
};
webSocket.onclose = () => {
console.log("WebSocket connection closed");
isConnected = false;
// Attempt to reconnect after a delay
setTimeout(() => {
if (!isConnected) {
console.log("Attempting to reconnect...");
setupWebSocket();
}
}, 3000);
};
webSocket.onerror = (error) => {
console.error("WebSocket error:", error);
showToast("Connection error. Please try again later.", "error");
};
}
// Setup audio visualizer
function setupAudioVisualizer() {
// Create bars for the audio visualizer
audioVisualizer.innerHTML = ''; // Clear existing bars
const numBars = 32;
for (let i = 0; i < numBars; i++) {
const bar = document.createElement('div');
bar.className = 'box';
audioVisualizer.appendChild(bar);
}
}
// Initialize audio context for visualization and recording
async function setupAudioContext() {
try {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
return true;
} catch (e) {
console.error("Error creating audio context:", e);
showToast("Could not initialize audio. Please check your browser permissions.", "error");
return false;
}
}
// Request microphone access and set up stream
async function setupMicrophone() {
try {
if (!audioContext) {
const success = await setupAudioContext();
if (!success) return false;
}
// Get user media
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
microphoneStream = stream;
// Create analyzer for input visualization
const source = audioContext.createMediaStreamSource(stream);
audioAnalyser = audioContext.createAnalyser();
audioAnalyser.fftSize = 256;
source.connect(audioAnalyser);
// Create processor for sending audio
microphoneProcessor = audioContext.createScriptProcessor(4096, 1, 1);
source.connect(microphoneProcessor);
microphoneProcessor.connect(audioContext.destination);
microphoneProcessor.onaudioprocess = (e) => {
if (isListening && isConnected) {
const audioData = e.inputBuffer.getChannelData(0);
sendAudioToServer(audioData);
}
};
// Start visualizing input
visualizeAudio();
return true;
} catch (e) {
console.error("Error accessing microphone:", e);
showToast("Could not access your microphone. Please check your browser permissions.", "error");
return false;
}
}
// Visualize audio input/output
function visualizeAudio() {
if (!audioAnalyser) return;
const bufferLength = audioAnalyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
function draw() {
if (!audioAnalyser) return;
audioAnalyser.getByteFrequencyData(dataArray);
const bars = audioVisualizer.querySelectorAll('.box');
// Update each bar height based on frequency data
let barIndex = 0;
const barCount = bars.length;
const step = Math.floor(bufferLength / barCount) || 1;
for (let i = 0; i < bufferLength; i += step) {
if (barIndex >= barCount) break;
// Get average of frequency range
let sum = 0;
for (let j = 0; j < step && i + j < bufferLength; j++) {
sum += dataArray[i + j];
}
const average = sum / step;
// Scale height (0.1 minimum to always show something)
const barHeight = Math.max(0.1, average / 255);
bars[barIndex].style.transform = `scaleY(${barHeight})`;
barIndex++;
}
// Keep animating
requestAnimationFrame(draw);
}
draw();
}
// Send audio data to server
function sendAudioToServer(audioData) {
if (!webSocket || webSocket.readyState !== WebSocket.OPEN) return;
// Convert float32 to int16
const pcmData = new Int16Array(audioData.length);
for (let i = 0; i < audioData.length; i++) {
pcmData[i] = Math.max(-32768, Math.min(32767, audioData[i] * 32768));
}
// Convert to base64
const buffer = new ArrayBuffer(pcmData.length * 2);
const view = new DataView(buffer);
for (let i = 0; i < pcmData.length; i++) {
view.setInt16(i * 2, pcmData[i], true);
}
const base64Audio = btoa(String.fromCharCode(...new Uint8Array(buffer)));
// Send with sequence number for ordering
webSocket.send(JSON.stringify({
type: "audio",
payload: base64Audio,
seq: audioSequence++
}));
}
// Play audio received from server
function playAudioFromServer(base64Audio) {
const audioData = atob(base64Audio);
const buffer = new ArrayBuffer(audioData.length);
const view = new Uint8Array(buffer);
for (let i = 0; i < audioData.length; i++) {
view[i] = audioData.charCodeAt(i);
}
// Create blob and play
const blob = new Blob([buffer], { type: 'audio/wav' });
const url = URL.createObjectURL(blob);
// Queue audio
const audio = new Audio(url);
audio.onended = () => URL.revokeObjectURL(url); // Clean up
audio.play().catch(e => console.error("Error playing audio:", e));
// If we have AudioContext, create analyzer for visualization
if (audioContext && !audioAnalyser) {
try {
const audioSource = audioContext.createMediaElementSource(audio);
audioAnalyser = audioContext.createAnalyser();
audioAnalyser.fftSize = 256;
audioSource.connect(audioAnalyser);
audioSource.connect(audioContext.destination);
// Start visualization if not already running
visualizeAudio();
} catch (e) {
console.warn("Could not create audio analyzer for playback:", e);
}
}
}
// Send preferences to server
function sendPreferences() {
if (!webSocket || webSocket.readyState !== WebSocket.OPEN) return;
const repoUrl = repoUrlInput.value.trim();
const githubToken = githubTokenInput.value.trim();
const userType = userTypeSelect.value;
const responseDetail = responseDetailSelect.value;
webSocket.send(JSON.stringify({
type: "init",
repo_url: repoUrl,
github_token: githubToken,
user_type: userType,
response_detail: responseDetail
}));
}
// --- Event Handlers ---
async function handleMicButtonClick() {
// If the bot is actively processing, the mic button acts as an interrupt
if (isBotActive) {
handleInterrupt();
return;
}
// Otherwise, toggle listening mode
const wasListening = isListening;
await toggleListeningMode();
// If we just started listening and there's no active message
// Send an empty audio data packet to initiate connection
if (!wasListening && isListening && !thinkingMessageElement) {
microphoneStream = null; // Force new stream setup
const success = await setupMicrophone();
if (!success) {
toggleListeningMode(); // Turn off listening if mic setup failed
}
}
}
async function toggleListeningMode() {
if (isBotActive && !isListening) return; // Don't start listening if bot is busy (unless already listening)
isListening = !isListening;
micButton.classList.toggle('listening', isListening);
genieOrb.classList.toggle('listening', isListening);
if (isListening) {
// Ensure WebSocket is connected
if (!isConnected) {
setupWebSocket();
}
// Setup microphone if not already done
if (!microphoneStream) {
const success = await setupMicrophone();
if (!success) {
isListening = false;
micButton.classList.remove('listening');
genieOrb.classList.remove('listening');
return;
}
}
setBotActiveState(false); // Ensure bot is not marked active
agentStatusTitle.textContent = "G.E.N.I.E. Status: Listening...";
updateGenieText("Listening... Speak your wish or type a question.");
micButton.title = "Stop Listening / Interrupt"; // Update tooltip
} else {
// Stop sending audio
micButton.title = "Toggle Listening Mode / Interrupt"; // Reset tooltip
// Only reset status if the bot isn't currently active
if (!isBotActive) {
agentStatusTitle.textContent = "G.E.N.I.E. Status";
// Only reset output if it was showing the listening message
if (genieOutput.textContent.startsWith("Listening...")) {
updateGenieText("Awaiting your command...");
}
}
}
}
function handleTypingInterrupt() {
// Interrupt if the bot is thinking and the user starts typing
if (isBotActive && botResponseTimeoutId) {
handleInterrupt();
}
}
function handleSendQuery() {
// Stop listening mode if active when sending query
if (isListening) {
toggleListeningMode();
}
// Prevent sending if bot is already active
if (isBotActive) {
console.log("G.E.N.I.E. is active, cannot send new query yet.");
return;
}
const repoUrl = repoUrlInput.value.trim();
const query = userQueryInput.value.trim();
// Basic Validations
if (!query) {
showToast('Please state your wish (enter a query).', 'warning');
return;
}
if (!isValidHttpUrl(repoUrl)) {
showToast('Please provide a valid GitHub repository URL (starting with http:// or https://).', 'warning');
return;
}
// Ensure WebSocket is connected
if (!isConnected) {
setupWebSocket();
setTimeout(() => sendTextQuery(query), 500); // Short delay to allow connection
return;
}
sendTextQuery(query);
}
function sendTextQuery(query) {
// Add user message to chat
addMessageToChat(query, 'user');
userQueryInput.value = ''; // Clear input field
// Add thinking message
thinkingMessageElement = addMessageToChat('', 'thinking');
// Send preferences again in case they've changed
sendPreferences();
// Send the query to the server
if (webSocket && webSocket.readyState === WebSocket.OPEN) {
webSocket.send(JSON.stringify({
type: "text",
content: query
}));
} else {
showToast("Connection to server lost. Please try again.", "error");
// Remove thinking message
if (thinkingMessageElement) {
thinkingMessageElement.remove();
thinkingMessageElement = null;
}
setBotActiveState(false);
}
}
function handleInterrupt() {
// Send interrupt signal to server
if (webSocket && webSocket.readyState === WebSocket.OPEN) {
webSocket.send(JSON.stringify({
type: "interrupt"
}));
}
// Clear local UI state
if (botResponseTimeoutId) {
clearTimeout(botResponseTimeoutId);
botResponseTimeoutId = null;
}
// Remove the "thinking" message if it exists
if (thinkingMessageElement) {
thinkingMessageElement.remove();
thinkingMessageElement = null;
}
// Reset bot state
isGeminiResponding = false;
setBotActiveState(false);
// Refocus input for convenience
userQueryInput.focus();
}
function addMessageToChat(message, type) {
const messageElement = document.createElement('div');
messageElement.classList.add('chat-bubble');
// Stop listening visual cues if a message is added
if (isListening && type !== 'thinking') {
genieOrb.classList.remove('listening');
micButton.classList.remove('listening');
isListening = false; // Ensure state is updated
micButton.title = "Toggle Listening Mode / Interrupt"; // Reset tooltip
}
switch (type) {
case 'user':
messageElement.classList.add('user-bubble');
messageElement.textContent = message;
break;
case 'bot':
messageElement.classList.add('bot-bubble');
messageElement.textContent = message;
updateGenieText(message); // Update status output
// Reset the thinking message
if (thinkingMessageElement) {
thinkingMessageElement.remove();
thinkingMessageElement = null;
}
// After a bot message, ensure we're not in "thinking" mode
setBotActiveState(true, false); // Active but not thinking
break;
case 'thinking':
messageElement.classList.add('bot-bubble', 'thinking');
messageElement.innerHTML = `Consulting the digital ether... <div class="dot-flashing"></div>`;
updateGenieText("Processing your wish..."); // Update status output
setBotActiveState(true, true); // Active and thinking
thinkingMessageElement = messageElement; // Store reference to remove later
break;
case 'interrupted':
messageElement.classList.add('bot-bubble', 'interrupted');
messageElement.textContent = message;
break;
default:
console.error("Unknown message type:", type);
return null; // Don't add unknown types
}
chatHistory.appendChild(messageElement);
// Scroll to the bottom smoothly after adding message
setTimeout(() => {
chatHistory.scrollTo({ top: chatHistory.scrollHeight, behavior: 'smooth' });
}, 50);
return messageElement; // Return the created element
}
function updateGenieText(text) {
genieOutput.textContent = text;
}
function setBotActiveState(isActive, isThinking = true) {
isBotActive = isActive;
// Always update these elements
genieOrb.classList.toggle('active', isActive);
interruptButton.classList.toggle('hidden', !isActive);
sendButton.disabled = isActive;
userQueryInput.disabled = isActive;
repoUrlInput.disabled = isActive;
githubTokenInput.disabled = isActive;
userTypeSelect.disabled = isActive;
responseDetailSelect.disabled = isActive;
if (isActive) {
// If activating, ensure listening mode is off
if (isListening) {
toggleListeningMode(); // Turn off listening visuals/state
}
agentStatusTitle.textContent = isThinking ?
"G.E.N.I.E. Status: Fulfilling Wish..." :
"G.E.N.I.E. Status: Active";
micButton.title = "Interrupt G.E.N.I.E."; // Mic becomes interrupt
} else {
// Reset status title only if not currently listening
if (!isListening) {
agentStatusTitle.textContent = "G.E.N.I.E. Status";
updateGenieText("Awaiting your command..."); // Reset output text
}
micButton.title = "Toggle Listening Mode / Interrupt"; // Reset mic title
// Clear any lingering timeout or thinking message reference
if (botResponseTimeoutId) {
clearTimeout(botResponseTimeoutId);
botResponseTimeoutId = null;
}
thinkingMessageElement = null;
}
}
function isValidHttpUrl(string) {
try {
const url = new URL(string);
return url.protocol === "http:" || url.protocol === "https:";
} catch (_) {
return false; // Invalid URL format
}
}
function showToast(message, type = 'error') {
toast.textContent = message;
toast.className = `toast ${type}`;
toast.style.display = 'block';
// Hide toast after 5 seconds
setTimeout(() => {
toast.style.display = 'none';
}, 5000);
}
// --- Event Listeners ---
sendButton.addEventListener('click', handleSendQuery);
userQueryInput.addEventListener('keypress', (event) => {
if (event.key === 'Enter' && !event.shiftKey) {
event.preventDefault(); // Prevent default newline behavior
handleSendQuery();
}
});
userQueryInput.addEventListener('input', handleTypingInterrupt);
micButton.addEventListener('click', handleMicButtonClick);
interruptButton.addEventListener('click', handleInterrupt);
// Listen for preference changes to update server
repoUrlInput.addEventListener('change', sendPreferences);
githubTokenInput.addEventListener('change', sendPreferences);
userTypeSelect.addEventListener('change', sendPreferences);
responseDetailSelect.addEventListener('change', sendPreferences);
// --- Initialization ---
function init() {
// Set up WebSocket connection
setupWebSocket();
// Set up audio context and visualizer
setupAudioVisualizer();
// Initialize state
updateGenieText("Standing by. Please provide a GitHub repo URL, set preferences, and state your wish, or click 🎙️.");
interruptButton.classList.add('hidden');
sendButton.disabled = false;
setBotActiveState(false);
}
// Start initialization
init();
// When page is closing, clean up WebSocket and audio resources
window.addEventListener('beforeunload', () => {
if (webSocket) {
webSocket.close();
}
if (microphoneStream) {
microphoneStream.getTracks().forEach(track => track.stop());
}
if (microphoneProcessor) {
microphoneProcessor.disconnect();
}
if (audioContext) {
audioContext.close();
}
});
</script>
</body>
</html>