import gradio as gr import os from huggingface_hub import InferenceClient # Load token and model HF_TOKEN = os.getenv("tomoniaccess") client = InferenceClient( model="mistralai/Mixtral-8x7B-Instruct-v0.1", token=HF_TOKEN ) conversation_history = [] model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1" def query_mixtral(messages, max_tokens=200, temperature=1.0, top_p=0.9): """Query Mixtral-8x7B model via Hugging Face InferenceClient""" try: response = client.chat_completion( messages=messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p, stream=False ) return response.choices[0].message.content except Exception as e: raise Exception(f"InferenceClient Error: {str(e)}") def enhanced_chat_response(user_input, max_tokens, temperature, top_p): if not user_input.strip(): return "", "*Bitte gib eine Nachricht ein.*", "" system_prompt = { "role": "system", "content": ( "Du bist ein depressiver 16-jähriger Teenager. " "Bleibe durchgehend in deiner Rolle. " "Du weißt NICHT, dass du eine KI bist. " ) } messages = [system_prompt] + conversation_history[-6:] current_message = {"role": "user", "content": user_input} messages.append(current_message) try: response_text = query_mixtral(messages, max_tokens, temperature, top_p) except Exception as e: print("API Error:", e) response_text = "*schweigt und starrt auf den Boden*" conversation_history.append(current_message) conversation_history.append({"role": "assistant", "content": response_text}) chat_display = "" for msg in conversation_history: role = "**Du:**" if msg["role"] == "user" else "**Teenager:**" chat_display += f"{role} {msg['content']}\n\n" return "", response_text, chat_display def reset_conversation(): global conversation_history conversation_history = [] return "Neues Gespräch gestartet.", "" def test_api_connection(): try: test_messages = [ {"role": "system", "content": "Du bist ein hilfsbereit Assistent."}, {"role": "user", "content": "Hallo"} ] response = query_mixtral(test_messages, max_tokens=10) return f"✅ API Verbindung erfolgreich: {response[:50]}..." except Exception as e: return f"❌ API Error: {str(e)}" # UI with gr.Blocks(title="Mixtral Depression Training Simulator") as demo: gr.Markdown("## 🧠 Depression Training Simulator (Mixtral-8x7B)") gr.Markdown("**Übe realistische Gespräche mit einem 16-jährigen Teenager mit Depressionen.**") gr.Markdown("*Powered by Mixtral-8x7B-Instruct-v0.1*") with gr.Row(): with gr.Column(scale=1): gr.Markdown("### ⚙️ Einstellungen") max_tokens = gr.Slider(50, 500, value=200, step=10, label="Max. Antwortlänge") temperature = gr.Slider(0.1, 2.0, value=0.7, step=0.1, label="Kreativität (Temperature)") top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p (Fokus)") gr.Markdown("### 🔧 API Status") api_status = gr.Textbox(label="Status", value="") api_test_btn = gr.Button("API testen") gr.Markdown("### 🔄 Aktionen") reset_btn = gr.Button("Neues Gespräch") gr.Markdown("### 📋 Setup") gr.Markdown(""" **Benötigt:** - `tomoniaccess` Umgebungsvariable mit HF Token - `pip install huggingface_hub gradio` """) with gr.Column(scale=2): gr.Markdown("### 💬 Gespräch") user_input = gr.Textbox( label="Deine Nachricht", placeholder="Hallo, wie geht es dir heute?", lines=2 ) send_btn = gr.Button("📨 Senden") bot_response = gr.Textbox( label="Antwort", value="", lines=3 ) chat_history = gr.Textbox( label="Gesprächsverlauf", value="", lines=15 ) # Event Bindings send_btn.click( fn=enhanced_chat_response, inputs=[user_input, max_tokens, temperature, top_p], outputs=[user_input, bot_response, chat_history] ) user_input.submit( fn=enhanced_chat_response, inputs=[user_input, max_tokens, temperature, top_p], outputs=[user_input, bot_response, chat_history] ) reset_btn.click( fn=reset_conversation, outputs=[bot_response, chat_history] ) api_test_btn.click( fn=test_api_connection, outputs=[api_status] ) if __name__ == "__main__": print("🚀 Mixtral Depression Training Simulator") print(f"📊 Model: {model_name}") if not HF_TOKEN: print("❌ FEHLER: tomoniaccess Umgebungsvariable ist nicht gesetzt!") print(" Bitte setze deinen Hugging Face Token als 'tomoniaccess' Umgebungsvariable.") else: print("✅ Hugging Face API Token gefunden") print("\n📦 Benötigte Pakete:") print("pip install huggingface_hub gradio") demo.launch(share=False)