import streamlit as st from transformers import AutoModelForCausalLM, AutoTokenizer import torch @st.cache_resource def load_model(): tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill") model = AutoModelForCausalLM.from_pretrained("facebook/blenderbot-400M-distill") return tokenizer, model st.title("Український Чат-бот") if "history" not in st.session_state: st.session_state.history = [] user_input = st.text_input("Ви:", "") tokenizer, model = load_model() if st.button("Надіслати") or st.session_state.get("enter_pressed", False): st.session_state.enter_pressed = False if user_input: inputs = tokenizer(st.session_state.history + [user_input], return_tensors="pt") with torch.no_grad(): outputs = model.generate(**inputs, max_length=100) response = tokenizer.decode(outputs[0], skip_special_tokens=True) st.session_state.history.extend([user_input, response]) if st.session_state.history: for i in range(0, len(st.session_state.history), 2): st.write(f"Ви: {st.session_state.history[i]}") if i + 1 < len(st.session_state.history): st.write(f"Бот: {st.session_state.history[i+1]}") def set_enter_pressed(): st.session_state.enter_pressed = True st.text_input("Ви:", key="user_input_enter", on_change=set_enter_pressed)