deepseek-r1 / app.py
Mahadih534's picture
Create app.py
0e399e5 verified
raw
history blame
775 Bytes
import asyncio
from ollama import AsyncClient
import streamlit as st
async def chat(prompt):
message = {'role': 'user', 'content': prompt}
async for part in await AsyncClient().chat(model='smollm:135m', messages=[message], stream=True):
yield part['message']['content']
async def get_full_response(prompt):
response = ""
async for part in chat(prompt):
response += part
return response
def main():
st.title("MySQL with Llama-3")
prompt = st.text_input("Enter your prompt:")
if st.button("Generate"):
if prompt:
with st.spinner("Generating response..."):
full_response = asyncio.run(get_full_response(prompt))
st.write(full_response)
if __name__ == '__main__':
main()