Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -67,12 +67,6 @@ from langchain_core.runnables import run_in_executor
|
|
67 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
68 |
import requests
|
69 |
import os
|
70 |
-
if os.path.exists("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf"):
|
71 |
-
pass
|
72 |
-
else:
|
73 |
-
req=requests.get("https://huggingface.co/peterpeter8585/Llama-3-Open-Ko-8B-Instruct-preview-Q5_K_M-GGUF/resolve/main/llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf")
|
74 |
-
with open("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf","wb") as f:
|
75 |
-
f.write(req.content)
|
76 |
#from transformers import pipeline,AutoModelForCausalLM as M,AutoTokenizer as T
|
77 |
#m=M.from_pretrained("peterpeter8585/syai4.3")
|
78 |
#t=T.from_pretrained("peterpeter8585/syai4.3")
|
@@ -81,7 +75,7 @@ import multiprocessing
|
|
81 |
|
82 |
from langchain.llms import GPT4All
|
83 |
|
84 |
-
llm = GPT4All(model=("./
|
85 |
from langchain.retrievers import WikipediaRetriever as Wiki
|
86 |
import gradio as gr
|
87 |
chatbot = gr.Chatbot(
|
|
|
67 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
68 |
import requests
|
69 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
#from transformers import pipeline,AutoModelForCausalLM as M,AutoTokenizer as T
|
71 |
#m=M.from_pretrained("peterpeter8585/syai4.3")
|
72 |
#t=T.from_pretrained("peterpeter8585/syai4.3")
|
|
|
75 |
|
76 |
from langchain.llms import GPT4All
|
77 |
|
78 |
+
llm = GPT4All(model=("./model.gguf"))
|
79 |
from langchain.retrievers import WikipediaRetriever as Wiki
|
80 |
import gradio as gr
|
81 |
chatbot = gr.Chatbot(
|