Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import inspect
|
|
|
2 |
from langchain_huggingface import HuggingFacePipeline,ChatHuggingFace
|
3 |
path_hf=inspect.getfile(HuggingFacePipeline)
|
4 |
from subprocess import Popen, PIPE as P
|
@@ -79,7 +80,7 @@ if os.path.exists("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf"):
|
|
79 |
else:
|
80 |
req=requests.get("https://huggingface.co/peterpeter8585/Llama-3-Open-Ko-8B-Instruct-preview-Q5_K_M-GGUF/resolve/main/llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf",stream=True)
|
81 |
with open("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf","wb") as f:
|
82 |
-
for i in req.iter_content(100000000000000000000):
|
83 |
f.write(i)
|
84 |
|
85 |
#from transformers import pipeline,AutoModelForCausalLM as M,AutoTokenizer as T
|
|
|
1 |
import inspect
|
2 |
+
from tqdm import tqdm
|
3 |
from langchain_huggingface import HuggingFacePipeline,ChatHuggingFace
|
4 |
path_hf=inspect.getfile(HuggingFacePipeline)
|
5 |
from subprocess import Popen, PIPE as P
|
|
|
80 |
else:
|
81 |
req=requests.get("https://huggingface.co/peterpeter8585/Llama-3-Open-Ko-8B-Instruct-preview-Q5_K_M-GGUF/resolve/main/llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf",stream=True)
|
82 |
with open("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf","wb") as f:
|
83 |
+
for i in tqdm(req.iter_content(100000000000000000000)):
|
84 |
f.write(i)
|
85 |
|
86 |
#from transformers import pipeline,AutoModelForCausalLM as M,AutoTokenizer as T
|