Commit
·
0299ece
1
Parent(s):
3a28db6
Initial
Browse files
app.py
CHANGED
@@ -4,8 +4,8 @@ import threading
|
|
4 |
|
5 |
# Third-party imports
|
6 |
import gradio as gr
|
7 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM, TextIteratorStreamer
|
8 |
from peft import PeftModel
|
|
|
9 |
|
10 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
11 |
|
@@ -24,7 +24,6 @@ base_model = AutoModelForCausalLM.from_pretrained(
|
|
24 |
|
25 |
base_model.resize_token_embeddings(len(tokenizer))
|
26 |
|
27 |
-
# 4️⃣ PEFT adapter’ı yükle
|
28 |
model = PeftModel.from_pretrained(
|
29 |
base_model,
|
30 |
"bunyaminergen/Qwen2.5-Coder-1.5B-Instruct-Reasoning",
|
@@ -87,4 +86,4 @@ demo = gr.ChatInterface(
|
|
87 |
)
|
88 |
|
89 |
if __name__ == "__main__":
|
90 |
-
demo.launch(
|
|
|
4 |
|
5 |
# Third-party imports
|
6 |
import gradio as gr
|
|
|
7 |
from peft import PeftModel
|
8 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, TextIteratorStreamer
|
9 |
|
10 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
11 |
|
|
|
24 |
|
25 |
base_model.resize_token_embeddings(len(tokenizer))
|
26 |
|
|
|
27 |
model = PeftModel.from_pretrained(
|
28 |
base_model,
|
29 |
"bunyaminergen/Qwen2.5-Coder-1.5B-Instruct-Reasoning",
|
|
|
86 |
)
|
87 |
|
88 |
if __name__ == "__main__":
|
89 |
+
demo.launch()
|