Spaces:
Runtime error
Runtime error
llama 3 dumbass
Browse files
app.py
CHANGED
@@ -12,9 +12,9 @@ from threading import Thread
|
|
12 |
import einops
|
13 |
|
14 |
|
15 |
-
tokenizer = AutoTokenizer.from_pretrained("NousResearch/Meta-LLaMA-70B-Instruct")
|
16 |
quantization_config = BitsAndBytesConfig(load_in_4_bit=True)
|
17 |
-
model = AutoModelForCausalLM.from_pretrained("NousResearch/Meta-LLaMA-70B-Instruct", quantization_config, device_map="cuda" ).eval()
|
18 |
|
19 |
|
20 |
@spaces.GPU
|
|
|
12 |
import einops
|
13 |
|
14 |
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained("NousResearch/Meta-LLaMA-3-70B-Instruct")
|
16 |
quantization_config = BitsAndBytesConfig(load_in_4_bit=True)
|
17 |
+
model = AutoModelForCausalLM.from_pretrained("NousResearch/Meta-LLaMA-3-70B-Instruct", quantization_config, device_map="cuda" ).eval()
|
18 |
|
19 |
|
20 |
@spaces.GPU
|