Update README.md
Browse files
README.md
CHANGED
@@ -1,11 +1,22 @@
|
|
1 |
---
|
2 |
library_name: transformers
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
tags:
|
|
|
4 |
- 4-bit
|
5 |
- AWQ
|
6 |
- text-generation
|
7 |
- autotrain_compatible
|
8 |
- endpoints_compatible
|
|
|
|
|
|
|
9 |
pipeline_tag: text-generation
|
10 |
inference: false
|
11 |
quantized_by: Suparious
|
@@ -15,7 +26,7 @@ quantized_by: Suparious
|
|
15 |
- Model creator: [timpal0l](https://huggingface.co/timpal0l)
|
16 |
- Original model: [dolphin-2.9-llama3-8b-flashback](https://huggingface.co/timpal0l/dolphin-2.9-llama3-8b-flashback)
|
17 |
|
18 |
-
|
19 |
|
20 |
## How to use
|
21 |
|
|
|
1 |
---
|
2 |
library_name: transformers
|
3 |
+
license: apache-2.0
|
4 |
+
language:
|
5 |
+
- sv
|
6 |
+
- da
|
7 |
+
- 'no'
|
8 |
+
- is
|
9 |
+
- en
|
10 |
tags:
|
11 |
+
- merge
|
12 |
- 4-bit
|
13 |
- AWQ
|
14 |
- text-generation
|
15 |
- autotrain_compatible
|
16 |
- endpoints_compatible
|
17 |
+
base_model:
|
18 |
+
- timpal0l/Llama-3-8B-flashback-v1
|
19 |
+
- cognitivecomputations/dolphin-2.9-llama3-8b
|
20 |
pipeline_tag: text-generation
|
21 |
inference: false
|
22 |
quantized_by: Suparious
|
|
|
26 |
- Model creator: [timpal0l](https://huggingface.co/timpal0l)
|
27 |
- Original model: [dolphin-2.9-llama3-8b-flashback](https://huggingface.co/timpal0l/dolphin-2.9-llama3-8b-flashback)
|
28 |
|
29 |
+

|
30 |
|
31 |
## How to use
|
32 |
|