Update README.md
Browse files
README.md
CHANGED
@@ -60,10 +60,10 @@ The model can be used for text generation via its integrated `generate()` method
|
|
60 |
### Example: Direct Inference
|
61 |
|
62 |
```python
|
63 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
64 |
|
65 |
# Load the model and tokenizer from the hub
|
66 |
-
model =
|
67 |
tokenizer = AutoTokenizer.from_pretrained("codewithdark/latent-recurrent-depth-lm")
|
68 |
|
69 |
prompt = "In the realm of language modeling"
|
@@ -87,7 +87,7 @@ print(generated_text)
|
|
87 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
88 |
|
89 |
tokenizer = AutoTokenizer.from_pretrained("codewithdark/latent-recurrent-depth-lm")
|
90 |
-
model =
|
91 |
|
92 |
prompt = "In the realm of language modeling"
|
93 |
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
|
|
|
60 |
### Example: Direct Inference
|
61 |
|
62 |
```python
|
63 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoModel
|
64 |
|
65 |
# Load the model and tokenizer from the hub
|
66 |
+
model = AutoModelForCausalLM.from_pretrained("codewithdark/latent-recurrent-depth-lm")
|
67 |
tokenizer = AutoTokenizer.from_pretrained("codewithdark/latent-recurrent-depth-lm")
|
68 |
|
69 |
prompt = "In the realm of language modeling"
|
|
|
87 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
88 |
|
89 |
tokenizer = AutoTokenizer.from_pretrained("codewithdark/latent-recurrent-depth-lm")
|
90 |
+
model = AutoModelForCausalLM.from_pretrained("codewithdark/latent-recurrent-depth-lm")
|
91 |
|
92 |
prompt = "In the realm of language modeling"
|
93 |
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
|