Update README.md
Browse files
README.md
CHANGED
@@ -117,7 +117,7 @@ inputs = tokenizer(text, return_tensors="pt")
|
|
117 |
outputs = model.generate(**inputs.to("cuda"), max_new_tokens=185,do_sample=False,top_k=None,temperature=1.0,top_p=None)
|
118 |
|
119 |
# Decode the output
|
120 |
-
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
121 |
print(result)
|
122 |
```
|
123 |
|
|
|
117 |
outputs = model.generate(**inputs.to("cuda"), max_new_tokens=185,do_sample=False,top_k=None,temperature=1.0,top_p=None)
|
118 |
|
119 |
# Decode the output
|
120 |
+
result = tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:], skip_special_tokens=True)
|
121 |
print(result)
|
122 |
```
|
123 |
|