Update README.md
Browse files
README.md
CHANGED
@@ -16,7 +16,7 @@ tokenizer.decode(
|
|
16 |
model.generate(
|
17 |
tokenizer.encode(
|
18 |
"你饿吗?", return_tensors="pt", add_special_tokens=True
|
19 |
-
), 128)[0],
|
20 |
skip_special_tokens = True
|
21 |
).split("\n-----\n")
|
22 |
|
|
|
16 |
model.generate(
|
17 |
tokenizer.encode(
|
18 |
"你饿吗?", return_tensors="pt", add_special_tokens=True
|
19 |
+
), max_length = 128)[0],
|
20 |
skip_special_tokens = True
|
21 |
).split("\n-----\n")
|
22 |
|