Updated max token length
Browse files
README.md
CHANGED
@@ -87,7 +87,7 @@ model = PeftModel.from_pretrained(
|
|
87 |
)
|
88 |
|
89 |
# Wrapper for encoding and pooling operations
|
90 |
-
l2v = LLM2Vec(model, tokenizer, pooling_mode="mean", max_length=
|
91 |
|
92 |
# Encoding queries using instructions
|
93 |
instruction = (
|
|
|
87 |
)
|
88 |
|
89 |
# Wrapper for encoding and pooling operations
|
90 |
+
l2v = LLM2Vec(model, tokenizer, pooling_mode="mean", max_length=8124)
|
91 |
|
92 |
# Encoding queries using instructions
|
93 |
instruction = (
|