Spaces:
Sleeping
Sleeping
sourabhzanwar
commited on
Commit
•
a02d3d8
1
Parent(s):
81c7841
changed max token size to 500
Browse files- utils/haystack.py +2 -1
utils/haystack.py
CHANGED
@@ -100,7 +100,8 @@ def start_haystack_extractive(_document_store: BaseDocumentStore, _retriever: Em
|
|
100 |
def start_haystack_rag(_document_store: BaseDocumentStore, _retriever: EmbeddingRetriever, openai_key):
|
101 |
prompt_node = PromptNode(default_prompt_template="deepset/question-answering",
|
102 |
model_name_or_path=model_configs['GENERATIVE_MODEL'],
|
103 |
-
api_key=openai_key
|
|
|
104 |
pipe = Pipeline()
|
105 |
|
106 |
pipe.add_node(component=_retriever, name="Retriever", inputs=["Query"])
|
|
|
100 |
def start_haystack_rag(_document_store: BaseDocumentStore, _retriever: EmbeddingRetriever, openai_key):
|
101 |
prompt_node = PromptNode(default_prompt_template="deepset/question-answering",
|
102 |
model_name_or_path=model_configs['GENERATIVE_MODEL'],
|
103 |
+
api_key=openai_key,
|
104 |
+
max_length=500)
|
105 |
pipe = Pipeline()
|
106 |
|
107 |
pipe.add_node(component=_retriever, name="Retriever", inputs=["Query"])
|