ppsingh commited on
Commit
620ccce
1 Parent(s): 488567e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -9
app.py CHANGED
@@ -185,17 +185,11 @@ async def chat(query,history,sources,reports,subtype,year):
185
 
186
  # llama-3_1 endpoint = https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud
187
  # llama-3 endpoint = https://nhe9phsr2zhs0e36.eu-west-1.aws.endpoints.huggingface.cloud
188
- callbacks = [StreamingStdOutCallbackHandler()]
189
  llm_qa = HuggingFaceEndpoint(
190
  endpoint_url="https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud",
191
- max_new_tokens=1024,
192
- top_k=10,
193
- top_p=0.95,
194
- typical_p=0.95,
195
- temperature=0.01,
196
- callbacks=callbacks,
197
- streaming=True,
198
- repetition_penalty=1.03,)
199
 
200
  # create rag chain
201
  chat_model = ChatHuggingFace(llm=llm_qa)
 
185
 
186
  # llama-3_1 endpoint = https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud
187
  # llama-3 endpoint = https://nhe9phsr2zhs0e36.eu-west-1.aws.endpoints.huggingface.cloud
188
+ #callbacks = [StreamingStdOutCallbackHandler()]
189
  llm_qa = HuggingFaceEndpoint(
190
  endpoint_url="https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud",
191
+ task="text-generation",
192
+ huggingfacehub_api_token=HF_token)
 
 
 
 
 
 
193
 
194
  # create rag chain
195
  chat_model = ChatHuggingFace(llm=llm_qa)