Update utils.py
Browse files
utils.py
CHANGED
@@ -35,6 +35,8 @@ from langchain.schema import AIMessage, HumanMessage
|
|
35 |
from langchain.llms import HuggingFaceHub
|
36 |
from langchain.llms import HuggingFaceTextGenInference
|
37 |
from langchain.embeddings import HuggingFaceInstructEmbeddings, HuggingFaceEmbeddings, HuggingFaceBgeEmbeddings, HuggingFaceInferenceAPIEmbeddings
|
|
|
|
|
38 |
|
39 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
40 |
from langchain.prompts import PromptTemplate
|
@@ -540,7 +542,7 @@ def wait_on_run(run, thread, client):
|
|
540 |
|
541 |
########################################
|
542 |
# Tavility Search Machine
|
543 |
-
def tavily_search(query):
|
544 |
search_result = tavily_client.get_search_context(query, search_depth="advanced", max_tokens=8000)
|
545 |
return search_result
|
546 |
|
|
|
35 |
from langchain.llms import HuggingFaceHub
|
36 |
from langchain.llms import HuggingFaceTextGenInference
|
37 |
from langchain.embeddings import HuggingFaceInstructEmbeddings, HuggingFaceEmbeddings, HuggingFaceBgeEmbeddings, HuggingFaceInferenceAPIEmbeddings
|
38 |
+
from langchain.tools import DuckDuckGoSearchRun
|
39 |
+
from langchain.retrievers.tavily_search_api import TavilySearchAPIRetriever
|
40 |
|
41 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
42 |
from langchain.prompts import PromptTemplate
|
|
|
542 |
|
543 |
########################################
|
544 |
# Tavility Search Machine
|
545 |
+
def tavily_search(tavily_client, query):
|
546 |
search_result = tavily_client.get_search_context(query, search_depth="advanced", max_tokens=8000)
|
547 |
return search_result
|
548 |
|