streamlit-demo / helpers /foundation_models.py
eagle0504's picture
search added
9206a9e
raw
history blame
1.72 kB
import os
from typing import List, Tuple
import openai
import streamlit as st
from langchain.agents import AgentType, initialize_agent, load_tools
from langchain.llms import OpenAI as l_OpenAI
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
SERPAPI_API_KEY = os.environ["SERPAPI_API_KEY"]
openai_client = openai.OpenAI(api_key=OPENAI_API_KEY)
def call_chatgpt(query: str, model: str = "gpt-3.5-turbo") -> str:
"""
Generates a response to a query using the specified language model.
Args:
query (str): The user's query that needs to be processed.
model (str, optional): The language model to be used. Defaults to "gpt-3.5-turbo".
Returns:
str: The generated response to the query.
"""
# Prepare the conversation context with system and user messages.
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": f"Question: {query}."},
]
# Use the OpenAI client to generate a response based on the model and the conversation context.
response = openai_client.chat.completions.create(
model=model,
messages=messages,
)
# Extract the content of the response from the first choice.
content: str = response.choices[0].message.content
# Return the generated content.
return content
def call_langchain(prompt: str) -> str:
llm = l_OpenAI(temperature=0, openai_api_key=OPENAI_API_KEY)
tools = load_tools(
["serpapi", "llm-math"], llm=llm, serpapi_api_key=SERPAPI_API_KEY
)
agent = initialize_agent(
tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
)
output = agent.run(prompt)
return output