gradio / test_gradio.py
karar-shah's picture
Update test_gradio.py
b1135e0
raw
history blame
650 Bytes
import gradio as gr
from langchain.llms import GooglePalm
from langchain import PromptTemplate, LLMChain
llm = GooglePalm(temperature=0.1, google_api_key= "AIzaSyB5XIeNPyhIy29g4DUNZzVBKfsa-fVZtrk")
template = """Question: {question}
Answer: Let's think step by step."""
prompt_open = PromptTemplate(template=template, input_variables=["question"])
open_chain = LLMChain(prompt=prompt_open,llm = llm)
def predict(message, history):
gpt_response = open_chain.run(message)
return gpt_response
gr.Markdown(
"""
# Let's think step by step!
I will break your question into steps :)
""")
gr.ChatInterface(predict).launch(share=False)