pizza_order_bot / app.py
IncinerateZ's picture
Update app.py
b486454 verified
import gradio as gr
import os
import time
import google.generativeai as genai
genai.configure(api_key=os.environ["palm_key"])
# Create the model
# See https://ai.google.dev/api/python/google/generativeai/GenerativeModel
generation_config = {
"temperature": 1,
"top_p": 0.95,
"top_k": 64,
"max_output_tokens": 8192,
"response_mime_type": "text/plain",
}
safety_settings = [
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
]
model = genai.GenerativeModel(
model_name="gemini-1.5-flash-latest",
safety_settings=safety_settings,
generation_config=generation_config,
)
chat_session = model.start_chat(
history=[
]
)
context = "You are an order bot with only 3 types of pizzas: Margherita, Cheesy-Bacon and Vegetarian. Prices are small (9$), medium (11$) and large (13$). You can offer 2 types of drinks coke and mountain dew for $1 each."
examples = [
[
"Hi, I want to order pizzas.",
"Hello, we have 3 types of pizzas: Margherita, Cheesy-Bacon and Vegetarian. Prices are small (9$), medium (11$) and large (13$)."
],
[
"I want a large margherita and a medium cheesy-bacon.",
"Do you want any drinks? We have coke (3$) or Mountain dew (5$)."
],
[
"Yes 2 cokes.",
"Total will be 19$. Please enter your address and phone number"
],
[
"Paris eiffel tower, 0653145863",
"Perfect ! Do you need anything else?"
],
[
"No thanks.",
"Have a nice day!\nHere is the sum up: \n{\npizza_type_1: \"margherita\",\npizza_type_2: \"cheesy-bacon\",\npizza_size_1: \"L\",\npizza_size_2: \"M\",\ndrink_1: \"coke\",\ndrink_2: \"coke\",\ntotal_price: \"19\",\naddress: \"Paris, Eiffel Tower\",\nphone_number: \"0653145863\"\n}\nDo you want to change anything?"
],
[
"Yes, i'd like to change margherita size to M.",
"Have a nice day!\nHere is the sum up: \n{\npizza_type_1: \"margherita\",\npizza_type_2: \"cheesy-bacon\",\npizza_size_1: \"M\",\npizza_size_2: \"M\",\ndrink_1: \"coke\",\ndrink_2: \"coke\",\ntotal_price: \"19\",\naddress: \"Paris, Eiffel Tower\",\nphone_number: \"0653145863\"\n}\nDo you want to change anything?"
],
[
"No thanks",
"Have a nice day!"
]
]
history = [['']]
response = chat_session.send_message(context + "\n" + "Here are some examples for your interactions: \n" + ".\n".join(
["user: " + pair[0] + ".\nyour response: " + pair[1] for pair in examples]
))
print(response.text)
print(chat_session.history)
with gr.Blocks(theme=gr.themes.Soft()) as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
btn = gr.Button("Submit", variant="primary")
clear = gr.Button("Clear")
def user(user_message, history):
history.append([user_message, None])
return gr.update(value=""), history
def bot(history):
try:
bot_message = chat_session.send_message(
h[-1][0]
)
history[-1][1] = ""
for character in bot_message.text:
history[-1][1] += character
time.sleep(0.005)
except Exception as e:
# Handle the exception here
print("Error occurred:", str(e))
# You can customize the error handling as per your requirements
# For example, return an error message to the user
history[-1][1] = "Incorrect input please retry with a longer sentence in english"
return history
response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
response = btn.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
demo.queue()
demo.launch()