|
import os |
|
import json |
|
from google.oauth2 import service_account |
|
from cryptography.fernet import Fernet |
|
|
|
from langchain.llms import OpenAI |
|
from langchain.chat_models import ChatOpenAI |
|
from langchain.prompts import PromptTemplate |
|
from langchain.chains import LLMChain |
|
|
|
from vertexai.preview.vision_models import Image |
|
from vertexai.preview.vision_models import ImageQnAModel |
|
import vertexai |
|
PROJECT_ID = "franz-media-1512554302520" |
|
LOCATION = "us-central1" |
|
CRED_PATH = "creds.json" |
|
|
|
with open("key.json","w") as f: |
|
encrypted_data = f.read() |
|
|
|
cipher_suite = Fernet(os.environ["ENCRYPTION_KEY"]) |
|
decrypted_data = cipher_suite.decrypt(encrypted_data) |
|
|
|
|
|
with open(CRED_PATH,"wb") as f: |
|
f.write(decrypted_data) |
|
|
|
print("stored") |
|
|
|
credentials = service_account.Credentials.from_service_account_file(CRED_PATH) |
|
vertexai.init(project=PROJECT_ID, location=LOCATION,credentials=credentials) |
|
image_qna_model = ImageQnAModel.from_pretrained("imagetext@001") |
|
|
|
|
|
template = """You are a super smart and charming GPT living inside of a plant, every day you get a text with your status. Your task then is to write a flirty message to your owner. |
|
Status Data: |
|
{question} |
|
|
|
Let's think step by step. |
|
Flirty message: |
|
""" |
|
|
|
prompt = PromptTemplate(template=template, input_variables=["question"]) |
|
llm = ChatOpenAI(model="gpt-4") |
|
llm_chain = LLMChain(prompt=prompt, llm=llm) |
|
|
|
|
|
def detect_question(image_path, question): |
|
|
|
image = Image.load_from_file(image_path) |
|
return image_qna_model.ask_question(image=image, question=question)[0] |
|
|
|
|
|
import gradio as gr |
|
import os |
|
import time |
|
|
|
|
|
|
|
local_history = [] |
|
global_cache = {} |
|
|
|
|
|
def add_text(history, text): |
|
global global_history, global_message |
|
history = history + [(text, None)] |
|
return history, gr.Textbox(value="", interactive=False) |
|
|
|
|
|
def add_file(history, file): |
|
history = history + [((file.name,), None)] |
|
|
|
return history |
|
|
|
|
|
def bot(history): |
|
global global_cache |
|
last_msg = history[-1][-0] |
|
if isinstance(last_msg, tuple): |
|
last_msg = last_msg[0] |
|
|
|
|
|
history[-1][1] = "" |
|
global_cache["history"] = history |
|
global_cache["last_msg"] = last_msg |
|
|
|
if os.path.exists(last_msg): |
|
history[-1][1] += "Detecting image..." |
|
yield history |
|
answer = detect_question( |
|
last_msg, |
|
"Your task is to save the main plant, classify what kind of plant it is:", |
|
) |
|
history[-1][1] = f"Plant detected: {answer}\n" |
|
yield history |
|
answer = detect_question( |
|
last_msg, |
|
"Where is orange indicator on the moist level on the soil hydrometer? DRY, MOIST or WET?", |
|
) |
|
history[-1][1] += f"Hydration level detected: {answer}\n" |
|
yield history |
|
answer = detect_question( |
|
last_msg, |
|
"Your task is to save the main plant, does it have a visible disease:", |
|
) |
|
history[-1][1] += f"Disease detected: {answer}\n" |
|
yield history |
|
status = history[-1][1] |
|
chat = llm_chain.run(status) |
|
history.append((chat, None)) |
|
yield history |
|
else: |
|
history[-1][1] = "Thinking..." |
|
|
|
|
|
def change_fn(*args, **kwargs): |
|
global_cache["args"] = args |
|
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
chatbot = gr.Chatbot( |
|
local_history, |
|
elem_id="chatbot", |
|
bubble_full_width=False, |
|
) |
|
|
|
with gr.Row(): |
|
txt = gr.Textbox( |
|
scale=4, |
|
show_label=False, |
|
placeholder="Enter text and press enter, or upload an image", |
|
container=False, |
|
) |
|
btn = gr.UploadButton("π", file_types=["image", "video", "audio"]) |
|
|
|
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then( |
|
bot, chatbot, chatbot, api_name="bot_response" |
|
) |
|
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False) |
|
file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False).then( |
|
bot, chatbot, chatbot |
|
) |
|
|
|
demo.launch(auth=("admin", os.environ["DEMO_KEY"])) |
|
|