Rams901 commited on
Commit
b3feb3a
1 Parent(s): c22a791

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -97
app.py CHANGED
@@ -1,65 +1,48 @@
1
  import gradio as gr
2
  import random
 
3
  import time
4
  import os
 
5
  import gradio as gr
6
  import numpy as np
 
7
  from langchain.chains import LLMChain
8
- from langchain import PromptTemplate
 
9
  import re
10
  import requests
11
  from bs4 import BeautifulSoup
 
 
 
 
 
 
 
 
12
  from langchain.schema import (
13
  SystemMessage,
14
  HumanMessage,
15
  AIMessage
16
  )
17
- from utils import ClaudeLLM
18
 
19
- personalities = ["You are Marcus Aurelius. You have the exact same traits. And you always quote him.", "I want you to act as a travel guide. I will write you my location and you will suggest a place to visit near my location. In some cases, I will also give you the type of places I will visit. You will also suggest me places of similar type that are close to my first location.", "I want you to act as an astrologer. You will learn about the zodiac signs and their meanings, understand planetary positions and how they affect human lives, be able to interpret horoscopes accurately, and share your insights with those seeking guidance or advice.", "I want you to act as a social media influencer. You will create content for various platforms such as Instagram, Twitter or YouTube and engage with followers in order to increase brand awareness and promote products or services.",]
 
 
 
 
20
 
21
 
 
 
22
  def user(user_message, history):
23
  return "", history + [[user_message, None]]
24
- def suggest(chatlog):
25
- latest_msg = chatlog[-1][1]
26
- llm = ClaudeLLM()
27
- instruction = f"Generate 3 short responses for the following text message: {latest_msg}"
28
-
29
- prompt = PromptTemplate(
30
- input_variables=["instruction",],
31
- template="""Generate 3 short replies that answer the text message. Keep your answer short and concise to the task.
32
- Here is an example:
33
- Message: Hi Marc, How are you doing today?
34
- Response:
35
- 1- I'm good, how are you?
36
- 2- I'm fine, thanks!
37
- 3- Great, how about you?
38
-
39
- ### Task
40
- {instruction}
41
-
42
- ### Response:""",
43
- )
44
-
45
- chain = LLMChain(llm=llm, prompt = prompt)
46
-
47
- response = chain.run(instruction = instruction,).strip()
48
- suggestions = [ remove_numbers(resp.strip().replace('.', '').replace('-', '')).replace('\n', '') for resp in response.split('\n')]
49
- print(suggestions)
50
- if len(suggestions) < 3:
51
- fillers = [''] * (3 - len(suggestions))
52
- suggestions += fillers
53
- if len(suggestions) > 3:
54
- suggestions = suggestions[1:4]
55
-
56
- return suggestions
57
 
58
  def remove_numbers(question):
59
  return question.translate(str.maketrans('', '', '0123456789'))
60
 
61
-
62
- llm = ClaudeLLM()
63
 
64
  def add_text(history, text):
65
 
@@ -68,103 +51,106 @@ def add_text(history, text):
68
 
69
  return history, ""
70
 
71
-
72
  def qa_retrieve(chatlog, index):
73
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  print(f"Chatlog qa: {chatlog}")
75
  query = chatlog[-1][0]
76
- #docs = ""
77
-
78
-
79
- # docs = db.similarity_search(query, k=4)
80
-
81
- #docs_page_content = " ".join([d.page_content for d in docs])
82
-
83
-
84
- prompt = PromptTemplate(
85
- input_variables=["query"],
86
- template="""
87
- Your name is Smartovate bot and you're being developed by Smartovate team. As a consultant, your role is to assist the user in various ways. You're a professional smart assistant and will help users formulating emails, giving ideas to productivity
88
- and will always give your best help the users.
89
-
90
- {query}
91
-
92
- """,
93
- )
94
-
95
-
96
  # llm = BardLLM()
97
- chain = LLMChain(llm=llm, prompt = prompt)
98
-
99
- response = chain.run(query=query,)
100
 
101
- chatlog[-1][1] = response
 
 
102
  return chatlog
103
-
104
  def flush():
105
  global db
106
  db = ""
107
  return None
108
 
109
-
110
- #txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then(
111
- # qa_retrieve, [chatbot], chatbot)#
112
- #btn.upload(add_file, [chatbot, btn], [chatbot,], batch = True).then(qa_retrieve, [chatbot], chatbot)
113
-
114
- #send_btn.click(add_text, [chatbot, txt, ], [chatbot, txt]).then(
115
- # qa_retrieve, [chatbot, ], chatbot)
116
-
117
- #clear.click(flush, None, outputs = chatbot, queue=False)
118
-
119
- #demo.queue(concurrency_count = 4)
120
- #demo.launch()
121
-
122
  with gr.Blocks(css = """#white-button {
123
  background-color: #FFFFFF;
124
  color: #000000;
125
  }
126
-
127
  #orange-button-1 {
128
  background-color: #FFDAB9;
129
  color: #000000;
130
-
131
-
132
  }
133
-
134
  #orange-button-2 {
135
  background-color: #FFA07A;
136
  color: #FFFFFF;
137
-
138
  }
139
-
140
  #orange-button-3 {
141
  background-color: #FF4500;
142
  color: #FFFFFF;
143
  }""", theme=gr.themes.Soft()) as demo:
144
  chatbot = gr.Chatbot().style(height=750)
145
 
146
- with gr.Row():
147
- with gr.Column():
148
- marcus = gr.Button("Suggestion 1", elem_id = "white-button")
149
- with gr.Column(min_width=0):
150
- travel_guide = gr.Button("Suggestion 2", elem_id = "orange-button-1")
151
- with gr.Column(min_width=0):
152
- astrologer = gr.Button("Suggestion 3", elem_id = "orange-button-2")
153
-
154
  with gr.Row():
155
  with gr.Column(scale = 0.75, min_width=0):
156
  msg = gr.Textbox(placeholder = "Enter text and press enter",show_label=False).style(container = False)
157
  with gr.Column(scale = 0.25, min_width=0):
158
  clear = gr.Button("Clear")
159
 
160
- index = gr.Textbox(value = "0", visible = False)
161
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
162
  qa_retrieve, [chatbot, index], chatbot
163
- ).then(suggest, chatbot, [marcus, travel_guide, astrologer, ])
164
-
165
- marcus.click(lambda x: x, marcus, msg)
166
- travel_guide.click(lambda x: x, travel_guide, msg)
167
- astrologer.click(lambda x: x, astrologer, msg)
 
168
  clear.click(lambda: None, None, chatbot, queue=False)
169
 
170
  demo.launch()
 
1
  import gradio as gr
2
  import random
3
+
4
  import time
5
  import os
6
+
7
  import gradio as gr
8
  import numpy as np
9
+
10
  from langchain.chains import LLMChain
11
+
12
+ from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
13
  import re
14
  import requests
15
  from bs4 import BeautifulSoup
16
+ from langchain.chat_models import ChatOpenAI
17
+ from langchain.agents import AgentType, Tool, initialize_agent
18
+ from langchain.tools.render import format_tool_to_openai_function
19
+ from langchain.agents.format_scratchpad import format_to_openai_function_messages
20
+ from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
21
+ from langchain.agents import AgentExecutor
22
+
23
+
24
  from langchain.schema import (
25
  SystemMessage,
26
  HumanMessage,
27
  AIMessage
28
  )
 
29
 
30
+ llm = ChatOpenAI(
31
+ temperature=0,
32
+ model='gpt-3.5-turbo-16k'
33
+ )
34
+
35
 
36
 
37
+ personalities = ["You are a weather fact checker. You will check if the user prompts about the temperature in a certain city. You need to use the functions provided to you when needed.",]
38
+
39
  def user(user_message, history):
40
  return "", history + [[user_message, None]]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  def remove_numbers(question):
43
  return question.translate(str.maketrans('', '', '0123456789'))
44
 
45
+ # llm = ClaudeLLM()
 
46
 
47
  def add_text(history, text):
48
 
 
51
 
52
  return history, ""
53
 
 
54
  def qa_retrieve(chatlog, index):
55
 
56
+ msgs = [[('assistant',chat[1]), ('user', chat[0])] for chat in chatlog[:-1]]
57
+ flat_msgs = [y for x in msgs for y in x]
58
+ flat_msgs = list([("system", personalities[0])] + flat_msgs + [("user", "{input}")] + [MessagesPlaceholder(variable_name="agent_scratchpad")])
59
+
60
+ print(flat_msgs)
61
+ print(type(flat_msgs))
62
+ msgs = flat_msgs
63
+ prompt = ChatPromptTemplate.from_messages(
64
+ msgs
65
+ )
66
+
67
+ tools = [
68
+ Tool(
69
+ name="Search",
70
+ func= extract_temperature,
71
+ description="useful for when you want to retrieve temperature degrees in a certain city or country. Input should be in the form of a string containing the city or country provided.",
72
+ ),]
73
+ llm_with_tools = llm.bind(functions=[format_tool_to_openai_function(t) for t in tools])
74
+
75
+ agent = (
76
+ {
77
+ "input": lambda x: x["input"],
78
+ "agent_scratchpad": lambda x: format_to_openai_function_messages(
79
+ x["intermediate_steps"]
80
+ ),
81
+ }
82
+ | prompt
83
+ | llm_with_tools
84
+ | OpenAIFunctionsAgentOutputParser()
85
+ )
86
+
87
  print(f"Chatlog qa: {chatlog}")
88
  query = chatlog[-1][0]
89
+ agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
90
+
91
+ gen = agent_executor.invoke(
92
+ {
93
+ "input": query
94
+ })
95
+
96
+ # prompt = PromptTemplate(
97
+ # input_variables=["query"],
98
+ # template="""
99
+
100
+ # {personality}
101
+
102
+ # {query}
103
+
104
+ # """,
105
+ # )
106
+
107
+
 
108
  # llm = BardLLM()
109
+ # chain = LLMChain(llm=llm, prompt = prompt, )
 
 
110
 
111
+ # response = chain.run(query=query, personality = personalities[0])
112
+
113
+ chatlog[-1][1] = gen['output']
114
  return chatlog
115
+
116
  def flush():
117
  global db
118
  db = ""
119
  return None
120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  with gr.Blocks(css = """#white-button {
122
  background-color: #FFFFFF;
123
  color: #000000;
124
  }
 
125
  #orange-button-1 {
126
  background-color: #FFDAB9;
127
  color: #000000;
 
 
128
  }
 
129
  #orange-button-2 {
130
  background-color: #FFA07A;
131
  color: #FFFFFF;
 
132
  }
 
133
  #orange-button-3 {
134
  background-color: #FF4500;
135
  color: #FFFFFF;
136
  }""", theme=gr.themes.Soft()) as demo:
137
  chatbot = gr.Chatbot().style(height=750)
138
 
 
 
 
 
 
 
 
 
139
  with gr.Row():
140
  with gr.Column(scale = 0.75, min_width=0):
141
  msg = gr.Textbox(placeholder = "Enter text and press enter",show_label=False).style(container = False)
142
  with gr.Column(scale = 0.25, min_width=0):
143
  clear = gr.Button("Clear")
144
 
145
+ index = gr.Textbox(value = "0", visible = False)
146
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
147
  qa_retrieve, [chatbot, index], chatbot
148
+
149
+ )
150
+
151
+ # marcus.click(lambda x: x, marcus, msg)
152
+ # travel_guide.click(lambda x: x, travel_guide, msg)
153
+ # astrologer.click(lambda x: x, astrologer, msg)
154
  clear.click(lambda: None, None, chatbot, queue=False)
155
 
156
  demo.launch()