Update app.py
Browse files
app.py
CHANGED
@@ -2,9 +2,9 @@ import os
|
|
2 |
import argparse
|
3 |
import gradio as gr
|
4 |
from difflib import Differ
|
5 |
-
from functools import partial
|
6 |
from string import Template
|
7 |
from utils import load_prompt, setup_gemini_client
|
|
|
8 |
|
9 |
def parse_args():
|
10 |
parser = argparse.ArgumentParser()
|
@@ -26,8 +26,8 @@ def find_attached_file(filename, attached_files):
|
|
26 |
return None
|
27 |
|
28 |
def echo(message, history, state):
|
29 |
-
|
30 |
attached_file = None
|
|
|
31 |
if message['files']:
|
32 |
path_local = message['files'][0]
|
33 |
filename = os.path.basename(path_local)
|
@@ -45,8 +45,6 @@ def echo(message, history, state):
|
|
45 |
})
|
46 |
attached_file = path_gcp
|
47 |
|
48 |
-
# [{'role': 'user', 'metadata': None, 'content': 'asdf', 'options': None}, {'role': 'assistant', 'metadata': None, 'content': 'asdf', 'options': None}]
|
49 |
-
|
50 |
user_message = [message['text']]
|
51 |
if attached_file: user_message.append(attached_file)
|
52 |
|
@@ -56,7 +54,7 @@ def echo(message, history, state):
|
|
56 |
|
57 |
response = client.models.generate_content(
|
58 |
model="gemini-1.5-flash",
|
59 |
-
contents=state['messages']
|
60 |
)
|
61 |
model_response = response.text
|
62 |
|
@@ -71,7 +69,10 @@ def echo(message, history, state):
|
|
71 |
previous_summary=state['summary'],
|
72 |
latest_conversation=str({"user": message['text'], "assistant": model_response})
|
73 |
)
|
74 |
-
]
|
|
|
|
|
|
|
75 |
)
|
76 |
|
77 |
if state['summary'] != "":
|
@@ -80,8 +81,16 @@ def echo(message, history, state):
|
|
80 |
prev_summary = ""
|
81 |
|
82 |
d = Differ()
|
83 |
-
state['summary'] =
|
84 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
state['summary_diff_history'].append(
|
86 |
[
|
87 |
(token[2:], token[0] if token[0] != " " else None)
|
@@ -188,4 +197,4 @@ def main(args):
|
|
188 |
if __name__ == "__main__":
|
189 |
args = parse_args()
|
190 |
demo = main(args)
|
191 |
-
demo.launch()
|
|
|
2 |
import argparse
|
3 |
import gradio as gr
|
4 |
from difflib import Differ
|
|
|
5 |
from string import Template
|
6 |
from utils import load_prompt, setup_gemini_client
|
7 |
+
from configs.responses import SummaryResponses
|
8 |
|
9 |
def parse_args():
|
10 |
parser = argparse.ArgumentParser()
|
|
|
26 |
return None
|
27 |
|
28 |
def echo(message, history, state):
|
|
|
29 |
attached_file = None
|
30 |
+
|
31 |
if message['files']:
|
32 |
path_local = message['files'][0]
|
33 |
filename = os.path.basename(path_local)
|
|
|
45 |
})
|
46 |
attached_file = path_gcp
|
47 |
|
|
|
|
|
48 |
user_message = [message['text']]
|
49 |
if attached_file: user_message.append(attached_file)
|
50 |
|
|
|
54 |
|
55 |
response = client.models.generate_content(
|
56 |
model="gemini-1.5-flash",
|
57 |
+
contents=state['messages'],
|
58 |
)
|
59 |
model_response = response.text
|
60 |
|
|
|
69 |
previous_summary=state['summary'],
|
70 |
latest_conversation=str({"user": message['text'], "assistant": model_response})
|
71 |
)
|
72 |
+
],
|
73 |
+
config={'response_mime_type': 'application/json',
|
74 |
+
'response_schema': SummaryResponses,
|
75 |
+
},
|
76 |
)
|
77 |
|
78 |
if state['summary'] != "":
|
|
|
81 |
prev_summary = ""
|
82 |
|
83 |
d = Differ()
|
84 |
+
state['summary'] = (
|
85 |
+
response.parsed.summary
|
86 |
+
if getattr(response.parsed, "summary", None) is not None
|
87 |
+
else response.text
|
88 |
+
)
|
89 |
+
state['summary_history'].append(
|
90 |
+
response.parsed.summary
|
91 |
+
if getattr(response.parsed, "summary", None) is not None
|
92 |
+
else response.text
|
93 |
+
)
|
94 |
state['summary_diff_history'].append(
|
95 |
[
|
96 |
(token[2:], token[0] if token[0] != " " else None)
|
|
|
197 |
if __name__ == "__main__":
|
198 |
args = parse_args()
|
199 |
demo = main(args)
|
200 |
+
demo.launch()
|