kaleidoskop-hug
commited on
Commit
•
0029ae0
1
Parent(s):
00bca09
Update app.py
Browse files
app.py
CHANGED
@@ -22,51 +22,28 @@ model_links = {
|
|
22 |
"Mistral-Small-Instruct-2409": "mistralai/Mistral-Small-Instruct-2409",
|
23 |
}
|
24 |
|
25 |
-
#Random dog images for error message
|
26 |
-
random_dog = ["0f476473-2d8b-415e-b944-483768418a95.jpg",
|
27 |
-
"1bd75c81-f1d7-4e55-9310-a27595fa8762.jpg",
|
28 |
-
"526590d2-8817-4ff0-8c62-fdcba5306d02.jpg",
|
29 |
-
"1326984c-39b0-492c-a773-f120d747a7e2.jpg",
|
30 |
-
"42a98d03-5ed7-4b3b-af89-7c4876cb14c3.jpg",
|
31 |
-
"8b3317ed-2083-42ac-a575-7ae45f9fdc0d.jpg",
|
32 |
-
"ee17f54a-83ac-44a3-8a35-e89ff7153fb4.jpg",
|
33 |
-
"027eef85-ccc1-4a66-8967-5d74f34c8bb4.jpg",
|
34 |
-
"08f5398d-7f89-47da-a5cd-1ed74967dc1f.jpg",
|
35 |
-
"0fd781ff-ec46-4bdc-a4e8-24f18bf07def.jpg",
|
36 |
-
"0fb4aeee-f949-4c7b-a6d8-05bf0736bdd1.jpg",
|
37 |
-
"6edac66e-c0de-4e69-a9d6-b2e6f6f9001b.jpg",
|
38 |
-
"bfb9e165-c643-4993-9b3a-7e73571672a6.jpg"]
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
def reset_conversation():
|
43 |
-
|
44 |
-
Resets Conversation
|
45 |
-
'''
|
46 |
-
st.session_state.conversation = []
|
47 |
st.session_state.messages = []
|
48 |
return None
|
49 |
|
50 |
-
def
|
51 |
-
response
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
# except Exception as e:
|
69 |
-
# response = "😵💫 Looks like someone unplugged something!"
|
70 |
|
71 |
return response
|
72 |
|
@@ -156,19 +133,21 @@ if st.session_state.remove:
|
|
156 |
|
157 |
|
158 |
|
159 |
-
|
160 |
# Accept user input
|
161 |
if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
|
162 |
-
# Display user message in chat message container
|
163 |
with st.chat_message("user"):
|
164 |
st.markdown(prompt)
|
165 |
-
# Add user message to chat history
|
166 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
167 |
|
168 |
# Display assistant response in chat message container
|
169 |
-
|
170 |
-
|
171 |
-
st.
|
|
|
|
|
|
|
|
|
172 |
|
173 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
174 |
|
|
|
22 |
"Mistral-Small-Instruct-2409": "mistralai/Mistral-Small-Instruct-2409",
|
23 |
}
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
def reset_conversation():
|
26 |
+
#st.session_state.conversation = []
|
|
|
|
|
|
|
27 |
st.session_state.messages = []
|
28 |
return None
|
29 |
|
30 |
+
def ask_assistant_stream(st_model, st_messages, st_temp_value, st_max_tokens):
|
31 |
+
response=[]
|
32 |
+
try:
|
33 |
+
stream = client.chat.completions.create(
|
34 |
+
model=st_model,
|
35 |
+
messages=[
|
36 |
+
{"role": m["role"], "content": m["content"]}
|
37 |
+
for m in st_messages
|
38 |
+
],
|
39 |
+
temperature=st_temp_value,
|
40 |
+
stream=True,
|
41 |
+
max_tokens=st_max_tokens,
|
42 |
+
)
|
43 |
+
response["stream"]=stream
|
44 |
|
45 |
+
except Exception as e:
|
46 |
+
pass
|
|
|
|
|
|
|
47 |
|
48 |
return response
|
49 |
|
|
|
133 |
|
134 |
|
135 |
|
|
|
136 |
# Accept user input
|
137 |
if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
|
138 |
+
# Display user message in chat message container and Add user message to chat history
|
139 |
with st.chat_message("user"):
|
140 |
st.markdown(prompt)
|
|
|
141 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
142 |
|
143 |
# Display assistant response in chat message container
|
144 |
+
assistant = ask_assistant_stream(model_links[selected_model], st.session_state.messages, temp_values, max_token_value)
|
145 |
+
if "stream" in assistant:
|
146 |
+
with st.chat_message("assistant"):
|
147 |
+
response = st.write_stream(assistant["stream"])
|
148 |
+
else:
|
149 |
+
with st.chat_message("assistant"):
|
150 |
+
response = st.write("Failure")
|
151 |
|
152 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
153 |
|