kaleidoskop-hug commited on
Commit
b8e69b4
ā€¢
1 Parent(s): 42ac914

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -36
app.py CHANGED
@@ -47,7 +47,29 @@ def reset_conversation():
47
  st.session_state.conversation = []
48
  st.session_state.messages = []
49
  return None
50
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  def retry_last():
52
  return None
53
 
@@ -118,42 +140,10 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
118
  # Add user message to chat history
119
  st.session_state.messages.append({"role": "user", "content": prompt})
120
 
121
-
122
  # Display assistant response in chat message container
 
123
  with st.chat_message("assistant"):
124
- try:
125
- stream = client.chat.completions.create(
126
- model=model_links[selected_model],
127
- messages=[
128
- {"role": m["role"], "content": m["content"]}
129
- for m in st.session_state.messages
130
- ],
131
- temperature=temp_values,#0.5,
132
- stream=True,
133
- max_tokens=max_token_value,
134
- )
135
-
136
- response = st.write_stream(stream)
137
- st.button("retry", on_click=retry_last())
138
-
139
- except Exception as e:
140
- # st.empty()
141
- response = "šŸ˜µā€šŸ’« Looks like someone unplugged something!\
142
- \n Either the model space is being updated or something is down.\
143
- \n\
144
- \n Try again later. \
145
- \n\
146
- \n Here's a random pic of a šŸ¶:"
147
- st.write(response)
148
- random_dog_pick = 'https://random.dog/'+ random_dog[np.random.randint(len(random_dog))]
149
- st.image(random_dog_pick)
150
- st.write("This was the error message:")
151
- st.write(e)
152
-
153
-
154
-
155
-
156
-
157
-
158
  st.session_state.messages.append({"role": "assistant", "content": response})
159
 
 
47
  st.session_state.conversation = []
48
  st.session_state.messages = []
49
  return None
50
+
51
+ def get_assistant_aswer(st_model, st_messages, st_temp_value, st_max_tokens):
52
+ response = ""
53
+ try:
54
+ stream = client.chat.completions.create(
55
+ model=st_model,
56
+ messages=[
57
+ {"role": m["role"], "content": m["content"]}
58
+ for m in st_messages
59
+ ],
60
+ temperature=st_temp_value,
61
+ stream=True,
62
+ max_tokens=st_max_tokens,
63
+ )
64
+
65
+ for chunk in stream:
66
+ response =+ chunk.choices[0].delta.content
67
+
68
+ except Exception as e:
69
+ response = "šŸ˜µā€šŸ’« Looks like someone unplugged something!\"
70
+
71
+ return response
72
+
73
  def retry_last():
74
  return None
75
 
 
140
  # Add user message to chat history
141
  st.session_state.messages.append({"role": "user", "content": prompt})
142
 
 
143
  # Display assistant response in chat message container
144
+ response = get_assistant_aswer(model_links[selected_model], st.session_state.messages, temp_values, max_token_value)
145
  with st.chat_message("assistant"):
146
+ st.write(response)
147
+ st.button("retry", on_click=retry_last())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  st.session_state.messages.append({"role": "assistant", "content": response})
149