drmasad commited on
Commit
5f10c1e
1 Parent(s): 8bfe0fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -18
app.py CHANGED
@@ -97,18 +97,24 @@ for message in st.session_state.messages:
97
  if "is_streaming" not in st.session_state:
98
  st.session_state.is_streaming = False
99
 
100
- # Chat input handling
 
 
 
 
 
 
101
  if st.session_state.is_streaming:
102
- st.chat_input("The assistant is currently responding. Please wait...") # Inform the user to wait
103
  else:
104
- # If not streaming, allow user input
105
  if prompt := st.chat_input("Ask me anything about diabetes"):
106
- st.session_state.is_streaming = True # Set the flag to indicate streaming has started
107
 
 
108
  with st.chat_message("user"):
109
  st.markdown(prompt)
110
 
111
- # Add the user message to chat history
112
  st.session_state.messages.append({"role": "user", "content": prompt})
113
 
114
  instructions = """
@@ -124,30 +130,25 @@ else:
124
  You will answer as if you are talking to a patient directly
125
  """
126
 
 
127
  full_prompt = f"<s>[INST] {prompt} [/INST] {instructions}</s>"
128
 
129
- # Display assistant response in chat message container
130
  with st.chat_message("assistant"):
131
- # Stream the response
132
  stream = client.chat.completions.create(
133
  model=model_links[selected_model],
134
  messages=[
135
- {"role": m["role"], "content": full_prompt}
136
- for m in st.session_state.messages
137
  ],
138
- temperature=temp_values,
139
  stream=True,
 
140
  max_tokens=1024,
141
  )
142
- response = st.write_stream(stream)
143
-
144
- # Process and clean the response
145
- response = response.replace('</s>', '').strip() # Clean unnecessary characters
146
 
 
147
  st.markdown(response)
148
 
149
- # Indicate that streaming is complete
150
- st.session_state.is_streaming = False
151
-
152
- # Store the final response
153
  st.session_state.messages.append({"role": "assistant", "content": response})
 
97
  if "is_streaming" not in st.session_state:
98
  st.session_state.is_streaming = False
99
 
100
+ # Session state initialization
101
+ if "is_streaming" not in st.session_state:
102
+ st.session_state.is_streaming = False # Indicate if streaming/processing
103
+ if "messages" not in st.session_state:
104
+ st.session_state.messages = []
105
+
106
+ # Prevent input during streaming
107
  if st.session_state.is_streaming:
108
+ st.info("Assistant is processing. Please wait...")
109
  else:
 
110
  if prompt := st.chat_input("Ask me anything about diabetes"):
111
+ st.session_state.is_streaming = True # Mark as streaming
112
 
113
+ # Display user message
114
  with st.chat_message("user"):
115
  st.markdown(prompt)
116
 
117
+ # Append user message to chat history
118
  st.session_state.messages.append({"role": "user", "content": prompt})
119
 
120
  instructions = """
 
130
  You will answer as if you are talking to a patient directly
131
  """
132
 
133
+ # Full prompt
134
  full_prompt = f"<s>[INST] {prompt} [/INST] {instructions}</s>"
135
 
136
+ # Stream assistant's response
137
  with st.chat_message("assistant"):
 
138
  stream = client.chat.completions.create(
139
  model=model_links[selected_model],
140
  messages=[
141
+ {"role": "assistant", "content": prompt},
142
+ {"role": "user", "content": full_prompt},
143
  ],
 
144
  stream=True,
145
+ temperature=temp_values,
146
  max_tokens=1024,
147
  )
 
 
 
 
148
 
149
+ response = st.write_stream(stream).replace("</s>", "").strip()
150
  st.markdown(response)
151
 
152
+ # Indicate streaming completion and update chat history
153
+ st.session_state.is_streaming = False
 
 
154
  st.session_state.messages.append({"role": "assistant", "content": response})