Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -161,60 +161,109 @@
|
|
161 |
|
162 |
import requests
|
163 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
164 |
|
165 |
# API key and user ID for on-demand
|
166 |
api_key = 'KGSjxB1uptfSk8I8A7ciCuNT9Xa3qWC3'
|
167 |
external_user_id = 'plugin-1717464304'
|
168 |
|
169 |
-
# Step 1: Create a chat session with the API
|
170 |
def create_chat_session():
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
#
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
"endpointId": "predefined-openai-gpt4o",
|
192 |
-
"query": query,
|
193 |
-
"pluginIds": ["plugin-1712327325", "plugin-1713962163"],
|
194 |
-
"responseMode": "sync"
|
195 |
-
}
|
196 |
-
response = requests.post(submit_query_url, headers=submit_query_headers, json=submit_query_body)
|
197 |
-
return response.json()
|
198 |
-
|
199 |
-
# Function to handle patient info, query, and image processing (now focusing on LLM)
|
200 |
-
def gradio_interface(patient_info, query_type):
|
201 |
-
# Call LLM with patient info and query
|
202 |
-
session_id = create_chat_session()
|
203 |
-
query = f"Patient Info: {patient_info}\nQuery Type: {query_type}"
|
204 |
-
llm_response = submit_query(session_id, query)
|
205 |
|
206 |
-
|
207 |
-
|
|
|
|
|
|
|
208 |
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
215 |
|
216 |
-
|
217 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
218 |
|
219 |
# Gradio interface
|
220 |
iface = gr.Interface(
|
@@ -236,4 +285,5 @@ iface = gr.Interface(
|
|
236 |
description="Provide patient information and a query type for analysis by the LLM."
|
237 |
)
|
238 |
|
239 |
-
|
|
|
|
161 |
|
162 |
import requests
|
163 |
import gradio as gr
|
164 |
+
import logging
|
165 |
+
import json
|
166 |
+
|
167 |
+
# Set up logging
|
168 |
+
logging.basicConfig(level=logging.INFO)
|
169 |
+
logger = logging.getLogger(__name__)
|
170 |
|
171 |
# API key and user ID for on-demand
|
172 |
api_key = 'KGSjxB1uptfSk8I8A7ciCuNT9Xa3qWC3'
|
173 |
external_user_id = 'plugin-1717464304'
|
174 |
|
|
|
175 |
def create_chat_session():
|
176 |
+
try:
|
177 |
+
create_session_url = 'https://api.on-demand.io/chat/v1/sessions'
|
178 |
+
create_session_headers = {
|
179 |
+
'apikey': api_key,
|
180 |
+
'Content-Type': 'application/json'
|
181 |
+
}
|
182 |
+
create_session_body = {
|
183 |
+
"pluginIds": [],
|
184 |
+
"externalUserId": external_user_id
|
185 |
+
}
|
186 |
+
|
187 |
+
logger.info("Creating chat session...")
|
188 |
+
response = requests.post(create_session_url, headers=create_session_headers, json=create_session_body)
|
189 |
+
response.raise_for_status() # Raise an exception for bad status codes
|
190 |
+
|
191 |
+
response_data = response.json()
|
192 |
+
logger.info(f"Session created successfully: {json.dumps(response_data, indent=2)}")
|
193 |
+
|
194 |
+
session_id = response_data['data']['id']
|
195 |
+
return session_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
|
197 |
+
except requests.exceptions.RequestException as e:
|
198 |
+
logger.error(f"Error creating chat session: {str(e)}")
|
199 |
+
if hasattr(e.response, 'text'):
|
200 |
+
logger.error(f"Response content: {e.response.text}")
|
201 |
+
raise
|
202 |
|
203 |
+
def submit_query(session_id, query):
|
204 |
+
try:
|
205 |
+
submit_query_url = f'https://api.on-demand.io/chat/v1/sessions/{session_id}/query'
|
206 |
+
submit_query_headers = {
|
207 |
+
'apikey': api_key,
|
208 |
+
'Content-Type': 'application/json'
|
209 |
+
}
|
210 |
+
submit_query_body = {
|
211 |
+
"endpointId": "predefined-openai-gpt4o",
|
212 |
+
"query": query,
|
213 |
+
"pluginIds": ["plugin-1712327325", "plugin-1713962163"],
|
214 |
+
"responseMode": "sync"
|
215 |
+
}
|
216 |
+
|
217 |
+
logger.info(f"Submitting query for session {session_id}")
|
218 |
+
logger.info(f"Query content: {query}")
|
219 |
+
|
220 |
+
response = requests.post(submit_query_url, headers=submit_query_headers, json=submit_query_body)
|
221 |
+
response.raise_for_status()
|
222 |
+
|
223 |
+
response_data = response.json()
|
224 |
+
logger.info(f"Query response received: {json.dumps(response_data, indent=2)}")
|
225 |
+
return response_data
|
226 |
+
|
227 |
+
except requests.exceptions.RequestException as e:
|
228 |
+
logger.error(f"Error submitting query: {str(e)}")
|
229 |
+
if hasattr(e.response, 'text'):
|
230 |
+
logger.error(f"Response content: {e.response.text}")
|
231 |
+
raise
|
232 |
|
233 |
+
def gradio_interface(patient_info, query_type):
|
234 |
+
try:
|
235 |
+
# Create session
|
236 |
+
session_id = create_chat_session()
|
237 |
+
|
238 |
+
# Construct query
|
239 |
+
query = f"Patient Info: {patient_info}\nQuery Type: {query_type}"
|
240 |
+
|
241 |
+
# Submit query and get response
|
242 |
+
llm_response = submit_query(session_id, query)
|
243 |
+
|
244 |
+
# Enhanced response handling
|
245 |
+
if not llm_response:
|
246 |
+
logger.error("Empty response received from LLM")
|
247 |
+
return "Error: No response received from the LLM service"
|
248 |
+
|
249 |
+
# Navigate the response structure with detailed logging
|
250 |
+
logger.info(f"Processing LLM response: {json.dumps(llm_response, indent=2)}")
|
251 |
+
|
252 |
+
if 'data' not in llm_response:
|
253 |
+
logger.error("Response missing 'data' field")
|
254 |
+
return f"Error: Unexpected response structure\nFull response: {json.dumps(llm_response, indent=2)}"
|
255 |
+
|
256 |
+
message = llm_response.get('data', {}).get('message')
|
257 |
+
if not message:
|
258 |
+
logger.error("No message found in response data")
|
259 |
+
return f"Error: No message in response\nFull response: {json.dumps(llm_response, indent=2)}"
|
260 |
+
|
261 |
+
response = f"Patient Info: {patient_info}\nQuery Type: {query_type}\n\nLLM Response:\n{message}"
|
262 |
+
return response
|
263 |
+
|
264 |
+
except Exception as e:
|
265 |
+
logger.error(f"Error in gradio_interface: {str(e)}", exc_info=True)
|
266 |
+
return f"Error processing request: {str(e)}"
|
267 |
|
268 |
# Gradio interface
|
269 |
iface = gr.Interface(
|
|
|
285 |
description="Provide patient information and a query type for analysis by the LLM."
|
286 |
)
|
287 |
|
288 |
+
if __name__ == "__main__":
|
289 |
+
iface.launch()
|