yashbyname commited on
Commit
6eb0efb
·
verified ·
1 Parent(s): f1bd090

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +96 -46
app.py CHANGED
@@ -161,60 +161,109 @@
161
 
162
  import requests
163
  import gradio as gr
 
 
 
 
 
 
164
 
165
  # API key and user ID for on-demand
166
  api_key = 'KGSjxB1uptfSk8I8A7ciCuNT9Xa3qWC3'
167
  external_user_id = 'plugin-1717464304'
168
 
169
- # Step 1: Create a chat session with the API
170
  def create_chat_session():
171
- create_session_url = 'https://api.on-demand.io/chat/v1/sessions'
172
- create_session_headers = {
173
- 'apikey': api_key
174
- }
175
- create_session_body = {
176
- "pluginIds": [],
177
- "externalUserId": external_user_id
178
- }
179
- response = requests.post(create_session_url, headers=create_session_headers, json=create_session_body)
180
- response_data = response.json()
181
- session_id = response_data['data']['id']
182
- return session_id
183
-
184
- # Step 2: Submit query to the API
185
- def submit_query(session_id, query):
186
- submit_query_url = f'https://api.on-demand.io/chat/v1/sessions/{session_id}/query'
187
- submit_query_headers = {
188
- 'apikey': api_key
189
- }
190
- submit_query_body = {
191
- "endpointId": "predefined-openai-gpt4o",
192
- "query": query,
193
- "pluginIds": ["plugin-1712327325", "plugin-1713962163"],
194
- "responseMode": "sync"
195
- }
196
- response = requests.post(submit_query_url, headers=submit_query_headers, json=submit_query_body)
197
- return response.json()
198
-
199
- # Function to handle patient info, query, and image processing (now focusing on LLM)
200
- def gradio_interface(patient_info, query_type):
201
- # Call LLM with patient info and query
202
- session_id = create_chat_session()
203
- query = f"Patient Info: {patient_info}\nQuery Type: {query_type}"
204
- llm_response = submit_query(session_id, query)
205
 
206
- # Debug: Print the full response to inspect it
207
- print("LLM Response:", llm_response) # This will print the full response for inspection
 
 
 
208
 
209
- # Safely handle 'message' if it exists
210
- message = llm_response.get('data', {}).get('message', 'No message returned from LLM')
211
-
212
- # Check if message is empty and print the complete response if necessary
213
- if message == 'No message returned from LLM':
214
- print("Full LLM Response Data:", llm_response) # Inspect the full LLM response for any helpful info
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
215
 
216
- response = f"Patient Info: {patient_info}\nQuery Type: {query_type}\n\nLLM Response:\n{message}"
217
- return response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
 
219
  # Gradio interface
220
  iface = gr.Interface(
@@ -236,4 +285,5 @@ iface = gr.Interface(
236
  description="Provide patient information and a query type for analysis by the LLM."
237
  )
238
 
239
- iface.launch()
 
 
161
 
162
  import requests
163
  import gradio as gr
164
+ import logging
165
+ import json
166
+
167
+ # Set up logging
168
+ logging.basicConfig(level=logging.INFO)
169
+ logger = logging.getLogger(__name__)
170
 
171
  # API key and user ID for on-demand
172
  api_key = 'KGSjxB1uptfSk8I8A7ciCuNT9Xa3qWC3'
173
  external_user_id = 'plugin-1717464304'
174
 
 
175
  def create_chat_session():
176
+ try:
177
+ create_session_url = 'https://api.on-demand.io/chat/v1/sessions'
178
+ create_session_headers = {
179
+ 'apikey': api_key,
180
+ 'Content-Type': 'application/json'
181
+ }
182
+ create_session_body = {
183
+ "pluginIds": [],
184
+ "externalUserId": external_user_id
185
+ }
186
+
187
+ logger.info("Creating chat session...")
188
+ response = requests.post(create_session_url, headers=create_session_headers, json=create_session_body)
189
+ response.raise_for_status() # Raise an exception for bad status codes
190
+
191
+ response_data = response.json()
192
+ logger.info(f"Session created successfully: {json.dumps(response_data, indent=2)}")
193
+
194
+ session_id = response_data['data']['id']
195
+ return session_id
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
 
197
+ except requests.exceptions.RequestException as e:
198
+ logger.error(f"Error creating chat session: {str(e)}")
199
+ if hasattr(e.response, 'text'):
200
+ logger.error(f"Response content: {e.response.text}")
201
+ raise
202
 
203
+ def submit_query(session_id, query):
204
+ try:
205
+ submit_query_url = f'https://api.on-demand.io/chat/v1/sessions/{session_id}/query'
206
+ submit_query_headers = {
207
+ 'apikey': api_key,
208
+ 'Content-Type': 'application/json'
209
+ }
210
+ submit_query_body = {
211
+ "endpointId": "predefined-openai-gpt4o",
212
+ "query": query,
213
+ "pluginIds": ["plugin-1712327325", "plugin-1713962163"],
214
+ "responseMode": "sync"
215
+ }
216
+
217
+ logger.info(f"Submitting query for session {session_id}")
218
+ logger.info(f"Query content: {query}")
219
+
220
+ response = requests.post(submit_query_url, headers=submit_query_headers, json=submit_query_body)
221
+ response.raise_for_status()
222
+
223
+ response_data = response.json()
224
+ logger.info(f"Query response received: {json.dumps(response_data, indent=2)}")
225
+ return response_data
226
+
227
+ except requests.exceptions.RequestException as e:
228
+ logger.error(f"Error submitting query: {str(e)}")
229
+ if hasattr(e.response, 'text'):
230
+ logger.error(f"Response content: {e.response.text}")
231
+ raise
232
 
233
+ def gradio_interface(patient_info, query_type):
234
+ try:
235
+ # Create session
236
+ session_id = create_chat_session()
237
+
238
+ # Construct query
239
+ query = f"Patient Info: {patient_info}\nQuery Type: {query_type}"
240
+
241
+ # Submit query and get response
242
+ llm_response = submit_query(session_id, query)
243
+
244
+ # Enhanced response handling
245
+ if not llm_response:
246
+ logger.error("Empty response received from LLM")
247
+ return "Error: No response received from the LLM service"
248
+
249
+ # Navigate the response structure with detailed logging
250
+ logger.info(f"Processing LLM response: {json.dumps(llm_response, indent=2)}")
251
+
252
+ if 'data' not in llm_response:
253
+ logger.error("Response missing 'data' field")
254
+ return f"Error: Unexpected response structure\nFull response: {json.dumps(llm_response, indent=2)}"
255
+
256
+ message = llm_response.get('data', {}).get('message')
257
+ if not message:
258
+ logger.error("No message found in response data")
259
+ return f"Error: No message in response\nFull response: {json.dumps(llm_response, indent=2)}"
260
+
261
+ response = f"Patient Info: {patient_info}\nQuery Type: {query_type}\n\nLLM Response:\n{message}"
262
+ return response
263
+
264
+ except Exception as e:
265
+ logger.error(f"Error in gradio_interface: {str(e)}", exc_info=True)
266
+ return f"Error processing request: {str(e)}"
267
 
268
  # Gradio interface
269
  iface = gr.Interface(
 
285
  description="Provide patient information and a query type for analysis by the LLM."
286
  )
287
 
288
+ if __name__ == "__main__":
289
+ iface.launch()