cstr commited on
Commit
f97296a
·
verified ·
1 Parent(s): 030aaa2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +119 -89
app.py CHANGED
@@ -203,124 +203,147 @@ def build_prompts(snippets: List[str], prompt_instruction: str, custom_prompt: O
203
 
204
  return "\n\n".join(prompts)
205
 
206
- def send_to_model(*args, **kwargs): # Correct the outputs here
207
- try:
208
- with gr.Progress() as progress:
209
- progress(0, "Preparing to send to model...")
210
- summary, download_file = send_to_model_impl(*args, **kwargs) # Get both outputs
211
- progress(1, "Complete!")
212
- return summary, download_file # Return both outputs
213
- except Exception as e:
214
- return f"Error: {str(e)}", None # Return error message and None for the file
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
215
 
216
  def send_to_model_impl(prompt, model_selection, hf_model_choice, hf_custom_model, hf_api_key,
217
- groq_model_choice, groq_api_key, openai_api_key, openai_model_choice):
 
218
  try:
219
  if model_selection == "Clipboard only":
220
- return "Use copy/paste for processing", None
221
 
222
  elif model_selection == "HuggingFace Inference":
223
  if not hf_api_key:
224
  return "Error: HuggingFace API key required", None
225
  if not hf_model_choice:
226
- return "Error: Select a HuggingFace model", None
 
227
  model_id = hf_custom_model if hf_model_choice == "Custom Model" else model_registry.hf_models[hf_model_choice]
228
- try:
229
- summary = send_to_hf_inference(prompt, model_id, hf_api_key)
230
- except Exception as e:
231
- return f"Error with HuggingFace Inference: {e}", None
232
-
233
 
234
  elif model_selection == "Groq API":
235
  if not groq_api_key:
236
  return "Error: Groq API key required", None
237
  if not groq_model_choice:
238
- return "Error: Select a Groq model", None
239
- try:
240
- summary = send_to_groq(prompt, groq_model_choice, groq_api_key)
241
- except Exception as e:
242
- return f"Error with Groq API: {e}", None
243
 
244
  elif model_selection == "OpenAI ChatGPT":
245
  if not openai_api_key:
246
  return "Error: OpenAI API key required", None
247
- try:
248
- summary = send_to_openai(prompt, openai_api_key, model=openai_model_choice)
249
- except Exception as e:
250
- return f"Error with OpenAI API: {e}", None
 
 
251
 
252
  else:
253
  return "Error: Invalid model selection", None
254
 
255
- try: # for file operations
256
- with tempfile.NamedTemporaryFile(mode='w+', suffix='.txt', delete=False) as f: # w+ for write and read
 
257
  f.write(summary)
258
- f.flush() # Ensure data is written to disk
259
- f.seek(0) # Rewind to the beginning of the file
260
- download_file = f.name
261
- except Exception as file_error:
262
- return f"Error creating summary file: {file_error}", None
263
-
264
- return summary, download_file
265
 
266
- except Exception as e: # Outer exception handler
267
- error_msg = f"An unexpected error occurred: {str(e)}"
268
  logging.error(error_msg)
269
  return error_msg, None
270
-
271
  def send_to_hf_inference(prompt: str, model_name: str, api_key: str) -> str:
272
- try:
273
- client = InferenceClient(token=api_key)
274
- response = client.text_generation(
275
- prompt,
276
- model=model_name,
277
- max_new_tokens=500,
278
- temperature=0.7,
279
- top_p=0.95,
280
- repetition_penalty=1.1
281
- )
282
- return str(response)
283
- except Exception as e:
284
- logging.error(f"Error with HF inference: {e}")
285
- return f"Error with HF inference: {e}"
 
286
 
287
  def send_to_groq(prompt: str, model_name: str, api_key: str) -> str:
288
- try:
289
- client = Groq(api_key=api_key)
290
- response = client.chat.completions.create(
291
- model=model_name,
292
- messages=[{
293
- "role": "user",
294
- "content": prompt
295
- }],
296
- temperature=0.7,
297
- max_tokens=500,
298
- top_p=0.95
299
- )
300
- return response.choices[0].message.content
301
- except Exception as e:
302
- logging.error(f"Error with Groq API: {e}")
303
- return f"Error with Groq API: {e}"
 
304
 
305
  def send_to_openai(prompt: str, api_key: str, model: str = "gpt-3.5-turbo") -> str:
306
- try:
307
- import openai
308
- openai.api_key = api_key
309
-
310
- response = openai.ChatCompletion.create(
311
- model=model,
312
- messages=[
313
- {"role": "system", "content": "You are a helpful assistant that provides detailed responses with examples and references where appropriate."},
314
- {"role": "user", "content": prompt}
315
- ],
316
- temperature=0.7,
317
- max_tokens=500,
318
- top_p=0.95
319
- )
320
- return response.choices[0].message.content
321
- except Exception as e:
322
- logging.error(f"Error with OpenAI API: {e}")
323
- return f"Error with OpenAI API: {e}"
 
324
 
325
  def copy_text_js(element_id: str) -> str:
326
  return f"""function() {{
@@ -855,10 +878,17 @@ with gr.Blocks(css="""
855
  send_to_model_btn.click(
856
  send_to_model,
857
  inputs=[
858
- generated_prompt, model_choice, hf_model, hf_custom_model, hf_api_key,
859
- groq_model, groq_api_key, openai_api_key, openai_model # Add openai_model as input
 
 
 
 
 
 
 
860
  ],
861
- outputs=[summary_output, download_summary] # Correct outputs
862
  )
863
 
864
  groq_refresh_btn.click(
 
203
 
204
  return "\n\n".join(prompts)
205
 
206
+ def send_to_model(prompt, model_selection, hf_model_choice, hf_custom_model, hf_api_key,
207
+ groq_model_choice, groq_api_key, openai_api_key, openai_model_choice):
208
+ """Wrapper function for send_to_model_impl with proper error handling."""
209
+ if not prompt:
210
+ return "Error: No prompt provided", None
211
+
212
+ try:
213
+ with gr.Progress() as progress:
214
+ progress(0, "Preparing to send to model...")
215
+
216
+ # Call implementation with proper error handling
217
+ summary, download_file = send_to_model_impl(
218
+ prompt=prompt,
219
+ model_selection=model_selection,
220
+ hf_model_choice=hf_model_choice,
221
+ hf_custom_model=hf_custom_model,
222
+ hf_api_key=hf_api_key,
223
+ groq_model_choice=groq_model_choice,
224
+ groq_api_key=groq_api_key,
225
+ openai_api_key=openai_api_key,
226
+ openai_model_choice=openai_model_choice
227
+ )
228
+
229
+ progress(1, "Complete!")
230
+ return summary, download_file
231
+
232
+ except Exception as e:
233
+ error_msg = f"Error processing request: {str(e)}"
234
+ logging.error(error_msg)
235
+ return error_msg, None
236
 
237
  def send_to_model_impl(prompt, model_selection, hf_model_choice, hf_custom_model, hf_api_key,
238
+ groq_model_choice, groq_api_key, openai_api_key, openai_model_choice):
239
+ """Implementation of model sending with improved error handling."""
240
  try:
241
  if model_selection == "Clipboard only":
242
+ return "Text copied to clipboard. Use paste for processing.", None
243
 
244
  elif model_selection == "HuggingFace Inference":
245
  if not hf_api_key:
246
  return "Error: HuggingFace API key required", None
247
  if not hf_model_choice:
248
+ return "Error: Please select a HuggingFace model", None
249
+
250
  model_id = hf_custom_model if hf_model_choice == "Custom Model" else model_registry.hf_models[hf_model_choice]
251
+ summary = send_to_hf_inference(prompt, model_id, hf_api_key)
252
+ if summary.startswith("Error"):
253
+ return summary, None
 
 
254
 
255
  elif model_selection == "Groq API":
256
  if not groq_api_key:
257
  return "Error: Groq API key required", None
258
  if not groq_model_choice:
259
+ return "Error: Please select a Groq model", None
260
+
261
+ summary = send_to_groq(prompt, groq_model_choice, groq_api_key)
262
+ if summary.startswith("Error"):
263
+ return summary, None
264
 
265
  elif model_selection == "OpenAI ChatGPT":
266
  if not openai_api_key:
267
  return "Error: OpenAI API key required", None
268
+ if not openai_model_choice:
269
+ return "Error: Please select an OpenAI model", None
270
+
271
+ summary = send_to_openai(prompt, openai_api_key, model=openai_model_choice)
272
+ if summary.startswith("Error"):
273
+ return summary, None
274
 
275
  else:
276
  return "Error: Invalid model selection", None
277
 
278
+ # If we get here, we have a valid summary. Create download file.
279
+ if summary and not summary.startswith("Error"):
280
+ with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
281
  f.write(summary)
282
+ return summary, f.name
283
+
284
+ return summary, None
 
 
 
 
285
 
286
+ except Exception as e:
287
+ error_msg = f"Error in model processing: {str(e)}"
288
  logging.error(error_msg)
289
  return error_msg, None
290
+
291
  def send_to_hf_inference(prompt: str, model_name: str, api_key: str) -> str:
292
+ """Send prompt to HuggingFace Inference API with better error handling."""
293
+ try:
294
+ client = InferenceClient(token=api_key)
295
+ response = client.text_generation(
296
+ prompt,
297
+ model=model_name,
298
+ max_new_tokens=500,
299
+ temperature=0.7,
300
+ top_p=0.95,
301
+ repetition_penalty=1.1
302
+ )
303
+ return str(response)
304
+ except Exception as e:
305
+ logging.error(f"HuggingFace inference error: {e}")
306
+ return f"Error with HuggingFace inference: {str(e)}"
307
 
308
  def send_to_groq(prompt: str, model_name: str, api_key: str) -> str:
309
+ """Send prompt to Groq API with better error handling."""
310
+ try:
311
+ client = Groq(api_key=api_key)
312
+ response = client.chat.completions.create(
313
+ model=model_name,
314
+ messages=[{
315
+ "role": "user",
316
+ "content": prompt
317
+ }],
318
+ temperature=0.7,
319
+ max_tokens=500,
320
+ top_p=0.95
321
+ )
322
+ return response.choices[0].message.content
323
+ except Exception as e:
324
+ logging.error(f"Groq API error: {e}")
325
+ return f"Error with Groq API: {str(e)}"
326
 
327
  def send_to_openai(prompt: str, api_key: str, model: str = "gpt-3.5-turbo") -> str:
328
+ """Send prompt to OpenAI API with better error handling."""
329
+ try:
330
+ import openai
331
+ openai.api_key = api_key
332
+
333
+ response = openai.ChatCompletion.create(
334
+ model=model,
335
+ messages=[
336
+ {"role": "system", "content": "You are a helpful assistant that provides detailed responses."},
337
+ {"role": "user", "content": prompt}
338
+ ],
339
+ temperature=0.7,
340
+ max_tokens=500,
341
+ top_p=0.95
342
+ )
343
+ return response.choices[0].message.content
344
+ except Exception as e:
345
+ logging.error(f"OpenAI API error: {e}")
346
+ return f"Error with OpenAI API: {str(e)}"
347
 
348
  def copy_text_js(element_id: str) -> str:
349
  return f"""function() {{
 
878
  send_to_model_btn.click(
879
  send_to_model,
880
  inputs=[
881
+ generated_prompt,
882
+ model_choice,
883
+ hf_model,
884
+ hf_custom_model,
885
+ hf_api_key,
886
+ groq_model,
887
+ groq_api_key,
888
+ openai_api_key,
889
+ openai_model
890
  ],
891
+ outputs=[summary_output, download_summary]
892
  )
893
 
894
  groq_refresh_btn.click(