hassanelmghari commited on
Commit
5fc2eb2
1 Parent(s): 8362128

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -15
app.py CHANGED
@@ -25,11 +25,15 @@ def encode_image(image_path):
25
  return base64.b64encode(buffered.getvalue()).decode('utf-8')
26
 
27
  def bot_streaming(message, history, together_api_key, max_new_tokens=250, temperature=0.7):
 
 
28
  if client is None:
29
  try:
30
  initialize_client(together_api_key)
31
  except Exception as e:
32
- yield [("Error initializing client", str(e))]
 
 
33
  return
34
 
35
  prompt = "You are a helpful AI assistant. Analyze the image provided (if any) and respond to the user's query or comment."
@@ -44,23 +48,37 @@ def bot_streaming(message, history, together_api_key, max_new_tokens=250, temper
44
  # Prepare the current message
45
  content = []
46
  user_text = ""
47
- if isinstance(message, dict):
48
- if 'text' in message:
49
- user_text = message['text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  content.append({"type": "text", "text": user_text})
51
- if 'files' in message and len(message['files']) > 0:
52
- image_path = message['files'][0]['name'] if isinstance(message['files'][0], dict) else message['files'][0]
53
- image_base64 = encode_image(image_path)
54
- content.append({"type": "image_url", "image_url": {"url": f"data:image/png;base64,{image_base64}"}})
55
- user_text += "\n[User uploaded an image]"
56
- else:
57
- user_text = message
58
- content.append({"type": "text", "text": user_text})
59
 
60
  messages.append({"role": "user", "content": content})
61
 
62
  # Update the history with the new user message (with empty assistant response)
63
- history = history + [[user_text, ""]]
64
  yield history
65
 
66
  try:
@@ -85,10 +103,9 @@ def bot_streaming(message, history, together_api_key, max_new_tokens=250, temper
85
  except Exception as e:
86
  if "Request Entity Too Large" in str(e):
87
  history[-1][1] = "The image is too large. Please try with a smaller image or compress the existing one."
88
- yield history
89
  else:
90
  history[-1][1] = f"An error occurred: {str(e)}"
91
- yield history
92
 
93
  with gr.Blocks() as demo:
94
  gr.Markdown("# Meta Llama-3.2-11B-Vision-Instruct (FREE)")
 
25
  return base64.b64encode(buffered.getvalue()).decode('utf-8')
26
 
27
  def bot_streaming(message, history, together_api_key, max_new_tokens=250, temperature=0.7):
28
+ if history is None:
29
+ history = []
30
  if client is None:
31
  try:
32
  initialize_client(together_api_key)
33
  except Exception as e:
34
+ # Initialize history with error message
35
+ history.append(["Error initializing client", str(e)])
36
+ yield history
37
  return
38
 
39
  prompt = "You are a helpful AI assistant. Analyze the image provided (if any) and respond to the user's query or comment."
 
48
  # Prepare the current message
49
  content = []
50
  user_text = ""
51
+
52
+ try:
53
+ if isinstance(message, dict):
54
+ if 'text' in message and message['text']:
55
+ user_text = message['text']
56
+ content.append({"type": "text", "text": user_text})
57
+ if 'files' in message and len(message['files']) > 0:
58
+ file_info = message['files'][0]
59
+ if isinstance(file_info, dict) and 'name' in file_info:
60
+ image_path = file_info['name']
61
+ elif isinstance(file_info, str):
62
+ image_path = file_info
63
+ else:
64
+ raise ValueError("Invalid file information.")
65
+
66
+ image_base64 = encode_image(image_path)
67
+ content.append({"type": "image_url", "image_url": {"url": f"data:image/png;base64,{image_base64}"}})
68
+ user_text += "\n[User uploaded an image]"
69
+ else:
70
+ user_text = message
71
  content.append({"type": "text", "text": user_text})
72
+ except Exception as e:
73
+ # Update history before yielding
74
+ history.append([user_text, f"An error occurred while processing your input: {str(e)}"])
75
+ yield history
76
+ return
 
 
 
77
 
78
  messages.append({"role": "user", "content": content})
79
 
80
  # Update the history with the new user message (with empty assistant response)
81
+ history.append([user_text, ""])
82
  yield history
83
 
84
  try:
 
103
  except Exception as e:
104
  if "Request Entity Too Large" in str(e):
105
  history[-1][1] = "The image is too large. Please try with a smaller image or compress the existing one."
 
106
  else:
107
  history[-1][1] = f"An error occurred: {str(e)}"
108
+ yield history
109
 
110
  with gr.Blocks() as demo:
111
  gr.Markdown("# Meta Llama-3.2-11B-Vision-Instruct (FREE)")