Keyven commited on
Commit
2f4b8e0
Β·
1 Parent(s): 2e721d5

update app

Browse files
Files changed (1) hide show
  1. app.py +46 -32
app.py CHANGED
@@ -66,8 +66,16 @@ def get_chat_response(chatbot, task_history):
66
  history_filter.append((pre, a))
67
  pre = ""
68
  history, message = history_filter[:-1], history_filter[-1][0]
69
- response, history = model.chat(tokenizer, message, history=history)
70
- # ... (rest of the code remains the same)
 
 
 
 
 
 
 
 
71
 
72
  def handle_text_input(history, task_history, text):
73
  """Handle text input from the user."""
@@ -111,40 +119,46 @@ def handle_regeneration(chatbot, task_history):
111
  print("After:", task_history, chatbot)
112
  return get_chat_response(chatbot, task_history)
113
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  # Custom CSS
115
  css = '''
116
  .gradio-container {
117
  max-width: 800px !important;
118
  }
119
- /* ... (add more custom CSS if needed) */
120
  '''
121
 
122
- # Build and launch the UI
123
- with gr.Blocks(css=css) as demo:
124
- gr.Markdown("# Qwen-VL-Chat Bot")
125
- gr.Markdown(
126
- "## Developed by Keyvan Hardani (Keyvven on [Twitter](https://twitter.com/Keyvven))\n"
127
- "Special thanks to [@Artificialguybr](https://twitter.com/artificialguybr) for the inspiration from his code.\n"
128
- "### Qwen-VL: A Multimodal Large Vision Language Model by Alibaba Cloud\n"
129
- )
130
- chatbot = gr.Chatbot(label='Qwen-VL-Chat', elem_classes="control-height", height=520)
131
- query = gr.Textbox(lines=2, label='Input')
132
- task_history = gr.State([])
133
-
134
- with gr.Row():
135
- upload_btn = gr.UploadButton("πŸ“ Upload", file_types=["image"])
136
- submit_btn = gr.Button("πŸš€ Submit")
137
- regen_btn = gr.Button("πŸ€”οΈ Regenerate")
138
- clear_btn = gr.Button("🧹 Clear History")
139
-
140
- gr.Markdown("### Key Features:\n- **Strong Performance**: Surpasses existing LVLMs on multiple English benchmarks including Zero-shot Captioning and VQA.\n- **Multi-lingual Support**: Supports English, Chinese, and multi-lingual conversation.\n- **High Resolution**: Utilizes 448*448 resolution for fine-grained recognition and understanding.")
141
- submit_btn.click(handle_text_input, [chatbot, task_history, query], [chatbot, task_history]).then(
142
- get_chat_response, [chatbot, task_history], [chatbot], show_progress=True
143
- )
144
- submit_btn.click(clear_input, [], [query])
145
- clear_btn.click(clear_history, [task_history], [chatbot], show_progress=True)
146
- regen_btn.click(handle_regeneration, [chatbot, task_history], [chatbot], show_progress=True)
147
- upload_btn.upload(handle_file_upload, [chatbot, task_history, upload_btn], [chatbot, task_history], show_progress=True)
148
-
149
- # Launch the demo
150
- demo.launch()
 
66
  history_filter.append((pre, a))
67
  pre = ""
68
  history, message = history_filter[:-1], history_filter[-1][0]
69
+
70
+ inputs = tokenizer.encode_plus(message, return_tensors='pt')
71
+ outputs = model.generate(inputs['input_ids'], max_length=150, num_beams=4, length_penalty=2.0, early_stopping=True)
72
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
73
+
74
+ task_history.append((message, response))
75
+ chatbot.append((format_text(message), format_text(response)))
76
+
77
+ return chatbot, task_history
78
+
79
 
80
  def handle_text_input(history, task_history, text):
81
  """Handle text input from the user."""
 
119
  print("After:", task_history, chatbot)
120
  return get_chat_response(chatbot, task_history)
121
 
122
+ chatbot = []
123
+ task_history = []
124
+
125
+ def main_function(text, image):
126
+ global chatbot, task_history
127
+ if text:
128
+ chatbot, task_history = handle_text_input(chatbot, task_history, text)
129
+ if image:
130
+ chatbot, task_history = handle_file_upload(chatbot, task_history, image)
131
+ chatbot, task_history = get_chat_response(chatbot, task_history)
132
+ formatted_response = chatbot[-1][1] # Get the latest response from the chatbot
133
+ return formatted_response
134
+
135
+ def clear_history_fn():
136
+ global chatbot, task_history
137
+ chatbot.clear()
138
+ task_history.clear()
139
+ return "History cleared."
140
+
141
  # Custom CSS
142
  css = '''
143
  .gradio-container {
144
  max-width: 800px !important;
145
  }
 
146
  '''
147
 
148
+ iface = gr.Interface(
149
+ fn=main_function,
150
+ inputs=[
151
+ gr.inputs.Textbox(lines=2, label='Input'),
152
+ gr.inputs.Image(type='file', label='Upload Image')
153
+ ],
154
+ outputs='text',
155
+ live=True,
156
+ layout='vertical',
157
+ theme='huggingface',
158
+ css=css
159
+ )
160
+
161
+ iface.add_button("🧹 Clear History", clear_history_fn)
162
+
163
+ iface.launch(share=True)
164
+