rishiraj commited on
Commit
df3bc75
1 Parent(s): 266f102

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -145,8 +145,8 @@ import random
145
 
146
 
147
 
148
- def load_models(inp):
149
- return gr.update(label=models[inp])
150
 
151
  def format_prompt(message, history, cust_p):
152
  prompt = ""
@@ -157,7 +157,7 @@ def format_prompt(message, history, cust_p):
157
  prompt+=cust_p.replace("USER_INPUT",message)
158
  return prompt
159
 
160
- def chat_inf(system_prompt,prompt,history,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,cust_p):
161
  hist_len=0
162
  if not history:
163
  history = []
@@ -214,7 +214,7 @@ def check_rand(inp,val):
214
 
215
  with gr.Blocks() as app:
216
  memory=gr.State()
217
- gr.HTML("""<center><h1 style='font-size:xx-large;'>Gemma Gemini Multimodal Chatbot</h1><br><h2>Gemini Sprint submission by Rishiraj Acharya. Uses Google's Gemini 1.0 Pro Vision multimodal model from Vertex AI with Google's Gemma 7B Instruct model from Hugging Face. Google Cloud credits are provided for this project.</h2>""")
218
  chat_b = gr.Chatbot(show_label=True, show_share_button=True, show_copy_button=True, likeable=True, layout="bubble", bubble_full_width=False)
219
  with gr.Group():
220
  with gr.Row():
@@ -230,7 +230,7 @@ with gr.Blocks() as app:
230
  with gr.Group():
231
  stop_btn=gr.Button("Stop")
232
  clear_btn=gr.Button("Clear")
233
- client_choice=gr.Dropdown(label="Models",type='index',choices=[c for c in models],value=models[0],interactive=True)
234
  with gr.Column(scale=1):
235
  with gr.Group():
236
  rand = gr.Checkbox(label="Random Seed", value=True)
@@ -242,11 +242,11 @@ with gr.Blocks() as app:
242
  chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
243
 
244
 
245
- client_choice.change(load_models,client_choice,[chat_b])
246
- app.load(load_models,client_choice,[chat_b])
247
 
248
- chat_sub=inp.submit(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
249
- go=btn.click(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
250
 
251
  stop_btn.click(None,None,None,cancels=[go,chat_sub])
252
  clear_btn.click(clear_fn,None,[inp,sys_inp,chat_b,memory])
 
145
 
146
 
147
 
148
+ # def load_models(inp):
149
+ # return gr.update(label=models[inp])
150
 
151
  def format_prompt(message, history, cust_p):
152
  prompt = ""
 
157
  prompt+=cust_p.replace("USER_INPUT",message)
158
  return prompt
159
 
160
+ def chat_inf(system_prompt,prompt,history,memory,seed,temp,tokens,top_p,rep_p,chat_mem,cust_p):
161
  hist_len=0
162
  if not history:
163
  history = []
 
214
 
215
  with gr.Blocks() as app:
216
  memory=gr.State()
217
+ gr.HTML("""<center><h1 style='font-size:xx-large;'>Gemma Gemini Multimodal Chatbot</h1><br><h3>Gemini Sprint submission by Rishiraj Acharya. Uses Google's Gemini 1.0 Pro Vision multimodal model from Vertex AI with Google's Gemma 7B Instruct model from Hugging Face. Google Cloud credits are provided for this project.</h3>""")
218
  chat_b = gr.Chatbot(show_label=True, show_share_button=True, show_copy_button=True, likeable=True, layout="bubble", bubble_full_width=False)
219
  with gr.Group():
220
  with gr.Row():
 
230
  with gr.Group():
231
  stop_btn=gr.Button("Stop")
232
  clear_btn=gr.Button("Clear")
233
+ # client_choice=gr.Dropdown(label="Models",type='index',choices=[c for c in models],value=models[0],interactive=True)
234
  with gr.Column(scale=1):
235
  with gr.Group():
236
  rand = gr.Checkbox(label="Random Seed", value=True)
 
242
  chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
243
 
244
 
245
+ # client_choice.change(load_models,client_choice,[chat_b])
246
+ # app.load(load_models,client_choice,[chat_b])
247
 
248
+ chat_sub=inp.submit(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
249
+ go=btn.click(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
250
 
251
  stop_btn.click(None,None,None,cancels=[go,chat_sub])
252
  clear_btn.click(clear_fn,None,[inp,sys_inp,chat_b,memory])