JCai commited on
Commit
8a81350
·
1 Parent(s): 9465a08

hide hyperparameters

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -216,10 +216,10 @@ with gr.Blocks(css=custom_css) as demo:
216
  system_message = gr.Textbox(value="You are a friendly and playful cat who loves help users learn math.", label="System message", interactive=True)
217
  use_local_model = gr.Checkbox(label="Use Local Model", value=False)
218
  # button_1 = gr.Button("Submit", variant="primary")
219
- with gr.Row():
220
- max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
221
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
222
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
223
 
224
  chat_history = gr.Chatbot(label="Chat")
225
 
@@ -228,6 +228,9 @@ with gr.Blocks(css=custom_css) as demo:
228
  cancel_button = gr.Button("Cancel Inference", variant="danger")
229
 
230
  # Adjusted to ensure history is maintained and passed correctly
 
 
 
231
  user_input.submit(respond, [user_input, chat_history, system_message, max_tokens, temperature, top_p, use_local_model], chat_history)
232
 
233
  cancel_button.click(cancel_inference)
 
216
  system_message = gr.Textbox(value="You are a friendly and playful cat who loves help users learn math.", label="System message", interactive=True)
217
  use_local_model = gr.Checkbox(label="Use Local Model", value=False)
218
  # button_1 = gr.Button("Submit", variant="primary")
219
+ # with gr.Row():
220
+ # max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
221
+ # temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
222
+ # top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
223
 
224
  chat_history = gr.Chatbot(label="Chat")
225
 
 
228
  cancel_button = gr.Button("Cancel Inference", variant="danger")
229
 
230
  # Adjusted to ensure history is maintained and passed correctly
231
+ max_tokens = 512
232
+ temperature = 0.8
233
+ top_p = 0.95
234
  user_input.submit(respond, [user_input, chat_history, system_message, max_tokens, temperature, top_p, use_local_model], chat_history)
235
 
236
  cancel_button.click(cancel_inference)