nawhgnuj commited on
Commit
149b76e
·
verified ·
1 Parent(s): 6836f82

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -12
app.py CHANGED
@@ -64,8 +64,6 @@ def generate_response(
64
  max_new_tokens: int,
65
  top_p: float,
66
  top_k: int,
67
- repetition_penalty: float,
68
- no_repeat_ngram_size: int,
69
  ):
70
  system_prompt = """You are a Kamala Harris chatbot. You only answer like Harris in style and tone. In every response:
71
  1. Maintain a composed and professional demeanor.
@@ -96,8 +94,6 @@ Crucially, Keep responses concise and impactful."""
96
  top_p=top_p,
97
  top_k=top_k,
98
  temperature=temperature,
99
- repetition_penalty=repetition_penalty,
100
- no_repeat_ngram_size=no_repeat_ngram_size,
101
  pad_token_id=tokenizer.pad_token_id,
102
  eos_token_id=tokenizer.eos_token_id,
103
  )
@@ -109,9 +105,9 @@ def add_text(history, text):
109
  history = history + [(text, None)]
110
  return history, ""
111
 
112
- def bot(history, temperature, max_new_tokens, top_p, top_k, repetition_penalty, no_repeat_ngram_size):
113
  user_message = history[-1][0]
114
- bot_response = generate_response(user_message, history[:-1], temperature, max_new_tokens, top_p, top_k, repetition_penalty, no_repeat_ngram_size)
115
  history[-1][1] = bot_response
116
  return history
117
 
@@ -136,11 +132,9 @@ with gr.Blocks(css=CSS, theme=gr.themes.Default()) as demo:
136
 
137
  with gr.Accordion("Advanced Settings", open=False):
138
  temperature = gr.Slider(minimum=0.1, maximum=1.5, value=0.8, step=0.1, label="Temperature")
139
- max_new_tokens = gr.Slider(minimum=50, maximum=500, value=256, step=1, label="Max New Tokens")
140
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.9, step=0.1, label="Top-p")
141
  top_k = gr.Slider(minimum=1, maximum=100, value=20, step=1, label="Top-k")
142
- repetition_penalty = gr.Slider(minimum=1.0, maximum=2.0, value=1.2, step=0.1, label="Repetition Penalty")
143
- no_repeat_ngram_size = gr.Slider(minimum=1, maximum=10, value=3, step=1, label="No Repeat N-gram Size")
144
 
145
  gr.Examples(
146
  examples=[
@@ -152,11 +146,11 @@ with gr.Blocks(css=CSS, theme=gr.themes.Default()) as demo:
152
  )
153
 
154
  submit.click(add_text, [chatbot, msg], [chatbot, msg], queue=False).then(
155
- bot, [chatbot, temperature, max_new_tokens, top_p, top_k, repetition_penalty, no_repeat_ngram_size], chatbot
156
  )
157
  clear.click(lambda: [], outputs=[chatbot], queue=False)
158
  msg.submit(add_text, [chatbot, msg], [chatbot, msg], queue=False).then(
159
- bot, [chatbot, temperature, max_new_tokens, top_p, top_k, repetition_penalty, no_repeat_ngram_size], chatbot
160
  )
161
 
162
  if __name__ == "__main__":
 
64
  max_new_tokens: int,
65
  top_p: float,
66
  top_k: int,
 
 
67
  ):
68
  system_prompt = """You are a Kamala Harris chatbot. You only answer like Harris in style and tone. In every response:
69
  1. Maintain a composed and professional demeanor.
 
94
  top_p=top_p,
95
  top_k=top_k,
96
  temperature=temperature,
 
 
97
  pad_token_id=tokenizer.pad_token_id,
98
  eos_token_id=tokenizer.eos_token_id,
99
  )
 
105
  history = history + [(text, None)]
106
  return history, ""
107
 
108
+ def bot(history, temperature, max_new_tokens, top_p, top_k):
109
  user_message = history[-1][0]
110
+ bot_response = generate_response(user_message, history[:-1], temperature, max_new_tokens, top_p, top_k)
111
  history[-1][1] = bot_response
112
  return history
113
 
 
132
 
133
  with gr.Accordion("Advanced Settings", open=False):
134
  temperature = gr.Slider(minimum=0.1, maximum=1.5, value=0.8, step=0.1, label="Temperature")
135
+ max_new_tokens = gr.Slider(minimum=50, maximum=1024, value=1024, step=1, label="Max New Tokens")
136
+ top_p = gr.Slider(minimum=0.1, maximum=1.5, value=1.0, step=0.1, label="Top-p")
137
  top_k = gr.Slider(minimum=1, maximum=100, value=20, step=1, label="Top-k")
 
 
138
 
139
  gr.Examples(
140
  examples=[
 
146
  )
147
 
148
  submit.click(add_text, [chatbot, msg], [chatbot, msg], queue=False).then(
149
+ bot, [chatbot, temperature, max_new_tokens, top_p, top_k], chatbot
150
  )
151
  clear.click(lambda: [], outputs=[chatbot], queue=False)
152
  msg.submit(add_text, [chatbot, msg], [chatbot, msg], queue=False).then(
153
+ bot, [chatbot, temperature, max_new_tokens, top_p, top_k], chatbot
154
  )
155
 
156
  if __name__ == "__main__":