carlosrosash commited on
Commit
ea1df65
1 Parent(s): 68cc827

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -59
app.py CHANGED
@@ -13,39 +13,6 @@ from gradio.layouts import Column, Row
13
  from gradio.data_classes import InterfaceTypes
14
  import inspect
15
 
16
- ########################################################################################################################################################################
17
- #### Create the custom Interface for the buttoms
18
-
19
- class CustomInterface(gr.Interface):
20
- def render_input_column(self):
21
- submit_btn, clear_btn, stop_btn, flag_btns, duplicate_btn = None, None, None, None, None
22
-
23
- with Column(variant="panel"):
24
- input_component_column = Column()
25
- with input_component_column:
26
- for component in self.input_components:
27
- component.render()
28
- with Row():
29
- if self.interface_type in [InterfaceTypes.STANDARD, InterfaceTypes.INPUT_ONLY]:
30
- clear_btn = ClearButton(value="Effacer ❌")
31
- if not self.live:
32
- submit_btn = Button("Soumettez votre requête ✅", variant="primary") # Custom label
33
- if inspect.isgeneratorfunction(self.fn) or inspect.isasyncgenfunction(self.fn):
34
- stop_btn = Button("Stop", variant="stop", visible=False)
35
- elif self.interface_type == InterfaceTypes.UNIFIED:
36
- clear_btn = ClearButton(value="Effacer ❌")
37
- submit_btn = Button("Soumettez votre requête ✅", variant="primary") # Custom label
38
- if (inspect.isgeneratorfunction(self.fn) or inspect.isasyncgenfunction(self.fn)) and not self.live:
39
- stop_btn = Button("Stop", variant="stop")
40
- if self.allow_flagging == "manual":
41
- flag_btns = self.render_flag_btns()
42
- elif self.allow_flagging == "auto":
43
- flag_btns = [submit_btn]
44
-
45
- # Return the appropriate tuple
46
- return submit_btn, clear_btn, stop_btn, flag_btns, input_component_column, duplicate_btn, None
47
-
48
- ########################################################################################################################################################################
49
  # Define the device
50
  device = "cuda" if torch.cuda.is_available() else "cpu"
51
  #Define variables
@@ -97,9 +64,9 @@ class MistralChatBot:
97
  def __init__(self, system_prompt="Le dialogue suivant est une conversation"):
98
  self.system_prompt = system_prompt
99
 
100
- def predict(self, user_message, selected_style, temperature):
101
 
102
- system_prompt = styles_prompts_dict[selected_style]
103
 
104
  sampling_params = SamplingParams(temperature=temperature, top_p=.95, max_tokens=500, presence_penalty = 2)
105
  detailed_prompt = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user"""
@@ -109,17 +76,14 @@ class MistralChatBot:
109
  generated_text = outputs[0].outputs[0].text
110
  return generated_text
111
 
112
- def predict_simple(self, user_message, selected_style):
113
- system_prompt = styles_prompts_dict[selected_style]
114
  conversation = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user\n" + user_message + "<|im_end|>\n<|im_start|>assistant\n"
115
  return conversation
116
 
117
  # Create the Mistral chatbot instance
118
  mistral_bot = MistralChatBot()
119
 
120
- ########################################################################################################################################################################
121
-
122
- # Define the Gradio interface
123
  title = """
124
  <h1 style='text-align: center; font-weight: bold;'> Bellay, le premier LLM de la littérature française</h1>
125
  """
@@ -213,17 +177,6 @@ explanations = {
213
  def update_explanation(choice):
214
  return explanations.get(choice, "No explanation available.")
215
 
216
- # Adjusted predict function to handle None temperature
217
- def predict(user_message, selected_style, temperature=0.7):
218
- system_prompt = styles_prompts_dict[selected_style]
219
- sampling_params = SamplingParams(temperature=temperature, top_p=.95, max_tokens=500, presence_penalty = 2)
220
- detailed_prompt = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user"""
221
- detailed_prompt = detailed_prompt + "\n" + user_message + "<|im_end|>\n<|im_start|>assistant\n"
222
- prompts = [detailed_prompt]
223
- outputs = llm.generate(prompts, sampling_params, use_tqdm = False)
224
- generated_text = outputs[0].outputs[0].text
225
- return generated_text
226
-
227
  # Define the Gradio interface using Blocks
228
  with gr.Blocks() as demo:
229
  gr.Markdown(title)
@@ -238,17 +191,15 @@ with gr.Blocks() as demo:
238
  with gr.Column():
239
  user_input = gr.Textbox(label="Question or your instruction", type="text", lines=5, placeholder="Start typing here")
240
  temperature_slider = gr.Slider(label="Temperature", value=0.7, minimum=0.05, maximum=1.0, step=0.05, interactive=True)
 
 
 
241
  with gr.Column():
242
  output_text = gr.Textbox(label="Response", type="text", lines=10)
243
 
244
- # Define the buttons
245
- with gr.Row():
246
- submit_button = gr.Button("Submit")
247
- clear_button = gr.Button("Clear")
248
-
249
  # Function to handle the button click for submit
250
- def on_submit(user_message, selected_style, temperature):
251
- return predict(user_message, selected_style, temperature)
252
 
253
  # Function to handle the button click for clear
254
  def on_clear():
@@ -261,4 +212,4 @@ with gr.Blocks() as demo:
261
  submit_button.click(on_submit, inputs=[user_input, dropdown, temperature_slider], outputs=output_text)
262
  clear_button.click(on_clear)
263
 
264
- demo.launch()
 
13
  from gradio.data_classes import InterfaceTypes
14
  import inspect
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  # Define the device
17
  device = "cuda" if torch.cuda.is_available() else "cpu"
18
  #Define variables
 
64
  def __init__(self, system_prompt="Le dialogue suivant est une conversation"):
65
  self.system_prompt = system_prompt
66
 
67
+ def predict(self, user_message, style, temperature):
68
 
69
+ system_prompt = styles_prompts_dict[style]
70
 
71
  sampling_params = SamplingParams(temperature=temperature, top_p=.95, max_tokens=500, presence_penalty = 2)
72
  detailed_prompt = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user"""
 
76
  generated_text = outputs[0].outputs[0].text
77
  return generated_text
78
 
79
+ def predict_simple(self, user_message, style):
80
+ system_prompt = styles_prompts_dict[style]
81
  conversation = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user\n" + user_message + "<|im_end|>\n<|im_start|>assistant\n"
82
  return conversation
83
 
84
  # Create the Mistral chatbot instance
85
  mistral_bot = MistralChatBot()
86
 
 
 
 
87
  title = """
88
  <h1 style='text-align: center; font-weight: bold;'> Bellay, le premier LLM de la littérature française</h1>
89
  """
 
177
  def update_explanation(choice):
178
  return explanations.get(choice, "No explanation available.")
179
 
 
 
 
 
 
 
 
 
 
 
 
180
  # Define the Gradio interface using Blocks
181
  with gr.Blocks() as demo:
182
  gr.Markdown(title)
 
191
  with gr.Column():
192
  user_input = gr.Textbox(label="Question or your instruction", type="text", lines=5, placeholder="Start typing here")
193
  temperature_slider = gr.Slider(label="Temperature", value=0.7, minimum=0.05, maximum=1.0, step=0.05, interactive=True)
194
+ with gr.Column():
195
+ clear_button = gr.Button("Clear")
196
+ submit_button = gr.Button("Submit")
197
  with gr.Column():
198
  output_text = gr.Textbox(label="Response", type="text", lines=10)
199
 
 
 
 
 
 
200
  # Function to handle the button click for submit
201
+ def on_submit(user_message, style, temperature):
202
+ return mistral_bot.predict(user_message, style, temperature)
203
 
204
  # Function to handle the button click for clear
205
  def on_clear():
 
212
  submit_button.click(on_submit, inputs=[user_input, dropdown, temperature_slider], outputs=output_text)
213
  clear_button.click(on_clear)
214
 
215
+ demo.launch()