Pclanglais commited on
Commit
b31117e
1 Parent(s): 9c111b6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -66,7 +66,7 @@ class MistralChatBot:
66
 
67
  sampling_params = SamplingParams(temperature=0.7, top_p=.95, max_tokens=500, presence_penalty = 2)
68
  detailed_prompt = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user"""
69
- detailed_prompt = detailed_prompt + "\n" + user_input + "<|im_end|>\n<|im_start|>assistant\n"
70
  prompts = [detailed_prompt]
71
  outputs = llm.generate(prompts, sampling_params, use_tqdm = False)
72
  generated_text = outputs[0].outputs[0].text
@@ -77,7 +77,7 @@ class MistralChatBot:
77
  conversation = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user\n" + user_message + "<|im_end|>\n<|im_start|>assistant\n"
78
  return conversation
79
 
80
- # Create the Falcon chatbot instance
81
  mistral_bot = MistralChatBot()
82
 
83
  # Define the Gradio interface
 
66
 
67
  sampling_params = SamplingParams(temperature=0.7, top_p=.95, max_tokens=500, presence_penalty = 2)
68
  detailed_prompt = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user"""
69
+ detailed_prompt = detailed_prompt + "\n" + user_message + "<|im_end|>\n<|im_start|>assistant\n"
70
  prompts = [detailed_prompt]
71
  outputs = llm.generate(prompts, sampling_params, use_tqdm = False)
72
  generated_text = outputs[0].outputs[0].text
 
77
  conversation = "<|im_start|>system\n" + system_prompt + "<|im_end|>\n<|im_start|>user\n" + user_message + "<|im_end|>\n<|im_start|>assistant\n"
78
  return conversation
79
 
80
+ # Create the Mistral chatbot instance
81
  mistral_bot = MistralChatBot()
82
 
83
  # Define the Gradio interface