awacke1 commited on
Commit
874645e
β€’
1 Parent(s): 6f69474

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -83,29 +83,29 @@ additional_inputs=[
83
 
84
  css = """
85
  #mkd {
86
- height: 500px;
87
  overflow: auto;
88
  border: 1px solid #ccc;
89
  }
90
  """
91
 
92
  with gr.Blocks(css=css) as demo:
93
- gr.HTML("""πŸ€– Mistral 7B Instruct πŸ€–
94
  In this demo, you can chat with <a href='https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1'>Mistral-7B-Instruct</a> model. πŸ’¬
95
- πŸ›  Model Features πŸ› 
 
96
  <ul>
97
  <li>πŸͺŸ Sliding Window Attention with 128K tokens span</li>
98
  <li>πŸš€ GQA for faster inference</li>
99
  <li>πŸ“ Byte-fallback BPE tokenizer</li>
100
  </ul>
101
- πŸ“œ License πŸ“œ Released under Apache 2.0 License
102
- πŸ“¦ Usage πŸ“¦
103
  <ul>
104
  <li>πŸ“š Available on Huggingface Hub</li>
105
  <li>🐍 Python code snippets for easy setup</li>
106
  <li>πŸ“ˆ Expected speedups with Flash Attention 2</li>
107
  </ul>
108
- Learn more about the model <a href='https://huggingface.co/docs/transformers/main/model_doc/mistral'>here</a>. πŸ“š
109
  """)
110
 
111
 
 
83
 
84
  css = """
85
  #mkd {
86
+ height: 300px;
87
  overflow: auto;
88
  border: 1px solid #ccc;
89
  }
90
  """
91
 
92
  with gr.Blocks(css=css) as demo:
93
+ gr.HTML("""<h2>πŸ€– Mistral Chat - Gradio πŸ€–</h2>
94
  In this demo, you can chat with <a href='https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1'>Mistral-7B-Instruct</a> model. πŸ’¬
95
+ Learn more about the model <a href='https://huggingface.co/docs/transformers/main/model_doc/mistral'>here</a>. πŸ“š
96
+ <h2>πŸ›  Model Features πŸ› </h2>
97
  <ul>
98
  <li>πŸͺŸ Sliding Window Attention with 128K tokens span</li>
99
  <li>πŸš€ GQA for faster inference</li>
100
  <li>πŸ“ Byte-fallback BPE tokenizer</li>
101
  </ul>
102
+ <h3>πŸ“œ License πŸ“œ Released under Apache 2.0 License</h3>
103
+ <h3>πŸ“¦ Usage πŸ“¦</h3>
104
  <ul>
105
  <li>πŸ“š Available on Huggingface Hub</li>
106
  <li>🐍 Python code snippets for easy setup</li>
107
  <li>πŸ“ˆ Expected speedups with Flash Attention 2</li>
108
  </ul>
 
109
  """)
110
 
111