Update app.py
Browse files
app.py
CHANGED
@@ -4,12 +4,12 @@ from st_audiorec import st_audiorec
|
|
4 |
|
5 |
|
6 |
# Constants
|
7 |
-
TITLE = "Llama2
|
8 |
DESCRIPTION = """
|
9 |
-
This Space demonstrates model [Llama-2-
|
10 |
| Model | Llama2 | Llama2-hf | Llama2-chat | Llama2-chat-hf |
|
11 |
|---|---|---|---|---|
|
12 |
-
|
|
13 |
|
14 |
---
|
15 |
"""
|
@@ -38,7 +38,7 @@ def transcribe(wav_path):
|
|
38 |
# Prediction function
|
39 |
def predict(message, system_prompt='', temperature=0.7, max_new_tokens=4096,Topp=0.5,Repetitionpenalty=1.2):
|
40 |
with st.status("Starting client"):
|
41 |
-
client = Client("https://
|
42 |
st.write("Requesting client")
|
43 |
with st.status("Requesting LLama-2"):
|
44 |
st.write("Requesting API")
|
@@ -49,7 +49,7 @@ def predict(message, system_prompt='', temperature=0.7, max_new_tokens=4096,Topp
|
|
49 |
max_new_tokens, # int | float (numeric value between 0 and 4096)
|
50 |
Topp, # int | float (numeric value between 0.0 and 1)
|
51 |
Repetitionpenalty, # int | float (numeric value between 1.0 and 2.0)
|
52 |
-
api_name="/
|
53 |
)
|
54 |
st.write("Done")
|
55 |
return response
|
@@ -70,7 +70,7 @@ for message in st.session_state.messages:
|
|
70 |
with st.chat_message(message["role"], avatar=("🧑💻" if message["role"] == 'human' else '🦙')):
|
71 |
st.markdown(message["content"])
|
72 |
|
73 |
-
textinput = st.chat_input("Ask LLama-2-
|
74 |
wav_audio_data = st_audiorec()
|
75 |
|
76 |
if wav_audio_data != None:
|
|
|
4 |
|
5 |
|
6 |
# Constants
|
7 |
+
TITLE = "Llama2 7B Chatbot"
|
8 |
DESCRIPTION = """
|
9 |
+
This Space demonstrates model [Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) by Meta, a Llama 2 model with 7B parameters fine-tuned for chat instructions.
|
10 |
| Model | Llama2 | Llama2-hf | Llama2-chat | Llama2-chat-hf |
|
11 |
|---|---|---|---|---|
|
12 |
+
| 7B | [Link](https://huggingface.co/meta-llama/Llama-2-7b) | [Link](https://huggingface.co/meta-llama/Llama-2-7b-hf) | [Link](https://huggingface.co/meta-llama/Llama-2-7b-chat) | [Link](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) |
|
13 |
|
14 |
---
|
15 |
"""
|
|
|
38 |
# Prediction function
|
39 |
def predict(message, system_prompt='', temperature=0.7, max_new_tokens=4096,Topp=0.5,Repetitionpenalty=1.2):
|
40 |
with st.status("Starting client"):
|
41 |
+
client = Client("https://huggingface-projects-llama-2-7b-chat.hf.space/--replicas/68mtz/")
|
42 |
st.write("Requesting client")
|
43 |
with st.status("Requesting LLama-2"):
|
44 |
st.write("Requesting API")
|
|
|
49 |
max_new_tokens, # int | float (numeric value between 0 and 4096)
|
50 |
Topp, # int | float (numeric value between 0.0 and 1)
|
51 |
Repetitionpenalty, # int | float (numeric value between 1.0 and 2.0)
|
52 |
+
api_name="/chat"
|
53 |
)
|
54 |
st.write("Done")
|
55 |
return response
|
|
|
70 |
with st.chat_message(message["role"], avatar=("🧑💻" if message["role"] == 'human' else '🦙')):
|
71 |
st.markdown(message["content"])
|
72 |
|
73 |
+
textinput = st.chat_input("Ask LLama-2-7b anything...")
|
74 |
wav_audio_data = st_audiorec()
|
75 |
|
76 |
if wav_audio_data != None:
|