Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
import gradio as gr
|
2 |
-
from
|
3 |
import os
|
4 |
|
5 |
-
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("
|
6 |
|
7 |
def respond(
|
8 |
message,
|
9 |
history: list[tuple[str, str]],
|
10 |
-
|
11 |
-
|
12 |
temperature,
|
13 |
top_p,
|
14 |
):
|
@@ -50,12 +50,7 @@ demo = gr.ChatInterface(
|
|
50 |
label="Top-p (nucleus sampling)",
|
51 |
),
|
52 |
],
|
53 |
-
examples=[
|
54 |
-
["한글로 답변할것", "", "", 512, 0.7, 0.95],
|
55 |
-
["계속 이어서 작성하라", "", "", 512, 0.7, 0.95],
|
56 |
-
["재밌는 이야기 해줘", "", "", 512, 0.7, 0.95],
|
57 |
-
],
|
58 |
)
|
59 |
|
60 |
-
if
|
61 |
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
+
from huggingfacehub import InferenceClient
|
3 |
import os
|
4 |
|
5 |
+
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("HFTOKEN"))
|
6 |
|
7 |
def respond(
|
8 |
message,
|
9 |
history: list[tuple[str, str]],
|
10 |
+
systemmessage,
|
11 |
+
maxtokens,
|
12 |
temperature,
|
13 |
top_p,
|
14 |
):
|
|
|
50 |
label="Top-p (nucleus sampling)",
|
51 |
),
|
52 |
],
|
|
|
|
|
|
|
|
|
|
|
53 |
)
|
54 |
|
55 |
+
if __name == "__main":
|
56 |
demo.launch()
|