Spaces:
Building
Building
Update app.py
Browse files
app.py
CHANGED
@@ -1,2 +1,77 @@
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
import gradio as gr
|
4 |
+
from huggingface_hub import InferenceClient
|
5 |
import os
|
6 |
+
import requests
|
7 |
+
|
8 |
+
# ์ถ๋ก API ํด๋ผ์ด์ธํธ ์ค์
|
9 |
+
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
|
10 |
+
|
11 |
+
def respond(
|
12 |
+
message,
|
13 |
+
history: list[tuple[str, str]],
|
14 |
+
system_message="",
|
15 |
+
max_tokens=7860,
|
16 |
+
temperature=0.8,
|
17 |
+
top_p=0.9,
|
18 |
+
):
|
19 |
+
system_prefix = """
|
20 |
+
[์์คํ
ํ๋กฌํํธ ๋ด์ฉ...]
|
21 |
+
"""
|
22 |
+
|
23 |
+
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
24 |
+
for val in history:
|
25 |
+
if val[0]:
|
26 |
+
messages.append({"role": "user", "content": val[0]})
|
27 |
+
if val[1]:
|
28 |
+
messages.append({"role": "assistant", "content": val[1]})
|
29 |
+
messages.append({"role": "user", "content": message})
|
30 |
+
|
31 |
+
response = ""
|
32 |
+
try:
|
33 |
+
for message in hf_client.chat_completion(
|
34 |
+
messages,
|
35 |
+
max_tokens=max_tokens,
|
36 |
+
stream=True,
|
37 |
+
temperature=temperature,
|
38 |
+
top_p=top_p,
|
39 |
+
):
|
40 |
+
token = message.choices[0].delta.content
|
41 |
+
if token is not None:
|
42 |
+
response += token.strip("")
|
43 |
+
yield response
|
44 |
+
except Exception as e:
|
45 |
+
yield f"Error: {str(e)}"
|
46 |
+
|
47 |
+
# Gradio ์ธํฐํ์ด์ค ์ค์
|
48 |
+
interface = gr.ChatInterface(
|
49 |
+
respond,
|
50 |
+
additional_inputs=[
|
51 |
+
gr.Textbox(label="System Message", value="Write(output) in ํ๊ตญ์ด."),
|
52 |
+
gr.Slider(minimum=1, maximum=8000, value=7000, label="Max Tokens"),
|
53 |
+
gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
|
54 |
+
gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
|
55 |
+
],
|
56 |
+
examples=[
|
57 |
+
["ํํ์ง ์์ค์ ํฅ๋ฏธ๋ก์ด ์์ฌ 10๊ฐ์ง๋ฅผ ์ ์ํ๋ผ"],
|
58 |
+
["๊ณ์ ์ด์ด์ ์์ฑํ๋ผ"],
|
59 |
+
["Translate into English"],
|
60 |
+
["๋ง๋ฒ ์์คํ
์ ๋ํด ๋ ์์ธํ ์ค๋ช
ํ๋ผ"],
|
61 |
+
["์ ํฌ ์ฅ๋ฉด์ ๋ ๊ทน์ ์ผ๋ก ๋ฌ์ฌํ๋ผ"],
|
62 |
+
["์๋ก์ด ํํ์ง ์ข
์กฑ์ ์ถ๊ฐํ๋ผ"],
|
63 |
+
["๊ณ ๋ ์์ธ์ ๋ํด ๋ ์์ธํ ์ค๋ช
ํ๋ผ"],
|
64 |
+
["์ฃผ์ธ๊ณต์ ๋ด๋ฉด ๋ฌ์ฌ๋ฅผ ์ถ๊ฐํ๋ผ"],
|
65 |
+
],
|
66 |
+
title="Fantasy Novel AI Generation",
|
67 |
+
cache_examples=False,
|
68 |
+
theme="Yntec/HaleyCH_Theme_Orange"
|
69 |
+
)
|
70 |
+
|
71 |
+
# ์ ํ๋ฆฌ์ผ์ด์
์คํ
|
72 |
+
if __name__ == "__main__":
|
73 |
+
interface.launch(
|
74 |
+
server_name="0.0.0.0", # ๋ชจ๋ IP์์ ์ ๊ทผ ๊ฐ๋ฅ
|
75 |
+
server_port=7860, # ํฌํธ ์ง์
|
76 |
+
share=True # ๊ณต์ ๋งํฌ ์์ฑ
|
77 |
+
)
|