fantaxy commited on
Commit
03ceb55
ยท
verified ยท
1 Parent(s): af12ef6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +76 -1
app.py CHANGED
@@ -1,2 +1,77 @@
 
 
 
 
1
  import os
2
- exec(os.environ.get('APP'))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+
3
+ import gradio as gr
4
+ from huggingface_hub import InferenceClient
5
  import os
6
+ import requests
7
+
8
+ # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
9
+ hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
10
+
11
+ def respond(
12
+ message,
13
+ history: list[tuple[str, str]],
14
+ system_message="",
15
+ max_tokens=7860,
16
+ temperature=0.8,
17
+ top_p=0.9,
18
+ ):
19
+ system_prefix = """
20
+ [์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ๋‚ด์šฉ...]
21
+ """
22
+
23
+ messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
24
+ for val in history:
25
+ if val[0]:
26
+ messages.append({"role": "user", "content": val[0]})
27
+ if val[1]:
28
+ messages.append({"role": "assistant", "content": val[1]})
29
+ messages.append({"role": "user", "content": message})
30
+
31
+ response = ""
32
+ try:
33
+ for message in hf_client.chat_completion(
34
+ messages,
35
+ max_tokens=max_tokens,
36
+ stream=True,
37
+ temperature=temperature,
38
+ top_p=top_p,
39
+ ):
40
+ token = message.choices[0].delta.content
41
+ if token is not None:
42
+ response += token.strip("")
43
+ yield response
44
+ except Exception as e:
45
+ yield f"Error: {str(e)}"
46
+
47
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ •
48
+ interface = gr.ChatInterface(
49
+ respond,
50
+ additional_inputs=[
51
+ gr.Textbox(label="System Message", value="Write(output) in ํ•œ๊ตญ์–ด."),
52
+ gr.Slider(minimum=1, maximum=8000, value=7000, label="Max Tokens"),
53
+ gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
54
+ gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
55
+ ],
56
+ examples=[
57
+ ["ํŒํƒ€์ง€ ์†Œ์„ค์˜ ํฅ๋ฏธ๋กœ์šด ์†Œ์žฌ 10๊ฐ€์ง€๋ฅผ ์ œ์‹œํ•˜๋ผ"],
58
+ ["๊ณ„์† ์ด์–ด์„œ ์ž‘์„ฑํ•˜๋ผ"],
59
+ ["Translate into English"],
60
+ ["๋งˆ๋ฒ• ์‹œ์Šคํ…œ์— ๋Œ€ํ•ด ๋” ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
61
+ ["์ „ํˆฌ ์žฅ๋ฉด์„ ๋” ๊ทน์ ์œผ๋กœ ๋ฌ˜์‚ฌํ•˜๋ผ"],
62
+ ["์ƒˆ๋กœ์šด ํŒํƒ€์ง€ ์ข…์กฑ์„ ์ถ”๊ฐ€ํ•˜๋ผ"],
63
+ ["๊ณ ๋Œ€ ์˜ˆ์–ธ์— ๋Œ€ํ•ด ๋” ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
64
+ ["์ฃผ์ธ๊ณต์˜ ๋‚ด๋ฉด ๋ฌ˜์‚ฌ๋ฅผ ์ถ”๊ฐ€ํ•˜๋ผ"],
65
+ ],
66
+ title="Fantasy Novel AI Generation",
67
+ cache_examples=False,
68
+ theme="Yntec/HaleyCH_Theme_Orange"
69
+ )
70
+
71
+ # ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹คํ–‰
72
+ if __name__ == "__main__":
73
+ interface.launch(
74
+ server_name="0.0.0.0", # ๋ชจ๋“  IP์—์„œ ์ ‘๊ทผ ๊ฐ€๋Šฅ
75
+ server_port=7860, # ํฌํŠธ ์ง€์ •
76
+ share=True # ๊ณต์œ  ๋งํฌ ์ƒ์„ฑ
77
+ )