Spaces:
Runtime error
Runtime error
Commit
·
9b0d427
1
Parent(s):
46e83b2
..
Browse files
app.py
CHANGED
@@ -7,6 +7,12 @@ from io import BytesIO
|
|
7 |
# Initialize the client
|
8 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
# Define the function to respond to user inputs
|
11 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
12 |
messages = [{"role": "system", "content": system_message}]
|
@@ -29,7 +35,7 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
29 |
return response.choices[0].message['content']
|
30 |
|
31 |
# Define the function to generate posts
|
32 |
-
def generate_post(prompt, max_tokens, temperature, top_p):
|
33 |
response = client.chat_completion(
|
34 |
[{"role": "user", "content": prompt}],
|
35 |
max_tokens=max_tokens,
|
@@ -83,34 +89,14 @@ with demo:
|
|
83 |
with gr.Column():
|
84 |
chat_interface = gr.ChatInterface(
|
85 |
respond,
|
86 |
-
additional_inputs=[
|
87 |
-
gr.Textbox(value="You are a friendly Chatbot meant to assist users in managing social media posts ensuring they meet community guidelines", label="System message", visible=False),
|
88 |
-
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens", visible=False),
|
89 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature", visible=False),
|
90 |
-
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)", visible=False),
|
91 |
-
],
|
92 |
)
|
93 |
-
advanced_button = gr.Button("Show Advanced Settings")
|
94 |
-
advanced_settings = gr.Column(visible=False)
|
95 |
-
with advanced_settings:
|
96 |
-
chat_interface.additional_inputs[0].visible = True
|
97 |
-
chat_interface.additional_inputs[1].visible = True
|
98 |
-
chat_interface.additional_inputs[2].visible = True
|
99 |
-
chat_interface.additional_inputs[3].visible = True
|
100 |
-
|
101 |
-
def toggle_advanced_settings():
|
102 |
-
advanced_settings.visible = not advanced_settings.visible
|
103 |
-
|
104 |
-
advanced_button.click(toggle_advanced_settings, [], advanced_settings)
|
105 |
|
106 |
with gr.TabItem("Generate Post"):
|
107 |
post_prompt = gr.Textbox(label="Post Prompt")
|
108 |
-
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
109 |
-
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
110 |
-
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
111 |
generate_button = gr.Button("Generate Post")
|
112 |
generated_post = gr.Textbox(label="Generated Post")
|
113 |
-
generate_button.click(generate_post, [post_prompt,
|
114 |
|
115 |
with gr.TabItem("Moderate Post"):
|
116 |
post_content = gr.Textbox(label="Post Content")
|
@@ -129,6 +115,17 @@ with demo:
|
|
129 |
moderate_image_button = gr.Button("Moderate Image")
|
130 |
image_moderation_result = gr.Textbox(label="Image Moderation Result")
|
131 |
moderate_image_button.click(moderate_image, uploaded_image, image_moderation_result)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
|
133 |
if __name__ == "__main__":
|
134 |
-
demo.launch()
|
|
|
7 |
# Initialize the client
|
8 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
9 |
|
10 |
+
# Default settings
|
11 |
+
default_system_message = "You are a friendly Chatbot meant to assist users in managing social media posts ensuring they meet community guidelines"
|
12 |
+
default_max_tokens = 512
|
13 |
+
default_temperature = 0.7
|
14 |
+
default_top_p = 0.95
|
15 |
+
|
16 |
# Define the function to respond to user inputs
|
17 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
18 |
messages = [{"role": "system", "content": system_message}]
|
|
|
35 |
return response.choices[0].message['content']
|
36 |
|
37 |
# Define the function to generate posts
|
38 |
+
def generate_post(prompt, system_message, max_tokens, temperature, top_p):
|
39 |
response = client.chat_completion(
|
40 |
[{"role": "user", "content": prompt}],
|
41 |
max_tokens=max_tokens,
|
|
|
89 |
with gr.Column():
|
90 |
chat_interface = gr.ChatInterface(
|
91 |
respond,
|
92 |
+
additional_inputs=[],
|
|
|
|
|
|
|
|
|
|
|
93 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
|
95 |
with gr.TabItem("Generate Post"):
|
96 |
post_prompt = gr.Textbox(label="Post Prompt")
|
|
|
|
|
|
|
97 |
generate_button = gr.Button("Generate Post")
|
98 |
generated_post = gr.Textbox(label="Generated Post")
|
99 |
+
generate_button.click(generate_post, [post_prompt, default_system_message, default_max_tokens, default_temperature, default_top_p], generated_post)
|
100 |
|
101 |
with gr.TabItem("Moderate Post"):
|
102 |
post_content = gr.Textbox(label="Post Content")
|
|
|
115 |
moderate_image_button = gr.Button("Moderate Image")
|
116 |
image_moderation_result = gr.Textbox(label="Image Moderation Result")
|
117 |
moderate_image_button.click(moderate_image, uploaded_image, image_moderation_result)
|
118 |
+
|
119 |
+
with gr.TabItem("Settings"):
|
120 |
+
system_message = gr.Textbox(value=default_system_message, label="System message")
|
121 |
+
max_tokens = gr.Slider(minimum=1, maximum=2048, value=default_max_tokens, step=1, label="Max new tokens")
|
122 |
+
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=default_temperature, step=0.1, label="Temperature")
|
123 |
+
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=default_top_p, step=0.05, label="Top-p (nucleus sampling)")
|
124 |
+
|
125 |
+
settings_button = gr.Button("Save Settings")
|
126 |
+
settings_button.click(lambda sm, mt, temp, tp: (sm, mt, temp, tp),
|
127 |
+
inputs=[system_message, max_tokens, temperature, top_p],
|
128 |
+
outputs=[system_message, max_tokens, temperature, top_p])
|
129 |
|
130 |
if __name__ == "__main__":
|
131 |
+
demo.launch()
|