freddyaboulton HF staff commited on
Commit
2e74f63
1 Parent(s): b5bfb9f
Files changed (2) hide show
  1. app.py +72 -0
  2. requirements.txt +1 -0
app.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from text_generation import Client
4
+
5
+ HF_TOKEN = os.getenv("HF_TOKEN")
6
+ INFERENCE_ENDPOINT = os.getenv("INFERENCE_ENDPOINT")
7
+
8
+
9
+ USER_NAME = "User"
10
+ BOT_NAME = "Falcon"
11
+ DEFAULT_INSTRUCTIONS = f"""The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins.
12
+ """
13
+ RETRY_COMMAND = "/retry"
14
+ STOP_STR = f"\n{USER_NAME}:"
15
+ STOP_SUSPECT_LIST = [":", "\n", "User"]
16
+
17
+ client = None
18
+ if INFERENCE_ENDPOINT:
19
+ client = Client(INFERENCE_ENDPOINT, headers={"Authorization": f"Bearer {HF_TOKEN}"})
20
+
21
+
22
+
23
+ def format_chat_prompt(message: str, chat_history, instructions: str) -> str:
24
+ instructions = instructions.strip(" ").strip("\n")
25
+ prompt = instructions
26
+ for turn in chat_history:
27
+ user_message, bot_message = turn
28
+ prompt = f"{prompt}\n{USER_NAME}: {user_message}\n{BOT_NAME}: {bot_message}"
29
+ prompt = f"{prompt}\n{USER_NAME}: {message}\n{BOT_NAME}:"
30
+ return prompt
31
+
32
+
33
+
34
+
35
+
36
+ def run_chat(message: str, chat_history):
37
+ prompt = format_chat_prompt(message, chat_history, DEFAULT_INSTRUCTIONS)
38
+ chat_history = chat_history + [[message, ""]]
39
+ response = client.generate(
40
+ prompt,
41
+ do_sample=True,
42
+ max_new_tokens=1024,
43
+ stop_sequences=[STOP_STR, "<|endoftext|>"],
44
+ temperature=0.8,
45
+ top_p=0.9,
46
+ )
47
+ chat_history[-1][1] = response
48
+ return response, chat_history
49
+
50
+
51
+ with gr.Blocks() as demo:
52
+ gr.Markdown("""
53
+ # Falcon-7b-instruct Discord Bot Powered by Gradio and Hugging Face Endpoints
54
+
55
+ To deploy this space as a discord bot, you will need to deploy your own Falcon model to Hugging Face Endpoints.
56
+ Don't worry it's super easy.
57
+
58
+
59
+ ```bash
60
+ pip install gradio_client
61
+ ```
62
+
63
+ grc.Client.duplicate("gradio-discord-bots/falcon-7b-instruct", private=False, secrets={"HF_TOKEN": "<your-key-here>", "INFERENCE_ENDPOINT": "<endpoint-url>"}).deploy_discord()
64
+ """)
65
+ button = gr.Button(visible=False)
66
+ history = gr.State([])
67
+ message = gr.Textbox()
68
+ response = gr.Textbox()
69
+ button.click(run_chat, [message, history], [response, history], api_name="chat")
70
+
71
+
72
+ demo.queue(concurrency_count=70).launch()
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ text-generation