Tonic commited on
Commit
239a985
1 Parent(s): 80a9208

Update maker.py

Browse files
Files changed (1) hide show
  1. maker.py +13 -8
maker.py CHANGED
@@ -8,11 +8,11 @@ import os
8
  HF_TOKEN = os.environ["HF_TOKEN"]
9
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
10
 
11
- zephyr_7b_beta = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta/"
12
 
13
 
14
  welcome_message = """
15
- Hi! I'll help you **build a GPT**. You can say something like, "make a bot that gives advice on how to grow your startup."
16
 
17
  What would you like to make?
18
  """
@@ -31,7 +31,7 @@ Welcome to **{}**! Say something like:
31
  # Example User Input: Vegetarian dinner ideas under 30 minutes
32
  # """
33
 
34
- zephyr_system_prompt = """
35
  You are an AI whose job it is to help users create their own chatbots. In particular, you need to respond succintly in a friendly tone, write a system prompt for an LLM, a catchy title for the chatbot, and a very short example user input. Make sure each part is included.
36
 
37
  For example, if a user says, "make a bot that gives advice on how to grow your startup", first do a friendly response, then add the title, system prompt, and example user input. Immediately STOP after the example input. It should be EXACTLY in this format:
@@ -62,14 +62,14 @@ def build_input_prompt(message, chatbot, system_prompt):
62
 
63
  def post_request_beta(payload):
64
  """
65
- Sends a POST request to the predefined Zephyr-7b-Beta URL and returns the JSON response.
66
  """
67
- response = requests.post(zephyr_7b_beta, headers=HEADERS, json=payload)
68
  response.raise_for_status() # Will raise an HTTPError if the HTTP request returned an unsuccessful status code
69
  return response.json()
70
 
71
 
72
- def predict_beta(message, chatbot=[], system_prompt=zephyr_system_prompt):
73
  input_prompt = build_input_prompt(message, chatbot, system_prompt)
74
  data = {
75
  "inputs": input_prompt
@@ -123,7 +123,7 @@ def extract_title_prompt_example(text, title, system_prompt, example_input):
123
  return text, title, system_prompt, example_input
124
 
125
  def make_open_gpt(message, history, current_title, current_system_prompt, current_example_input):
126
- response = predict_beta(message, history, zephyr_system_prompt)
127
  response, title, system_prompt, example_input = extract_title_prompt_example(response, current_title, current_system_prompt, current_example_input)
128
  return "", history + [(message, response)], title, system_prompt, example_input, [(None, welcome_preview_message.format(title, example_input))], example_input, gr.Column(visible=True), gr.Group(visible=True)
129
 
@@ -207,7 +207,12 @@ css = """
207
  """
208
 
209
  with gr.Blocks(css=css) as demo:
210
- gr.Markdown("🥧 **GPT Baker** lets you create your own **open-source GPTs**. Start chatting to automatically below to automatically bake your GPT (or you can manually configure the recipe in the second tab). You can build and test them for free, but will need a [HF Pro account](https://huggingface.co/subscribe/pro) to publish them on Spaces (as Open GPTs are powered by the Zephyr 7B beta model using the HF Inference API). You will **not be charged** for usage of your Open GPT as the HF Inference API Pro membership does not charge per-query. Find your token here: https://huggingface.co/settings/tokens")
 
 
 
 
 
211
  with gr.Row():
212
  with gr.Column(scale=3):
213
  with gr.Tab("Create"):
 
8
  HF_TOKEN = os.environ["HF_TOKEN"]
9
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
10
 
11
+ tulu = "https://tonic1-tulu.hf.space/--replicas/9sffh/"
12
 
13
 
14
  welcome_message = """
15
+ Hi! I'm using [Tulu from AlenAi](https://huggingface.co/spaces/Tonic1/Tulu) I'll help you **build a GPT**. You can say something like, "make a bot that gives advice on how to grow your startup."
16
 
17
  What would you like to make?
18
  """
 
31
  # Example User Input: Vegetarian dinner ideas under 30 minutes
32
  # """
33
 
34
+ system_prompt = """
35
  You are an AI whose job it is to help users create their own chatbots. In particular, you need to respond succintly in a friendly tone, write a system prompt for an LLM, a catchy title for the chatbot, and a very short example user input. Make sure each part is included.
36
 
37
  For example, if a user says, "make a bot that gives advice on how to grow your startup", first do a friendly response, then add the title, system prompt, and example user input. Immediately STOP after the example input. It should be EXACTLY in this format:
 
62
 
63
  def post_request_beta(payload):
64
  """
65
+ Sends a POST request to the predefined Tulu and returns the JSON response.
66
  """
67
+ response = requests.post(tulu, headers=HEADERS, json=payload)
68
  response.raise_for_status() # Will raise an HTTPError if the HTTP request returned an unsuccessful status code
69
  return response.json()
70
 
71
 
72
+ def predict_beta(message, chatbot=[], system_prompt=system_prompt):
73
  input_prompt = build_input_prompt(message, chatbot, system_prompt)
74
  data = {
75
  "inputs": input_prompt
 
123
  return text, title, system_prompt, example_input
124
 
125
  def make_open_gpt(message, history, current_title, current_system_prompt, current_example_input):
126
+ response = predict_beta(message, history, system_prompt)
127
  response, title, system_prompt, example_input = extract_title_prompt_example(response, current_title, current_system_prompt, current_example_input)
128
  return "", history + [(message, response)], title, system_prompt, example_input, [(None, welcome_preview_message.format(title, example_input))], example_input, gr.Column(visible=True), gr.Group(visible=True)
129
 
 
207
  """
208
 
209
  with gr.Blocks(css=css) as demo:
210
+ gr.Markdown(""" # 👋🏻Welcome to 🕵🏻‍♂️Agent🌷Tulu
211
+ **A🕵🏻‍♂️Agent🌷Tulu** lets you create your own **open-source GPTs** using [allenai/tulu-2-dpo-13b](https://huggingface.co/allenai/tulu-2-dpo-13b). Start chatting to automatically below to automatically bake your GPT (or you can manually configure the recipe in the second tab). You can build and test them for free & publish them on Spaces (as Open GPTs are powered by the [Tulu DPO model](https://huggingface.co/allenai/tulu-2-dpo-70b) ).
212
+ You think this is cool + want to make your own ? check out [GPTBaker](https://huggingface.co/abidlabs/GPT-Baker) from [AbidLabs](https://huggingface.co/abidlabs) of 🤗[Gradio](https://www.gradio.app/)
213
+ ### Join us:
214
+ TeamTonic is always making cool demos! Join our active builder's community on Discord: [Discord](https://discord.gg/GWpVpekp) On Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) On Github: [Polytonic](https://github.com/tonic-ai) & contribute to [PolyGPT](https://github.com/tonic-ai/polygpt-alpha) """
215
+ )
216
  with gr.Row():
217
  with gr.Column(scale=3):
218
  with gr.Tab("Create"):