Spaces:
Build error
Build error
update
Browse files
app.py
CHANGED
@@ -31,11 +31,16 @@ A: Let’s think step by step.
|
|
31 |
"""
|
32 |
|
33 |
|
34 |
-
def text_generate(prompt):
|
35 |
|
36 |
-
#prints
|
37 |
-
print(f"*****Inside text_generate function
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
39 |
"parameters":
|
40 |
{
|
41 |
"top_p": 0.9,
|
@@ -60,15 +65,21 @@ demo = gr.Blocks()
|
|
60 |
with demo:
|
61 |
gr.Markdown("<h1><center>Step By Step With Bloom</center></h1>")
|
62 |
gr.Markdown(
|
63 |
-
""" [BigScienceW Bloom](https://twitter.com/BigscienceW) \n\n Large language models have demonstrated a capability of 'Chain-of-thought reasoning'. Some amazing researchers( [Jason Wei et al.](https://arxiv.org/abs/
|
64 |
)
|
65 |
with gr.Row():
|
|
|
66 |
example_prompt = gr.Radio( ["Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?\nA: Let’s think step by step.\n", "Q: Roger has 5 tennis balls already. He buys 2 more cans of tennis balls. Each can has 3 tennis balls. How many tennis balls does he have now?\nA: Let’s think step by step.\n", "Q: On an average Joe throws 25 punches per minute. His fight lasts 5 rounds of 3 minutes each. How many punches did he throw?\nA: Let’s think about this logically.\n"], label= "Choose a sample Prompt")
|
|
|
|
|
|
|
|
|
|
|
67 |
#input_word = gr.Textbox(placeholder="Enter a word here to generate text ...")
|
68 |
generated_txt = gr.Textbox(lines=7)
|
69 |
|
70 |
|
71 |
b1 = gr.Button("Generate Text")
|
72 |
-
b1.click(text_generate,inputs=example_prompt, outputs=generated_txt)
|
73 |
|
74 |
demo.launch(enable_queue=True, debug=True)
|
|
|
31 |
"""
|
32 |
|
33 |
|
34 |
+
def text_generate(prompt, problem, template):
|
35 |
|
36 |
+
#prints to debug
|
37 |
+
print(f"*****Inside text_generate function******")
|
38 |
+
print(f"Prompt is :{prompt}")
|
39 |
+
print(f"Problem is :{prompt}")
|
40 |
+
print(f"Template is :{prompt}")
|
41 |
+
|
42 |
+
p = problem + "A: " + template + "\n"
|
43 |
+
json_ = {"inputs": p,
|
44 |
"parameters":
|
45 |
{
|
46 |
"top_p": 0.9,
|
|
|
65 |
with demo:
|
66 |
gr.Markdown("<h1><center>Step By Step With Bloom</center></h1>")
|
67 |
gr.Markdown(
|
68 |
+
""" [BigScienceW Bloom](https://twitter.com/BigscienceW) \n\n Large language models have demonstrated a capability of 'Chain-of-thought reasoning'. Some amazing researchers( [Jason Wei et al.](https://arxiv.org/abs/2205.11916)) recently found out that by addding **Lets think step by step** it improves the model's zero-shot performance. Some might say — You can get good results out of LLMs if you know how to speak to them.\n\nThis Space is created by [Yuvraj Sharma](https://twitter.com/yvrjsharma) for EuroPython 2022 Demo."""
|
69 |
)
|
70 |
with gr.Row():
|
71 |
+
|
72 |
example_prompt = gr.Radio( ["Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?\nA: Let’s think step by step.\n", "Q: Roger has 5 tennis balls already. He buys 2 more cans of tennis balls. Each can has 3 tennis balls. How many tennis balls does he have now?\nA: Let’s think step by step.\n", "Q: On an average Joe throws 25 punches per minute. His fight lasts 5 rounds of 3 minutes each. How many punches did he throw?\nA: Let’s think about this logically.\n"], label= "Choose a sample Prompt")
|
73 |
+
|
74 |
+
example_problem = gr.Radio( ["Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?\n", "Q: Roger has 5 tennis balls already. He buys 2 more cans of tennis balls. Each can has 3 tennis balls. How many tennis balls does he have now?\n", "Q: On an average Joe throws 25 punches per minute. His fight lasts 5 rounds of 3 minutes each. How many punches did he throw?\n"], label= "Choose a sample Prompt")
|
75 |
+
|
76 |
+
example_template = gr.Radio( ["Let’s think step by step."," First, ", " Let’s think about this logically.", "Let’s solve this problem by splitting it into steps.", " Let’s be realistic and think step by step.", "Let’s think like a detective step by step.", "Let’s think", "Before we dive into the answer,", "The answer is after the proof."], label= "Choose a sample Template for Zero-Shot CoT")
|
77 |
+
|
78 |
#input_word = gr.Textbox(placeholder="Enter a word here to generate text ...")
|
79 |
generated_txt = gr.Textbox(lines=7)
|
80 |
|
81 |
|
82 |
b1 = gr.Button("Generate Text")
|
83 |
+
b1.click(text_generate,inputs=[example_prompt, example_problem, example_template], outputs=generated_txt)
|
84 |
|
85 |
demo.launch(enable_queue=True, debug=True)
|