srush HF staff commited on
Commit
0e4ded8
1 Parent(s): 4e3dc76

Upload with huggingface_hub

Browse files
Files changed (6) hide show
  1. app.py +23 -4
  2. math_demo.py +3 -2
  3. ner.py +1 -1
  4. pal.py +2 -1
  5. temp +0 -0
  6. temp.log +0 -0
app.py CHANGED
@@ -9,14 +9,33 @@ from qa import gradio as qa
9
  from stats import gradio as stats
10
  from selfask import gradio as selfask
11
 
12
- css = "#clean div.form {border: 0px} #response {border: 0px; background: #ffeec6} #prompt {border: 0px;background: aliceblue} #json {border: 0px} #result {border: 0px; background: #c5e0e5} #inner {padding: 20px} #inner textarea {border: 0px} .tabs div.tabitem {border: 0px}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
- with gr.Blocks(css=css) as demo:
15
- gr.HTML("<center> <img width='10%' style='display:inline; padding: 5px' src='https://user-images.githubusercontent.com/35882/218286642-67985b6f-d483-49be-825b-f62b72c469cd.png'> <h1 style='display:inline; font-size:40pt; padding:20px'> Mini-Chain </h1> <img width='10%' style='display:inline;padding: 5px' src='https://avatars.githubusercontent.com/u/25720743?s=200&v=4'> </center><br><center><a href='https://github.com/srush/minichain'>[code]</a> <a href='https://user-images.githubusercontent.com/35882/218286642-67985b6f-d483-49be-825b-f62b72c469cd.png'>[docs]</a></center>")
 
 
16
 
17
  gr.TabbedInterface([math_demo, qa, chat, gatsby, ner, bash, pal, stats, selfask],
18
  ["Math", "QA", "Chat", "Book", "NER", "Bash", "PAL", "Stats", "SelfAsk"],
19
- css = css)
20
 
21
  demo.launch()
22
 
 
9
  from stats import gradio as stats
10
  from selfask import gradio as selfask
11
 
12
+ CSS = """
13
+ #clean div.form {border: 0px}
14
+ #response {border: 0px; background: #ffeec6}
15
+ #prompt {border: 0px;background: aliceblue}
16
+ #json {border: 0px}
17
+ #result {border: 0px; background: #c5e0e5}
18
+ #inner {margin: 10px; padding: 10px; font-size: 20px; }
19
+ #inner textarea {border: 0px}
20
+ div.gradio-container {color: black}
21
+ span.head {font-size: 60pt; font-family: cursive;}
22
+ body {
23
+ --text-sm: 15px;
24
+ --text-md: 20px;
25
+ --text-lg: 22px;
26
+ --input-text-size: 20px;
27
+ --section-text-size: 20px;
28
+ }
29
+ """
30
 
31
+
32
+
33
+ with gr.Blocks(css=CSS, theme=gr.themes.Monochrome()) as demo:
34
+ gr.HTML("<center style='background:#B6B7BA'> <span class='head'>Mini</span><img src='https://user-images.githubusercontent.com/35882/227017900-0cacdfb7-37e2-47b1-9347-a233810d3544.png' width='20%' style='display:inline'><span class='head'>Chain</span> <br><a href='https://github.com/srush/minichain'>[code]</a> <a href='https://user-images.githubusercontent.com/35882/218286642-67985b6f-d483-49be-825b-f62b72c469cd.png'>[docs]</a></center>")
35
 
36
  gr.TabbedInterface([math_demo, qa, chat, gatsby, ner, bash, pal, stats, selfask],
37
  ["Math", "QA", "Chat", "Book", "NER", "Bash", "PAL", "Stats", "SelfAsk"],
38
+ css = CSS)
39
 
40
  demo.launch()
41
 
math_demo.py CHANGED
@@ -18,10 +18,11 @@ def math_prompt(model, question):
18
  "Prompt to call GPT with a Jinja template"
19
  return model(dict(question=question))
20
 
21
- @prompt(Python())
22
  def python(model, code):
23
  "Prompt to call Python interpreter"
24
- return int(model(code))
 
25
 
26
  def math_demo(question):
27
  "Chain them together"
 
18
  "Prompt to call GPT with a Jinja template"
19
  return model(dict(question=question))
20
 
21
+ @prompt(Python(), template="import math\n{{code}}")
22
  def python(model, code):
23
  "Prompt to call Python interpreter"
24
+ code = "\n".join(code.strip().split("\n")[1:-1])
25
+ return int(model(dict(code=code)))
26
 
27
  def math_demo(question):
28
  "Chain them together"
ner.py CHANGED
@@ -14,7 +14,7 @@ Chain that does named entity recognition with arbitrary labels. [[Code](https://
14
  from minichain import prompt, show, OpenAI
15
 
16
  @prompt(OpenAI(), template_file = "ner.pmpt.tpl", parser="json")
17
- def ner_extract(model, **kwargs):
18
  return model(kwargs)
19
 
20
  @prompt(OpenAI())
 
14
  from minichain import prompt, show, OpenAI
15
 
16
  @prompt(OpenAI(), template_file = "ner.pmpt.tpl", parser="json")
17
+ def ner_extract(model, kwargs):
18
  return model(kwargs)
19
 
20
  @prompt(OpenAI())
pal.py CHANGED
@@ -16,7 +16,7 @@ def pal_prompt(model, question):
16
 
17
  @prompt(Python())
18
  def python(model, inp):
19
- return int(model(inp + "\nprint(solution())"))
20
 
21
  def pal(question):
22
  return python(pal_prompt(question))
@@ -32,6 +32,7 @@ gradio = show(pal,
32
  examples=[question],
33
  subprompts=[pal_prompt, python],
34
  description=desc,
 
35
  code=open("pal.py", "r").read().split("$")[1].strip().strip("#").strip(),
36
  )
37
 
 
16
 
17
  @prompt(Python())
18
  def python(model, inp):
19
+ return float(model(inp + "\nprint(solution())"))
20
 
21
  def pal(question):
22
  return python(pal_prompt(question))
 
32
  examples=[question],
33
  subprompts=[pal_prompt, python],
34
  description=desc,
35
+ out_type="json",
36
  code=open("pal.py", "r").read().split("$")[1].strip().strip("#").strip(),
37
  )
38
 
temp CHANGED
Binary files a/temp and b/temp differ
 
temp.log CHANGED
Binary files a/temp.log and b/temp.log differ