File size: 2,011 Bytes
cd607b2
 
 
eac37df
cd607b2
eac37df
 
 
cd607b2
 
7b856a8
69deff6
 
7b856a8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69deff6
 
7b856a8
 
eac37df
 
cd607b2
69deff6
 
 
 
cd607b2
7b856a8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# + tags=["hide_inp"]

desc = """
### Bash Command Suggestion

Chain that ask for a command-line question and then runs the bash command. [[Code](https://github.com/srush/MiniChain/blob/main/examples/bash.py)]

(Adapted from LangChain [BashChain](https://langchain.readthedocs.io/en/latest/modules/chains/examples/llm_bash.html))
"""
# -

# $

import minichain

# Prompt that asks LLM to produce a bash command.


class CLIPrompt(minichain.TemplatePrompt):
    template_file = "bash.pmpt.tpl"

    def parse(self, out: str, inp):
        out = out.strip()
        assert out.startswith("```bash")
        return out.split("\n")[1:-1]


# Prompt that runs the bash command.


class BashPrompt(minichain.Prompt):
    def prompt(self, inp) -> str:
        return ";".join(inp).replace("\n", "")

    def parse(self, out: str, inp) -> str:
        return out


# Generate and run bash command.

with minichain.start_chain("bash") as backend:
    question = (

    )
    prompt = CLIPrompt(backend.OpenAI()).chain(BashPrompt(backend.BashProcess()))

# $
    
gradio = prompt.to_gradio(fields =["question"],
                 examples=['Go up one directory, and then into the minichain directory,'
                           'and list the files in the directory',
                           "Please write a bash script that prints 'Hello World' to the console."],
                          out_type="markdown",
                          description=desc,
                          code=open("bash.py", "r").read().split("$")[1].strip().strip("#").strip(),
                          templates=[open("bash.pmpt.tpl")]                          
                          )

if __name__ == "__main__":
    gradio.launch()


    
# View the prompts.

# + tags=["hide_inp"]
# CLIPrompt().show(
#     {"question": "list the files in the directory"}, """```bash\nls\n```"""
# )
# # -


# # + tags=["hide_inp"]
# BashPrompt().show(["ls", "cat file.txt"], "hello")
# # -

# # View the run log.

# minichain.show_log("bash.log")