Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ import os
|
|
6 |
|
7 |
# Initialize Hugging Face pipelines
|
8 |
text_generator = pipeline("text-generation", model="gpt2")
|
9 |
-
code_generator = pipeline("text2text-generation", model="
|
10 |
|
11 |
# Streamlit App
|
12 |
st.title("AI Dev Tool Kit")
|
@@ -22,7 +22,7 @@ if app_mode == "Explorer":
|
|
22 |
|
23 |
elif app_mode == "In-Chat Terminal":
|
24 |
st.header("In-Chat Terminal")
|
25 |
-
|
26 |
def run_terminal_command(command):
|
27 |
try:
|
28 |
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
@@ -43,7 +43,7 @@ elif app_mode == "In-Chat Terminal":
|
|
43 |
terminal_input = gr.Textbox(label="Enter Command or Code")
|
44 |
terminal_output = gr.Textbox(label="Terminal Output", lines=10)
|
45 |
terminal_button = gr.Button("Run")
|
46 |
-
|
47 |
terminal_button.click(
|
48 |
nlp_code_interpreter,
|
49 |
inputs=terminal_input,
|
@@ -63,30 +63,26 @@ elif app_mode == "Tool Box":
|
|
63 |
def deploy_to_huggingface(app_name):
|
64 |
code = f"""
|
65 |
import gradio as gr
|
66 |
-
|
67 |
def run_terminal_command(command):
|
68 |
try:
|
69 |
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
70 |
return result.stdout if result.returncode == 0 else result.stderr
|
71 |
except Exception as e:
|
72 |
return str(e)
|
73 |
-
|
74 |
def nlp_code_interpreter(text):
|
75 |
response = code_generator(text, max_length=150)
|
76 |
code = response[0]['generated_text']
|
77 |
return code, run_terminal_command(code)
|
78 |
-
|
79 |
with gr.Blocks() as iface:
|
80 |
terminal_input = gr.Textbox(label="Enter Command or Code")
|
81 |
terminal_output = gr.Textbox(label="Terminal Output", lines=10)
|
82 |
terminal_button = gr.Button("Run")
|
83 |
-
|
84 |
terminal_button.click(
|
85 |
nlp_code_interpreter,
|
86 |
inputs=terminal_input,
|
87 |
outputs=[terminal_output, terminal_output]
|
88 |
)
|
89 |
-
|
90 |
iface.launch()
|
91 |
"""
|
92 |
|
|
|
6 |
|
7 |
# Initialize Hugging Face pipelines
|
8 |
text_generator = pipeline("text-generation", model="gpt2")
|
9 |
+
code_generator = pipeline("text2text-generation", model="t5-base")
|
10 |
|
11 |
# Streamlit App
|
12 |
st.title("AI Dev Tool Kit")
|
|
|
22 |
|
23 |
elif app_mode == "In-Chat Terminal":
|
24 |
st.header("In-Chat Terminal")
|
25 |
+
|
26 |
def run_terminal_command(command):
|
27 |
try:
|
28 |
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
|
|
43 |
terminal_input = gr.Textbox(label="Enter Command or Code")
|
44 |
terminal_output = gr.Textbox(label="Terminal Output", lines=10)
|
45 |
terminal_button = gr.Button("Run")
|
46 |
+
|
47 |
terminal_button.click(
|
48 |
nlp_code_interpreter,
|
49 |
inputs=terminal_input,
|
|
|
63 |
def deploy_to_huggingface(app_name):
|
64 |
code = f"""
|
65 |
import gradio as gr
|
|
|
66 |
def run_terminal_command(command):
|
67 |
try:
|
68 |
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
69 |
return result.stdout if result.returncode == 0 else result.stderr
|
70 |
except Exception as e:
|
71 |
return str(e)
|
|
|
72 |
def nlp_code_interpreter(text):
|
73 |
response = code_generator(text, max_length=150)
|
74 |
code = response[0]['generated_text']
|
75 |
return code, run_terminal_command(code)
|
|
|
76 |
with gr.Blocks() as iface:
|
77 |
terminal_input = gr.Textbox(label="Enter Command or Code")
|
78 |
terminal_output = gr.Textbox(label="Terminal Output", lines=10)
|
79 |
terminal_button = gr.Button("Run")
|
80 |
+
|
81 |
terminal_button.click(
|
82 |
nlp_code_interpreter,
|
83 |
inputs=terminal_input,
|
84 |
outputs=[terminal_output, terminal_output]
|
85 |
)
|
|
|
86 |
iface.launch()
|
87 |
"""
|
88 |
|