acecalisto3 commited on
Commit
9c8e622
1 Parent(s): d4bcbcd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +100 -224
app.py CHANGED
@@ -1,226 +1,102 @@
1
  import gradio as gr
2
- from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
3
- from sentence_transformers import SentenceTransformer, util
4
  import os
5
- import requests
6
- from huggingface_hub import HfApi, HfFolder
7
- import openai
8
-
9
- client = openai.OpenAI()
10
-
11
- # Constants
12
- GITHUB_API_BASE_URL = "https://api.github.com/repos"
13
- DEFAULT_MODEL = "acecalisto3/PhiCoDInstruck"
14
- MAX_RELATED_ISSUES = 3
15
-
16
- # Initialize global variables
17
- model = None
18
- tokenizer = None
19
- similarity_model = SentenceTransformer('all-MiniLM-L6-v2')
20
-
21
- def load_model(hf_token):
22
- """Loads the model and tokenizer using the provided Hugging Face token."""
23
- global model, tokenizer
24
-
25
- # Set the token for Hugging Face authentication
26
- HfApi().set_access_token(hf_token)
27
- HfFolder.save_token(hf_token) # Save the token to a file
28
-
29
- # Load the model
30
- model = AutoModelForCausalLM.from_pretrained(DEFAULT_MODEL)
31
- tokenizer = AutoTokenizer.from_pretrained(DEFAULT_MODEL)
32
-
33
- # Cache the model and tokenizer for future use (optional)
34
- # model.save_pretrained("model")
35
- # tokenizer.save_pretrained("tokenizer")
36
-
37
- return "Model loaded successfully!"
38
-
39
- def analyze_issues(issue_text: str, severity: str = None, programming_language: str = None) -> dict:
40
- """Analyzes issues and provides solutions using a specified language model."""
41
- global model, tokenizer
42
-
43
- if model is not None and tokenizer is not None:
44
- response = model.generate(
45
- tokenizer(f"{issue_text}\nAssistant: ", return_tensors="pt")['input_ids'],
46
- max_length=2048,
47
- do_sample=True,
48
- temperature=0.7,
49
- top_k=50,
50
- )
51
- assistant_response = tokenizer.batch_decode(response, skip_special_tokens=True)[0].strip()
52
-
53
- # Extract severity and programming language from the response
54
- if "Severity" in assistant_response:
55
- severity = assistant_response.split(":")[1].strip()
56
- if "Programming Language" in assistant_response:
57
- programming_language = assistant_response.split(":")[1].strip()
58
-
59
- return {
60
- 'assistant_response': assistant_response,
61
- 'severity': severity,
62
- 'programming_language': programming_language,
63
- }
64
- else:
65
- return {"assistant_response": "Please enter your Hugging Face API token and click 'Load Model'."}
66
-
67
- def find_related_issues(issue_text: str, issues: list) -> list:
68
- """Finds semantically related issues from a list of issues based on the input issue text."""
69
- issue_embedding = similarity_model.encode(issue_text, convert_to_tensor=True)
70
- similarities = [util.cos_sim(issue_embedding, similarity_model.encode(issue['title'], convert_to_tensor=True)) for issue in issues]
71
- sorted_issues = sorted(enumerate(similarities), key=lambda x: x[1], reverse=True)
72
- related_issues = [issues[i] for i, similarity in sorted_issues[:MAX_RELATED_ISSUES]]
73
- return related_issues
74
-
75
- def fetch_github_issues(github_api_token: str, github_username: str, github_repository: str) -> list:
76
- """Fetches issues from a specified GitHub repository using the GitHub API."""
77
- headers = {'Authorization': f'token {github_api_token}'}
78
- url = f"{GITHUB_API_BASE_URL}/{github_username}/{github_repository}/issues"
79
- response = requests.get(url, headers=headers)
80
- issues = response.json()
81
- return issues
82
-
83
- def respond(command, history, system_message, max_tokens, temperature, top_p, github_api_token, github_username, github_repository, *args, **kwargs):
84
- """Handles user commands and generates responses using the selected language model."""
85
- global model, tokenizer
86
-
87
- command = command.strip()
88
-
89
- if model is not None and tokenizer is not None:
90
- response = model.generate(
91
- tokenizer(f"{system_message}\n{command}\n{history}\n{github_username}/{github_repository}\nAssistant: ", return_tensors="pt")['input_ids'],
92
- max_length=max_tokens,
93
- do_sample=True,
94
- temperature=temperature,
95
- top_k=top_p,
96
- )
97
- assistant_response = tokenizer.batch_decode(response, skip_special_tokens=True)[0].strip() if response else ''
98
- return {
99
- 'assistant_response': assistant_response,
100
- }
101
- else:
102
- return {"assistant_response": "Please enter your Hugging Face API token and click 'Load Model'."}
103
-
104
- class MyChatbot(gr.Chatbot):
105
- """Custom Chatbot class for enhanced functionality."""
106
- def __init__(self, **kwargs):
107
- super().__init__(**kwargs)
108
- self.issues = [] # Store fetched issues
109
- self.current_issue = None # Store the currently selected issue
110
-
111
- def postprocess(self, y):
112
- """Post-processes the response to handle commands and display results."""
113
- # Ensure `y` is not None
114
- if y is None:
115
- return "No response available."
116
-
117
- # Extract the response from the dictionary
118
- assistant_response = y.get('assistant_response', "No response available.")
119
- command = y.get('command', "")
120
-
121
- # Handle commands
122
- if command == "/github":
123
- if not y['github_api_token']:
124
- return "Please enter your GitHub API token first."
125
- else:
126
- try:
127
- self.issues = fetch_github_issues(y['github_api_token'], y['github_username'], y['github_repository'])
128
- issue_list = "\n".join([f"{i+1}. {issue['title']}" for i, issue in enumerate(self.issues)])
129
- return f"Available GitHub Issues:\n{issue_list}\n\nEnter the issue number to analyze:"
130
- except Exception as e:
131
- return f"Error fetching GitHub issues: {e}"
132
- elif command == "/help":
133
- return """Available commands:
134
- - `/github`: Analyze a GitHub issue
135
- - `/help`: Show this help message
136
- - `/generate_code [code description]`: Generate code based on the description
137
- - `/explain_concept [concept]`: Explain a concept
138
- - `/write_documentation [topic]`: Write documentation for a given topic
139
- - `/translate_code [code] to [target language]`: Translate code to another language"""
140
- elif command.isdigit() and self.issues:
141
- try:
142
- issue_number = int(command) - 1
143
- if 0 <= issue_number < len(self.issues):
144
- self.current_issue = self.issues[issue_number]
145
- else:
146
- return "Invalid issue number."
147
-
148
- # Store the selected issue
149
- issue_text = self.current_issue['title'] + "\n\n" + self.current_issue['body']
150
- resolution = analyze_issues(issue_text)
151
- related_issues = find_related_issues(issue_text, self.issues)
152
- related_issue_text = "\n".join([f"- {issue['title']} (Similarity: {similarity:.2f})" for issue, similarity in related_issues])
153
- return f"Resolution for Issue '{self.current_issue['title']}':\n{resolution['assistant_response']}\n\nRelated Issues:\n{related_issue_text}"
154
- except Exception as e:
155
- return f"Error analyzing issue: {e}"
156
- elif command.startswith("/"):
157
- # Handle commands like `/generate_code`, `/explain_concept`, etc.
158
- if self.current_issue:
159
- # Use the current issue's context for these commands
160
- issue_text = self.current_issue['title'] + "\n\n" + self.current_issue['body']
161
- return analyze_issues(issue_text)['assistant_response']
162
- else:
163
- return "Please select an issue first using `/github`."
164
- else:
165
- # For free-form text, simply display the assistant's response
166
- return assistant_response
167
-
168
- def setup_demo():
169
- with gr.Blocks() as demo:
170
- with gr.Row():
171
- hf_token = gr.Textbox(placeholder="Enter Hugging Face API Token", type="password")
172
- github_api_token = gr.Textbox(placeholder="Enter GitHub API Token", type="password")
173
- github_username = gr.Textbox(placeholder="Enter GitHub Username")
174
- github_repository = gr.Textbox(placeholder="Enter GitHub Repository")
175
-
176
- system_message = gr.Textbox(
177
- value="You are GitBot, the Github project guardian angel. You resolve issues and propose implementation of feature requests.",
178
- label="System message"
179
- )
180
-
181
- # Load the model when the token is entered
182
- load_button = gr.Button("Load Model")
183
- status_box = gr.Textbox(label="Status")
184
-
185
- load_button.click(fn=load_model, inputs=[hf_token], outputs=[status_box])
186
-
187
- chatbot = MyChatbot()
188
-
189
- # Add a command input
190
- command = gr.Textbox(label="Command")
191
-
192
- # Add sliders for model generation settings
193
- max_tokens = gr.Slider(minimum=1, maximum=8192, value=2048, step=1, label="Max new tokens")
194
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.71, step=0.1, label="Temperature")
195
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.01, label="Top-p (nucleus sampling)")
196
-
197
- # Set up the main interaction
198
- respond_button = gr.Button("Respond")
199
- respond_button.click(
200
- fn=respond,
201
- inputs=[
202
- command,
203
- gr.State(), # history placeholder
204
- system_message,
205
- max_tokens,
206
- temperature,
207
- top_p,
208
- github_api_token,
209
- github_username,
210
- github_repository,
211
- ],
212
- outputs=[chatbot]
213
- )
214
-
215
- # Add a button to fetch GitHub issues
216
- fetch_issues_button = gr.Button("Fetch Issues")
217
- fetch_issues_button.click(
218
- fn=lambda github_api_token, github_username, github_repository: fetch_github_issues(github_api_token, github_username, github_repository),
219
- inputs=[github_api_token, github_username, github_repository],
220
- outputs=[gr.Dropdown(choices=[], label="Select Issue")]
221
- )
222
-
223
- return demo
224
-
225
- demo = setup_demo()
226
- setup_demo.launch()
 
1
  import gradio as gr
2
+ import subprocess
 
3
  import os
4
+
5
+ iface = gr.Blocks()
6
+
7
+ with iface:
8
+ gr.Markdown("## Issue Resolution Workshop")
9
+ issue_number = gr.Number(label="Issue Number")
10
+ replicate_issue = gr.Button(label="Replicate Issue")
11
+ resolve_issue = gr.Button(label="Resolve Issue")
12
+ issue_description = gr.Markdown(label="Issue Description")
13
+ resolution_notes = gr.Markdown(label="Resolution Notes")
14
+ resolution_proof = gr.Markdown(label="Resolution Proof")
15
+ submit_resolution = gr.Button(label="Submit Resolution")
16
+
17
+ # Set up GitPython
18
+ os.environ["HOME"] = "/tmp"
19
+ repo = Repo.init("/tmp/repo")
20
+
21
+ # Authenticate with GitHub
22
+ token = os.environ["GITHUB_TOKEN"]
23
+ repo.config_writer().set_value("user", "token", token).release()
24
+
25
+ # Pull issues from enricoros/big-agi
26
+ issues_url = "https://api.github.com/repos/enricoros/big-agi/issues"
27
+ issues = subprocess.check_output(["curl", "-s", issues_url]).decode().strip().split("\n")
28
+
29
+ # Loop through the issues and store them in a dictionary
30
+ issues_dict = {}
31
+ for issue in issues:
32
+ issue = eval(issue)
33
+ title = issue["title"]
34
+ number = issue["number"]
35
+ issues_dict[number] = title
36
+
37
+ # Define the replicate issue function
38
+ def replicate_issue(number):
39
+ # Check if the issue number exists
40
+ if number not in issues_dict:
41
+ return "Issue not found"
42
+
43
+ # Clone your miagi fork
44
+ miagi_repo = Repo.clone_from("https://github.com/Ig0tU/miagi.git", "/tmp/miagi")
45
+
46
+ # Create a new branch for the issue
47
+ miagi_repo.create_head(f"issue-{number}", miagi_repo.head.reference)
48
+ miagi_branch = miagi_repo.heads[f"issue-{number}"]
49
+
50
+ # Checkout the new branch
51
+ miagi_branch.checkout()
52
+
53
+ # Replicate the issue in the new branch
54
+ shutil.copytree(os.path.expanduser("~/enricoros/big-agi/issues"), os.path.expanduser("~/miagi/issues"))
55
+
56
+ # Update the issue description
57
+ issue_description.update(issues_dict[number])
58
+
59
+ return "Issue replicated"
60
+
61
+ # Define the resolve issue function
62
+ def resolve_issue(number):
63
+ # Check if the issue number exists
64
+ if number not in issues_dict:
65
+ return "Issue not found"
66
+
67
+ # Resolve the issue in the new branch
68
+ # (Replace this with your own code to resolve the issue)
69
+ with open(os.path.expanduser("~/miagi/issues/{}/{}.md".format(number, number)), "a") as f:
70
+ f.write("Resolved!\n\n")
71
+
72
+ return "Issue resolved"
73
+
74
+ # Define the submit resolution function
75
+ def submit_resolution(number):
76
+ # Check if the issue number exists
77
+ if number not in issues_dict:
78
+ return "Issue not found"
79
+
80
+ # Document the resolution in a README file
81
+ with open(os.path.expanduser("~/miagi/README.md"), "a") as f:
82
+ f.write(f"Issue {number}: {issues_dict[number]}\n")
83
+ f.write("--------------------\n")
84
+ f.write(open(os.path.expanduser("~/miagi/issues/{}/{}.md".format(number, number))).read())
85
+ f.write("\n\n")
86
+
87
+ # Commit and push the changes
88
+ miagi_repo = Repo.init("/tmp/miagi")
89
+ miagi_repo.git.add(".")
90
+ miagi_repo.git.commit(m="Resolved issue {}".format(number))
91
+ miagi_repo.git.push()
92
+
93
+ return "Resolution submitted"
94
+ iface.replicate_issue.output.subscribe(issue_description.update)
95
+ iface.resolve_issue.output.subscribe(resolution_notes.update)
96
+ iface.submit_resolution.output.subscribe(lambda _: "Resolution submitted")
97
+
98
+ iface.connect(lambda number: replicate_issue(number), "Number")
99
+ iface.connect(lambda _: resolve_issue(issue_number.output.value), "Resolve Issue")
100
+ iface.connect(lambda _: submit_resolution(issue_number.output.value), "Submit Resolution")
101
+
102
+ iface.launch()