GitBot / app.py
acecalisto3's picture
Update app.py
0dc1093 verified
raw
history blame
No virus
5.66 kB
import gradio as gr
from huggingface_hub import InferenceClient
import os
import requests
from transformers import pipeline
from sentence_transformers import SentenceTransformer, util
# Hugging Face Inference Client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# Load a pre-trained model for sentence similarity
similarity_model = SentenceTransformer('all-mpnet-base-v2')
# Function to analyze issues and provide solutions
def analyze_issues(issue_text, model_name):
nlp = pipeline("text-generation", model=model_name)
result = nlp(issue_text)
return result[0]['generated_text']
# Function to find related issues
def find_related_issues(issue_text, issues):
issue_embedding = similarity_model.encode(issue_text)
related_issues = []
for issue in issues:
title_embedding = similarity_model.encode(issue['title'])
similarity = util.cos_sim(issue_embedding, title_embedding)[0][0]
related_issues.append((issue, similarity))
related_issues = sorted(related_issues, key=lambda x: x[1], reverse=True)
return related_issues[:3] # Return top 3 most similar issues
# Function to handle chat responses
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
github_api_token,
github_username,
github_repository
):
global GITHUB_API_TOKEN
GITHUB_API_TOKEN = github_api_token
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
if message.startswith("/github"):
if not GITHUB_API_TOKEN:
yield "Please enter your GitHub API token first. [Click here to get your token](https://github.com/settings/tokens)"
else:
try:
url = f"https://api.github.com/repos/{github_username}/{github_repository}/issues"
headers = {
"Authorization": f"Bearer {GITHUB_API_TOKEN}",
"Accept": "application/vnd.github.v3+json"
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
issues = response.json()
issue_list = "\n".join([f"{i+1}. {issue['title']}" for i, issue in enumerate(issues)])
yield f"Available GitHub Issues:\n{issue_list}\n\nEnter the issue number to analyze:"
else:
yield f"Error fetching GitHub issues: {response.status_code}"
except Exception as e:
yield f"Error fetching GitHub issues: {e}"
elif message.isdigit():
if not GITHUB_API_TOKEN:
yield "Please enter your GitHub API token first. [Click here to get your token](https://github.com/settings/tokens)"
else:
try:
issue_number = int(message) - 1
url = f"https://api.github.com/repos/{github_username}/{github_repository}/issues"
headers = {
"Authorization": f"Bearer {GITHUB_API_TOKEN}",
"Accept": "application/vnd.github.v3+json"
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
issues = response.json()
issue = issues[issue_number]
issue_text = issue['title'] + "\n\n" + issue['body']
resolution = analyze_issues(issue_text, "gpt2") # Default to gpt2 for now
# Find and display related issues
related_issues = find_related_issues(issue_text, issues)
related_issue_text = "\n".join([f"- {issue['title']} (Similarity: {similarity:.2f})" for issue, similarity in related_issues])
yield f"Resolution for Issue '{issue['title']}':\n{resolution}\n\nRelated Issues:\n{related_issue_text}"
else:
yield f"Error fetching GitHub issues: {response.status_code}"
except Exception as e:
yield f"Error analyzing issue: {e}"
else:
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
with gr.Blocks() as demo:
with gr.Row():
github_api_token = gr.Textbox(label="GitHub API Token", type="password")
github_username = gr.Textbox(label="GitHub Username")
github_repository = gr.Textbox(label="GitHub Repository")
chatbot = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
github_api_token,
github_username,
github_repository
],
)
if __name__ == "__main__":
demo.queue().launch(share=True, server_name="0.0.0.0", server_port=7860)