File size: 5,664 Bytes
4833a42
 
f1f508e
 
0dc1093
 
4833a42
f1f508e
4833a42
 
0dc1093
 
 
f1f508e
 
 
 
 
 
0dc1093
 
 
 
 
 
 
 
 
 
 
f1f508e
4833a42
 
 
 
 
 
 
2bc4405
 
 
4833a42
2bc4405
 
 
4833a42
 
 
 
 
 
 
 
f1f508e
2bc4405
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f1f508e
2bc4405
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0dc1093
 
 
 
 
 
2bc4405
 
 
 
f1f508e
 
 
 
 
 
 
 
 
 
 
 
 
 
4833a42
f1f508e
2bc4405
 
 
 
 
f1f508e
 
 
 
 
 
 
 
 
 
 
 
 
2bc4405
 
 
f1f508e
 
4833a42
 
f1f508e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import gradio as gr
from huggingface_hub import InferenceClient
import os
import requests
from transformers import pipeline
from sentence_transformers import SentenceTransformer, util

# Hugging Face Inference Client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")

# Load a pre-trained model for sentence similarity
similarity_model = SentenceTransformer('all-mpnet-base-v2') 

# Function to analyze issues and provide solutions
def analyze_issues(issue_text, model_name):
  nlp = pipeline("text-generation", model=model_name)
  result = nlp(issue_text)
  return result[0]['generated_text']

# Function to find related issues
def find_related_issues(issue_text, issues):
  issue_embedding = similarity_model.encode(issue_text)
  related_issues = []
  for issue in issues:
    title_embedding = similarity_model.encode(issue['title'])
    similarity = util.cos_sim(issue_embedding, title_embedding)[0][0]
    related_issues.append((issue, similarity))
  related_issues = sorted(related_issues, key=lambda x: x[1], reverse=True)
  return related_issues[:3] # Return top 3 most similar issues

# Function to handle chat responses
def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
    github_api_token,
    github_username,
    github_repository
):
    global GITHUB_API_TOKEN
    GITHUB_API_TOKEN = github_api_token

    messages = [{"role": "system", "content": system_message}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    if message.startswith("/github"):
        if not GITHUB_API_TOKEN:
            yield "Please enter your GitHub API token first. [Click here to get your token](https://github.com/settings/tokens)"
        else:
            try:
                url = f"https://api.github.com/repos/{github_username}/{github_repository}/issues"
                headers = {
                    "Authorization": f"Bearer {GITHUB_API_TOKEN}",
                    "Accept": "application/vnd.github.v3+json"
                }
                response = requests.get(url, headers=headers)
                if response.status_code == 200:
                    issues = response.json()
                    issue_list = "\n".join([f"{i+1}. {issue['title']}" for i, issue in enumerate(issues)])
                    yield f"Available GitHub Issues:\n{issue_list}\n\nEnter the issue number to analyze:"
                else:
                    yield f"Error fetching GitHub issues: {response.status_code}"
            except Exception as e:
                yield f"Error fetching GitHub issues: {e}"
    elif message.isdigit():
        if not GITHUB_API_TOKEN:
            yield "Please enter your GitHub API token first. [Click here to get your token](https://github.com/settings/tokens)"
        else:
            try:
                issue_number = int(message) - 1
                url = f"https://api.github.com/repos/{github_username}/{github_repository}/issues"
                headers = {
                    "Authorization": f"Bearer {GITHUB_API_TOKEN}",
                    "Accept": "application/vnd.github.v3+json"
                }
                response = requests.get(url, headers=headers)
                if response.status_code == 200:
                    issues = response.json()
                    issue = issues[issue_number]
                    issue_text = issue['title'] + "\n\n" + issue['body']
                    resolution = analyze_issues(issue_text, "gpt2")  # Default to gpt2 for now

                    # Find and display related issues
                    related_issues = find_related_issues(issue_text, issues)
                    related_issue_text = "\n".join([f"- {issue['title']} (Similarity: {similarity:.2f})" for issue, similarity in related_issues])

                    yield f"Resolution for Issue '{issue['title']}':\n{resolution}\n\nRelated Issues:\n{related_issue_text}"
                else:
                    yield f"Error fetching GitHub issues: {response.status_code}"
            except Exception as e:
                yield f"Error analyzing issue: {e}"
    else:
        messages.append({"role": "user", "content": message})

        response = ""
        for message in client.chat_completion(
            messages,
            max_tokens=max_tokens,
            stream=True,
            temperature=temperature,
            top_p=top_p,
        ):
            token = message.choices[0].delta.content
            response += token
            yield response

with gr.Blocks() as demo:
    with gr.Row():
        github_api_token = gr.Textbox(label="GitHub API Token", type="password")
        github_username = gr.Textbox(label="GitHub Username")
        github_repository = gr.Textbox(label="GitHub Repository")

    chatbot = gr.ChatInterface(
        respond,
        additional_inputs=[
            gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
            gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
            gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
            gr.Slider(
                minimum=0.1,
                maximum=1.0,
                value=0.95,
                step=0.05,
                label="Top-p (nucleus sampling)",
            ),
            github_api_token,
            github_username,
            github_repository
        ],
    )

if __name__ == "__main__":
    demo.queue().launch(share=True, server_name="0.0.0.0", server_port=7860)