|
import streamlit as st |
|
import ast |
|
import json |
|
import openai |
|
from llama_index.llms.openai import OpenAI |
|
import nest_asyncio |
|
|
|
nest_asyncio.apply() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
key = os.getenv('OPENAI_API_KEY') |
|
openai.api_key = key |
|
os.environ["OPENAI_API_KEY"] = key |
|
|
|
|
|
st.title("Auto Test Case Generation using LLM") |
|
|
|
uploaded_files = st.file_uploader("Upload a python(.py) file", type=".py", accept_multiple_files=True) |
|
|
|
if uploaded_files: |
|
for uploaded_file in uploaded_files: |
|
with open(f"./data/{uploaded_file.name}", 'wb') as f: |
|
f.write(uploaded_file.getbuffer()) |
|
st.success("File uploaded...") |
|
|
|
st.success("Fetching list of functions...") |
|
file_path = f"./data/{uploaded_file.name}" |
|
def extract_functions_from_file(file_path): |
|
with open(file_path, "r") as file: |
|
file_content = file.read() |
|
|
|
parsed_content = ast.parse(file_content) |
|
functions = {} |
|
|
|
for node in ast.walk(parsed_content): |
|
if isinstance(node, ast.FunctionDef): |
|
func_name = node.name |
|
func_body = ast.get_source_segment(file_content, node) |
|
functions[func_name] = func_body |
|
|
|
return functions |
|
|
|
functions = extract_functions_from_file(file_path) |
|
list_of_functions = list(functions.keys()) |
|
st.write(list_of_functions) |
|
|
|
def res(prompt): |
|
|
|
response = openai.chat.completions.create( |
|
model=model, |
|
messages=[ |
|
{"role":"system", |
|
"content":"You are a helpful coding assistant. Your task is to generate test cases. If the function can't be found, politely refuse" |
|
}, |
|
{"role": "user", |
|
"content": prompt, |
|
} |
|
] |
|
) |
|
|
|
return response.choices[0].message.content |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if func := st.chat_input("Enter the function name for generating test cases:"): |
|
st.session_state.messages.append({"role": "assistant", "content": f"Generating test cases for {func}"}) |
|
st.success(f"Generating test cases for {func}") |
|
|
|
func = ''.join(func.split()) |
|
|
|
if func not in list_of_functions: |
|
st.write("Incorrect function name") |
|
|
|
else: |
|
snippet = functions[func] |
|
|
|
|
|
model = "gpt-3.5-turbo" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
prompt=f"""You are a helpful coding assistant. Your task is to generate unit test cases for this function : {snippet}\ |
|
\n\nPolitely refuse if the function is not suitable for generating test cases. |
|
\n\nGenerate atleast 5 unit test case. Include couple of edge cases as well. |
|
\n\nThere should be no duplicate test cases. Avoid generating repeated statements. |
|
""" |
|
|
|
print(prompt) |
|
|
|
resp = res(prompt) |
|
st.session_state.messages.append({"role": "assistant", "content": f"{resp}"}) |
|
st.markdown(resp) |
|
|
|
|
|
|