import streamlit as st import ast import json import openai from llama_index.llms.openai import OpenAI import nest_asyncio nest_asyncio.apply() # import ollama # from llama_index.llms.ollama import Ollama # from llama_index.core.llms import ChatMessage # OpenAI credentials key = os.getenv('OPENAI_API_KEY') openai.api_key = key os.environ["OPENAI_API_KEY"] = key # Streamlit UI st.title("Auto Test Case Generation using LLM") uploaded_files = st.file_uploader("Upload a python(.py) file", type=".py", accept_multiple_files=True) if uploaded_files: for uploaded_file in uploaded_files: with open(f"./data/{uploaded_file.name}", 'wb') as f: f.write(uploaded_file.getbuffer()) st.success("File uploaded...") st.success("Fetching list of functions...") file_path = f"./data/{uploaded_file.name}" def extract_functions_from_file(file_path): with open(file_path, "r") as file: file_content = file.read() parsed_content = ast.parse(file_content) functions = {} for node in ast.walk(parsed_content): if isinstance(node, ast.FunctionDef): func_name = node.name func_body = ast.get_source_segment(file_content, node) functions[func_name] = func_body return functions functions = extract_functions_from_file(file_path) list_of_functions = list(functions.keys()) st.write(list_of_functions) def res(prompt): response = openai.chat.completions.create( model=model, messages=[ {"role":"system", "content":"You are a helpful coding assistant. Your task is to generate test cases. If the function can't be found, politely refuse" }, {"role": "user", "content": prompt, } ] ) return response.choices[0].message.content # Initialize session state for chat messages if "messages" not in st.session_state: st.session_state.messages = [] # Display chat messages from history on app rerun for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) # Accept user input if func := st.chat_input("Enter the function name for generating test cases:"): st.session_state.messages.append({"role": "assistant", "content": f"Generating test cases for {func}"}) st.success(f"Generating test cases for {func}") func = ''.join(func.split()) if func not in list_of_functions: st.write("Incorrect function name") else: snippet = functions[func] # Generation model = "gpt-3.5-turbo" # Generation # resp = ollama.generate(model='codellama', # prompt=f"""You are a helpful coding assistant. Your task is to generate unit test cases for this function : {snippet}\ # \n\nPolitely refuse if the function is not suitable for generating test cases. # \n\nGenerate atleast 5 unit test case. Include couple of edge cases as well. # \n\nThere should be no duplicate test cases. Avoid generating repeated statements. # """) prompt=f"""You are a helpful coding assistant. Your task is to generate unit test cases for this function : {snippet}\ \n\nPolitely refuse if the function is not suitable for generating test cases. \n\nGenerate atleast 5 unit test case. Include couple of edge cases as well. \n\nThere should be no duplicate test cases. Avoid generating repeated statements. """ print(prompt) resp = res(prompt) st.session_state.messages.append({"role": "assistant", "content": f"{resp}"}) st.markdown(resp) # st.session_state.messages.append({"role": "assistant", "content": f"{resp['response']}"}) # st.markdown(resp['response'])