datawithsuman commited on
Commit
2429b6f
·
verified ·
1 Parent(s): 924deef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -7
app.py CHANGED
@@ -6,6 +6,7 @@ import openai
6
  from llama_index.llms.openai import OpenAI
7
  from llama_index.core.llms import ChatMessage
8
  from llama_index.llms.anthropic import Anthropic
 
9
  import nest_asyncio
10
 
11
  nest_asyncio.apply()
@@ -21,8 +22,13 @@ nest_asyncio.apply()
21
  # os.environ["OPENAI_API_KEY"] = key
22
 
23
  # Anthropic credentials
24
- key = os.getenv('CLAUDE_API_KEY')
25
- os.environ["ANTHROPIC_API_KEY"] = key
 
 
 
 
 
26
 
27
  # Streamlit UI
28
  st.title("Auto Test Case Generation using LLM")
@@ -56,7 +62,7 @@ if uploaded_files:
56
  list_of_functions = list(functions.keys())
57
  st.write(list_of_functions)
58
 
59
- def res(prompt, model):
60
 
61
  # response = openai.chat.completions.create(
62
  # model=model,
@@ -73,7 +79,8 @@ if uploaded_files:
73
  ChatMessage(role="system", content="You are a sincere and helpful coding assistant"),
74
  ChatMessage(role="user", content=prompt),
75
  ]
76
- resp = Anthropic(model=model).chat(response)
 
77
  return resp
78
 
79
  # Initialize session state for chat messages
@@ -100,10 +107,9 @@ if uploaded_files:
100
 
101
  # Generation
102
  # model = "gpt-3.5-turbo"
103
- # claude-3-sonnet-20240229
104
  # model = "claude-3-haiku-20240307"
105
  # model = "claude-3-sonnet-20240229"
106
- model = "claude-3-opus-20240229"
107
 
108
 
109
  # Generation
@@ -127,7 +133,7 @@ if uploaded_files:
127
 
128
  # print(prompt)
129
 
130
- resp = res(prompt, model)
131
  st.session_state.messages.append({"role": "assistant", "content": f"{resp}"})
132
  st.markdown(resp)
133
  # st.session_state.messages.append({"role": "assistant", "content": f"{resp['response']}"})
 
6
  from llama_index.llms.openai import OpenAI
7
  from llama_index.core.llms import ChatMessage
8
  from llama_index.llms.anthropic import Anthropic
9
+ from llama_index.llms.mistralai import MistralAI
10
  import nest_asyncio
11
 
12
  nest_asyncio.apply()
 
22
  # os.environ["OPENAI_API_KEY"] = key
23
 
24
  # Anthropic credentials
25
+ # key = os.getenv('CLAUDE_API_KEY')
26
+ # os.environ["ANTHROPIC_API_KEY"] = key
27
+
28
+ # Mistral
29
+ key = os.getenv('CODESTRAL_KEY')
30
+ os.environ["MISTRAL_API_KEY"] = key
31
+
32
 
33
  # Streamlit UI
34
  st.title("Auto Test Case Generation using LLM")
 
62
  list_of_functions = list(functions.keys())
63
  st.write(list_of_functions)
64
 
65
+ def res(prompt, model=None):
66
 
67
  # response = openai.chat.completions.create(
68
  # model=model,
 
79
  ChatMessage(role="system", content="You are a sincere and helpful coding assistant"),
80
  ChatMessage(role="user", content=prompt),
81
  ]
82
+ # resp = Anthropic(model=model).chat(response)
83
+ resp = MistralAI().chat(messages)
84
  return resp
85
 
86
  # Initialize session state for chat messages
 
107
 
108
  # Generation
109
  # model = "gpt-3.5-turbo"
 
110
  # model = "claude-3-haiku-20240307"
111
  # model = "claude-3-sonnet-20240229"
112
+ # model = "claude-3-opus-20240229"
113
 
114
 
115
  # Generation
 
133
 
134
  # print(prompt)
135
 
136
+ resp = res(prompt, model = None)
137
  st.session_state.messages.append({"role": "assistant", "content": f"{resp}"})
138
  st.markdown(resp)
139
  # st.session_state.messages.append({"role": "assistant", "content": f"{resp['response']}"})