Ley_Fill7 commited on
Commit
07f7e2b
·
1 Parent(s): e950dd2

Changed app.py to preserve conversation history

Browse files
Files changed (1) hide show
  1. app.py +45 -30
app.py CHANGED
@@ -1,6 +1,6 @@
1
- from openai import OpenAI
2
- import streamlit as st
3
  import os
 
 
4
 
5
  api_key = os.getenv("NVIDIANIM_API_KEY")
6
 
@@ -11,32 +11,47 @@ client = OpenAI(
11
 
12
  model_name = "meta/llama-3.1-405b-instruct"
13
 
14
- def get_llama_response(question):
15
- completion = client.chat.completions.create(
16
- model=model_name,
17
- messages=[{"role": "user", "content": question}],
18
- temperature=0.2,
19
- top_p=0.7,
20
- max_tokens=1024,
21
- stream=True
22
- )
23
-
24
- response = ""
25
- for chunk in completion:
26
- if chunk.choices[0].delta.content is not None:
27
- response += chunk.choices[0].delta.content
28
- return response.strip()
29
-
30
- st.title("Ask Llama 3.1 405B on Nvidia NIM")
31
- user_question = st.text_input("Enter your question:")
32
-
33
- if st.button("Submit"):
34
- if user_question:
35
- llama_response = get_llama_response(user_question)
36
- st.write("**Llama 3.1 405B Response:**")
37
- st.write(llama_response)
38
- else:
39
- st.warning("Please enter a question.")
40
-
41
-
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
+ import streamlit as st
3
+ from openai import OpenAI
4
 
5
  api_key = os.getenv("NVIDIANIM_API_KEY")
6
 
 
11
 
12
  model_name = "meta/llama-3.1-405b-instruct"
13
 
14
+ if "messages" not in st.session_state:
15
+ st.session_state.messages = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
+ def get_llama_response(question):
18
+ st.session_state.messages.append({"role": "user", "content": question})
19
+ response = client.chat.completions.create(
20
+ model=model_name,
21
+ messages=st.session_state.messages,
22
+ temperature=0.2,
23
+ top_p=0.7,
24
+ max_tokens=1024,
25
+ stream=True
26
+ )
27
+
28
+ response_text = ""
29
+ for chunk in response:
30
+ if chunk.choices[0].delta.content is not None:
31
+ response_text += chunk.choices[0].delta.content
32
+
33
+ st.session_state.messages.append({"role": "assistant", "content": response_text})
34
+ return response_text
35
+
36
+ def generate_response():
37
+ question = st.session_state.user_input
38
+ st.session_state.user_input = ""
39
+ response = get_llama_response(question)
40
+ return response
41
+
42
+ # Display chat history
43
+ for message in st.session_state.messages:
44
+ with st.chat_message(message["role"]):
45
+ st.markdown(message["content"])
46
+
47
+ # User input and response
48
+ user_input = st.chat_input("Your message")
49
+ if user_input:
50
+ st.session_state.user_input = user_input
51
+ with st.chat_message("user"):
52
+ st.markdown(user_input)
53
+
54
+ if st.button("Submit"):
55
+ response = generate_response()
56
+ with st.chat_message("assistant"):
57
+ st.markdown(response)