mazed commited on
Commit
185a310
·
verified ·
1 Parent(s): 9012107

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +61 -61
app.py CHANGED
@@ -1,61 +1,61 @@
1
- from dotenv import load_dotenv
2
- import streamlit as st
3
- import os
4
- import google.generativeai as genai
5
-
6
- # Load environment variables from .env file
7
- load_dotenv()
8
-
9
- # Configure Gemini Pro model
10
- genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
11
- model = genai.GenerativeModel("gemini-1.5-pro")
12
-
13
- # Initialize Streamlit app
14
- st.set_page_config(page_title="Gemmy")
15
-
16
- # Custom header with centered text
17
- header_html = """
18
- <style>
19
- .header {
20
- text-align: center;
21
- font-family: Arial, sans-serif;
22
- font-size: 2em;
23
- color: #F8F9F9;
24
- margin-top: 50px;
25
- margin-bottom: 20px;
26
- }
27
- </style>
28
- <div class="header">
29
- Gemmy
30
- </div>
31
- """
32
- st.markdown(header_html, unsafe_allow_html=True)
33
-
34
- # Initialize session state for chat history
35
- if "chat_history" not in st.session_state:
36
- st.session_state.chat_history = []
37
- if 'chat_session' not in st.session_state:
38
- st.session_state['chat_session'] = model.start_chat(history=[])
39
-
40
- # Function to get response from Gemini model
41
- def get_gemini_response():
42
- question = st.session_state.input
43
- chat = st.session_state['chat_session']
44
- response = chat.send_message(question, stream=True)
45
- response_text = "".join(chunk.text for chunk in response)
46
-
47
- # Save question and response to chat history
48
- st.session_state.chat_history.append({"question": question, "response": response_text})
49
-
50
- # Clear the input box after submission
51
- st.session_state.input = ""
52
-
53
- # Display chat history
54
- for entry in st.session_state.chat_history:
55
- st.write(f"***You :*** {entry['question']}")
56
- st.write(f"***Gemmy :*** {entry['response']}")
57
- st.markdown("<hr>", unsafe_allow_html=True) # Add a line below each response
58
-
59
-
60
- # Input box
61
- st.text_input(" ", key="input", on_change=get_gemini_response, placeholder="Ask Gemmy")
 
1
+ from dotenv import load_dotenv
2
+ import streamlit as st
3
+ import os
4
+ import google.generativeai as genai
5
+
6
+ # Load environment variables
7
+ load_dotenv()
8
+
9
+ # Configure Gemini Pro model
10
+ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
11
+ model = genai.GenerativeModel("gemini-1.5-pro")
12
+
13
+ # Initialize Streamlit app
14
+ st.set_page_config(page_title="Gemmy")
15
+
16
+ # Custom header with centered text
17
+ header_html = """
18
+ <style>
19
+ .header {
20
+ text-align: center;
21
+ font-family: Arial, sans-serif;
22
+ font-size: 2em;
23
+ color: #F8F9F9;
24
+ margin-top: 50px;
25
+ margin-bottom: 20px;
26
+ }
27
+ </style>
28
+ <div class="header">
29
+ Gemmy
30
+ </div>
31
+ """
32
+ st.markdown(header_html, unsafe_allow_html=True)
33
+
34
+ # Initialize session state for chat history
35
+ if "chat_history" not in st.session_state:
36
+ st.session_state.chat_history = []
37
+ if 'chat_session' not in st.session_state:
38
+ st.session_state['chat_session'] = model.start_chat(history=[])
39
+
40
+ # Function to get response from Gemini model
41
+ def get_gemini_response():
42
+ question = st.session_state.input
43
+ chat = st.session_state['chat_session']
44
+ response = chat.send_message(question, stream=True)
45
+ response_text = "".join(chunk.text for chunk in response)
46
+
47
+ # Save question and response to chat history
48
+ st.session_state.chat_history.append({"question": question, "response": response_text})
49
+
50
+ # Clear the input box after submission
51
+ st.session_state.input = ""
52
+
53
+ # Display chat history
54
+ for entry in st.session_state.chat_history:
55
+ st.write(f"***You :*** {entry['question']}")
56
+ st.write(f"***Gemmy :*** {entry['response']}")
57
+ st.markdown("<hr>", unsafe_allow_html=True) # Add a line below each response
58
+
59
+
60
+ # Input box
61
+ st.text_input(" ", key="input", on_change=get_gemini_response, placeholder="Ask Gemmy")