drkareemkamal commited on
Commit
059e5e6
·
verified ·
1 Parent(s): 3b2373b

Upload 2 files

Browse files
Files changed (2) hide show
  1. qachat.py +46 -0
  2. requirements.txt +3 -0
qachat.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dotenv import load_dotenv
2
+ load_dotenv()
3
+
4
+ import streamlit as st
5
+ import os
6
+ import google.generativeai as genai
7
+
8
+ genai.configure(api_key = os.getenv('GOOGLE_API_KEY'))
9
+
10
+ # Function to load Gemini Pro model and get response
11
+ model = genai.GenerativeModel('gemini-pro')
12
+
13
+ chat = model.start_chat(history=[])
14
+
15
+ def get_gemini_response(question):
16
+ response = chat.send_message(question,stream=True)
17
+ return response
18
+
19
+
20
+ ## initialize our streamlit app
21
+
22
+ st.set_page_config(page_title='Q&A Demo')
23
+
24
+ st.header('Gemini LLM Application')
25
+
26
+ ## initialize session state for chat history if does't excit
27
+ if 'chat_history' not in st.session_state:
28
+ st.session_state['chat_history'] = []
29
+
30
+ input = st.text_input('Input: ',key='input')
31
+
32
+ submit = st.button("Ask the question")
33
+
34
+ if submit and input :
35
+ response = get_gemini_response(input)
36
+ # add user query and respinse to session chat history
37
+ st.session_state['chat_history'].append(('you',input))
38
+ st.subheader('the response is ')
39
+ for chunk in response :
40
+ st.write(chunk.text)
41
+ st.session_state['chat_history'].append(('Bot',chunk.text))
42
+
43
+ st.subheader('the chat history is ')
44
+
45
+ for role,text in st.session_state['chat_history']:
46
+ st.write(f"{role}:{text}")
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ streamlit
2
+ google-generativeai
3
+ python-dotenv