Docfile commited on
Commit
2ca54a6
·
verified ·
1 Parent(s): e692355

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -55
app.py CHANGED
@@ -2,15 +2,12 @@ import streamlit as st
2
  import google.generativeai as genai
3
  import os
4
  from dotenv import load_dotenv
5
- from PIL import Image
6
- import io
7
- import mimetypes
8
 
9
  load_dotenv()
10
-
11
  # Configure the API key
12
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
13
 
 
14
  safety_settings = [
15
  {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
16
  {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
@@ -18,11 +15,17 @@ safety_settings = [
18
  {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
19
  ]
20
 
21
- model = genai.GenerativeModel(
22
- 'gemini-1.5-flash',
23
- safety_settings=safety_settings,
24
- system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam"
25
- )
 
 
 
 
 
 
26
 
27
  def role_to_streamlit(role):
28
  if role == "model":
@@ -30,6 +33,7 @@ def role_to_streamlit(role):
30
  else:
31
  return role
32
 
 
33
  # Add a Gemini Chat history object to Streamlit session state
34
  if "chat" not in st.session_state:
35
  st.session_state.chat = model.start_chat(history=[])
@@ -40,52 +44,16 @@ st.title("Mariam AI!")
40
  # Display chat messages from history above current input box
41
  for message in st.session_state.chat.history:
42
  with st.chat_message(role_to_streamlit(message.role)):
43
- for part in message.parts:
44
- if part.text: # Check for text content
45
- st.markdown(part.text)
46
- elif part.file_data: # Check for file data
47
- try:
48
- # Infer MIME type if not provided
49
- mime_type = part.file_data.mime_type
50
- if not mime_type:
51
- mime_type = mimetypes.guess_type(part.file_data.file_name)[0]
52
-
53
- if mime_type and mime_type.startswith("image/"):
54
- image = Image.open(io.BytesIO(part.file_data.data))
55
- st.image(image)
56
- else:
57
- st.write(f"File: {part.file_data.file_name} (MIME type: {mime_type})")
58
- except Exception as e:
59
- st.error(f"Error displaying file: {e}")
60
-
61
- # Accept user's next message and file uploads
62
  if prompt := st.chat_input("Hey?"):
63
- uploaded_file = st.file_uploader("Choose a file", type=["jpg", "jpeg", "png", "pdf"])
64
-
65
- parts = [prompt]
66
- if uploaded_file:
67
- bytes_data = uploaded_file.getvalue()
68
- parts.append({
69
- "file_data": {
70
- "mime_type": uploaded_file.type,
71
- "file_name": uploaded_file.name,
72
- "data": bytes_data
73
- }
74
- })
75
-
76
- # Display the uploaded image
77
- if uploaded_file.type.startswith("image/"):
78
- image = Image.open(uploaded_file)
79
- with st.chat_message("user"):
80
- st.image(image, caption=f"Uploaded Image: {uploaded_file.name}")
81
-
82
- # Display user's message
83
- with st.chat_message("user"):
84
- st.markdown(prompt)
85
-
86
- # Send message to Gemini
87
- response = st.session_state.chat.send_message(parts)
88
-
89
- # Display Gemini's response
90
  with st.chat_message("assistant"):
91
  st.markdown(response.text)
 
2
  import google.generativeai as genai
3
  import os
4
  from dotenv import load_dotenv
 
 
 
5
 
6
  load_dotenv()
 
7
  # Configure the API key
8
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
9
 
10
+
11
  safety_settings = [
12
  {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
13
  {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
 
15
  {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
16
  ]
17
 
18
+
19
+ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
20
+
21
+ model = genai.GenerativeModel('gemini-1.5-flash',safety_settings=safety_settings,
22
+ system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam")
23
+
24
+
25
+
26
+ # Function to get response from the model
27
+ # Gemini uses 'model' for assistant; Streamlit uses 'assistant'
28
+
29
 
30
  def role_to_streamlit(role):
31
  if role == "model":
 
33
  else:
34
  return role
35
 
36
+
37
  # Add a Gemini Chat history object to Streamlit session state
38
  if "chat" not in st.session_state:
39
  st.session_state.chat = model.start_chat(history=[])
 
44
  # Display chat messages from history above current input box
45
  for message in st.session_state.chat.history:
46
  with st.chat_message(role_to_streamlit(message.role)):
47
+ st.markdown(message.parts[0].text)
48
+
49
+ # Accept user's next message, add to context, resubmit context to Gemini
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  if prompt := st.chat_input("Hey?"):
51
+ # Display user's last message
52
+ st.chat_message("user").markdown(prompt)
53
+
54
+ # Send user entry to Gemini and read the response
55
+ response = st.session_state.chat.send_message(prompt)
56
+
57
+ # Display last
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  with st.chat_message("assistant"):
59
  st.markdown(response.text)