Spaces:
Sleeping
Sleeping
File size: 5,149 Bytes
d5a6a33 ae34e36 d5a6a33 9b159d4 1db4ff7 188c1ca 2ca54a6 331c814 2cd6b7a ae34e36 2cd6b7a ae34e36 2cd6b7a ae34e36 4212bad ae34e36 4212bad ae34e36 4212bad ae34e36 4212bad ae34e36 4212bad ae34e36 b16530f 2cd6b7a 5ae354b 2cd6b7a d5a6a33 2cd6b7a 586d97d 2cd6b7a 586d97d 2cd6b7a 586d97d 69977f9 2cd6b7a 4212bad 2cd6b7a 4212bad 2cd6b7a b16530f 2cd6b7a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
import streamlit as st
import google.generativeai as genai
import os
from dotenv import load_dotenv
import http.client
import json
load_dotenv()
# Configure the API key
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
safety_settings = [
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]
model = genai.GenerativeModel('gemini-2.0-flash-exp', tools='code_execution',
safety_settings=safety_settings,
system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam")
def perform_web_search(query):
conn = http.client.HTTPSConnection("google.serper.dev")
payload = json.dumps({"q": query})
headers = {
'X-API-KEY': '9b90a274d9e704ff5b21c0367f9ae1161779b573',
'Content-Type': 'application/json'
}
try:
conn.request("POST", "/search", payload, headers)
res = conn.getresponse()
data = json.loads(res.read().decode("utf-8"))
return data
except Exception as e:
st.error(f"Erreur lors de la recherche web : {e}")
return None
finally:
conn.close()
def format_search_results(data):
if not data:
return "Aucun résultat trouvé"
result = ""
# Knowledge Graph
if 'knowledgeGraph' in data:
kg = data['knowledgeGraph']
result += f"### {kg.get('title', '')}\n"
result += f"*{kg.get('type', '')}*\n\n"
result += f"{kg.get('description', '')}\n\n"
# Organic Results
if 'organic' in data:
result += "### Résultats principaux:\n"
for item in data['organic'][:3]: # Limit to top 3 results
result += f"- **{item['title']}**\n"
result += f" {item['snippet']}\n"
result += f" [Lien]({item['link']})\n\n"
# People Also Ask
if 'peopleAlsoAsk' in data:
result += "### Questions fréquentes:\n"
for item in data['peopleAlsoAsk'][:2]: # Limit to top 2 questions
result += f"- **{item['question']}**\n"
result += f" {item['snippet']}\n\n"
return result
def role_to_streamlit(role):
if role == "model":
return "assistant"
else:
return role
# Add chat and settings to session state
if "chat" not in st.session_state:
st.session_state.chat = model.start_chat(history=[])
if "web_search" not in st.session_state:
st.session_state.web_search = False
# Display Form Title
st.title("Mariam AI!")
# Settings section
with st.sidebar:
st.title("Paramètres")
st.session_state.web_search = st.toggle("Activer la recherche web", value=st.session_state.web_search)
# File upload section
uploaded_file = st.file_uploader("Télécharger un fichier (image/document)", type=['jpg', 'mp4', 'mp3', 'jpeg', 'png', 'pdf', 'txt'])
# Display chat messages
for message in st.session_state.chat.history:
with st.chat_message(role_to_streamlit(message.role)):
st.markdown(message.parts[0].text)
# Function to handle file upload with Gemini
def process_uploaded_file(file):
if file is not None:
with open(os.path.join("temp", file.name), "wb") as f:
f.write(file.getbuffer())
try:
gemini_file = genai.upload_file(os.path.join("temp", file.name))
return gemini_file
except Exception as e:
st.error(f"Erreur lors du téléchargement du fichier : {e}")
return None
# Chat input and processing
if prompt := st.chat_input("Hey?"):
uploaded_gemini_file = None
if uploaded_file:
uploaded_gemini_file = process_uploaded_file(uploaded_file)
# Display user message
st.chat_message("user").markdown(prompt)
print(prompt)
print("------------")
try:
# Perform web search if enabled
web_results = None
if st.session_state.web_search:
with st.spinner("Recherche web en cours..."):
web_results = perform_web_search(prompt)
if web_results:
formatted_results = format_search_results(web_results)
prompt = f"""Question: {prompt}\n\nRésultats de recherche web:\n{formatted_results}\n\nPourrais-tu analyser ces informations et me donner une réponse complète?"""
# Send message to Gemini
if uploaded_gemini_file:
response = st.session_state.chat.send_message([uploaded_gemini_file, "\n\n", prompt])
else:
response = st.session_state.chat.send_message(prompt)
print(response.text)
# Display assistant response
with st.chat_message("assistant"):
st.markdown(response.text)
except Exception as e:
st.error(f"Erreur lors de l'envoi du message : {e}")
# Create temp directory
os.makedirs("temp", exist_ok=True) |