AI4PE-STATIC / hogwats_gemini.py
Adr740's picture
Upload hogwats_gemini.py
d49c183 verified
import google.generativeai as genai
from config import gemini_api
import tiktoken
def get_answer(query, company_name, chunked_raw_content):
genai.configure(api_key=gemini_api)
# Create the model
# See https://ai.google.dev/api/python/google/generativeai/GenerativeModel
generation_config = {
"temperature": 1,
"top_p": 0.95,
"top_k": 64,
"max_output_tokens": 8192,
"response_mime_type": "text/plain",
}
safety_settings = [
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
]
chunks = []
for chunk in chunked_raw_content:
chunk = chunk.replace("PDF_FILE_____data_dumpster_", "")
chunk = chunk.replace("data_dumpster_", "")
chunks.append(chunk)
context = str(chunks)
enc = tiktoken.get_encoding("cl100k_base")
toks = enc.encode(context)
if len(toks) >= 900000:
chunk_size = int(len(context) // (len(toks) / 900000))
chunk_size
context = context[:chunk_size]
model = genai.GenerativeModel(
model_name="gemini-1.5-flash-latest",
safety_settings=safety_settings,
generation_config=generation_config,
system_instruction=f"You are an expert at a Private Equity fund. You are helping a colleague with his due diligence on {company_name}. All the questions you will receive are in the context of this due diligence. You always precise the sources from the context (given below) you use.\nYou answer any question based on the following context elements:\n{context}",
)
chat_session = model.start_chat(
history=[],
)
response = chat_session.send_message(f"{query} - (Bain style answer + sources properly renamed if needed)")
return response.text