fury-bot / main.py
robinroy03's picture
new google gemini added
e226db7
raw
history blame
1.58 kB
from flask import Flask
from flask import request
from groq import Groq
import google.generativeai
import os
app = Flask(__name__)
groq_client = Groq(
api_key=os.environ.get("GROQ_API_KEY")
)
google.generativeai.configure(
api_key=os.environ.get("GEMINI_API_KEY"))
@app.route("/api/groq/generate", methods=['POST'])
def groq_completion():
"""
{
"model": "llama3-70b-8192",
"prompt": "why is the sky blue?"
}
"""
message = request.get_json()
model = message['model']
prompt = message['prompt']
chat_completion = groq_client.chat.completions.create(
messages=[
{
"role": "user",
"content": prompt,
}
],
model=model,
)
return chat_completion.to_dict()
@app.route("/api/google/generate", methods=['POST'])
def google_completion():
"""
{
"model": "gemini-1.5-flash",
"prompt": ""
}
"""
message = request.get_json()
model = message['model']
prompt = message['prompt']
llm_model = google.generativeai.GenerativeModel(model)
chat_completion = llm_model.generate_content(prompt)
return chat_completion.to_dict()
# curl -v -X POST 'https://robinroy03-fury-bot.hf.space/api/groq/generate' --header 'Content-Type: application/json' --data '{"model": "llama3-70b-8192", "prompt": "why is sky blue?"}'
# curl -v POST 'http://127.0.0.1:8000/api/google/generate' --header 'Content-Type: application/json' --data '{"model": "gemini-1.5-flash", "prompt": "why is sky blue?"}'