Spaces:
Runtime error
Runtime error
from flask import Flask, request, jsonify | |
from mistral import Mistral7B | |
from gpt import ChatGpt | |
from news import News | |
from datetime import datetime | |
from os import listdir | |
from web import Online_Scraper | |
import requests | |
from RT import RealTimeGemini | |
from time import time as t | |
import os | |
app = Flask(__name__) | |
def generate(): | |
# Get data from the request | |
data = request.json | |
prompt = data.get('prompt', '') | |
messages = data.get('messages', []) | |
key = data.get('key', '') | |
# Call Mistral7B function | |
response, updated_messages, execution_time = Mistral7B(prompt, messages,key) | |
# Prepare the response | |
result = { | |
'response': response, | |
'messages': updated_messages, | |
'execution_time': execution_time | |
} | |
return jsonify(result) | |
def chat(): | |
# Get data from the request | |
data = request.json | |
user_message = data.get('message', '') | |
messages = data.get('messages', []) | |
# Call ChatGpt function | |
response, updated_messages, execution_time = ChatGpt(user_message, messages) | |
# Prepare the response | |
result = { | |
'response': response, | |
'messages': updated_messages, | |
'execution_time': execution_time | |
} | |
return jsonify(result) | |
def get_news(): | |
# Get data from the request | |
key = request.args.get('key', '') | |
cache_flag = request.args.get('cache', 'True').lower() == 'true' | |
# Call News function | |
news, error, execution_time = News(key, cache_flag) | |
# Prepare the response | |
result = { | |
'news': news, | |
'error': error, | |
'execution_time': execution_time | |
} | |
return jsonify(result) | |
def Web(): | |
key = request.args.get('prompt', '') | |
result = { | |
'response': Online_Scraper(key) | |
} | |
return jsonify(result) | |
def IMGEN(): | |
data = request.json | |
prompt = data.get('prompt', '') | |
key = data.get('key', '') | |
API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2-1" | |
headers = {"Authorization": f"Bearer {key}"} | |
return requests.post(API_URL, headers=headers, json={"inputs": prompt,}).content | |
def Genration(): | |
try: | |
import google.generativeai as genai | |
generation_config = { | |
"temperature": 0.7, | |
"top_p": 1, | |
"top_k": 1, | |
"max_output_tokens": 300, | |
} | |
safety_settings = [ | |
{ | |
"category": "HARM_CATEGORY_HARASSMENT", | |
"threshold": "BLOCK_NONE" | |
}, | |
{ | |
"category": "HARM_CATEGORY_HATE_SPEECH", | |
"threshold": "BLOCK_NONE" | |
}, | |
{ | |
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", | |
"threshold": "BLOCK_NONE" | |
}, | |
{ | |
"category": "HARM_CATEGORY_DANGEROUS_CONTENT", | |
"threshold": "BLOCK_NONE" | |
}, | |
] | |
model = genai.GenerativeModel( | |
model_name="gemini-pro", | |
generation_config=generation_config, | |
safety_settings=safety_settings) | |
data = request.json | |
messages = data.get('messages', []) | |
key = data.get('key', '') | |
C=t() | |
genai.configure(api_key=key) | |
response = model.generate_content(messages) | |
# Prepare the response | |
result = { | |
'response': response.text, | |
'execution_time': t()-C | |
} | |
return jsonify(result) | |
except Exception as e: | |
result = { | |
'response': f"{e}", | |
'execution_time': t()-C | |
} | |
return jsonify(result) | |
def GenrationRT(): | |
try: | |
import google.generativeai as genai | |
generation_config = { | |
"temperature": 0.9, | |
"top_p": 1, | |
"top_k": 1, | |
"max_output_tokens": 2048, | |
} | |
safety_settings = [ | |
{ | |
"category": "HARM_CATEGORY_HARASSMENT", | |
"threshold": "BLOCK_NONE" | |
}, | |
{ | |
"category": "HARM_CATEGORY_HATE_SPEECH", | |
"threshold": "BLOCK_NONE" | |
}, | |
{ | |
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", | |
"threshold": "BLOCK_NONE" | |
}, | |
{ | |
"category": "HARM_CATEGORY_DANGEROUS_CONTENT", | |
"threshold": "BLOCK_NONE" | |
}, | |
] | |
model = genai.GenerativeModel( | |
model_name="gemini-pro", | |
generation_config=generation_config, | |
safety_settings=safety_settings) | |
data = request.json | |
query = data.get('prompt', "hello ?") | |
messages = data.get('messages', []) | |
key = data.get('key', '') | |
C=t() | |
genai.configure(api_key=key) | |
response = RealTimeGemini(query,messages,model) | |
# Prepare the response | |
result = { | |
'response': response, | |
'execution_time': t()-C | |
} | |
return jsonify(result) | |
except Exception as e: | |
result = { | |
'response': f"{e}", | |
'execution_time': t()-C | |
} | |
return jsonify(result) | |
def get_counters(): | |
return jsonify(counter),jsonify({"data":str(listdir(r"static/data/"))}) | |
if __name__ == '__main__': | |
app.run() | |