Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,66 +0,0 @@
|
|
1 |
-
from fastapi import FastAPI, Request
|
2 |
-
from pydantic import BaseModel
|
3 |
-
import requests
|
4 |
-
import os
|
5 |
-
import uuid
|
6 |
-
import re
|
7 |
-
|
8 |
-
|
9 |
-
app = FastAPI()
|
10 |
-
|
11 |
-
|
12 |
-
class RequestBody(BaseModel):
|
13 |
-
model: str
|
14 |
-
key_body: str
|
15 |
-
text: str
|
16 |
-
|
17 |
-
|
18 |
-
@app.post("/api/v1")
|
19 |
-
async def generate_response(request_body: RequestBody):
|
20 |
-
input_text = request_body.text
|
21 |
-
model = request_body.model
|
22 |
-
key_true = os.environ['key']
|
23 |
-
key_body = request_body.key_body
|
24 |
-
if key_body == key_true:
|
25 |
-
if model == "gemini":
|
26 |
-
key_gemini = os.environ['key_gemini']
|
27 |
-
headers = {'Content-Type': 'application/json',}
|
28 |
-
params = {'key': key_gemini}
|
29 |
-
json_data = {'contents': [{'parts': [{'text': input_text}]}]}
|
30 |
-
response = requests.post('https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent',params=params,headers=headers,json=json_data,)
|
31 |
-
all_chunk = response.json()['candidates'][0]['content']['parts'][0]['text']
|
32 |
-
|
33 |
-
if model == 'groq':
|
34 |
-
key_groq = os.environ['key_groq']
|
35 |
-
headers = {'Authorization': f'Bearer {key_groq}','Content-Type': 'application/json'}
|
36 |
-
json_data = {'messages': [{'role': 'user','content': input_text}],'model': 'llama-3.1-70b-versatile',}
|
37 |
-
response = requests.post('https://api.groq.com/openai/v1/chat/completions', headers=headers, json=json_data)
|
38 |
-
all_chunk = response.json()["choices"][0]["message"]["content"]
|
39 |
-
|
40 |
-
if model == "cohere":
|
41 |
-
key_cohere = os.environ['key_cohere']
|
42 |
-
headers = {'accept': 'application/json','content-type': 'application/json','Authorization': f'Bearer {key_cohere}',}
|
43 |
-
data = {"model":"command-r-plus","messages":[{ "role": "user", "content": input_text}]}
|
44 |
-
response = requests.post('https://api.cohere.com/v2/chat', headers=headers, json=data)
|
45 |
-
all_chunk = response.json()['message']['content'][0]['text']
|
46 |
-
|
47 |
-
if model == 'blackbox':
|
48 |
-
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36","Accept": "*/*","Accept-Language": "en-US,en;q=0.5","Accept-Encoding": "gzip, deflate, br","Referer": "https://www.blackbox.ai","Content-Type": "application/json","Origin": "https://www.blackbox.ai","DNT": "1","Sec-GPC": "1","Alt-Used": "www.blackbox.ai","Connection": "keep-alive",}
|
49 |
-
json_data = {'messages': [{'id': uuid.uuid4().hex,'content': input_text,'role': 'user',},],'id': uuid.uuid4().hex,'previewToken': None,'userId': None,'codeModelMode': True,'agentMode': { 'mode': True, 'id': 'ImageGenerationLV45LJp', 'name': 'Image Generation',},'trendingAgentMode': {},'isMicMode': False,'maxTokens': 1024,'playgroundTopP': None,'playgroundTemperature': None,'isChromeExt': False,'githubToken': None,'clickedAnswer2': False,'clickedAnswer3': False,'clickedForceWebSearch': False,'visitFromDelta': False,'mobileClient': False,'userSelectedModel': None,'validated': '00f37b34-a166-4efb-bce5-1312d87f2f94',}
|
50 |
-
response = requests.post('https://www.blackbox.ai/api/chat', headers=headers, json=json_data)
|
51 |
-
decoded_string = ''.join(char for char in response.text if char != '\x00' and char != '\x08')
|
52 |
-
pattern = r'https?://\S+\.(?:jpg|jpeg|png|gif)'
|
53 |
-
match2 = re.search(pattern, decoded_string)
|
54 |
-
all_chunk = match2.group()
|
55 |
-
|
56 |
-
if model == 'gemini2':
|
57 |
-
key_gemini2 = os.environ['key_gemini2']
|
58 |
-
headers = {'Content-Type': 'application/json',}
|
59 |
-
params = {'key': key_gemini2}
|
60 |
-
json_data = {'contents': [{'parts': [{'text': input_text}]}]}
|
61 |
-
response = requests.post('https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash-exp:generateContent', params=params, headers=headers, json=json_data)
|
62 |
-
all_chunk = response.json()['candidates'][0]['content']['parts'][0]['text']
|
63 |
-
|
64 |
-
if key_body != key_true:
|
65 |
-
all_chunk = "How's the hack going?"
|
66 |
-
return {"response": all_chunk}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|