Spaces:
Running
Running
File size: 4,680 Bytes
1e3a144 51e07f2 96d1023 51e07f2 96d1023 1e3a144 1ee4f76 96d1023 f3e7da4 51e07f2 1e3a144 1ee4f76 51e07f2 96d1023 f3e7da4 96d1023 1ee4f76 96d1023 f3e7da4 1ee4f76 1e3a144 1ee4f76 1e3a144 51e07f2 1ee4f76 51e07f2 96d1023 51e07f2 1ee4f76 51e07f2 1e3a144 51e07f2 1e3a144 51e07f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
import copy
import uuid
from pathlib import Path
from curl_cffi import requests
from tclogger import logger, OSEnver
secrets_path = Path(__file__).parents[1] / "secrets.json"
ENVER = OSEnver(secrets_path)
class OpenaiAPI:
def __init__(self):
self.init_requests_params()
def init_requests_params(self):
self.api_base = "https://chat.openai.com/backend-anon"
self.api_me = f"{self.api_base}/me"
self.api_models = f"{self.api_base}/models"
self.api_chat_requirements = f"{self.api_base}/sentinel/chat-requirements"
self.api_conversation = f"{self.api_base}/conversation"
self.uuid = str(uuid.uuid4())
self.requests_headers = {
# "Accept": "*/*",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Accept-Language": "en-US,en;q=0.9",
"Cache-Control": "no-cache",
"Content-Type": "application/json",
"Oai-Device-Id": self.uuid,
"Oai-Language": "en-US",
"Pragma": "no-cache",
"Referer": "https://chat.openai.com/",
"Sec-Ch-Ua": 'Google Chrome";v="123", "Not:A-Brand";v="8", "Chromium";v="123"',
"Sec-Ch-Ua-Mobile": "?0",
"Sec-Ch-Ua-Platform": '"Windows"',
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
}
http_proxy = ENVER["http_proxy"]
if http_proxy:
self.requests_proxies = {
"http": http_proxy,
"https": http_proxy,
}
else:
self.requests_proxies = None
def log_request(self, url, method="GET"):
if ENVER["http_proxy"]:
logger.note(f"> Using Proxy:", end=" ")
logger.mesg(f"{ENVER['http_proxy']}")
logger.note(f"> {method}:", end=" ")
logger.mesg(f"{url}", end=" ")
def log_response(self, res: requests.Response):
status_code = res.status_code
status_code_str = f"[{status_code}]"
if status_code == 200:
logger.success(status_code_str)
logger.success(res.json())
else:
logger.warn(status_code_str)
logger.warn(res.json())
def get_models(self):
self.log_request(self.api_models)
res = requests.get(
self.api_models,
headers=self.requests_headers,
proxies=self.requests_proxies,
timeout=10,
impersonate="chrome120",
)
self.log_response(res)
def auth(self):
self.log_request(self.api_models, method="POST")
res = requests.post(
self.api_chat_requirements,
headers=self.requests_headers,
proxies=self.requests_proxies,
timeout=10,
impersonate="chrome120",
)
self.log_response(res)
def chat_completions(self, prompt: str):
new_headers = {
"Accept": "text/event-stream",
}
requests_headers = copy.deepcopy(self.requests_headers)
requests_headers.update(new_headers)
post_data = {
"action": "next",
"messages": [
{
"id": self.uuid,
"author": {"role": "user"},
"content": {"content_type": "text", "parts": [prompt]},
"metadata": {},
}
],
# "parent_message_id": "aaa1de6f-3d50-4d57-8591-ec7042deb594",
"model": "text-davinci-002-render-sha",
"timezone_offset_min": -480,
"suggestions": [],
"history_and_training_disabled": False,
"conversation_mode": {"kind": "primary_assistant"},
"force_paragen": False,
"force_paragen_model_slug": "",
"force_nulligen": False,
"force_rate_limit": False,
# "websocket_request_id": "f4bd44ac-64ad-4832-b6ca-3603ac6b38c5",
}
self.log_request(self.api_conversation, method="POST")
res = requests.post(
self.api_conversation,
headers=requests_headers,
json=post_data,
proxies=self.requests_proxies,
timeout=10,
impersonate="chrome120",
)
self.log_response(res)
if __name__ == "__main__":
api = OpenaiAPI()
# api.get_models()
# api.auth()
prompt = "who are you?"
api.chat_completions(prompt)
# python -m tests.openai
|