Abhaykoul's picture
Create v2.py
fc223bf verified
raw
history blame
3.91 kB
import re
from dotenv import load_dotenv
import requests
from uuid import uuid4
import json
from typing import Any, AsyncGenerator, Dict
import os
# Load environment variables from .env file
load_dotenv()
class v2:
def __init__(
self,
timeout: int = 100,
proxies: dict = {},
):
self.session = requests.Session()
self.chat_endpoint = os.getenv("v2")
self.stream_chunk_size = 64
self.timeout = timeout
self.last_response = {}
self.headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br, zstd",
"accept-language": "en-US,en;q=0.9,en-IN;q=0.8",
"content-type": "application/json",
}
self.session.headers.update(self.headers)
self.session.proxies = proxies
def ask(
self,
prompt: str,
stream: bool = False,
raw: bool = False,
) -> dict:
conversation_prompt =f"Tell me evrything about {prompt}"
self.session.headers.update(self.headers)
payload = {
"query": conversation_prompt,
"search_uuid": uuid4().hex,
"lang": "",
"agent_lang": "en",
"search_options": {
"langcode": "en-US"
},
"search_video": True,
"contexts_from": "google"
}
def for_stream():
response = self.session.post(
self.chat_endpoint, json=payload, stream=True, timeout=self.timeout
)
if not response.ok:
raise Exception(
f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
)
streaming_text = ""
for line in response.iter_lines(decode_unicode=True):
if line.startswith('data:'):
try:
data = json.loads(line[5:].strip())
if data['type'] == 'answer' and 'text' in data['data']:
new_text = data['data']['text']
if len(new_text) > len(streaming_text):
delta = new_text[len(streaming_text):]
streaming_text = new_text
resp = dict(text=delta)
self.last_response.update(dict(text=streaming_text))
yield line if raw else resp
except json.JSONDecodeError:
pass
def for_non_stream():
full_response = ""
for chunk in for_stream():
if not raw:
full_response += chunk['text']
self.last_response = dict(text=full_response)
return self.last_response
return for_stream() if stream else for_non_stream()
def chat(
self,
prompt: str,
stream: bool = False,
) -> str:
def for_stream():
for response in self.ask(
prompt, True
):
yield self.get_message(response)
def for_non_stream():
return self.get_message(
self.ask(
prompt,
False,
)
)
return for_stream() if stream else for_non_stream()
def get_message(self, response: dict) -> str:
assert isinstance(response, dict), "Response should be of dict data-type only"
if "text" in response:
text = re.sub(r'\[\[\d+\]\]', '', response["text"])
return text
else:
return ""
if __name__ == '__main__':
from rich import print
ai = v2()
response = ai.chat("HelpingAI-9B", stream=True)
for chunk in response:
print(chunk, end="", flush=True)