rbn2008k commited on
Commit
1fd740a
·
verified ·
1 Parent(s): 5146b90

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +140 -147
app.py CHANGED
@@ -9,12 +9,10 @@ import asyncio
9
  from PIL import Image
10
  import base64
11
  from io import BytesIO
12
- from huggingface_hub import InferenceClient
13
- from transformers import AutoProcessor, AutoTokenizer
14
 
15
  def load_system_prompt():
16
- with open('prompt.txt', 'r') as file:
17
- return file.read()
18
 
19
  system_prompt = load_system_prompt()
20
 
@@ -25,178 +23,173 @@ openai_api_key = os.getenv('glhf')
25
  yolo = os.getenv('yolo')
26
 
27
  openai_client = OpenAI(
28
- base_url=f"https://api.cloudflare.com/client/v4/accounts/{yolo}/ai/v1",
29
- api_key=openai_api_key,
30
  )
31
 
32
- idefics_processor = AutoProcessor.from_pretrained("HuggingFaceM4/idefics2-8b")
33
- idefics_client = InferenceClient("HuggingFaceM4/idefics2-8b-chatty")
34
- tokenizer = AutoTokenizer.from_pretrained("HuggingFaceM4/idefics2-8b")
35
-
36
- chat_template = """<|user|>: Describe this image at its finest, mentioning the exact names of the objects present in it.
37
- <|assistant|>:"""
38
-
39
- tokenizer.chat_template = chat_template
40
-
41
  def encode_local_image(image):
42
- pil_image = Image.open(image)
43
- buffer = BytesIO()
44
- pil_image.save(buffer, format="JPEG")
45
- base64_image = base64.b64encode(buffer.getvalue()).decode("utf-8")
46
- return f"data:image/jpeg;base64,{base64_image}"
47
-
48
- def describe_image(image_path):
49
- image_string = encode_local_image(image_path)
50
- messages = [
51
- {
52
- "role": "user",
53
- "content": [
54
- {"type": "image"},
55
- {"type": "text", "text": "Describe this image in detail and explain what is in this image basically."},
56
- ],
57
- },
58
- ]
59
- prompt_with_template = idefics_processor.apply_chat_template(
60
- messages, add_generation_prompt=True
61
- )
62
- prompt_with_images = prompt_with_template.replace("<image>", "![]({}) ").format(image_string)
63
- payload = {
64
- "inputs": prompt_with_images,
65
- "parameters": {
66
- "return_full_text": False,
67
- "max_new_tokens": 2048,
68
- },
69
- }
70
- response = idefics_client.post(json=payload).decode()
71
- return response
72
 
73
  client = TelegramClient('bot', api_id, api_hash).start(bot_token=bot_token)
74
 
75
  class CircularBuffer:
76
- def __init__(self, size: int):
77
- self.size = size
78
- self.buffer = [None] * size
79
- self.start = 0
80
- self.end = 0
81
-
82
- def add(self, role: str, content: str):
83
- self.buffer[self.end] = {'role': role, 'content': content}
84
- self.end = (self.end + 1) % self.size
85
- if self.end == self.start:
86
- self.start = (self.start + 1) % self.size
87
-
88
- def get_history(self):
89
- history = []
90
- i = self.start
91
- while i != self.end:
92
- history.append(self.buffer[i])
93
- i = (i + 1) % self.size
94
- return history
95
-
96
- def reset(self):
97
- self.buffer = [None] * self.size
98
- self.start = 0
99
- self.end = 0
100
-
 
101
  user_histories = {}
102
 
103
  def get_user_history(user_id):
104
- if user_id not in user_histories:
105
- user_histories[user_id] = CircularBuffer(99)
106
- return user_histories[user_id]
107
-
108
- async def get_completion(prompt: str, user_id) -> str:
109
- user_history = get_user_history(user_id)
110
- messages = [
111
- {"role": "system", "content": system_prompt},
112
- *user_history.get_history(),
113
- {"role": "user", "content": prompt},
114
- ]
115
- try:
116
- completion = openai_client.chat.completions.create(
117
- model="@cf/meta/llama-3.2-11b-vision-instruct",
118
- messages=messages,
119
- stream=True,
120
- temperature=0.8,
121
- top_p=0.9,
122
- max_tokens=4096,
123
- frequency_penalty=0.2,
124
- presence_penalty=0.6
125
- )
126
- message = ''
127
- for chunk in completion:
128
- if chunk.choices[0].delta.content is not None:
129
- message += chunk.choices[0].delta.content
130
- except Exception as e:
131
- message = f"Error: {str(e)}"
132
- print(e)
133
- user_history.add("user", prompt)
134
- user_history.add("assistant", message)
135
- return message
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136
 
137
  @client.on(events.NewMessage(pattern='/start'))
138
  async def start(event):
139
- await event.respond("Hello! I am your boo.")
140
 
141
  @client.on(events.NewMessage(pattern='/help'))
142
  async def help(event):
143
- await event.respond("Here is how I can help you:\n/start - Start the bot\n/help - Get help\n/reset - Reset chat history")
144
 
145
  @client.on(events.NewMessage(pattern='/reset'))
146
  async def reset(event):
147
- user_history = get_user_history(event.sender_id)
148
- user_history.reset()
149
- await event.respond("History has been reset.")
150
 
151
  @client.on(events.NewMessage)
152
  async def handle_message(event):
153
- if event.raw_text.startswith('/start') or event.raw_text.startswith('/help') or event.raw_text.startswith('/reset'):
154
- return
155
- try:
156
- async with client.action(event.chat_id, 'typing'):
157
- await asyncio.sleep(3)
158
 
159
- user_id = event.sender_id
160
- user_history = get_user_history(user_id)
 
161
 
162
- if event.photo:
163
- photo = await event.download_media()
164
- image_description = describe_image(photo)
165
- user_message = event.raw_text + "\n\nContent of the image: " + image_description
166
- else:
167
- user_message = event.raw_text
168
 
169
- response = await get_completion(user_message, user_id)
170
- await event.respond(response)
171
 
172
- except Exception as e:
173
- print(f"An error occurred: {e}")
174
- await event.respond("Whoopsie 🤭")
175
 
176
  def launch_gradio():
177
- welcome_message = """
178
- # Meet Scarlett!
179
- - Scarlett is a girl residing in Telegram
180
- - She can chat with you without any censorship.
181
- - Head over to https://t.me/Scarlett_Rogue_isNotBot
182
- """
183
- with gr.Blocks() as demo:
184
- gr.Markdown(f"""
185
- # Follow me: https://example.com
186
- {welcome_message}
187
- """)
188
- demo.launch(show_api=False)
189
 
190
  def keep_alive():
191
- while True:
192
- try:
193
- requests.get("https://rbn2008k-Scarlett.hf.space")
194
- print("Ping success")
195
- except Exception as e:
196
- print(f"Keep-alive request failed: {e}")
197
- time.sleep(180)
198
 
199
  if __name__ == "__main__":
200
- threading.Thread(target=launch_gradio).start()
201
- threading.Thread(target=keep_alive).start()
202
- client.run_until_disconnected()
 
9
  from PIL import Image
10
  import base64
11
  from io import BytesIO
 
 
12
 
13
  def load_system_prompt():
14
+ with open('prompt.txt', 'r') as file:
15
+ return file.read()
16
 
17
  system_prompt = load_system_prompt()
18
 
 
23
  yolo = os.getenv('yolo')
24
 
25
  openai_client = OpenAI(
26
+ base_url=f"https://api.cloudflare.com/client/v4/accounts/{yolo}/ai/v1",
27
+ api_key=openai_api_key,
28
  )
29
 
 
 
 
 
 
 
 
 
 
30
  def encode_local_image(image):
31
+ pil_image = Image.open(image)
32
+ buffer = BytesIO()
33
+ pil_image.save(buffer, format="JPEG")
34
+ base64_image = base64.b64encode(buffer.getvalue()).decode("utf-8")
35
+ return f"data:image/jpeg;base64,{base64_image}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
  client = TelegramClient('bot', api_id, api_hash).start(bot_token=bot_token)
38
 
39
  class CircularBuffer:
40
+ def __init__(self, size: int):
41
+ self.size = size
42
+ self.buffer = [None] * size
43
+ self.start = 0
44
+ self.end = 0
45
+
46
+ def add(self, role: str, content: str):
47
+ self.buffer[self.end] = {'role': role, 'content': content}
48
+ self.end = (self.end + 1) % self.size
49
+ if self.end == self.start:
50
+ self.start = (self.start + 1) % self.size
51
+
52
+ def get_history(self):
53
+ history = []
54
+ i = self.start
55
+ while i != self.end:
56
+ history.append(self.buffer[i])
57
+ i = (i + 1) % self.size
58
+ return history
59
+
60
+ def reset(self):
61
+ self.buffer = [None] * self.size
62
+ self.start = 0
63
+ self.end = 0
64
+
65
+ # Only store history for 3 users
66
  user_histories = {}
67
 
68
  def get_user_history(user_id):
69
+ if user_id not in user_histories:
70
+ # If more than 3 users, remove the oldest one
71
+ if len(user_histories) >= 3:
72
+ oldest_user_id = list(user_histories.keys())[0]
73
+ del user_histories[oldest_user_id]
74
+ user_histories[user_id] = CircularBuffer(99)
75
+ return user_histories[user_id]
76
+
77
+ async def fetch_telegram_history(user_id):
78
+ messages = await client.get_messages(user_id, limit=50) # Fetch the last 10 messages
79
+ user_history = get_user_history(user_id)
80
+ for message in messages:
81
+ role = 'user' if message.sender_id == user_id else 'assistant'
82
+ user_history.add(role, message.text)
83
+
84
+ async def get_completion(prompt: str, user_id, image_base64=None) -> str:
85
+ user_history = get_user_history(user_id)
86
+
87
+ # If the user has no history, fetch from Telegram
88
+ if not user_history.get_history():
89
+ await fetch_telegram_history(user_id)
90
+
91
+ # Prepare message content
92
+ if image_base64:
93
+ user_message = [
94
+ {"type": "text", "text": prompt},
95
+ {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{image_base64}"}}
96
+ ]
97
+ else:
98
+ user_message = [{"type": "text", "text": prompt}]
99
+
100
+ messages = [
101
+ {"role": "system", "content": system_prompt},
102
+ *user_history.get_history(),
103
+ {"role": "user", "content": user_message},
104
+ ]
105
+
106
+ try:
107
+ completion = openai_client.chat.completions.create(
108
+ model="@cf/meta/llama-3.2-11b-vision-instruct",
109
+ messages=messages,
110
+ stream=True,
111
+ temperature=0.8,
112
+ top_p=0.9,
113
+ max_tokens=4096,
114
+ frequency_penalty=0.2,
115
+ presence_penalty=0.6
116
+ )
117
+ message = ''
118
+ for chunk in completion:
119
+ if chunk.choices[0].delta.content is not None:
120
+ message += chunk.choices[0].delta.content
121
+ except Exception as e:
122
+ message = f"Error: {str(e)}"
123
+ print(e)
124
+
125
+ # Add the user prompt and assistant response to the history
126
+ user_history.add("user", prompt)
127
+ user_history.add("assistant", message)
128
+ return message
129
 
130
  @client.on(events.NewMessage(pattern='/start'))
131
  async def start(event):
132
+ await event.respond("Hello! I am your boo.")
133
 
134
  @client.on(events.NewMessage(pattern='/help'))
135
  async def help(event):
136
+ await event.respond("Here is how I can help you:\n/start - Start the bot\n/help - Get help\n/reset - Reset chat history")
137
 
138
  @client.on(events.NewMessage(pattern='/reset'))
139
  async def reset(event):
140
+ user_history = get_user_history(event.sender_id)
141
+ user_history.reset()
142
+ await event.respond("History has been reset.")
143
 
144
  @client.on(events.NewMessage)
145
  async def handle_message(event):
146
+ if event.raw_text.startswith('/start') or event.raw_text.startswith('/help') or event.raw_text.startswith('/reset'):
147
+ return
148
+ try:
149
+ async with client.action(event.chat_id, 'typing'):
150
+ await asyncio.sleep(3)
151
 
152
+ user_id = event.sender_id
153
+ user_history = get_user_history(user_id)
154
+ user_message = event.raw_text
155
 
156
+ # Check if the message contains an image
157
+ image_base64 = None
158
+ if event.photo:
159
+ image = await event.download_media(file=BytesIO())
160
+ image_base64 = encode_local_image(image)
 
161
 
162
+ response = await get_completion(user_message, user_id, image_base64)
163
+ await event.respond(response)
164
 
165
+ except Exception as e:
166
+ print(f"An error occurred: {e}")
167
+ await event.respond("Whoopsie 🤭")
168
 
169
  def launch_gradio():
170
+ welcome_message = """
171
+ # Meet Scarlett!
172
+ - Scarlett is a girl residing in Telegram
173
+ - She can chat with you without any censorship.
174
+ - Head over to https://t.me/Scarlett_Rogue_isNotBot
175
+ """
176
+ with gr.Blocks() as demo:
177
+ gr.Markdown(f"""
178
+ # Follow me: https://example.com
179
+ {welcome_message}
180
+ """)
181
+ demo.launch(show_api=False)
182
 
183
  def keep_alive():
184
+ while True:
185
+ try:
186
+ requests.get("https://rbn2008k-Scarlett.hf.space")
187
+ print("Ping success")
188
+ except Exception as e:
189
+ print(f"Keep-alive request failed: {e}")
190
+ time.sleep(180)
191
 
192
  if __name__ == "__main__":
193
+ threading.Thread(target=launch_gradio).start()
194
+ threading.Thread(target=keep_alive).start()
195
+ client.run_until_disconnected()