mfoud444 commited on
Commit
0700e99
·
1 Parent(s): bd1f5d0
Files changed (5) hide show
  1. copy.md +397 -0
  2. join.py +66 -260
  3. join_groups.log +29 -0
  4. session/mbot2.session +3 -0
  5. session/mbot3.session +3 -0
copy.md ADDED
@@ -0,0 +1,397 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ import asyncio
3
+ import time
4
+ from telethon import TelegramClient
5
+ from tqdm import tqdm # Import tqdm for progress bar
6
+ from telethon.tl.functions.channels import JoinChannelRequest
7
+ from telethon.tl.functions.messages import ImportChatInviteRequest
8
+ from telethon.errors.rpcerrorlist import InviteHashExpiredError
9
+ from flask import Flask, jsonify, send_from_directory
10
+ # Directory for storing files
11
+ from flask import Flask, render_template, send_from_directory
12
+ from telethon.tl.functions.channels import GetParticipantsRequest
13
+ from telethon.tl.types import ChannelParticipantsSearch
14
+ from telethon.errors import FloodWaitError, UserAdminInvalidError
15
+ import json
16
+ import asyncio
17
+ import nest_asyncio
18
+ import logging
19
+ from telethon import TelegramClient, events
20
+ from supabase import create_client, Client
21
+ from flask import Flask, jsonify
22
+ from threading import Thread
23
+ from multiprocessing import Process, Queue
24
+ import unicodedata
25
+ from telegram.helpers import escape_markdown
26
+ import re
27
+ import os
28
+
29
+ from telethon.tl.functions.channels import JoinChannelRequest, InviteToChannelRequest
30
+ from telethon.tl.functions.channels import EditBannedRequest
31
+ from telethon.tl.types import ChatBannedRights
32
+ from telethon.errors.rpcerrorlist import UserAdminInvalidError, UserNotParticipantError
33
+ from telethon.errors.rpcerrorlist import InviteHashExpiredError, UserAlreadyParticipantError
34
+ from telethon.tl.types import Channel, Chat
35
+
36
+ logging.basicConfig(
37
+ level=logging.INFO,
38
+ format="%(asctime)s - %(levelname)s - %(message)s",
39
+ handlers=[
40
+ logging.FileHandler("join_groups.log"), # Log to a file
41
+ logging.StreamHandler() # Log to console
42
+ ])
43
+
44
+ # Replace with your API credentials (from https://my.telegram.org/apps)
45
+ API_ID = 25216912 # Your API ID
46
+ API_HASH = "f65f6050fe9b342a4996c59e4283ab5e"
47
+ PHONE_NUMBER = "+967730426743" # Your phone number with country code
48
+ OUTPUT_CSV = "groups_with_status.csv"
49
+ # Path to your CSV file
50
+ CSV_FILENAME = "8.csv"
51
+ session_dir = "session/mbot1"
52
+ FILE_DIRECTORY = os.getcwd() # Current working directory
53
+
54
+ SLEEP_TIME = 280
55
+
56
+ # Flask App
57
+ app = Flask(__name__)
58
+
59
+ # 🔹 Flask API Endpoints
60
+ @app.route('/')
61
+ def index():
62
+ """Show available files for download as an HTML page."""
63
+ files = os.listdir(FILE_DIRECTORY)
64
+ return render_template("index.html", files=files)
65
+
66
+
67
+ @app.route('/download/<filename>')
68
+ def download_file(filename):
69
+ """Allow downloading any file from the directory."""
70
+ return send_from_directory(FILE_DIRECTORY, filename, as_attachment=True)
71
+
72
+
73
+ def run_flask():
74
+ app.run(host='0.0.0.0', port=7860)
75
+
76
+
77
+ USER_CSV = "user_list.csv"
78
+ # SLEEP_TIME = 280 # Delay between adding users
79
+
80
+ # Logging setup
81
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
82
+
83
+ BATCH_SIZE = 30 # Fetch 200 users at a time (Telegram's limit)
84
+ MAX_USERS = 200 # Set your desired limit here
85
+
86
+
87
+
88
+ async def fetch_users_from_source_group(source_group, max_users=MAX_USERS):
89
+ """
90
+ Fetch up to `max_users` from a Telegram group using pagination and save them to a CSV file.
91
+
92
+ :param source_group: The source group username or ID.
93
+ :param max_users: The maximum number of users to fetch. Use `None` to fetch all users.
94
+ """
95
+ async with TelegramClient(session_dir, API_ID, API_HASH) as client:
96
+ await client.start(PHONE_NUMBER)
97
+
98
+ try:
99
+ entity = await client.get_entity(source_group)
100
+ offset = 0 # Pagination start
101
+ total_users = 0
102
+ csv_filename = f"{source_group}.csv" # Save with group name
103
+
104
+ with open(csv_filename, mode="a", newline="", encoding="utf-8") as file:
105
+ writer = csv.writer(file)
106
+ writer.writerow(["user_id", "username", "first_name", "last_name"]) # CSV headers
107
+
108
+ with tqdm(desc=f"Fetching users from {source_group}", unit="user", ncols=100) as pbar:
109
+ while max_users is None or total_users < max_users:
110
+ remaining_users = max_users - total_users if max_users else BATCH_SIZE
111
+ batch_size = min(BATCH_SIZE, remaining_users) # Adjust batch size if close to max
112
+
113
+ try:
114
+ participants = await client(GetParticipantsRequest(
115
+ entity, ChannelParticipantsSearch(''), offset, batch_size, hash=0
116
+ ))
117
+
118
+ if not participants.users:
119
+ break # Stop when no more users are found
120
+
121
+ for user in participants.users:
122
+ writer.writerow([user.id, user.username or "N/A", user.first_name or "N/A", user.last_name or "N/A"])
123
+ pbar.update(1) # Update progress bar
124
+ pbar.set_postfix(user=user.username or "N/A") # Show last processed user
125
+
126
+ total_users += len(participants.users)
127
+ offset += len(participants.users) # Move offset forward
128
+
129
+ except FloodWaitError as e:
130
+ pbar.set_postfix(waiting=f"{e.seconds}s") # Show cooldown time
131
+ await asyncio.sleep(e.seconds) # Wait for Telegram cooldown
132
+
133
+ pbar.set_description(f"✅ Done! Fetched {total_users} users")
134
+
135
+ except Exception as e:
136
+ print(f"❌ Failed to fetch users from {source_group}: {e}")
137
+
138
+
139
+ async def fetch_users_from_source_group1(source_group, max_users=MAX_USERS):
140
+ """
141
+ Fetch up to `max_users` from a Telegram group using pagination and save them to a CSV file.
142
+
143
+ :param source_group: The source group username or ID.
144
+ :param max_users: The maximum number of users to fetch. Use `None` to fetch all users.
145
+ """
146
+ logging.info(f"Fetching users from {source_group} (Limit: {max_users if max_users else 'All'})...")
147
+
148
+ async with TelegramClient(session_dir, API_ID, API_HASH) as client:
149
+ await client.start(PHONE_NUMBER)
150
+
151
+ try:
152
+ entity = await client.get_entity(source_group)
153
+ offset = 0 # Pagination start
154
+ total_users = 0
155
+ csv_filename = f"{source_group}.csv" # Save with group name
156
+
157
+ with open(csv_filename, mode="w", newline="", encoding="utf-8") as file:
158
+ writer = csv.writer(file)
159
+ writer.writerow(["user_id", "username", "first_name", "last_name"]) # CSV headers
160
+
161
+ while max_users is None or total_users < max_users:
162
+ remaining_users = max_users - total_users if max_users else BATCH_SIZE
163
+ batch_size = min(BATCH_SIZE, remaining_users) # Adjust batch size if close to max
164
+
165
+ try:
166
+ participants = await client(GetParticipantsRequest(
167
+ entity, ChannelParticipantsSearch(''), offset, batch_size, hash=0
168
+ ))
169
+
170
+ if not participants.users:
171
+ break # Stop when no more users are found
172
+
173
+ for user in participants.users:
174
+ writer.writerow([user.id, user.username or "N/A", user.first_name or "N/A", user.last_name or "N/A"])
175
+ logging.info(f"✔ User saved: {user.id} | {user.username}")
176
+
177
+ total_users += len(participants.users)
178
+ offset += len(participants.users) # Move offset forward
179
+
180
+ except FloodWaitError as e:
181
+ logging.warning(f"⚠️ Telegram rate limit hit! Waiting {e.seconds} seconds...")
182
+ await asyncio.sleep(e.seconds) # Wait for Telegram cooldown
183
+
184
+ await asyncio.sleep(SLEEP_TIME) # Avoid hitting limits
185
+
186
+ logging.info(f"✅ Fetched {total_users} users from {source_group}. Saved to {csv_filename}.")
187
+
188
+ except Exception as e:
189
+ logging.error(f"❌ Failed to fetch users from {source_group}: {e}")
190
+
191
+ async def fetch_users_from_source_group1(source_group, max_users=MAX_USERS):
192
+ """
193
+ Fetch up to `max_users` from a Telegram group using client.iter_participants() and save them to a CSV file.
194
+
195
+ :param source_group: The source group username or ID.
196
+ :param max_users: The maximum number of users to fetch.
197
+ """
198
+ logging.info(f"Fetching up to {max_users} users from {source_group}...")
199
+
200
+ async with TelegramClient(session_dir, API_ID, API_HASH) as client:
201
+ await client.start(PHONE_NUMBER)
202
+
203
+ try:
204
+ entity = await client.get_entity(source_group)
205
+ count = 0 # Counter for fetched users
206
+
207
+ with open(USER_CSV, mode="w", newline="", encoding="utf-8") as file:
208
+ writer = csv.writer(file)
209
+ writer.writerow(["user_id", "username", "first_name", "last_name"]) # CSV headers
210
+
211
+ async for user in client.iter_participants(entity, limit=None, aggressive=True):
212
+ if count >= max_users:
213
+ break # Stop when max_users limit is reached
214
+
215
+ writer.writerow([user.id, user.username or "N/A", user.first_name or "N/A", user.last_name or "N/A"])
216
+ logging.info(f"✔ User saved: {user.id} | {user.username}")
217
+
218
+ count += 1 # Increase user count
219
+ await asyncio.sleep(1) # Delay to prevent rate limits
220
+
221
+ logging.info(f"✅ Fetched {count}/{max_users} users from {source_group}. Saved to {USER_CSV}.")
222
+
223
+ except Exception as e:
224
+ logging.error(f"❌ Failed to fetch users from {source_group}: {e}")
225
+ async def add_users_to_destination_group(destination_group, csvfile):
226
+ """
227
+ Reads users from the CSV file and adds them to the destination group while handling rate limits.
228
+ Before adding, it fetches current members of the destination group and filters out any users already present.
229
+
230
+ :param destination_group: The destination group username or ID.
231
+ :param csvfile: The CSV file containing the list of user IDs.
232
+ """
233
+ logging.info(f"Adding users to {destination_group} from {csvfile}...")
234
+
235
+ async with TelegramClient(session_dir, API_ID, API_HASH) as client:
236
+ await client.start(PHONE_NUMBER)
237
+
238
+ try:
239
+ # Get the destination group entity
240
+ dest_entity = await client.get_entity(destination_group)
241
+
242
+ # Fetch existing members in the destination group
243
+ existing_user_ids = set()
244
+ async for user in client.iter_participants(dest_entity, limit=None):
245
+ existing_user_ids.add(user.id)
246
+ logging.info(f"Fetched {len(existing_user_ids)} existing users from {destination_group}.")
247
+
248
+ # Read users from CSV file and filter out those already in the destination group
249
+ users = []
250
+ with open(csvfile, mode="r", encoding="utf-8") as file:
251
+ reader = csv.reader(file)
252
+ # Skip header row
253
+ header = next(reader, None)
254
+ for row in reader:
255
+ # Check if the first cell is a valid integer (skip row if not)
256
+ try:
257
+ user_id = int(row[0].strip())
258
+ user_name = row[1].strip()
259
+ except ValueError:
260
+ logging.debug(f"Skipping row with non-numeric user_id: {row}")
261
+ continue
262
+ if user_id not in existing_user_ids and user_name != 'N/A':
263
+ users.append(user_id)
264
+
265
+
266
+
267
+ logging.info(f"Filtered CSV: {len(users)} users to add after removing existing members.")
268
+
269
+ count = 0
270
+ for index, user_id in enumerate(users, start=1):
271
+ try:
272
+ time.sleep(SLEEP_TIME)
273
+ logging.info(f"[{index}/{len(users)}] Adding user {user_id} to {destination_group}...")
274
+ await client(InviteToChannelRequest(dest_entity, [user_id]))
275
+ logging.info(f"✅ Successfully added user {user_id}.")
276
+
277
+ count += 1
278
+ if count % BATCH_SIZE == 0: # Pause after each batch to avoid rate limits
279
+ logging.info(f"⏳ Waiting {SLEEP_TIME} seconds to avoid rate limits...")
280
+ await asyncio.sleep(SLEEP_TIME)
281
+
282
+ except FloodWaitError as e:
283
+ logging.warning(f"⚠️ FloodWait: Waiting {e.seconds} seconds...")
284
+ await asyncio.sleep(e.seconds)
285
+
286
+ except UserAdminInvalidError:
287
+ logging.error(f"❌ Cannot add {user_id}: Bot lacks admin rights.")
288
+
289
+ except Exception as e:
290
+ logging.error(f"❌ Failed to add {user_id}: {e}")
291
+
292
+ logging.info(f"✅ Process completed: Added {count} new users to {destination_group}.")
293
+
294
+ except Exception as e:
295
+ logging.error(f"❌ Failed to add users to {destination_group}: {e}")
296
+
297
+ async def get_user_groups(client):
298
+ """Fetch all groups/channels the user is already a member of using get_dialogs."""
299
+ joined_groups = set()
300
+ dialogs = await client.get_dialogs()
301
+
302
+ # Filter only groups and channels
303
+ groups = [d for d in dialogs if d.is_group or d.is_channel]
304
+
305
+ for group in groups:
306
+ username = f"https://t.me/{group.entity.username}" if hasattr(group.entity, "username") and group.entity.username else "private_group" # Get the group/channel ID
307
+ joined_groups.add(username)
308
+ logging.info(f"Joined group/channel: {group.entity.title} (ID: {username})")
309
+
310
+ return joined_groups
311
+
312
+
313
+ async def join_groups():
314
+ async with TelegramClient(session_dir, API_ID, API_HASH) as client:
315
+ await client.start(PHONE_NUMBER)
316
+ me = await client.get_me()
317
+ logging.info(f"Logged in as {me.first_name} (ID: {me.id})")
318
+
319
+ # Fetch all groups/channels the user is already a member of
320
+ user_groups = await get_user_groups(client)
321
+ logging.info(f"✅ Retrieved {len(user_groups)} joined groups/channels.")
322
+ logging.info(f"✅ Retrieved {user_groups} joined groups/channels.")
323
+ # Read the CSV file containing group information
324
+ with open(CSV_FILENAME, mode="r", newline="", encoding="utf-8") as file:
325
+ reader = csv.reader(file)
326
+ header = next(reader) # Skip header row
327
+ groups = [row for row in reader]
328
+
329
+ # Filter out groups the user is already a member of 1183631472
330
+ filtered_groups = []
331
+ for row in groups:
332
+ phone_number, group_name, username, group_id = row
333
+
334
+ if username and username in user_groups:
335
+ logging.info(f"⚡ Already a member: {group_name} ({username}) - Skipping")
336
+ else:
337
+ filtered_groups.append(row)
338
+
339
+ # Prepare output CSV file
340
+ with open(OUTPUT_CSV, mode="a", newline="", encoding="utf-8") as output_file:
341
+ writer = csv.writer(output_file)
342
+ writer.writerow(header + ["status"]) # Add "status" column
343
+
344
+ for index, row in enumerate(filtered_groups, start=2):
345
+ phone_number, group_name, username, group_id = row
346
+ status = ""
347
+
348
+ try:
349
+ if username != "private_group":
350
+ # Join a public group/channel
351
+ await client(JoinChannelRequest(username))
352
+ status = "Joined (public)"
353
+ logging.info(f"[{index}/{len(filtered_groups)}] ✅ Joined public group: {group_name} ({username})")
354
+
355
+ # Sleep only after a successful join
356
+ time.sleep(SLEEP_TIME)
357
+ else:
358
+ # Join a private group using its invite hash (group_id)
359
+ await client(ImportChatInviteRequest(group_id))
360
+ status = "Joined (private)"
361
+ logging.info(f"[{index}/{len(filtered_groups)}] ✅ Joined private group: {group_name}")
362
+
363
+ # Sleep only after a successful join
364
+ time.sleep(SLEEP_TIME)
365
+
366
+ except UserAlreadyParticipantError:
367
+ status = "Already a member"
368
+ logging.info(f"[{index}/{len(filtered_groups)}] ⚡ Already a member: {group_name} ({username})")
369
+ except InviteHashExpiredError:
370
+ status = "Failed (private) - Invite link expired"
371
+ logging.error(f"[{index}/{len(filtered_groups)}] ❌ Failed to join private group: {group_name} - Invite link expired")
372
+ except Exception as e:
373
+ status = f"Failed - {e}"
374
+ logging.error(f"[{index}/{len(filtered_groups)}] ❌ Failed to join {group_name}: {e}")
375
+
376
+ writer.writerow(row + [status])
377
+
378
+ logging.info(f"✅ Process completed. Results saved to {OUTPUT_CSV}")
379
+
380
+
381
+ def run_telegram():
382
+ asyncio.run(join_groups())
383
+
384
+ def run_telegram_mov():
385
+ asyncio.run(fetch_users_from_source_group("UT_CHEM", None))
386
+ def run_telegram_mov2():
387
+ asyncio.run(add_users_to_destination_group("@searchai090", 'UT_CHEM.csv'))
388
+
389
+ if __name__ == "__main__":
390
+ p1 = Process(target=run_flask)
391
+ # p2 = Process(target=run_telegram)
392
+ p2 = Process(target=run_telegram_mov2)
393
+ p1.start()
394
+ p2.start()
395
+
396
+ p1.join()
397
+ p2.join()
join.py CHANGED
@@ -41,14 +41,25 @@ logging.basicConfig(
41
  logging.StreamHandler() # Log to console
42
  ])
43
 
44
- # Replace with your API credentials (from https://my.telegram.org/apps)
45
- API_ID = 25216912 # Your API ID
 
46
  API_HASH = "f65f6050fe9b342a4996c59e4283ab5e"
47
- PHONE_NUMBER = "+967730426743" # Your phone number with country code
48
- OUTPUT_CSV = "groups_with_status.csv"
 
 
 
 
 
49
  # Path to your CSV file
50
  CSV_FILENAME = "8.csv"
51
- session_dir = "session/mbot1"
 
 
 
 
 
52
  FILE_DIRECTORY = os.getcwd() # Current working directory
53
 
54
  SLEEP_TIME = 280
@@ -72,8 +83,8 @@ def download_file(filename):
72
 
73
  def run_flask():
74
  app.run(host='0.0.0.0', port=7860)
75
-
76
-
77
  USER_CSV = "user_list.csv"
78
  # SLEEP_TIME = 280 # Delay between adding users
79
 
@@ -83,157 +94,36 @@ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(
83
  BATCH_SIZE = 30 # Fetch 200 users at a time (Telegram's limit)
84
  MAX_USERS = 200 # Set your desired limit here
85
 
86
-
87
-
88
- async def fetch_users_from_source_group(source_group, max_users=MAX_USERS):
89
  """
90
- Fetch up to `max_users` from a Telegram group using pagination and save them to a CSV file.
91
-
92
- :param source_group: The source group username or ID.
93
- :param max_users: The maximum number of users to fetch. Use `None` to fetch all users.
94
  """
95
- async with TelegramClient(session_dir, API_ID, API_HASH) as client:
96
- await client.start(PHONE_NUMBER)
97
-
98
- try:
99
- entity = await client.get_entity(source_group)
100
- offset = 0 # Pagination start
101
- total_users = 0
102
- csv_filename = f"{source_group}.csv" # Save with group name
103
-
104
- with open(csv_filename, mode="a", newline="", encoding="utf-8") as file:
105
- writer = csv.writer(file)
106
- writer.writerow(["user_id", "username", "first_name", "last_name"]) # CSV headers
107
-
108
- with tqdm(desc=f"Fetching users from {source_group}", unit="user", ncols=100) as pbar:
109
- while max_users is None or total_users < max_users:
110
- remaining_users = max_users - total_users if max_users else BATCH_SIZE
111
- batch_size = min(BATCH_SIZE, remaining_users) # Adjust batch size if close to max
112
-
113
- try:
114
- participants = await client(GetParticipantsRequest(
115
- entity, ChannelParticipantsSearch(''), offset, batch_size, hash=0
116
- ))
117
-
118
- if not participants.users:
119
- break # Stop when no more users are found
120
-
121
- for user in participants.users:
122
- writer.writerow([user.id, user.username or "N/A", user.first_name or "N/A", user.last_name or "N/A"])
123
- pbar.update(1) # Update progress bar
124
- pbar.set_postfix(user=user.username or "N/A") # Show last processed user
125
-
126
- total_users += len(participants.users)
127
- offset += len(participants.users) # Move offset forward
128
-
129
- except FloodWaitError as e:
130
- pbar.set_postfix(waiting=f"{e.seconds}s") # Show cooldown time
131
- await asyncio.sleep(e.seconds) # Wait for Telegram cooldown
132
-
133
- pbar.set_description(f"✅ Done! Fetched {total_users} users")
134
-
135
- except Exception as e:
136
- print(f"❌ Failed to fetch users from {source_group}: {e}")
137
-
138
-
139
- async def fetch_users_from_source_group1(source_group, max_users=MAX_USERS):
140
- """
141
- Fetch up to `max_users` from a Telegram group using pagination and save them to a CSV file.
142
 
143
- :param source_group: The source group username or ID.
144
- :param max_users: The maximum number of users to fetch. Use `None` to fetch all users.
145
- """
146
- logging.info(f"Fetching users from {source_group} (Limit: {max_users if max_users else 'All'})...")
147
-
148
- async with TelegramClient(session_dir, API_ID, API_HASH) as client:
149
- await client.start(PHONE_NUMBER)
150
-
151
- try:
152
- entity = await client.get_entity(source_group)
153
- offset = 0 # Pagination start
154
- total_users = 0
155
- csv_filename = f"{source_group}.csv" # Save with group name
156
-
157
- with open(csv_filename, mode="w", newline="", encoding="utf-8") as file:
158
- writer = csv.writer(file)
159
- writer.writerow(["user_id", "username", "first_name", "last_name"]) # CSV headers
160
-
161
- while max_users is None or total_users < max_users:
162
- remaining_users = max_users - total_users if max_users else BATCH_SIZE
163
- batch_size = min(BATCH_SIZE, remaining_users) # Adjust batch size if close to max
164
-
165
- try:
166
- participants = await client(GetParticipantsRequest(
167
- entity, ChannelParticipantsSearch(''), offset, batch_size, hash=0
168
- ))
169
-
170
- if not participants.users:
171
- break # Stop when no more users are found
172
-
173
- for user in participants.users:
174
- writer.writerow([user.id, user.username or "N/A", user.first_name or "N/A", user.last_name or "N/A"])
175
- logging.info(f"✔ User saved: {user.id} | {user.username}")
176
-
177
- total_users += len(participants.users)
178
- offset += len(participants.users) # Move offset forward
179
-
180
- except FloodWaitError as e:
181
- logging.warning(f"⚠️ Telegram rate limit hit! Waiting {e.seconds} seconds...")
182
- await asyncio.sleep(e.seconds) # Wait for Telegram cooldown
183
-
184
- await asyncio.sleep(SLEEP_TIME) # Avoid hitting limits
185
-
186
- logging.info(f"✅ Fetched {total_users} users from {source_group}. Saved to {csv_filename}.")
187
-
188
- except Exception as e:
189
- logging.error(f"❌ Failed to fetch users from {source_group}: {e}")
190
-
191
- async def fetch_users_from_source_group1(source_group, max_users=MAX_USERS):
192
- """
193
- Fetch up to `max_users` from a Telegram group using client.iter_participants() and save them to a CSV file.
194
 
195
- :param source_group: The source group username or ID.
196
- :param max_users: The maximum number of users to fetch.
197
- """
198
- logging.info(f"Fetching up to {max_users} users from {source_group}...")
199
 
200
- async with TelegramClient(session_dir, API_ID, API_HASH) as client:
201
- await client.start(PHONE_NUMBER)
202
-
203
- try:
204
- entity = await client.get_entity(source_group)
205
- count = 0 # Counter for fetched users
206
-
207
- with open(USER_CSV, mode="w", newline="", encoding="utf-8") as file:
208
- writer = csv.writer(file)
209
- writer.writerow(["user_id", "username", "first_name", "last_name"]) # CSV headers
210
-
211
- async for user in client.iter_participants(entity, limit=None, aggressive=True):
212
- if count >= max_users:
213
- break # Stop when max_users limit is reached
214
-
215
- writer.writerow([user.id, user.username or "N/A", user.first_name or "N/A", user.last_name or "N/A"])
216
- logging.info(f"✔ User saved: {user.id} | {user.username}")
217
 
218
- count += 1 # Increase user count
219
- await asyncio.sleep(1) # Delay to prevent rate limits
220
-
221
- logging.info(f"✅ Fetched {count}/{max_users} users from {source_group}. Saved to {USER_CSV}.")
222
-
223
- except Exception as e:
224
- logging.error(f"❌ Failed to fetch users from {source_group}: {e}")
225
- async def add_users_to_destination_group(destination_group, csvfile):
226
  """
227
- Reads users from the CSV file and adds them to the destination group while handling rate limits.
228
- Before adding, it fetches current members of the destination group and filters out any users already present.
229
-
230
  :param destination_group: The destination group username or ID.
231
- :param csvfile: The CSV file containing the list of user IDs.
 
 
232
  """
233
- logging.info(f"Adding users to {destination_group} from {csvfile}...")
234
 
235
  async with TelegramClient(session_dir, API_ID, API_HASH) as client:
236
- await client.start(PHONE_NUMBER)
237
 
238
  try:
239
  # Get the destination group entity
@@ -245,26 +135,19 @@ async def add_users_to_destination_group(destination_group, csvfile):
245
  existing_user_ids.add(user.id)
246
  logging.info(f"Fetched {len(existing_user_ids)} existing users from {destination_group}.")
247
 
248
- # Read users from CSV file and filter out those already in the destination group
249
  users = []
250
- with open(csvfile, mode="r", encoding="utf-8") as file:
251
- reader = csv.reader(file)
252
- # Skip header row
253
- header = next(reader, None)
254
- for row in reader:
255
- # Check if the first cell is a valid integer (skip row if not)
256
- try:
257
- user_id = int(row[0].strip())
258
- user_name = row[1].strip()
259
- except ValueError:
260
- logging.debug(f"Skipping row with non-numeric user_id: {row}")
261
- continue
262
- if user_id not in existing_user_ids and user_name != 'N/A':
263
- users.append(user_id)
264
-
265
-
266
-
267
- logging.info(f"Filtered CSV: {len(users)} users to add after removing existing members.")
268
 
269
  count = 0
270
  for index, user_id in enumerate(users, start=1):
@@ -273,7 +156,7 @@ async def add_users_to_destination_group(destination_group, csvfile):
273
  logging.info(f"[{index}/{len(users)}] Adding user {user_id} to {destination_group}...")
274
  await client(InviteToChannelRequest(dest_entity, [user_id]))
275
  logging.info(f"✅ Successfully added user {user_id}.")
276
-
277
  count += 1
278
  if count % BATCH_SIZE == 0: # Pause after each batch to avoid rate limits
279
  logging.info(f"⏳ Waiting {SLEEP_TIME} seconds to avoid rate limits...")
@@ -294,104 +177,27 @@ async def add_users_to_destination_group(destination_group, csvfile):
294
  except Exception as e:
295
  logging.error(f"❌ Failed to add users to {destination_group}: {e}")
296
 
297
- async def get_user_groups(client):
298
- """Fetch all groups/channels the user is already a member of using get_dialogs."""
299
- joined_groups = set()
300
- dialogs = await client.get_dialogs()
301
-
302
- # Filter only groups and channels
303
- groups = [d for d in dialogs if d.is_group or d.is_channel]
304
-
305
- for group in groups:
306
- username = f"https://t.me/{group.entity.username}" if hasattr(group.entity, "username") and group.entity.username else "private_group" # Get the group/channel ID
307
- joined_groups.add(username)
308
- logging.info(f"Joined group/channel: {group.entity.title} (ID: {username})")
309
 
310
- return joined_groups
 
311
 
312
 
313
- async def join_groups():
314
- async with TelegramClient(session_dir, API_ID, API_HASH) as client:
315
- await client.start(PHONE_NUMBER)
316
- me = await client.get_me()
317
- logging.info(f"Logged in as {me.first_name} (ID: {me.id})")
318
-
319
- # Fetch all groups/channels the user is already a member of
320
- user_groups = await get_user_groups(client)
321
- logging.info(f"✅ Retrieved {len(user_groups)} joined groups/channels.")
322
- logging.info(f"✅ Retrieved {user_groups} joined groups/channels.")
323
- # Read the CSV file containing group information
324
- with open(CSV_FILENAME, mode="r", newline="", encoding="utf-8") as file:
325
- reader = csv.reader(file)
326
- header = next(reader) # Skip header row
327
- groups = [row for row in reader]
328
-
329
- # Filter out groups the user is already a member of 1183631472
330
- filtered_groups = []
331
- for row in groups:
332
- phone_number, group_name, username, group_id = row
333
-
334
- if username and username in user_groups:
335
- logging.info(f"⚡ Already a member: {group_name} ({username}) - Skipping")
336
- else:
337
- filtered_groups.append(row)
338
-
339
- # Prepare output CSV file
340
- with open(OUTPUT_CSV, mode="a", newline="", encoding="utf-8") as output_file:
341
- writer = csv.writer(output_file)
342
- writer.writerow(header + ["status"]) # Add "status" column
343
-
344
- for index, row in enumerate(filtered_groups, start=2):
345
- phone_number, group_name, username, group_id = row
346
- status = ""
347
-
348
- try:
349
- if username != "private_group":
350
- # Join a public group/channel
351
- await client(JoinChannelRequest(username))
352
- status = "Joined (public)"
353
- logging.info(f"[{index}/{len(filtered_groups)}] ✅ Joined public group: {group_name} ({username})")
354
-
355
- # Sleep only after a successful join
356
- time.sleep(SLEEP_TIME)
357
- else:
358
- # Join a private group using its invite hash (group_id)
359
- await client(ImportChatInviteRequest(group_id))
360
- status = "Joined (private)"
361
- logging.info(f"[{index}/{len(filtered_groups)}] ✅ Joined private group: {group_name}")
362
-
363
- # Sleep only after a successful join
364
- time.sleep(SLEEP_TIME)
365
-
366
- except UserAlreadyParticipantError:
367
- status = "Already a member"
368
- logging.info(f"[{index}/{len(filtered_groups)}] ⚡ Already a member: {group_name} ({username})")
369
- except InviteHashExpiredError:
370
- status = "Failed (private) - Invite link expired"
371
- logging.error(f"[{index}/{len(filtered_groups)}] ❌ Failed to join private group: {group_name} - Invite link expired")
372
- except Exception as e:
373
- status = f"Failed - {e}"
374
- logging.error(f"[{index}/{len(filtered_groups)}] ❌ Failed to join {group_name}: {e}")
375
-
376
- writer.writerow(row + [status])
377
-
378
- logging.info(f"✅ Process completed. Results saved to {OUTPUT_CSV}")
379
-
380
-
381
- def run_telegram():
382
- asyncio.run(join_groups())
383
-
384
- def run_telegram_mov():
385
- asyncio.run(fetch_users_from_source_group("UT_CHEM", None))
386
- def run_telegram_mov2():
387
- asyncio.run(add_users_to_destination_group("@searchai090", 'UT_CHEM.csv'))
388
-
389
  if __name__ == "__main__":
 
 
 
 
390
  p1 = Process(target=run_flask)
391
- # p2 = Process(target=run_telegram)
392
- p2 = Process(target=run_telegram_mov2)
393
  p1.start()
394
- p2.start()
395
 
 
 
 
 
 
 
 
 
396
  p1.join()
397
- p2.join()
 
 
41
  logging.StreamHandler() # Log to console
42
  ])
43
 
44
+
45
+ # Replace with your API credentials (from https://my.telegram.org/apps) , "session/mbot1"
46
+ API_ID = 25216912 # Your API ID
47
  API_HASH = "f65f6050fe9b342a4996c59e4283ab5e"
48
+ PHONE_NUMBERS = [
49
+ "+967730426743",
50
+ "+967730446721",
51
+ "+967730436848"
52
+
53
+ ] # Your phone numbers with country code
54
+ OUTPUT_CSV = "groups_with_status.csv"
55
  # Path to your CSV file
56
  CSV_FILENAME = "8.csv"
57
+ SESSION_DIRS = [
58
+ "session/mbot1",
59
+ "session/mbot2",
60
+ "session/mbot3"
61
+
62
+ ]
63
  FILE_DIRECTORY = os.getcwd() # Current working directory
64
 
65
  SLEEP_TIME = 280
 
83
 
84
  def run_flask():
85
  app.run(host='0.0.0.0', port=7860)
86
+
87
+
88
  USER_CSV = "user_list.csv"
89
  # SLEEP_TIME = 280 # Delay between adding users
90
 
 
94
  BATCH_SIZE = 30 # Fetch 200 users at a time (Telegram's limit)
95
  MAX_USERS = 200 # Set your desired limit here
96
 
97
+ def split_csv(csvfile, num_parts):
 
 
98
  """
99
+ Splits the CSV file into `num_parts` chunks.
100
+ :param csvfile: Path to the CSV file.
101
+ :param num_parts: Number of parts to split the CSV into.
102
+ :return: List of lists, where each sublist contains rows for a specific part.
103
  """
104
+ with open(csvfile, mode="r", encoding="utf-8") as file:
105
+ reader = csv.reader(file)
106
+ rows = list(reader)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
 
108
+ # Split rows into `num_parts` chunks
109
+ chunk_size = len(rows) // num_parts
110
+ chunks = [rows[i:i + chunk_size] for i in range(0, len(rows), chunk_size)]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
 
112
+ return chunks
 
 
 
113
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
 
115
+ async def add_users_to_destination_group(destination_group, user_chunk, session_dir, phone_number):
 
 
 
 
 
 
 
116
  """
117
+ Adds users from a specific chunk to the destination group while handling rate limits.
 
 
118
  :param destination_group: The destination group username or ID.
119
+ :param user_chunk: A list of rows (users) from the CSV file.
120
+ :param session_dir: The session directory for the Telegram client.
121
+ :param phone_number: The phone number associated with the Telegram account.
122
  """
123
+ logging.info(f"Adding users to {destination_group} from chunk...")
124
 
125
  async with TelegramClient(session_dir, API_ID, API_HASH) as client:
126
+ await client.start(phone_number)
127
 
128
  try:
129
  # Get the destination group entity
 
135
  existing_user_ids.add(user.id)
136
  logging.info(f"Fetched {len(existing_user_ids)} existing users from {destination_group}.")
137
 
138
+ # Filter out users already in the destination group
139
  users = []
140
+ for row in user_chunk:
141
+ try:
142
+ user_id = int(row[0].strip())
143
+ user_name = row[1].strip()
144
+ except ValueError:
145
+ logging.debug(f"Skipping row with non-numeric user_id: {row}")
146
+ continue
147
+ if user_id not in existing_user_ids and user_name != 'N/A':
148
+ users.append(user_id)
149
+
150
+ logging.info(f"Filtered chunk: {len(users)} users to add after removing existing members.")
 
 
 
 
 
 
 
151
 
152
  count = 0
153
  for index, user_id in enumerate(users, start=1):
 
156
  logging.info(f"[{index}/{len(users)}] Adding user {user_id} to {destination_group}...")
157
  await client(InviteToChannelRequest(dest_entity, [user_id]))
158
  logging.info(f"✅ Successfully added user {user_id}.")
159
+
160
  count += 1
161
  if count % BATCH_SIZE == 0: # Pause after each batch to avoid rate limits
162
  logging.info(f"⏳ Waiting {SLEEP_TIME} seconds to avoid rate limits...")
 
177
  except Exception as e:
178
  logging.error(f"❌ Failed to add users to {destination_group}: {e}")
179
 
 
 
 
 
 
 
 
 
 
 
 
 
180
 
181
+ def run_telegram_mov2(session_dir, phone_number, user_chunk):
182
+ asyncio.run(add_users_to_destination_group("@searchai090", user_chunk, session_dir, phone_number))
183
 
184
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
  if __name__ == "__main__":
186
+ # Split the CSV file into chunks for each account
187
+ user_chunks = split_csv('UT_CHEM.csv', len(PHONE_NUMBERS))
188
+
189
+ # Start Flask server
190
  p1 = Process(target=run_flask)
 
 
191
  p1.start()
 
192
 
193
+ # Start Telegram processes
194
+ processes = []
195
+ for i, (session_dir, phone_number) in enumerate(zip(SESSION_DIRS, PHONE_NUMBERS)):
196
+ p = Process(target=run_telegram_mov2, args=(session_dir, phone_number, user_chunks[i]))
197
+ p.start()
198
+ processes.append(p)
199
+
200
+ # Wait for all processes to finish
201
  p1.join()
202
+ for p in processes:
203
+ p.join()
join_groups.log CHANGED
@@ -2971,3 +2971,32 @@
2971
  2025-02-11 23:45:46,711 - WARNING - ⚠️ FloodWait: Waiting 82371 seconds...
2972
  2025-02-11 23:49:22,702 - INFO - Disconnecting from 149.154.167.92:443/TcpFull...
2973
  2025-02-11 23:49:22,703 - INFO - Disconnection from 149.154.167.92:443/TcpFull complete!
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2971
  2025-02-11 23:45:46,711 - WARNING - ⚠️ FloodWait: Waiting 82371 seconds...
2972
  2025-02-11 23:49:22,702 - INFO - Disconnecting from 149.154.167.92:443/TcpFull...
2973
  2025-02-11 23:49:22,703 - INFO - Disconnection from 149.154.167.92:443/TcpFull complete!
2974
+ 2025-02-19 02:09:49,934 - INFO - Adding users to @searchai090 from UT_CHEM.csv...
2975
+ 2025-02-19 02:09:49,976 - INFO - Connecting to 149.154.167.51:443/TcpFull...
2976
+ 2025-02-19 02:09:51,573 - INFO - Connection to 149.154.167.51:443/TcpFull complete!
2977
+ 2025-02-19 02:11:36,730 - INFO - Adding users to @searchai090 from UT_CHEM.csv...
2978
+ 2025-02-19 02:11:36,732 - INFO - Connecting to 149.154.167.51:443/TcpFull...
2979
+ 2025-02-19 02:11:36,893 - INFO - Connection to 149.154.167.51:443/TcpFull complete!
2980
+ 2025-02-19 02:11:51,382 - INFO - Phone migrated to 4
2981
+ 2025-02-19 02:11:51,586 - INFO - Reconnecting to new data center 4
2982
+ 2025-02-19 02:11:51,795 - INFO - Disconnecting from 149.154.167.51:443/TcpFull...
2983
+ 2025-02-19 02:11:51,796 - INFO - Disconnection from 149.154.167.51:443/TcpFull complete!
2984
+ 2025-02-19 02:11:51,796 - INFO - Connecting to 149.154.167.91:443/TcpFull...
2985
+ 2025-02-19 02:11:53,944 - INFO - Connection to 149.154.167.91:443/TcpFull complete!
2986
+ 2025-02-19 02:12:10,839 - INFO - Fetched 97 existing users from @searchai090.
2987
+ 2025-02-19 02:12:10,852 - INFO - Filtered CSV: 2856 users to add after removing existing members.
2988
+ 2025-02-19 02:12:14,155 - INFO - Disconnecting from 149.154.167.91:443/TcpFull...
2989
+ 2025-02-19 02:12:14,158 - INFO - Disconnection from 149.154.167.91:443/TcpFull complete!
2990
+ 2025-02-19 02:13:49,099 - INFO - Adding users to @searchai090 from UT_CHEM.csv...
2991
+ 2025-02-19 02:13:49,118 - INFO - Connecting to 149.154.167.51:443/TcpFull...
2992
+ 2025-02-19 02:13:50,972 - INFO - Connection to 149.154.167.51:443/TcpFull complete!
2993
+ 2025-02-19 02:14:00,469 - INFO - Phone migrated to 4
2994
+ 2025-02-19 02:14:00,614 - INFO - Reconnecting to new data center 4
2995
+ 2025-02-19 02:14:00,751 - INFO - Disconnecting from 149.154.167.51:443/TcpFull...
2996
+ 2025-02-19 02:14:00,752 - INFO - Disconnection from 149.154.167.51:443/TcpFull complete!
2997
+ 2025-02-19 02:14:00,752 - INFO - Connecting to 149.154.167.91:443/TcpFull...
2998
+ 2025-02-19 02:14:02,315 - INFO - Connection to 149.154.167.91:443/TcpFull complete!
2999
+ 2025-02-19 02:14:15,818 - INFO - Fetched 97 existing users from @searchai090.
3000
+ 2025-02-19 02:14:15,836 - INFO - Filtered CSV: 2856 users to add after removing existing members.
3001
+ 2025-02-19 02:14:19,138 - INFO - Disconnecting from 149.154.167.91:443/TcpFull...
3002
+ 2025-02-19 02:14:19,141 - INFO - Disconnection from 149.154.167.91:443/TcpFull complete!
session/mbot2.session ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:942efa8eabeaba789a667d945a5f4ae3c3201f963ab5738b1db940afd9e68aa5
3
+ size 36864
session/mbot3.session ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97179840522e85b0504859cb4ff95b2a8a254268cb00e63375865b4b13a79551
3
+ size 36864