xjf6b commited on
Commit
edb8a16
1 Parent(s): 302fe60

Update merged2upload.py

Browse files
Files changed (1) hide show
  1. merged2upload.py +50 -39
merged2upload.py CHANGED
@@ -1,52 +1,63 @@
1
  import requests
 
2
  import os
3
- import logging
4
- import glob
5
- import tempfile
6
 
7
- logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
 
 
 
 
 
 
 
 
8
 
9
- def fetch_content(url):
10
- proxy = os.getenv('PROXY')
11
- proxies = {'http': proxy, 'https': proxy} if proxy else None
 
 
 
 
 
 
 
 
 
 
12
  try:
13
- session = requests.Session()
14
- session.trust_env = False
15
- response = session.get(url, verify=False, proxies=proxies, timeout=10)
16
  response.raise_for_status()
17
- return response.text
18
  except requests.RequestException as e:
19
- logging.error(f"Error fetching {url} with proxy: {e}")
20
- try:
21
- # Try without proxy
22
- session = requests.Session()
23
- session.trust_env = False
24
- response = session.get(url, verify=False, timeout=10)
25
- response.raise_for_status()
26
- return response.text
27
- except requests.RequestException as e:
28
- logging.error(f"Error fetching {url} without proxy: {e}")
29
- return None
30
-
31
- # ... [rest of the functions remain the same]
32
 
33
  def main():
34
- # Find the most recent temporary file
35
- temp_dir = tempfile.gettempdir()
36
- files = glob.glob(os.path.join(temp_dir, 'subscribes_*.txt'))
37
- if not files:
38
- logging.error("No subscribes file found. Exiting.")
39
- return
40
- file_path = max(files, key=os.path.getctime)
41
-
42
- try:
43
- with open(file_path, 'r') as file:
44
- urls = file.read().strip().split('\n')
45
- except FileNotFoundError:
46
- logging.error(f"Error: {file_path} not found. Exiting.")
47
- return
 
 
 
 
 
48
 
49
- # ... [rest of the main function remains the same]
 
 
 
50
 
51
  if __name__ == "__main__":
52
  main()
 
1
  import requests
2
+ import base64
3
  import os
 
 
 
4
 
5
+ def fetch_and_decode_base64(url):
6
+ print(url)
7
+ try:
8
+ response = requests.get(url, verify=False)
9
+ response.raise_for_status()
10
+ decoded_content = base64.b64decode(response.text)
11
+ return decoded_content.decode('utf-8')
12
+ except requests.RequestException as e:
13
+ print(f"Error fetching {url}: {e}")
14
+ return None
15
 
16
+ def upload_to_gist(content, gist_id, github_token):
17
+ url = f"https://api.github.com/gists/{gist_id}"
18
+ headers = {
19
+ 'Authorization': f'token {github_token}',
20
+ 'Accept': 'application/vnd.github.v3+json'
21
+ }
22
+ data = {
23
+ "files": {
24
+ "configsub.yaml": {
25
+ "content": content
26
+ }
27
+ }
28
+ }
29
  try:
30
+ response = requests.patch(url, headers=headers, json=data)
 
 
31
  response.raise_for_status()
32
+ print(f"Successfully updated Gist: {gist_id}")
33
  except requests.RequestException as e:
34
+ print(f"Error updating Gist: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
35
 
36
  def main():
37
+ file_path = '/app/aggregator/data/subscribes.txt'
38
+
39
+ with open(file_path, 'r') as file:
40
+ urls = file.read().strip().split('\n')
41
+
42
+ all_decoded_texts = []
43
+
44
+ for url in urls:
45
+ decoded_content = fetch_and_decode_base64(url)
46
+ if decoded_content:
47
+ all_decoded_texts.append(decoded_content)
48
+
49
+ merged_content = "\n".join(all_decoded_texts)
50
+ encoded_merged_content = base64.b64encode(merged_content.encode('utf-8')).decode('utf-8')
51
+
52
+ merged_file_path = '/app/aggregator/data/merged.txt'
53
+ with open(merged_file_path, 'w') as file:
54
+ file.write(encoded_merged_content)
55
+ print(f"Encoded merged content written to {merged_file_path}")
56
 
57
+ # Upload the merged content to the Gist
58
+ github_token = os.getenv('GITHUB_TOKEN')
59
+ gist_id = os.getenv('GITHUB_GIST_ID')
60
+ upload_to_gist(encoded_merged_content, gist_id, github_token)
61
 
62
  if __name__ == "__main__":
63
  main()