xjf6b commited on
Commit
32cc9d5
·
verified ·
1 Parent(s): 8a0a3c0

Update merged2upload.py

Browse files
Files changed (1) hide show
  1. merged2upload.py +60 -16
merged2upload.py CHANGED
@@ -3,16 +3,37 @@ import base64
3
  import os
4
  import time
5
  import schedule
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
  def fetch_and_decode_base64(url):
8
- print(url)
9
  try:
10
- response = requests.get(url, verify=False)
11
  response.raise_for_status()
12
  decoded_content = base64.b64decode(response.text)
13
  return decoded_content.decode('utf-8')
14
  except requests.RequestException as e:
15
- print(f"Error fetching {url}: {e}")
16
  return None
17
 
18
  def upload_to_gist(content, gist_id, github_token):
@@ -29,23 +50,42 @@ def upload_to_gist(content, gist_id, github_token):
29
  }
30
  }
31
  try:
32
- response = requests.patch(url, headers=headers, json=data)
33
  response.raise_for_status()
34
- print(f"Successfully updated Gist: {gist_id}")
35
  except requests.RequestException as e:
36
- print(f"Error updating Gist: {e}")
 
 
 
 
 
 
 
 
 
 
37
 
38
  def run_task():
39
- print("执行任务...")
40
-
41
- # 运行 collect.py
42
- os.system("cd /app/aggregator && python -u subscribe/collect.py -si")
43
 
 
 
 
 
44
  file_path = '/app/aggregator/data/subscribes.txt'
 
 
 
 
45
 
46
  with open(file_path, 'r') as file:
47
  urls = file.read().strip().split('\n')
48
 
 
 
 
 
49
  all_decoded_texts = []
50
 
51
  for url in urls:
@@ -53,31 +93,35 @@ def run_task():
53
  if decoded_content:
54
  all_decoded_texts.append(decoded_content)
55
 
 
 
 
 
56
  merged_content = "\n".join(all_decoded_texts)
57
  encoded_merged_content = base64.b64encode(merged_content.encode('utf-8')).decode('utf-8')
58
 
59
  merged_file_path = '/app/aggregator/data/merged.txt'
60
  with open(merged_file_path, 'w') as file:
61
  file.write(encoded_merged_content)
62
- print(f"Encoded merged content written to {merged_file_path}")
63
 
64
- # Upload the merged content to the Gist
65
  github_token = os.getenv('GITHUB_TOKEN')
66
  gist_id = os.getenv('GITHUB_GIST_ID')
67
  upload_to_gist(encoded_merged_content, gist_id, github_token)
68
 
69
- print("任务完成")
70
 
71
  def main():
72
- # 立即运行一次任务
73
  run_task()
74
 
75
- # 设置每6小时运行一次任务
76
  schedule.every(6).hours.do(run_task)
77
 
78
- while True:
79
  schedule.run_pending()
80
  time.sleep(1)
81
 
 
 
82
  if __name__ == "__main__":
83
  main()
 
3
  import os
4
  import time
5
  import schedule
6
+ import signal
7
+ import sys
8
+ import logging
9
+ from urllib3.exceptions import InsecureRequestWarning
10
+
11
+ # 配置日志
12
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
13
+
14
+ # 禁用不安全请求警告
15
+ requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
16
+
17
+ # 全局变量来控制脚本运行
18
+ running = True
19
+
20
+ def signal_handler(signum, frame):
21
+ global running
22
+ logging.info("Received signal to terminate. Shutting down gracefully...")
23
+ running = False
24
+
25
+ signal.signal(signal.SIGINT, signal_handler)
26
+ signal.signal(signal.SIGTERM, signal_handler)
27
 
28
  def fetch_and_decode_base64(url):
29
+ logging.info(f"Fetching: {url}")
30
  try:
31
+ response = requests.get(url, verify=False, timeout=30)
32
  response.raise_for_status()
33
  decoded_content = base64.b64decode(response.text)
34
  return decoded_content.decode('utf-8')
35
  except requests.RequestException as e:
36
+ logging.error(f"Error fetching {url}: {e}")
37
  return None
38
 
39
  def upload_to_gist(content, gist_id, github_token):
 
50
  }
51
  }
52
  try:
53
+ response = requests.patch(url, headers=headers, json=data, timeout=30)
54
  response.raise_for_status()
55
+ logging.info(f"Successfully updated Gist: {gist_id}")
56
  except requests.RequestException as e:
57
+ logging.error(f"Error updating Gist: {e}")
58
+
59
+ def run_collect_script():
60
+ logging.info("Running collect.py script...")
61
+ try:
62
+ os.system("cd /app/aggregator && python -u subscribe/collect.py -si")
63
+ logging.info("collect.py script completed successfully")
64
+ except Exception as e:
65
+ logging.error(f"Error running collect.py script: {e}")
66
+ return False
67
+ return True
68
 
69
  def run_task():
70
+ logging.info("Starting task execution...")
 
 
 
71
 
72
+ if not run_collect_script():
73
+ logging.warning("Failed to run collect.py, skipping this task execution")
74
+ return
75
+
76
  file_path = '/app/aggregator/data/subscribes.txt'
77
+
78
+ if not os.path.exists(file_path):
79
+ logging.warning(f"File {file_path} does not exist, skipping this task execution")
80
+ return
81
 
82
  with open(file_path, 'r') as file:
83
  urls = file.read().strip().split('\n')
84
 
85
+ if not urls:
86
+ logging.warning("subscribes.txt is empty, skipping this task execution")
87
+ return
88
+
89
  all_decoded_texts = []
90
 
91
  for url in urls:
 
93
  if decoded_content:
94
  all_decoded_texts.append(decoded_content)
95
 
96
+ if not all_decoded_texts:
97
+ logging.warning("No content was successfully decoded, skipping this task execution")
98
+ return
99
+
100
  merged_content = "\n".join(all_decoded_texts)
101
  encoded_merged_content = base64.b64encode(merged_content.encode('utf-8')).decode('utf-8')
102
 
103
  merged_file_path = '/app/aggregator/data/merged.txt'
104
  with open(merged_file_path, 'w') as file:
105
  file.write(encoded_merged_content)
106
+ logging.info(f"Encoded merged content written to {merged_file_path}")
107
 
 
108
  github_token = os.getenv('GITHUB_TOKEN')
109
  gist_id = os.getenv('GITHUB_GIST_ID')
110
  upload_to_gist(encoded_merged_content, gist_id, github_token)
111
 
112
+ logging.info("Task execution completed")
113
 
114
  def main():
115
+ logging.info("Script started")
116
  run_task()
117
 
 
118
  schedule.every(6).hours.do(run_task)
119
 
120
+ while running:
121
  schedule.run_pending()
122
  time.sleep(1)
123
 
124
+ logging.info("Script terminated")
125
+
126
  if __name__ == "__main__":
127
  main()