Spaces:
Running
Running
File size: 3,832 Bytes
25fd285 edb8a16 25fd285 19f5b5d 32cc9d5 ea9a0dd edb8a16 32cc9d5 edb8a16 32cc9d5 edb8a16 32cc9d5 edb8a16 25fd285 edb8a16 25fd285 32cc9d5 25fd285 32cc9d5 25fd285 32cc9d5 25fd285 19f5b5d 32cc9d5 19f5b5d 32cc9d5 edb8a16 32cc9d5 edb8a16 32cc9d5 edb8a16 32cc9d5 edb8a16 32cc9d5 25fd285 edb8a16 19f5b5d 32cc9d5 19f5b5d 32cc9d5 19f5b5d 32cc9d5 19f5b5d 25fd285 32cc9d5 25fd285 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
import requests
import base64
import os
import time
import schedule
import signal
import sys
import logging
from urllib3.exceptions import InsecureRequestWarning
# 配置日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# 禁用不安全请求警告
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
# 全局变量来控制脚本运行
running = True
def signal_handler(signum, frame):
global running
logging.info("Received signal to terminate. Shutting down gracefully...")
running = False
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
def fetch_and_decode_base64(url):
logging.info(f"Fetching: {url}")
try:
response = requests.get(url, verify=False, timeout=30)
response.raise_for_status()
decoded_content = base64.b64decode(response.text)
return decoded_content.decode('utf-8')
except requests.RequestException as e:
logging.error(f"Error fetching {url}: {e}")
return None
def upload_to_gist(content, gist_id, github_token):
url = f"https://api.github.com/gists/{gist_id}"
headers = {
'Authorization': f'token {github_token}',
'Accept': 'application/vnd.github.v3+json'
}
data = {
"files": {
"configsub.yaml": {
"content": content
}
}
}
try:
response = requests.patch(url, headers=headers, json=data, timeout=30)
response.raise_for_status()
logging.info(f"Successfully updated Gist: {gist_id}")
except requests.RequestException as e:
logging.error(f"Error updating Gist: {e}")
def run_collect_script():
logging.info("Running collect.py script...")
try:
os.system("cd /app/aggregator && python -u subscribe/collect.py -si")
logging.info("collect.py script completed successfully")
except Exception as e:
logging.error(f"Error running collect.py script: {e}")
return False
return True
def run_task():
logging.info("Starting task execution...")
if not run_collect_script():
logging.warning("Failed to run collect.py, skipping this task execution")
return
file_path = '/app/aggregator/data/subscribes.txt'
if not os.path.exists(file_path):
logging.warning(f"File {file_path} does not exist, skipping this task execution")
return
with open(file_path, 'r') as file:
urls = file.read().strip().split('\n')
if not urls:
logging.warning("subscribes.txt is empty, skipping this task execution")
return
all_decoded_texts = []
for url in urls:
decoded_content = fetch_and_decode_base64(url)
if decoded_content:
all_decoded_texts.append(decoded_content)
if not all_decoded_texts:
logging.warning("No content was successfully decoded, skipping this task execution")
return
merged_content = "\n".join(all_decoded_texts)
encoded_merged_content = base64.b64encode(merged_content.encode('utf-8')).decode('utf-8')
merged_file_path = '/app/aggregator/data/merged.txt'
with open(merged_file_path, 'w') as file:
file.write(encoded_merged_content)
logging.info(f"Encoded merged content written to {merged_file_path}")
github_token = os.getenv('GITHUB_TOKEN')
gist_id = os.getenv('GITHUB_GIST_ID')
upload_to_gist(encoded_merged_content, gist_id, github_token)
logging.info("Task execution completed")
def main():
logging.info("Script started")
run_task()
schedule.every(6).hours.do(run_task)
while running:
schedule.run_pending()
time.sleep(1)
logging.info("Script terminated")
if __name__ == "__main__":
main() |