ceshidddyyy / merged2upload.py
xjf6b's picture
Update merged2upload.py
32cc9d5 verified
raw
history blame
3.83 kB
import requests
import base64
import os
import time
import schedule
import signal
import sys
import logging
from urllib3.exceptions import InsecureRequestWarning
# 配置日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# 禁用不安全请求警告
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
# 全局变量来控制脚本运行
running = True
def signal_handler(signum, frame):
global running
logging.info("Received signal to terminate. Shutting down gracefully...")
running = False
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
def fetch_and_decode_base64(url):
logging.info(f"Fetching: {url}")
try:
response = requests.get(url, verify=False, timeout=30)
response.raise_for_status()
decoded_content = base64.b64decode(response.text)
return decoded_content.decode('utf-8')
except requests.RequestException as e:
logging.error(f"Error fetching {url}: {e}")
return None
def upload_to_gist(content, gist_id, github_token):
url = f"https://api.github.com/gists/{gist_id}"
headers = {
'Authorization': f'token {github_token}',
'Accept': 'application/vnd.github.v3+json'
}
data = {
"files": {
"configsub.yaml": {
"content": content
}
}
}
try:
response = requests.patch(url, headers=headers, json=data, timeout=30)
response.raise_for_status()
logging.info(f"Successfully updated Gist: {gist_id}")
except requests.RequestException as e:
logging.error(f"Error updating Gist: {e}")
def run_collect_script():
logging.info("Running collect.py script...")
try:
os.system("cd /app/aggregator && python -u subscribe/collect.py -si")
logging.info("collect.py script completed successfully")
except Exception as e:
logging.error(f"Error running collect.py script: {e}")
return False
return True
def run_task():
logging.info("Starting task execution...")
if not run_collect_script():
logging.warning("Failed to run collect.py, skipping this task execution")
return
file_path = '/app/aggregator/data/subscribes.txt'
if not os.path.exists(file_path):
logging.warning(f"File {file_path} does not exist, skipping this task execution")
return
with open(file_path, 'r') as file:
urls = file.read().strip().split('\n')
if not urls:
logging.warning("subscribes.txt is empty, skipping this task execution")
return
all_decoded_texts = []
for url in urls:
decoded_content = fetch_and_decode_base64(url)
if decoded_content:
all_decoded_texts.append(decoded_content)
if not all_decoded_texts:
logging.warning("No content was successfully decoded, skipping this task execution")
return
merged_content = "\n".join(all_decoded_texts)
encoded_merged_content = base64.b64encode(merged_content.encode('utf-8')).decode('utf-8')
merged_file_path = '/app/aggregator/data/merged.txt'
with open(merged_file_path, 'w') as file:
file.write(encoded_merged_content)
logging.info(f"Encoded merged content written to {merged_file_path}")
github_token = os.getenv('GITHUB_TOKEN')
gist_id = os.getenv('GITHUB_GIST_ID')
upload_to_gist(encoded_merged_content, gist_id, github_token)
logging.info("Task execution completed")
def main():
logging.info("Script started")
run_task()
schedule.every(6).hours.do(run_task)
while running:
schedule.run_pending()
time.sleep(1)
logging.info("Script terminated")
if __name__ == "__main__":
main()