|
import csv |
|
from datasets import load_dataset |
|
import os |
|
import time |
|
import signal |
|
import requests |
|
from multiprocessing import Pool, Manager |
|
from functools import partial |
|
|
|
|
|
score_threshold = 4 |
|
error_log_file = "error_log.txt" |
|
|
|
|
|
dataset_folders = [ |
|
"CC-MAIN-2013-20", "CC-MAIN-2013-48", "CC-MAIN-2014-10", "CC-MAIN-2014-15", |
|
"CC-MAIN-2014-23", "CC-MAIN-2014-35", "CC-MAIN-2014-41", "CC-MAIN-2014-42", |
|
"CC-MAIN-2014-49", "CC-MAIN-2014-52", "CC-MAIN-2015-06", "CC-MAIN-2015-11", |
|
"CC-MAIN-2015-14", "CC-MAIN-2015-18", "CC-MAIN-2015-22", "CC-MAIN-2015-27", |
|
"CC-MAIN-2015-32", "CC-MAIN-2015-35", "CC-MAIN-2015-40", "CC-MAIN-2015-48", |
|
"CC-MAIN-2016-07", "CC-MAIN-2016-18", "CC-MAIN-2016-22", "CC-MAIN-2016-26", |
|
"CC-MAIN-2016-30", "CC-MAIN-2016-36", "CC-MAIN-2016-40", "CC-MAIN-2016-44", |
|
"CC-MAIN-2016-50", "CC-MAIN-2017-04", "CC-MAIN-2017-09", "CC-MAIN-2017-13", |
|
"CC-MAIN-2017-17", "CC-MAIN-2017-22", "CC-MAIN-2017-26", "CC-MAIN-2017-30", |
|
"CC-MAIN-2017-34", "CC-MAIN-2017-39", "CC-MAIN-2017-43", "CC-MAIN-2017-47", |
|
"CC-MAIN-2017-51", "CC-MAIN-2018-05", "CC-MAIN-2018-09", "CC-MAIN-2018-13", |
|
"CC-MAIN-2018-17", "CC-MAIN-2018-22", "CC-MAIN-2018-26", "CC-MAIN-2018-30", |
|
"CC-MAIN-2018-34", "CC-MAIN-2018-39", "CC-MAIN-2018-43", "CC-MAIN-2018-47", |
|
"CC-MAIN-2018-51", "CC-MAIN-2019-04", "CC-MAIN-2019-09", "CC-MAIN-2019-13", |
|
"CC-MAIN-2019-18", "CC-MAIN-2019-22", "CC-MAIN-2019-26", "CC-MAIN-2019-30", |
|
"CC-MAIN-2019-35", "CC-MAIN-2019-39", "CC-MAIN-2019-43", "CC-MAIN-2019-47", |
|
"CC-MAIN-2019-51", "CC-MAIN-2020-05", "CC-MAIN-2020-10", "CC-MAIN-2020-16", |
|
"CC-MAIN-2020-24", "CC-MAIN-2020-29", "CC-MAIN-2020-34", "CC-MAIN-2020-40", |
|
"CC-MAIN-2020-45", "CC-MAIN-2020-50", "CC-MAIN-2021-04", "CC-MAIN-2021-10", |
|
"CC-MAIN-2021-17", "CC-MAIN-2021-21", "CC-MAIN-2021-25", "CC-MAIN-2021-31", |
|
"CC-MAIN-2021-39", "CC-MAIN-2021-43", "CC-MAIN-2021-49", "CC-MAIN-2022-05", |
|
"CC-MAIN-2022-21", "CC-MAIN-2022-27", "CC-MAIN-2022-33", "CC-MAIN-2022-40", |
|
"CC-MAIN-2022-49", "CC-MAIN-2023-06", "CC-MAIN-2023-14", "CC-MAIN-2023-23", |
|
"CC-MAIN-2023-40", "CC-MAIN-2023-50", "CC-MAIN-2024-10" |
|
] |
|
|
|
|
|
interrupt_flag = Manager().Value('i', False) |
|
|
|
|
|
def log_error(error_message): |
|
with open(error_log_file, "a") as error_log: |
|
error_log.write(f"{error_message}\n") |
|
|
|
|
|
def retry_request(load_dataset_function, max_retries=5, wait_time=5): |
|
retries = 0 |
|
while retries < max_retries: |
|
try: |
|
dataset = load_dataset_function() |
|
return dataset |
|
except requests.exceptions.ConnectionError as e: |
|
log_error(f"Connection error: {e}. Retrying in {wait_time} seconds...") |
|
retries += 1 |
|
time.sleep(wait_time) |
|
except Exception as e: |
|
log_error(f"Unexpected error: {e}. Retrying in {wait_time} seconds...") |
|
retries += 1 |
|
time.sleep(wait_time) |
|
log_error("Max retries exceeded.") |
|
return None |
|
|
|
|
|
def save_text_column(entry, output_text_file): |
|
try: |
|
text = entry["text"] |
|
with open(output_text_file, "a", encoding='utf-8') as f: |
|
f.write(f"<s>\n{text}</s>\n") |
|
except KeyError as e: |
|
log_error(f"Missing 'text' field: {e}") |
|
|
|
|
|
def save_to_csv(entry, output_csv_file, write_header=False): |
|
try: |
|
with open(output_csv_file, mode='a', newline='', encoding='utf-8') as file: |
|
writer = csv.writer(file) |
|
if write_header: |
|
writer.writerow(["score", "text", "url"]) |
|
score = entry["score"] |
|
text = entry["text"] |
|
url = entry.get("url", "N/A") |
|
writer.writerow([score, text, url]) |
|
except KeyError as e: |
|
log_error(f"Missing field in entry: {e}") |
|
|
|
|
|
def signal_handler(sig, frame): |
|
global interrupt_flag |
|
print("Interrupt received, saving progress and exiting...") |
|
interrupt_flag.value = True |
|
exit(0) |
|
|
|
signal.signal(signal.SIGINT, signal_handler) |
|
|
|
|
|
def process_folder(folder, score_threshold): |
|
global interrupt_flag |
|
|
|
|
|
log_file = f"processing_log_{folder}.txt" |
|
|
|
|
|
def log_progress(last_id): |
|
with open(log_file, "w") as log: |
|
log.write(f"{last_id}") |
|
|
|
|
|
def resume_progress(): |
|
if os.path.exists(log_file): |
|
with open(log_file, "r") as log: |
|
last_id = log.read().strip() |
|
if last_id == 'None' or last_id == '': |
|
last_id = None |
|
return last_id |
|
return None |
|
|
|
print(f"Processing dataset folder: {folder}") |
|
|
|
|
|
output_text_file = f"forti-sampled_text_dataset_{folder}.txt" |
|
output_csv_file = f"forti-sampled_dataset_{folder}.csv" |
|
|
|
|
|
dataset = retry_request(lambda: load_dataset( |
|
"airtrain-ai/fineweb-edu-fortified", |
|
folder, |
|
split="train", |
|
streaming=True |
|
)) |
|
if not dataset: |
|
log_error(f"Failed to load dataset {folder}. Skipping.") |
|
return |
|
|
|
|
|
last_processed_id = resume_progress() |
|
|
|
|
|
found_last_id = last_processed_id is None |
|
processed_entries = 0 |
|
|
|
|
|
while True: |
|
try: |
|
for entry in dataset: |
|
if interrupt_flag.value: |
|
break |
|
|
|
|
|
entry_id = entry.get('id') |
|
if not found_last_id: |
|
if entry_id == last_processed_id: |
|
found_last_id = True |
|
continue |
|
|
|
|
|
last_processed_id = entry_id |
|
|
|
|
|
if entry.get('score', 0) >= score_threshold: |
|
|
|
|
|
if processed_entries == 0: |
|
write_header = True |
|
else: |
|
write_header = False |
|
save_to_csv(entry, output_csv_file, write_header=write_header) |
|
save_text_column(entry, output_text_file) |
|
|
|
processed_entries += 1 |
|
|
|
if processed_entries % 100 == 0: |
|
log_progress(last_processed_id) |
|
print(f"Processed {processed_entries} entries from {folder}...") |
|
|
|
break |
|
|
|
except requests.exceptions.ConnectionError as e: |
|
|
|
log_error(f"Connection error during iteration in {folder}: {e}") |
|
print(f"Connection error during iteration in {folder}: {e}. Retrying in 5 seconds...") |
|
time.sleep(5) |
|
|
|
dataset = retry_request(lambda: load_dataset( |
|
"airtrain-ai/fineweb-edu-fortified", |
|
folder, |
|
split="train", |
|
streaming=True |
|
)) |
|
if not dataset: |
|
log_error(f"Failed to reload dataset {folder} after connection error. Skipping.") |
|
break |
|
|
|
found_last_id = False |
|
|
|
except Exception as e: |
|
log_error(f"Error during processing in {folder}: {e}") |
|
print(f"Error during processing in {folder}: {e}. Skipping entry.") |
|
continue |
|
|
|
|
|
log_progress(last_processed_id) |
|
print(f"Completed processing folder: {folder}") |
|
|
|
if interrupt_flag.value: |
|
print(f"Processing interrupted in folder: {folder}") |
|
|
|
|
|
def process_all_folders_parallel(dataset_folders, score_threshold): |
|
global interrupt_flag |
|
|
|
|
|
with Pool(processes=os.cpu_count()) as pool: |
|
try: |
|
|
|
func = partial(process_folder, score_threshold=score_threshold) |
|
pool.map(func, dataset_folders) |
|
except KeyboardInterrupt: |
|
print("KeyboardInterrupt received, terminating pool...") |
|
pool.terminate() |
|
pool.join() |
|
print("Pool terminated.") |
|
interrupt_flag.value = True |
|
|
|
print("Processing complete.") |
|
|
|
|
|
process_all_folders_parallel(dataset_folders, score_threshold) |
|
|
|
print("Filtered datasets saved to individual files per folder.") |
|
|
|
|