|
import json
|
|
import re
|
|
import requests
|
|
import gzip
|
|
from urlextract import URLExtract
|
|
from pathlib import Path
|
|
|
|
|
|
utid = 'fchernow'
|
|
base = {
|
|
'model': 'https://huggingface.co/',
|
|
'data': 'https://huggingface.co/datasets/',
|
|
'source': 'https://'
|
|
}
|
|
post_hf = '/raw/main/README.md'
|
|
post_gh = '/blob/main/README.md'
|
|
|
|
|
|
extU = URLExtract()
|
|
DOIpattern = r'\b(10\.\d{4,9}/[-._;()/:A-Z0-9]+)\b'
|
|
|
|
BibTeXPattern = r'@\w+\{[^}]+\}'
|
|
|
|
|
|
REQUEST_TIMEOUT = 10
|
|
|
|
|
|
def extractURLs(content):
|
|
return extU.find_urls(content)
|
|
|
|
def extractDOIs(content):
|
|
return re.findall(DOIpattern, content, re.IGNORECASE)
|
|
|
|
def extractBibTeX(content):
|
|
return re.findall(BibTeXPattern, content, re.DOTALL)
|
|
|
|
|
|
output_path = f"output_data/{utid}.json.gz"
|
|
Path("output_data").mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
successful_entries = 0
|
|
|
|
|
|
with gzip.open(output_path, 'wt', encoding='utf-8') as fo:
|
|
|
|
def run(tp):
|
|
global successful_entries
|
|
|
|
post_suffix = post_hf if tp != 'source' else post_gh
|
|
|
|
|
|
with open(f"{utid}_{tp}.txt", 'r') as f:
|
|
line_count = 0
|
|
for line in f:
|
|
line = line.strip()
|
|
if not line:
|
|
continue
|
|
|
|
|
|
if tp == 'source':
|
|
if ';' in line:
|
|
npapers, line = line.split(';')
|
|
else:
|
|
continue
|
|
|
|
|
|
url = base[tp] + f"{line}{post_suffix}"
|
|
|
|
|
|
try:
|
|
r = requests.get(url, timeout=REQUEST_TIMEOUT)
|
|
r.raise_for_status()
|
|
content = r.text
|
|
status = "success"
|
|
except requests.RequestException:
|
|
content = ""
|
|
status = "failed"
|
|
|
|
|
|
urls = extractURLs(content)
|
|
dois = extractDOIs(content)
|
|
bibs = extractBibTeX(content)
|
|
|
|
|
|
res = {
|
|
'id': line,
|
|
'type': tp,
|
|
'url': url,
|
|
'content': content.replace("\n", " ") if content else "",
|
|
'links': urls,
|
|
'dois': dois,
|
|
'bibs': bibs,
|
|
'status': status
|
|
}
|
|
out = json.dumps(res, ensure_ascii=False)
|
|
fo.write(out + "\n")
|
|
|
|
|
|
if status == "success":
|
|
successful_entries += 1
|
|
|
|
line_count += 1
|
|
|
|
print(f"Processed {line_count} lines in {tp} file.")
|
|
|
|
|
|
run('model')
|
|
run('data')
|
|
run('source')
|
|
|
|
print(f"Data successfully saved to {output_path} with {successful_entries} successful entries.")
|
|
|