Datasets:
File size: 1,901 Bytes
ae1fbb3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
# Text_toolkit.
puncts_replacements = {
"“": '"',
"”": '"',
"’": "'",
"\n\n\n\n": "\n\n",
"\n\n\n": "\n\n",
"\xa0": " ",
"—": "-",
". …": "...",
"…": "...",
" ": " ",
", , ": ", ",
". ": ". ",
", ": ", ",
"; ": "; ",
}
def normalize_puncts(html: str):
strp = html.strip()
for k, v in puncts_replacements.items():
strp = strp.replace(k, v)
return strp
def merge_consecutive_lines(lines:str):
last_line = None
final_concate = []
for curr_line in lines.split("\n"):
if last_line is None:
last_line = curr_line.lstrip()
final_concate.append(curr_line)
continue
if curr_line.strip():
# current line not empty
if last_line:
# Last line has content
final_concate[-1] = f"{final_concate[-1]} {curr_line}"
last_line = curr_line.lstrip()
# Current line empty
else:
last_line = curr_line.lstrip()
final_concate.append("\n")
else:
# currenty line is empty
if last_line:
final_concate.append(curr_line + "\n")
else:
continue
return "\n".join(final_concate)
def clean_newlines(lines: str):
last_line = None
final_concate = []
for line in lines.split("\n"):
if last_line is None:
last_line = line.lstrip()
final_concate.append(line.lstrip())
continue
if line.strip():
last_line = line.lstrip()
final_concate.append(line.lstrip())
else:
if last_line:
last_line = line.lstrip()
final_concate.append(line.lstrip())
else:
continue
return "\n".join(final_concate)
|