Datasets:
File size: 1,081 Bytes
91e0ed2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import argparse
from collections import Counter
def remove_duplicates(file_path):
with open(file_path, 'r') as file:
lines = file.readlines()
line_counts = Counter(lines)
duplicates = [line for line, count in line_counts.items() if count > 1]
if not duplicates:
print(f"No duplicates found in {file_path}.")
return
with open(file_path, 'w') as file:
unique_lines = set(lines)
file.writelines(unique_lines)
print(f"{len(duplicates)} duplicates removed from {file_path}.")
print("Removed duplicates:")
print("")
for duplicate in duplicates:
print(duplicate.strip())
def main():
parser = argparse.ArgumentParser(description='Remove duplicate lines from a text file.')
parser.add_argument('file', nargs='?', help='Path to the text file to remove duplicates')
args = parser.parse_args()
if args.file:
file_path = args.file
else:
file_path = input("Enter the path to the text file: ")
remove_duplicates(file_path)
if __name__ == "__main__":
main()
|