|
import os |
|
import hashlib |
|
from tqdm import tqdm |
|
|
|
def calculate_hash(file_path, chunk_size=1024): |
|
"""Calculate the MD5 hash of a file.""" |
|
hash_md5 = hashlib.md5() |
|
with open(file_path, "rb") as f: |
|
for chunk in iter(lambda: f.read(chunk_size), b""): |
|
hash_md5.update(chunk) |
|
return hash_md5.hexdigest() |
|
|
|
def find_and_remove_duplicates(directory): |
|
"""Find and remove duplicate files in the given directory.""" |
|
if not os.path.isdir(directory): |
|
print(f"The directory {directory} does not exist.") |
|
return |
|
|
|
file_hashes = {} |
|
files_to_remove = [] |
|
|
|
|
|
for root, _, files in tqdm(os.walk(directory)): |
|
for file in files: |
|
file_path = os.path.join(root, file) |
|
file_hash = calculate_hash(file_path) |
|
if file_hash in file_hashes: |
|
files_to_remove.append(file_path) |
|
else: |
|
file_hashes[file_hash] = file_path |
|
|
|
|
|
for file_path in tqdm(files_to_remove, desc="Removing duplicates"): |
|
os.remove(file_path) |
|
|
|
print(f"Removed {len(files_to_remove)} duplicate files.") |
|
|
|
if __name__ == "__main__": |
|
directory = input("Enter the directory to check for duplicates: ") |
|
find_and_remove_duplicates(directory) |