Datasets:
File size: 1,303 Bytes
38a17b3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
import argparse
import os
import hashlib
import csv
from tqdm import tqdm
def md5_checksum(file_path):
hash_md5 = hashlib.md5()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def get_checksums(input_directory, output_filepath):
with open(output_filepath, 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerow(["filepath", "filename", "md5"])
for root, dirs, files in os.walk(input_directory):
n_files = len(files)
for name in tqdm(files, total=n_files, desc="MD5ing"):
file_path = os.path.join(root, name)
checksum = md5_checksum(file_path)
writer.writerow([file_path, name, checksum])
print(f"Checksums written to {output_filepath}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate MD5 checksums for files in a directory")
parser.add_argument("--input-directory", required=True, help="Directory to traverse for files")
parser.add_argument("--output-filepath", required=True, help="Filepath for the output CSV file")
args = parser.parse_args()
get_checksums(args.input_directory, args.output_filepath)
|