File size: 2,570 Bytes
3e238a0 5cea947 3e238a0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
"""
This script checks the resolution of all images in a specified directory and its subdirectories.
If the resolution of an image exceeds a certain limit, the path of the image is written to an output file.
The script uses multiprocessing to speed up the process.
"""
from pathlib import Path
import multiprocessing
import os
from PIL import Image
def check_image_resolution(filepath, output_file):
"""
Checks the resolution of an image and writes the path of the image to a file if its resolution exceeds a certain limit.
Parameters:
filepath (Path): The path of the image file.
output_file (str): The path of the output file where the paths of oversized images will be written.
Returns:
None
"""
if filepath.suffix in [".jpg", ".jpeg", ".png"]:
img = Image.open(filepath)
width, height = img.size
resolution = width * height
if resolution > 16777216:
normalized_path = os.path.normpath(str(filepath))
print(
f"The image {normalized_path} has a resolution of {resolution} pixels which is more than 16777216 pixels."
)
with open(output_file, "a", encoding="utf-8") as f:
f.write(f"{normalized_path}\n")
def process_directory(directory, output_file):
"""
Processes all files in a directory and its subdirectories.
Parameters:
directory (str): The path of the directory to be processed.
output_file (str): The path of the output file where the paths of oversized images will be written.
Returns:
None
"""
for filepath in Path(directory).rglob("*"):
check_image_resolution(filepath, output_file)
def main(output_file):
"""
Main function that creates a pool of worker processes and applies the process_directory function asynchronously.
Parameters:
output_file (str): The path of the output file where the paths of oversized images will be written.
Returns:
None
"""
# Get the number of available CPU cores
num_cores = multiprocessing.cpu_count()
# Create a pool of worker processes
pool = multiprocessing.Pool(num_cores)
# Call the function with the path to your directory
#pool.apply_async(process_directory, args=(r"E:\training_dir", output_file))
pool.apply_async(process_directory, args=(r"E:\training_dir_staging", output_file))
# Close the pool and wait for all tasks to complete
pool.close()
pool.join()
if __name__ == "__main__":
OUTPUT_FILE = "oversized.txt"
main(OUTPUT_FILE)
|