use over 61 workers

This commit is contained in:
Nathan
2025-12-23 17:01:00 -07:00
parent f1cf096069
commit fd5c8a9ab4
2 changed files with 60 additions and 1 deletions

View File

@@ -7,12 +7,21 @@ Compresses all PNG files in subdirectories with maximum parallelism.
import os
import sys
import argparse
import platform
from pathlib import Path
from concurrent.futures import ProcessPoolExecutor, as_completed
from PIL import Image
import multiprocessing
import time
# Try to unlock ProcessPoolExecutor on Windows to bypass 61-worker limit
try:
import unlock_processpool
unlock_processpool.please()
UNLOCKED = True
except ImportError:
UNLOCKED = False
def compress_png(input_path, output_path, force_bitdepth=None):
"""Compress a single PNG file.
@@ -311,7 +320,17 @@ def main():
print(f"Corrupted files will be moved to: {corrupted_dir}")
# Use all available CPU cores
max_workers = multiprocessing.cpu_count()
cpu_count = multiprocessing.cpu_count()
if platform.system() == 'Windows' and not UNLOCKED:
# Windows ProcessPoolExecutor has a maximum of 61 workers (unless unlocked)
max_workers = min(cpu_count, 61)
if cpu_count > 61:
print(f"Detected {cpu_count} CPU threads, but Windows limits ProcessPoolExecutor to 61 workers.")
print("Install 'unlock-processpool-win' package to use all cores: pip install unlock-processpool-win")
else:
max_workers = cpu_count
if UNLOCKED:
print(f"Using unlock-processpool-win to bypass Windows 61-worker limit")
print(f"Using {max_workers} worker processes for compression...")
print("-" * 80)