Compare commits

...

1 Commits

Author SHA1 Message Date
Nathan
fd5c8a9ab4 use over 61 workers 2025-12-23 17:01:00 -07:00
2 changed files with 60 additions and 1 deletions

View File

@@ -0,0 +1,40 @@
<!-- Generated by SpecStory, Markdown v2.1.0 -->
<!-- cursor Session c6b9ac3c-667a-4529-8923-79fe43eb7838 (2025-12-23 23:54Z) -->
# Untitled (2025-12-23 23:54Z)
_**User (2025-12-23 23:54Z)**_
it didn't work on the threadripper. additional context: @.specstory/history/2025-12-11_18-21Z-compress-png-sequences-with-multithreading.md
Mode: Auto-detect bit depth (preserve 16-bit if present)
Input directory: input
Output directory: output
Scanning for image files...
Found 646 JPG/JPEG files - ignoring (skipping)
Found 6371 PNG files to process.
Corrupted files will be moved to: corrupted
Using 64 worker processes for compression...
--------------------------------------------------------------------------------
Traceback (most recent call last):
File "E:\SequenceCompressor\compress_pngs.py", line 427, in <module>
main()
~~~~^^
File "E:\SequenceCompressor\compress_pngs.py", line 328, in main
with ProcessPoolExecutor(max_workers=max_workers) as executor:
~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python314\Lib\concurrent\futures\process.py", line 671, in __init__
raise ValueError(
f"max_workers must be <= {_MAX_WINDOWS_WORKERS}")
ValueError: max_workers must be <= 61
Press any key to continue . . .
---
_**Agent (model default, mode Agent)**_
---

View File

@@ -7,12 +7,21 @@ Compresses all PNG files in subdirectories with maximum parallelism.
import os import os
import sys import sys
import argparse import argparse
import platform
from pathlib import Path from pathlib import Path
from concurrent.futures import ProcessPoolExecutor, as_completed from concurrent.futures import ProcessPoolExecutor, as_completed
from PIL import Image from PIL import Image
import multiprocessing import multiprocessing
import time import time
# Try to unlock ProcessPoolExecutor on Windows to bypass 61-worker limit
try:
import unlock_processpool
unlock_processpool.please()
UNLOCKED = True
except ImportError:
UNLOCKED = False
def compress_png(input_path, output_path, force_bitdepth=None): def compress_png(input_path, output_path, force_bitdepth=None):
"""Compress a single PNG file. """Compress a single PNG file.
@@ -311,7 +320,17 @@ def main():
print(f"Corrupted files will be moved to: {corrupted_dir}") print(f"Corrupted files will be moved to: {corrupted_dir}")
# Use all available CPU cores # Use all available CPU cores
max_workers = multiprocessing.cpu_count() cpu_count = multiprocessing.cpu_count()
if platform.system() == 'Windows' and not UNLOCKED:
# Windows ProcessPoolExecutor has a maximum of 61 workers (unless unlocked)
max_workers = min(cpu_count, 61)
if cpu_count > 61:
print(f"Detected {cpu_count} CPU threads, but Windows limits ProcessPoolExecutor to 61 workers.")
print("Install 'unlock-processpool-win' package to use all cores: pip install unlock-processpool-win")
else:
max_workers = cpu_count
if UNLOCKED:
print(f"Using unlock-processpool-win to bypass Windows 61-worker limit")
print(f"Using {max_workers} worker processes for compression...") print(f"Using {max_workers} worker processes for compression...")
print("-" * 80) print("-" * 80)