2025-11-06 10:10:29 -07:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
"""Maintain zipped render sequences for Git hooks.
|
|
|
|
|
|
|
|
|
|
Default mode scans `Renders/`, produces ZIP archives under `Renders/_zipped/`,
|
|
|
|
|
and stages any updated archives so commits only track compact files. Switch to
|
|
|
|
|
`--mode expand` to inflate the tracked archives back into the ignored working
|
|
|
|
|
directories after checkouts or pulls.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
import json
|
|
|
|
|
import os
|
|
|
|
|
import shutil
|
2025-11-10 10:36:28 -07:00
|
|
|
import subprocess
|
2025-11-06 10:10:29 -07:00
|
|
|
import sys
|
2025-11-10 10:36:28 -07:00
|
|
|
import tempfile
|
2025-11-10 11:00:12 -07:00
|
|
|
import time
|
2025-11-06 10:10:29 -07:00
|
|
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from typing import Iterator, Sequence
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
RENDER_ROOT = Path("Renders")
|
|
|
|
|
ARCHIVE_ROOT = RENDER_ROOT / "_zipped"
|
|
|
|
|
SEQUENCE_EXTENSIONS = {
|
|
|
|
|
".png",
|
|
|
|
|
".jpg",
|
|
|
|
|
".jpeg",
|
|
|
|
|
".tif",
|
|
|
|
|
".tiff",
|
|
|
|
|
".exr",
|
|
|
|
|
}
|
|
|
|
|
STATE_SUFFIX = ".meta.json"
|
2025-11-08 02:36:20 -07:00
|
|
|
DEFAULT_CONFIG = {
|
2025-11-10 10:24:55 -07:00
|
|
|
"zipper": "7z",
|
2025-11-08 02:36:20 -07:00
|
|
|
"compression": 9,
|
2025-11-10 10:24:55 -07:00
|
|
|
"dailyFormat": "daily_YYMMDD",
|
2025-11-08 02:36:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_config() -> dict:
|
2025-11-10 11:27:44 -07:00
|
|
|
# First try to load from project's .config folder (current working directory)
|
|
|
|
|
# Then fall back to ProjectStructure repo config (next to zip_sequences.py)
|
|
|
|
|
cwd = Path.cwd()
|
|
|
|
|
project_config = cwd / ".config" / "config.json"
|
|
|
|
|
repo_config = Path(__file__).resolve().with_name("config.json")
|
|
|
|
|
|
|
|
|
|
config_paths = [
|
|
|
|
|
("project", project_config),
|
|
|
|
|
("repo", repo_config),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
for source, config_path in config_paths:
|
|
|
|
|
try:
|
|
|
|
|
if config_path.exists():
|
|
|
|
|
text = config_path.read_text(encoding="utf-8")
|
|
|
|
|
try:
|
|
|
|
|
data = json.loads(text)
|
|
|
|
|
if isinstance(data, dict):
|
|
|
|
|
merged = DEFAULT_CONFIG.copy()
|
|
|
|
|
merged.update(data)
|
|
|
|
|
return merged
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
continue
|
|
|
|
|
except OSError:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# If no config found, return defaults
|
|
|
|
|
return DEFAULT_CONFIG.copy()
|
2025-11-08 02:36:20 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
CONFIG = load_config()
|
2025-11-10 10:24:55 -07:00
|
|
|
zipper_val = CONFIG.get("zipper", "7z")
|
|
|
|
|
# Handle both old boolean format and new string format
|
|
|
|
|
if isinstance(zipper_val, bool):
|
|
|
|
|
ZIPPER_TYPE = "7z" if zipper_val else "zip"
|
|
|
|
|
else:
|
|
|
|
|
ZIPPER_TYPE = str(zipper_val).lower()
|
2025-11-10 11:27:44 -07:00
|
|
|
|
2025-11-08 02:36:20 -07:00
|
|
|
COMPRESSION_LEVEL = CONFIG.get("compression", 9)
|
|
|
|
|
if isinstance(COMPRESSION_LEVEL, str):
|
|
|
|
|
try:
|
|
|
|
|
COMPRESSION_LEVEL = int(COMPRESSION_LEVEL)
|
|
|
|
|
except ValueError:
|
|
|
|
|
COMPRESSION_LEVEL = 9
|
|
|
|
|
if not isinstance(COMPRESSION_LEVEL, int):
|
|
|
|
|
COMPRESSION_LEVEL = 9
|
|
|
|
|
COMPRESSION_LEVEL = max(0, min(9, COMPRESSION_LEVEL))
|
|
|
|
|
|
|
|
|
|
SEVEN_Z_EXE: str | None = None
|
2025-11-10 10:36:28 -07:00
|
|
|
if ZIPPER_TYPE == "7z":
|
2025-11-08 02:36:20 -07:00
|
|
|
SEVEN_Z_EXE = shutil.which("7z") or shutil.which("7za")
|
2025-11-06 10:10:29 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_args() -> argparse.Namespace:
|
|
|
|
|
parser = argparse.ArgumentParser(description="Sync render sequences with zipped archives.")
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--mode",
|
|
|
|
|
choices=("zip", "expand"),
|
|
|
|
|
default="zip",
|
|
|
|
|
help="zip sequences for commit (default) or expand tracked archives",
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument("--jobs", type=int, help="max parallel workers")
|
|
|
|
|
parser.add_argument("--verbose", action="store_true", help="print extra progress details")
|
|
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def max_workers(requested: int | None) -> int:
|
|
|
|
|
cpu = os.cpu_count() or 1
|
|
|
|
|
limit = max(1, min(8, cpu))
|
|
|
|
|
if requested and requested > 0:
|
|
|
|
|
return min(requested, max(1, cpu))
|
|
|
|
|
return limit
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def log(mode: str, message: str, *, verbose_only: bool = False, verbose: bool = False) -> None:
|
|
|
|
|
if verbose_only and not verbose:
|
|
|
|
|
return
|
|
|
|
|
print(f"[{mode}] {message}")
|
|
|
|
|
|
|
|
|
|
|
2025-11-06 12:35:07 -07:00
|
|
|
def is_archive_path(path: Path) -> bool:
|
|
|
|
|
return any(part == "_archive" for part in path.parts)
|
|
|
|
|
|
|
|
|
|
|
2025-11-06 10:10:29 -07:00
|
|
|
def find_sequence_dirs(root: Path) -> Iterator[Path]:
|
|
|
|
|
for dirpath, dirnames, filenames in os.walk(root):
|
|
|
|
|
path = Path(dirpath)
|
2025-11-06 12:35:07 -07:00
|
|
|
dirnames[:] = [d for d in dirnames if d != "_archive"]
|
|
|
|
|
if is_archive_path(path):
|
2025-11-06 10:10:29 -07:00
|
|
|
continue
|
|
|
|
|
has_frames = any(Path(dirpath, f).suffix.lower() in SEQUENCE_EXTENSIONS for f in filenames)
|
|
|
|
|
if has_frames:
|
|
|
|
|
yield path
|
|
|
|
|
|
|
|
|
|
|
2025-11-06 12:35:07 -07:00
|
|
|
def iter_sequence_files(seq_dir: Path) -> Iterator[Path]:
|
|
|
|
|
for dirpath, dirnames, filenames in os.walk(seq_dir):
|
|
|
|
|
path = Path(dirpath)
|
|
|
|
|
dirnames[:] = [d for d in dirnames if d != "_archive"]
|
|
|
|
|
if is_archive_path(path):
|
|
|
|
|
continue
|
|
|
|
|
for filename in filenames:
|
|
|
|
|
yield path / filename
|
|
|
|
|
|
|
|
|
|
|
2025-11-06 10:10:29 -07:00
|
|
|
def compute_state(seq_dir: Path) -> dict:
|
|
|
|
|
entries = []
|
2025-11-06 12:35:07 -07:00
|
|
|
files = sorted(
|
|
|
|
|
iter_sequence_files(seq_dir),
|
|
|
|
|
key=lambda p: p.relative_to(seq_dir).as_posix(),
|
|
|
|
|
)
|
|
|
|
|
for file_path in files:
|
2025-11-06 10:10:29 -07:00
|
|
|
stat = file_path.stat()
|
|
|
|
|
entries.append(
|
|
|
|
|
{
|
2025-11-06 12:35:07 -07:00
|
|
|
"path": file_path.relative_to(seq_dir).as_posix(),
|
2025-11-06 10:10:29 -07:00
|
|
|
"size": stat.st_size,
|
|
|
|
|
"mtime_ns": stat.st_mtime_ns,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
return {"files": entries}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def current_state(seq_dir: Path) -> dict:
|
|
|
|
|
if not seq_dir.exists() or not seq_dir.is_dir():
|
|
|
|
|
return {"files": []}
|
|
|
|
|
return compute_state(seq_dir)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_state(state_path: Path) -> dict | None:
|
|
|
|
|
if not state_path.exists():
|
|
|
|
|
return None
|
|
|
|
|
try:
|
|
|
|
|
return json.loads(state_path.read_text())
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def state_changed(seq_state: dict, stored_state: dict | None) -> bool:
|
|
|
|
|
if stored_state is None:
|
|
|
|
|
return True
|
|
|
|
|
return seq_state != stored_state
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def archive_path_for(seq_dir: Path) -> Path:
|
|
|
|
|
rel = seq_dir.relative_to(RENDER_ROOT)
|
2025-11-10 11:27:44 -07:00
|
|
|
suffix = ".7z" if ZIPPER_TYPE == "7z" else ".zip"
|
|
|
|
|
return (ARCHIVE_ROOT / rel).with_suffix(suffix)
|
2025-11-06 10:10:29 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def sequence_dir_for(zip_path: Path) -> Path:
|
|
|
|
|
rel = zip_path.relative_to(ARCHIVE_ROOT)
|
|
|
|
|
return (RENDER_ROOT / rel).with_suffix("")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def state_path_for(zip_path: Path) -> Path:
|
|
|
|
|
return zip_path.with_suffix(zip_path.suffix + STATE_SUFFIX)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def zip_sequence(seq_dir: Path, zip_path: Path) -> None:
|
2025-11-10 10:36:28 -07:00
|
|
|
if ZIPPER_TYPE == "7z":
|
|
|
|
|
if SEVEN_Z_EXE is None:
|
2025-11-10 11:27:44 -07:00
|
|
|
raise RuntimeError(
|
|
|
|
|
"7z compression requested but 7z executable not found in PATH. "
|
|
|
|
|
"Please install 7z (e.g., via Chocolatey: choco install 7zip) "
|
|
|
|
|
"or set zipper to 'zip' in config.json"
|
|
|
|
|
)
|
2025-11-08 02:36:20 -07:00
|
|
|
zip_path.parent.mkdir(parents=True, exist_ok=True)
|
2025-11-10 11:27:44 -07:00
|
|
|
|
|
|
|
|
# If creating a .7z file, remove any existing .zip file for the same sequence
|
|
|
|
|
if zip_path.suffix == ".7z":
|
|
|
|
|
old_zip_path = zip_path.with_suffix(".zip")
|
|
|
|
|
if old_zip_path.exists():
|
|
|
|
|
old_zip_path.unlink(missing_ok=True)
|
|
|
|
|
old_state_path = state_path_for(old_zip_path)
|
|
|
|
|
if old_state_path.exists():
|
|
|
|
|
old_state_path.unlink(missing_ok=True)
|
2025-11-10 11:00:12 -07:00
|
|
|
|
2025-11-10 10:36:28 -07:00
|
|
|
# Build list of files to archive with relative paths
|
|
|
|
|
file_list = []
|
|
|
|
|
for file_path in iter_sequence_files(seq_dir):
|
|
|
|
|
rel_path = file_path.relative_to(seq_dir).as_posix()
|
|
|
|
|
file_list.append(rel_path)
|
|
|
|
|
|
2025-11-10 11:00:12 -07:00
|
|
|
if not file_list:
|
|
|
|
|
raise RuntimeError(f"No files found to archive in {seq_dir}")
|
2025-11-10 10:36:28 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
# Create zip in temporary location first to avoid issues with corrupted existing files
|
|
|
|
|
temp_zip = None
|
2025-11-10 11:00:12 -07:00
|
|
|
list_file_path = None
|
2025-11-10 10:36:28 -07:00
|
|
|
try:
|
2025-11-10 11:27:44 -07:00
|
|
|
# Create temporary archive file path (but don't create the file - let 7z create it)
|
|
|
|
|
temp_zip_path = tempfile.mktemp(suffix=".7z", dir=zip_path.parent)
|
|
|
|
|
temp_zip = Path(temp_zip_path)
|
|
|
|
|
|
2025-11-10 11:00:12 -07:00
|
|
|
# Create list file with absolute path
|
|
|
|
|
fd, temp_path = tempfile.mkstemp(suffix=".lst", text=True)
|
|
|
|
|
list_file_path = Path(temp_path)
|
|
|
|
|
with os.fdopen(fd, "w", encoding="utf-8") as list_file:
|
|
|
|
|
for rel_path in file_list:
|
|
|
|
|
list_file.write(rel_path + "\n")
|
|
|
|
|
list_file.flush()
|
|
|
|
|
os.fsync(list_file.fileno()) # Ensure data is written to disk
|
|
|
|
|
# File is closed here by context manager, small delay to ensure OS releases handle
|
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
# Use absolute paths for both list file and temp zip
|
2025-11-10 11:00:12 -07:00
|
|
|
list_file_abs = list_file_path.resolve()
|
2025-11-10 11:27:44 -07:00
|
|
|
temp_zip_abs = temp_zip.resolve()
|
|
|
|
|
# Create archive in temp location first (7z will create it fresh)
|
2025-11-10 10:36:28 -07:00
|
|
|
cmd = [
|
|
|
|
|
SEVEN_Z_EXE,
|
|
|
|
|
"a",
|
|
|
|
|
"-y",
|
2025-11-10 11:00:12 -07:00
|
|
|
"-bb0", # Suppress progress output
|
2025-11-10 10:36:28 -07:00
|
|
|
f"-mx={COMPRESSION_LEVEL}",
|
2025-11-10 11:27:44 -07:00
|
|
|
"-t7z", # Use 7z format, not zip
|
|
|
|
|
str(temp_zip_abs),
|
2025-11-10 11:00:12 -07:00
|
|
|
f"@{list_file_abs}",
|
2025-11-10 10:36:28 -07:00
|
|
|
]
|
|
|
|
|
result = subprocess.run(
|
|
|
|
|
cmd,
|
|
|
|
|
cwd=seq_dir,
|
|
|
|
|
check=False,
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
|
text=True,
|
|
|
|
|
)
|
|
|
|
|
if result.returncode != 0:
|
2025-11-10 11:00:12 -07:00
|
|
|
error_msg = result.stderr.strip() if result.stderr else "Unknown error"
|
|
|
|
|
if result.stdout:
|
|
|
|
|
error_msg += f"\nstdout: {result.stdout.strip()}"
|
|
|
|
|
raise RuntimeError(f"7z compression failed: {error_msg}")
|
2025-11-10 11:27:44 -07:00
|
|
|
|
|
|
|
|
# Move temp zip to final location, replacing any existing file
|
|
|
|
|
if zip_path.exists():
|
|
|
|
|
zip_path.unlink()
|
|
|
|
|
temp_zip.replace(zip_path)
|
|
|
|
|
temp_zip = None # Mark as moved so we don't delete it
|
2025-11-10 10:36:28 -07:00
|
|
|
finally:
|
2025-11-10 11:27:44 -07:00
|
|
|
# Clean up temp zip if it wasn't moved
|
|
|
|
|
if temp_zip and temp_zip.exists():
|
|
|
|
|
try:
|
|
|
|
|
temp_zip.unlink(missing_ok=True)
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|
2025-11-10 11:00:12 -07:00
|
|
|
# Clean up list file, with retry in case 7z still has it open
|
|
|
|
|
if list_file_path and list_file_path.exists():
|
|
|
|
|
for attempt in range(3):
|
|
|
|
|
try:
|
|
|
|
|
list_file_path.unlink(missing_ok=True)
|
|
|
|
|
break
|
|
|
|
|
except PermissionError:
|
|
|
|
|
if attempt < 2:
|
|
|
|
|
time.sleep(0.1) # Wait 100ms before retry
|
|
|
|
|
else:
|
|
|
|
|
# Last attempt failed, just log and continue
|
|
|
|
|
# The temp file will be cleaned up by the OS eventually
|
|
|
|
|
pass
|
2025-11-08 02:36:20 -07:00
|
|
|
return
|
|
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
# Use zipfile (only if ZIPPER_TYPE == "zip")
|
|
|
|
|
if ZIPPER_TYPE == "zip":
|
|
|
|
|
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
|
2025-11-06 10:10:29 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
zip_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
if COMPRESSION_LEVEL <= 0:
|
|
|
|
|
compression = ZIP_STORED
|
|
|
|
|
zip_kwargs = {}
|
|
|
|
|
else:
|
|
|
|
|
compression = ZIP_DEFLATED
|
|
|
|
|
zip_kwargs = {"compresslevel": COMPRESSION_LEVEL}
|
2025-11-08 02:36:20 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
with ZipFile(zip_path, "w", compression=compression, **zip_kwargs) as archive:
|
|
|
|
|
for file_path in iter_sequence_files(seq_dir):
|
|
|
|
|
archive.write(file_path, arcname=file_path.relative_to(seq_dir).as_posix())
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Unknown ZIPPER_TYPE - fail with clear error
|
|
|
|
|
raise RuntimeError(
|
|
|
|
|
f"Unsupported ZIPPER_TYPE: {ZIPPER_TYPE!r}. "
|
|
|
|
|
f"Expected '7z' or 'zip'. "
|
|
|
|
|
f"Config zipper value: {CONFIG.get('zipper', 'not set')!r}"
|
|
|
|
|
)
|
2025-11-06 10:10:29 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def expand_sequence(zip_path: Path, seq_state: dict) -> None:
|
|
|
|
|
target_dir = sequence_dir_for(zip_path)
|
|
|
|
|
if target_dir.exists():
|
|
|
|
|
shutil.rmtree(target_dir)
|
|
|
|
|
target_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
2025-11-10 10:36:28 -07:00
|
|
|
if ZIPPER_TYPE == "7z":
|
|
|
|
|
if SEVEN_Z_EXE is None:
|
2025-11-10 11:27:44 -07:00
|
|
|
raise RuntimeError(
|
|
|
|
|
"7z extraction requested but 7z executable not found in PATH. "
|
|
|
|
|
"Please install 7z or set zipper to 'zip' in config.json"
|
|
|
|
|
)
|
2025-11-08 02:36:20 -07:00
|
|
|
cmd = [
|
|
|
|
|
SEVEN_Z_EXE,
|
|
|
|
|
"x",
|
|
|
|
|
"-y",
|
|
|
|
|
str(zip_path),
|
|
|
|
|
f"-o{target_dir}",
|
|
|
|
|
]
|
2025-11-10 11:27:44 -07:00
|
|
|
result = subprocess.run(
|
|
|
|
|
cmd,
|
|
|
|
|
check=False,
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
|
text=True,
|
|
|
|
|
)
|
|
|
|
|
if result.returncode != 0:
|
|
|
|
|
error_msg = result.stderr.strip() if result.stderr else "Unknown error"
|
|
|
|
|
if result.stdout:
|
|
|
|
|
error_msg += f"\nstdout: {result.stdout.strip()}"
|
|
|
|
|
raise RuntimeError(f"7z extraction failed: {error_msg}")
|
|
|
|
|
elif ZIPPER_TYPE == "zip":
|
2025-11-08 02:36:20 -07:00
|
|
|
from zipfile import ZipFile
|
|
|
|
|
|
|
|
|
|
with ZipFile(zip_path, "r") as archive:
|
|
|
|
|
archive.extractall(target_dir)
|
2025-11-10 11:27:44 -07:00
|
|
|
else:
|
|
|
|
|
raise RuntimeError(
|
|
|
|
|
f"Unsupported ZIPPER_TYPE: {ZIPPER_TYPE!r}. "
|
|
|
|
|
f"Expected '7z' or 'zip'. "
|
|
|
|
|
f"Config zipper value: {CONFIG.get('zipper', 'not set')!r}"
|
|
|
|
|
)
|
2025-11-06 10:10:29 -07:00
|
|
|
|
|
|
|
|
for entry in seq_state.get("files", []):
|
|
|
|
|
file_path = target_dir / entry["path"]
|
|
|
|
|
if file_path.exists():
|
|
|
|
|
os.utime(file_path, ns=(entry["mtime_ns"], entry["mtime_ns"]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_zip(seq_dir: Path, zip_path: Path, state_path: Path, seq_state: dict, *, verbose: bool) -> Sequence[Path]:
|
|
|
|
|
log("zip", f"{seq_dir} -> {zip_path}", verbose_only=True, verbose=verbose)
|
|
|
|
|
zip_sequence(seq_dir, zip_path)
|
|
|
|
|
state_path.write_text(json.dumps(seq_state, indent=2))
|
|
|
|
|
return (zip_path, state_path)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_expand(zip_path: Path, state: dict, *, verbose: bool) -> None:
|
|
|
|
|
log("expand", f"{zip_path} -> {sequence_dir_for(zip_path)}", verbose_only=True, verbose=verbose)
|
|
|
|
|
expand_sequence(zip_path, state)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_zip(worker_count: int, *, verbose: bool) -> int:
|
|
|
|
|
work_items: list[tuple[Path, Path, Path, dict]] = []
|
|
|
|
|
|
|
|
|
|
if RENDER_ROOT.exists():
|
|
|
|
|
for seq_dir in find_sequence_dirs(RENDER_ROOT):
|
|
|
|
|
seq_state = compute_state(seq_dir)
|
|
|
|
|
if not seq_state["files"]:
|
|
|
|
|
continue
|
|
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
# Get the target archive path (will be .7z if ZIPPER_TYPE is "7z")
|
2025-11-06 10:10:29 -07:00
|
|
|
zip_path = archive_path_for(seq_dir)
|
|
|
|
|
state_path = state_path_for(zip_path)
|
2025-11-10 11:27:44 -07:00
|
|
|
|
|
|
|
|
# Check if we need to upgrade from .zip to .7z
|
|
|
|
|
old_zip_path = None
|
|
|
|
|
if ZIPPER_TYPE == "7z":
|
|
|
|
|
# Check if an old .zip file exists
|
|
|
|
|
old_zip_path = zip_path.with_suffix(".zip")
|
|
|
|
|
if old_zip_path.exists():
|
|
|
|
|
# Check if the old .zip's metadata matches current state
|
|
|
|
|
old_state_path = state_path_for(old_zip_path)
|
|
|
|
|
old_stored_state = load_state(old_state_path)
|
|
|
|
|
if not state_changed(seq_state, old_stored_state):
|
|
|
|
|
# Old .zip is up to date, skip conversion
|
|
|
|
|
continue
|
|
|
|
|
# Old .zip is out of date, will be replaced with .7z
|
|
|
|
|
|
|
|
|
|
# Check if the target archive (e.g., .7z) already exists and is up to date
|
2025-11-06 10:10:29 -07:00
|
|
|
stored_state = load_state(state_path)
|
|
|
|
|
if not state_changed(seq_state, stored_state):
|
2025-11-10 11:27:44 -07:00
|
|
|
# Target archive is up to date, but we might still need to clean up old .zip
|
|
|
|
|
if old_zip_path and old_zip_path.exists():
|
|
|
|
|
# Old .zip exists but we have a newer .7z, remove the old one
|
|
|
|
|
old_zip_path.unlink(missing_ok=True)
|
|
|
|
|
old_state_path = state_path_for(old_zip_path)
|
|
|
|
|
if old_state_path.exists():
|
|
|
|
|
old_state_path.unlink(missing_ok=True)
|
2025-11-06 10:10:29 -07:00
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
work_items.append((seq_dir, zip_path, state_path, seq_state))
|
|
|
|
|
|
|
|
|
|
if not work_items:
|
2025-11-06 11:02:59 -07:00
|
|
|
if not RENDER_ROOT.exists():
|
|
|
|
|
log("zip", "Render root 'Renders' not found; nothing to zip.")
|
|
|
|
|
else:
|
|
|
|
|
log("zip", "Archives already up to date; no sequences needed zipping.")
|
2025-11-06 10:10:29 -07:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
updated_paths: list[Path] = []
|
|
|
|
|
|
|
|
|
|
total = len(work_items)
|
|
|
|
|
completed = 0
|
|
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=worker_count) as executor:
|
|
|
|
|
future_map = {
|
|
|
|
|
executor.submit(process_zip, seq_dir, zip_path, state_path, seq_state, verbose=verbose): seq_dir
|
|
|
|
|
for seq_dir, zip_path, state_path, seq_state in work_items
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for future in as_completed(future_map):
|
|
|
|
|
updated_paths.extend(future.result())
|
|
|
|
|
completed += 1
|
|
|
|
|
seq_dir = future_map[future]
|
|
|
|
|
rel = seq_dir.relative_to(RENDER_ROOT)
|
|
|
|
|
log("zip", f"{completed}/{total} {rel}")
|
|
|
|
|
|
|
|
|
|
updated_count = len(updated_paths) // 2
|
|
|
|
|
log("zip", f"Updated {updated_count} sequence archive(s).", verbose=verbose)
|
|
|
|
|
if updated_paths:
|
|
|
|
|
log(
|
|
|
|
|
"zip",
|
|
|
|
|
"Archives updated. Stage manually with `git add Renders/_zipped`, if desired.",
|
|
|
|
|
verbose_only=True,
|
|
|
|
|
verbose=verbose,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
removed = cleanup_orphan_archives(verbose=verbose)
|
|
|
|
|
if removed:
|
|
|
|
|
log("zip", f"Removed {removed} orphan archive(s).", verbose=verbose)
|
|
|
|
|
|
|
|
|
|
return updated_count
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_expand(worker_count: int, *, verbose: bool) -> int:
|
|
|
|
|
if not ARCHIVE_ROOT.exists():
|
2025-11-06 11:02:59 -07:00
|
|
|
log("expand", "No archives to expand (missing 'Renders/_zipped').")
|
2025-11-06 10:10:29 -07:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
work_items: list[tuple[Path, dict]] = []
|
|
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
# Look for both .zip and .7z archives
|
|
|
|
|
archive_patterns = ["*.zip", "*.7z"]
|
|
|
|
|
for pattern in archive_patterns:
|
|
|
|
|
for zip_path in ARCHIVE_ROOT.rglob(pattern):
|
|
|
|
|
state_path = state_path_for(zip_path)
|
|
|
|
|
seq_state = load_state(state_path)
|
|
|
|
|
if seq_state is None:
|
|
|
|
|
log("expand", f"Skipping {zip_path} (missing metadata)")
|
|
|
|
|
continue
|
2025-11-06 10:10:29 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
target_dir = sequence_dir_for(zip_path)
|
|
|
|
|
if current_state(target_dir) == seq_state:
|
|
|
|
|
continue
|
2025-11-06 10:10:29 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
work_items.append((zip_path, seq_state))
|
2025-11-06 10:10:29 -07:00
|
|
|
|
|
|
|
|
if not work_items:
|
2025-11-06 11:02:59 -07:00
|
|
|
log("expand", "Working folders already match archives; nothing to expand.")
|
2025-11-06 10:10:29 -07:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
total = len(work_items)
|
|
|
|
|
completed = 0
|
|
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=worker_count) as executor:
|
|
|
|
|
future_map = {
|
|
|
|
|
executor.submit(process_expand, zip_path, seq_state, verbose=verbose): zip_path
|
|
|
|
|
for zip_path, seq_state in work_items
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for future in as_completed(future_map):
|
|
|
|
|
future.result()
|
|
|
|
|
completed += 1
|
|
|
|
|
zip_path = future_map[future]
|
|
|
|
|
rel = zip_path.relative_to(ARCHIVE_ROOT)
|
|
|
|
|
log("expand", f"{completed}/{total} {rel}")
|
|
|
|
|
|
|
|
|
|
log("expand", f"Refreshed {len(work_items)} sequence folder(s).", verbose=verbose)
|
|
|
|
|
return len(work_items)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cleanup_orphan_archives(*, verbose: bool) -> int:
|
|
|
|
|
if not ARCHIVE_ROOT.exists():
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
removed: list[Path] = []
|
|
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
# Look for both .zip and .7z archives
|
|
|
|
|
archive_patterns = ["*.zip", "*.7z"]
|
|
|
|
|
for pattern in archive_patterns:
|
|
|
|
|
for zip_path in ARCHIVE_ROOT.rglob(pattern):
|
|
|
|
|
seq_dir = sequence_dir_for(zip_path)
|
|
|
|
|
if seq_dir.exists():
|
|
|
|
|
continue
|
2025-11-06 10:10:29 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
rel = zip_path.relative_to(ARCHIVE_ROOT)
|
|
|
|
|
log("zip", f"Removing orphan archive {rel}", verbose_only=True, verbose=verbose)
|
2025-11-06 10:10:29 -07:00
|
|
|
|
2025-11-10 11:27:44 -07:00
|
|
|
zip_path.unlink(missing_ok=True)
|
|
|
|
|
state_path = state_path_for(zip_path)
|
|
|
|
|
if state_path.exists():
|
|
|
|
|
state_path.unlink()
|
|
|
|
|
removed.append(zip_path)
|
2025-11-06 10:10:29 -07:00
|
|
|
|
|
|
|
|
if not removed:
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
for parent in sorted({p.parent for p in removed}, key=lambda p: len(p.parts), reverse=True):
|
|
|
|
|
if not parent.exists():
|
|
|
|
|
continue
|
|
|
|
|
while parent != ARCHIVE_ROOT and not any(parent.iterdir()):
|
|
|
|
|
parent.rmdir()
|
|
|
|
|
parent = parent.parent
|
|
|
|
|
|
|
|
|
|
return len(removed)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main() -> int:
|
|
|
|
|
args = parse_args()
|
|
|
|
|
workers = max_workers(args.jobs)
|
|
|
|
|
|
|
|
|
|
if args.mode == "expand":
|
|
|
|
|
run_expand(workers, verbose=args.verbose)
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
updated = run_zip(workers, verbose=args.verbose)
|
|
|
|
|
return 0 if updated >= 0 else 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
try:
|
|
|
|
|
raise SystemExit(main())
|
|
|
|
|
except Exception as exc: # broad to surface unexpected errors cleanly
|
|
|
|
|
print(f"Sequence sync failed: {exc}", file=sys.stderr)
|
|
|
|
|
raise
|
|
|
|
|
|