Compare commits

..

33 Commits

Author SHA1 Message Date
aad97c9c7a config for zipseq run on renderscrap 2025-11-10 22:30:47 -07:00
c94d1eaa79 append zipseq batches 2025-11-10 19:22:23 -07:00
0754593671 reignore RnR 2025-11-10 19:21:53 -07:00
aa84a5fe48 deploy config 2025-11-10 19:20:37 -07:00
ffc6939279 deliver final startupscreen YAY 2025-11-08 02:31:29 -07:00
0984f12064 forgot to rename nearly a year ago looool 2025-11-08 02:30:45 -07:00
666ec70a85 finally denoise the huge sequence 2025-11-07 18:22:13 -07:00
d4cb142724 denoise shot2 and 3 2025-11-07 08:59:19 -07:00
dd59f5266a sheepit scripts for vast ai 2025-11-05 08:29:39 -07:00
4c8fb82536 vast.ai last night 2025-11-04 18:10:25 -07:00
1c9f11bd6d queue everything else 65k samps 2025-11-03 22:25:50 -07:00
b888183599 denoise sheepit results 2025-11-03 19:22:58 -07:00
ac74a036c7 test queue 4x the samps monkaX 2025-11-03 00:26:39 -07:00
a3f279688d queue exr test 2025-11-03 00:08:26 -07:00
4c1d193f1c queue shot1 dark more samps 2025-11-02 23:46:59 -07:00
f0f6c28c67 background 4k 0-900 export 2025-11-02 17:03:21 -07:00
7652eb2142 move background to deliverable 2025-11-02 16:59:40 -07:00
f596b0769d jk i already did that just copy it over 2025-11-02 16:41:39 -07:00
66bf6b87b9 background dark 4k 0-900 denoise 2025-11-02 16:16:25 -07:00
9bba131159 queue test startupscreen 16k samps 2025-11-02 16:16:02 -07:00
d37d118ee1 background loop: 1080p FINAL EXPORT 2025-11-02 15:57:01 -07:00
5f9bbeb002 blend denoise in 1080p 0-900 2025-11-02 11:36:13 -07:00
f8e1cb92aa fix final frame? 2025-11-02 11:24:31 -07:00
c64478f7e7 decrease to 1080p to re-RnR 2025-11-02 11:13:06 -07:00
f57fe4fa59 working on getting dark sequence rendered 2025-11-02 11:09:02 -07:00
a9dacc6189 remap paths and set compression to 0 2025-10-31 17:13:33 -06:00
80322cc470 just denoise render local for now 2025-10-30 02:21:40 -06:00
99fd5560c0 flamenco batpack retardation 2025-10-30 01:48:29 -06:00
9b63f2fff3 sorted denoising cheat 2025-10-30 01:03:40 -06:00
bd35ecc46f render from yesterday 2025-10-30 00:48:35 -06:00
1cfbbe6f13 prproj gzip to LFS 2025-10-30 00:36:03 -06:00
a9b7eabf0f attempt denoise/encode composite 2025-10-28 23:25:04 -06:00
f2ac1be1e0 background dark render! 2025-10-28 22:45:18 -06:00
88 changed files with 92683 additions and 16475 deletions

68
.config/GetStructDir.ps1 Normal file
View File

@@ -0,0 +1,68 @@
# Simple helper script to get structDir from project config.json
# Reads config.json from .config folder in project root
param(
[string]$ProjectRoot
)
Set-StrictMode -Version Latest
$ErrorActionPreference = 'Stop'
if ([string]::IsNullOrWhiteSpace($ProjectRoot)) {
# Try to determine project root from script location
if ($PSScriptRoot) {
$ProjectRoot = Split-Path -Parent $PSScriptRoot
}
elseif ($MyInvocation.MyCommand.Path) {
$ProjectRoot = Split-Path -Parent (Split-Path -Parent $MyInvocation.MyCommand.Path)
}
else {
Write-Error "Unable to determine project root. Please provide -ProjectRoot parameter."
exit 1
}
}
$configPath = Join-Path -Path $ProjectRoot -ChildPath '.config\config.json'
if (-not (Test-Path -LiteralPath $configPath -PathType Leaf)) {
Write-Error "config.json not found at: $configPath"
exit 1
}
try {
$config = Get-Content -LiteralPath $configPath -Raw -ErrorAction Stop | ConvertFrom-Json
if ($config.PSObject.Properties.Name -contains 'structDir') {
$structDir = $config.structDir
if ($null -ne $structDir -and ($structDir -isnot [string] -or $structDir.Trim().Length -gt 0)) {
# If it's an absolute path, resolve it
if ([System.IO.Path]::IsPathRooted($structDir)) {
$resolved = Resolve-Path -LiteralPath $structDir -ErrorAction SilentlyContinue
if ($null -ne $resolved) {
Write-Output $resolved.Path
exit 0
}
Write-Output $structDir
exit 0
}
# Relative path - resolve relative to config location
$candidate = Join-Path -Path (Split-Path -Parent $configPath) -ChildPath $structDir
$resolvedCandidate = Resolve-Path -LiteralPath $candidate -ErrorAction SilentlyContinue
if ($null -ne $resolvedCandidate) {
Write-Output $resolvedCandidate.Path
exit 0
}
Write-Output $candidate
exit 0
}
}
# Default: return the directory containing config.json (project root)
Write-Output $ProjectRoot
exit 0
}
catch {
Write-Error "Failed to read or parse config.json: $($_.Exception.Message)"
exit 1
}

8
.config/config.json Normal file
View File

@@ -0,0 +1,8 @@
{
"dailyFormat": "daily_YYMMDD",
"structDir": "D:\\0 ProjectStructure",
"zipper": "7z",
"compression": 9,
"Max7zInst": 0
}

12
.gitattributes vendored
View File

@@ -23,6 +23,7 @@
*.zip filter=lfs diff=lfs merge=lfs -text
*.fbx filter=lfs diff=lfs merge=lfs -text
*.fbxkey filter=lfs diff=lfs merge=lfs -text
*.blend2 filter=lfs diff=lfs merge=lfs -text
*.mb filter=lfs diff=lfs merge=lfs -text
*.ma filter=lfs diff=lfs merge=lfs -text
*.tiff filter=lfs diff=lfs merge=lfs -text
@@ -40,11 +41,16 @@
*.dae filter=lfs diff=lfs merge=lfs -text
*.mp3 filter=lfs diff=lfs merge=lfs -text
*.mp3 filter=lfs diff=lfs merge=lfs -text
*.wav filter=lfs diff=lfs merge=lfs -text
*.vdb filter=lfs diff=lfs merge=lfs -text
*.aep filter=lfs diff=lfs merge=lfs -text
*.prproj filter=gzipped
*.aegraphic filter=lfs diff=lfs merge=lfs -text
*.prproj filter=lfs diff=lfs merge=lfs -text
*.prin filter=lfs diff=lfs merge=lfs -text
*.prel filter=lfs diff=lfs merge=lfs -text
*.wav filter=lfs diff=lfs merge=lfs -text
*.docx filter=lfs diff=lfs merge=lfs -text
*.MOV filter=lfs diff=lfs merge=lfs -text
*.gif filter=lfs diff=lfs merge=lfs -text
*.sbs filter=lfs diff=lfs merge=lfs -text
*.sbsar filter=lfs diff=lfs merge=lfs -text
*.spp filter=lfs diff=lfs merge=lfs -text
*.spp filter=lfs diff=lfs merge=lfs -text

20
.gitignore vendored
View File

@@ -83,11 +83,6 @@ desktop.ini
# *.aac
# *.ogg
# Archive files
*.zip
*.rar
*.tar.gz
# Log files
*.log
@@ -118,12 +113,12 @@ temp/
tmp/
cache/
# image sequences
seq/
Renders/
png/
Pr/3_background/RenderRegions/
Pr/3_background/background/
Renders/**
!Renders/_zipped/
!Renders/_zipped/**
!Renders/**/
!Renders/**/*.bat
!Renders/**/*.log
# Premiere
*.prlock
@@ -131,4 +126,7 @@ Adobe Premiere Pro Video Previews/
Adobe Premiere Pro Audio Previews/
Adobe Premiere Pro (Beta) Video Previews/
Adobe Premiere Pro (Beta) Audio Previews/
Pr/RnR/
Pr/3_background/RnR/

132
.specstory/.gitignore vendored
View File

@@ -1,2 +1,130 @@
# SpecStory explanation file
/.what-is-this.md
# Blender Studio Git Management - .gitignore
# Blender auto-save files
*.blend1
*.blend2
*.blend3
*.blend4
*.blend5
*.blend6
*.blend7
*.blend8
*.blend9
*.blend10
*.blend11
*.blend12
*.blend13
*.blend14
*.blend15
*.blend16
*.blend17
*.blend18
*.blend19
*.blend20
*.blend21
*.blend22
*.blend23
*.blend24
*.blend25
*.blend26
*.blend27
*.blend28
*.blend29
*.blend30
*.blend31
*.blend32
# Blender cache and temporary files
*.swp
*.swo
*~
*.tmp
*.temp
# Blender internal cache files
*.bpy
*.bmesh
*.bvh
*.bobj
*.bphys
# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
desktop.ini
# Editor files
.vscode/
.idea/
*.swp
*.swo
*~
# Large media files (uncomment if you want to exclude rendered outputs)
# *.mp4
# *.avi
# *.mov
# *.png
# *.jpg
# *.jpeg
# *.exr
# *.hdr
# *.tiff
# *.tga
# Audio files (uncomment if excluding)
# *.wav
# *.mp3
# *.aac
# *.ogg
# Log files
*.log
# Python cache
__pycache__/
*.pyc
*.pyo
*.pyd
.Python
# Node modules (if using any web tools)
node_modules/
# Environment files
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Backup files
*.bak
*.backup
*.old
# Temporary files
temp/
tmp/
cache/
Renders/**
!Renders/_zipped/
!Renders/_zipped/**
!Renders/**/
!Renders/**/*.bat
!Renders/**/*.log
# Premiere
*.prlock
Adobe Premiere Pro Video Previews/
Adobe Premiere Pro Audio Previews/
Adobe Premiere Pro (Beta) Video Previews/
Adobe Premiere Pro (Beta) Audio Previews/
Pr/RnR/

8
.specstory/.project.json Normal file
View File

@@ -0,0 +1,8 @@
{
"workspace_id": "1b0d-a83a-3ac5-e88c",
"workspace_id_at": "2025-11-01T15:50:13.343Z",
"project_name": "2025-02-BondingArtifact",
"cloud_sync": false,
"git_id": "5b73-3baf-2eb5-36fa",
"git_id_at": "2025-11-01T15:50:13.388Z"
}

View File

@@ -0,0 +1,69 @@
# SpecStory Artifacts Directory
This directory is automatically created and maintained by the SpecStory extension to preserve your AI chat history.
## What's Here?
- `.specstory/history`: Contains auto-saved markdown files of your AI coding sessions
- Each file represents a separate AI chat session
- If you enable auto-save, files are automatically updated as you work
- You can enable/disable the auto-save feature in the SpecStory settings, it is disabled by default
- `.specstory/.project.json`: Contains the persistent project identity for the current workspace
- This file is only present if you enable AI rules derivation
- This is used to provide consistent project identity of your project, even as the workspace is moved or renamed
- `.specstory/ai_rules_backups`: Contains backups of the `.cursor/rules/derived-cursor-rules.mdc` or the `.github/copilot-instructions.md` file
- Backups are automatically created each time the `.cursor/rules/derived-cursor-rules.mdc` or the `.github/copilot-instructions.md` file is updated
- You can enable/disable the AI Rules derivation feature in the SpecStory settings, it is disabled by default
- `.specstory/.gitignore`: Contains directives to exclude non-essential contents of the `.specstory` directory from version control
- Add `/history` to exclude the auto-saved chat history from version control
## Valuable Uses
- Capture: Keep your context window up-to-date when starting new Chat/Composer sessions via @ references
- Search: For previous prompts and code snippets
- Learn: Meta-analyze your patterns and learn from your past experiences
- Derive: Keep the AI on course with your past decisions by automatically deriving rules from your AI interactions
## Version Control
We recommend keeping this directory under version control to maintain a history of your AI interactions. However, if you prefer not to version these files, you can exclude them by adding this to your `.gitignore`:
```
.specstory/**
```
We recommend __not__ keeping the `.specstory/ai_rules_backups` directory under version control if you are already using git to version your AI rules, and committing regularly. You can exclude it by adding this to your `.gitignore`:
```
.specstory/ai_rules_backups
```
## Searching Your Codebase
When searching your codebase, search results may include your previous AI coding interactions. To focus solely on your actual code files, you can exclude the AI interaction history from search results.
To exclude AI interaction history:
1. Open the "Find in Files" search in Cursor or VSCode (Cmd/Ctrl + Shift + F)
2. Navigate to the "files to exclude" section
3. Add the following pattern:
```
.specstory/*
```
This will ensure your searches only return results from your working codebase files.
## Notes
- Auto-save only works when Cursor or VSCode flushes sqlite database data to disk. This results in a small delay after the AI response is complete before SpecStory can save the history.
## Settings
You can control auto-saving behavior in Cursor or VSCode:
1. Open Cursor/Code → Settings → VS Code Settings (Cmd/Ctrl + ,)
2. Search for "SpecStory"
3. Find "Auto Save" setting to enable/disable
Auto-save occurs when changes are detected in the sqlite database, or every 2 minutes as a safety net.

View File

@@ -0,0 +1,244 @@
<!-- Generated by SpecStory -->
<!-- cursor Session c94f4a58-8269-4d9c-960c-470f5a0bd4c7 (2025-10-09 18:12Z) -->
# Encode large image sequence with ffmpeg (2025-10-09 18:12Z)
_**User (2025-10-29 04:20Z)**_
losslessly encode the freakishly large image sequence in D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark to E:\3_background_dark using ffmpeg, nvidia nvenc with CQ=0. 60fps.
---
_**Assistant (default)**_
---
Checking the source image sequence to identify the format and frame range.
---
Tool use: **list_dir**
<details>
<summary>Listed directory undefined • **0** results</summary>
No results found
</details>
---
---
Tool use: **run_terminal_cmd**
```bash
dir "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark" | Select-Object -First 5
```
```
Directory: D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark
Mode LastWriteTime Length Name
---- ------------- ------ ----
-a--- 10/26/2025 2:33 PM 49803735 3_background_dark_0000.png
-a--- 10/26/2025 12:17 PM 49803388 3_background_dark_0001.png
-a--- 10/26/2025 1:19 PM 49799657 3_background_dark_0002.png
-a--- 10/26/2025 9:56 AM 49800635 3_background_dark_0003.png
-a--- 10/26/2025 1:39 PM 49806719 3_background_dark_0004.png
```
---
---
Tool use: **run_terminal_cmd**
```bash
dir "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark" | Select-Object -Last 5
```
```
Directory: D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark
Mode LastWriteTime Length Name
---- ------------- ------ ----
----- 10/27/2025 12:37 AM 49805765 3_background_dark_9995.png
----- 10/26/2025 11:21 PM 49805316 3_background_dark_9996.png
----- 10/26/2025 9:46 PM 49808654 3_background_dark_9997.png
----- 10/27/2025 12:13 AM 49806734 3_background_dark_9998.png
----- 10/26/2025 11:34 PM 49805629 3_background_dark_9999.png
```
---
---
Image sequence goes from `0000` to `9999` (10,000 frames); each frame is ~50MB.
---
Tool use: **create_plan**
---
_**User (2025-10-29 04:20Z)**_
No. There are 14,401 frames. I want nvenc hevc in an mp4 container. Is the preset slow, or is it 7? I forget what the exact arguments are.
---
_**Assistant (default)**_
---
Verifying the exact frame range and NVENC preset syntax. Updating the plan.
---
Tool use: **run_terminal_cmd**
```bash
Get-ChildItem "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark\*.png" | Measure-Object | Select-Object -ExpandProperty Count
```
```
14401
```
---
---
Tool use: **run_terminal_cmd**
```bash
$frames = Get-ChildItem "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark\*.png"; Write-Host "First: $($frames[0].BaseName)"; Write-Host "Last: $($frames[-1].BaseName)"
```
```
First: 3_background_dark_0000
Last: 3_background_dark_9999
```
---
---
Tool use: **run_terminal_cmd**
```bash
$frames = Get-ChildItem "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark\*.png" | Sort-Object Name; Write-Host "Start: $($frames[0].Name)"; Write-Host "End: $($frames[-1].Name)"; Write-Host "Total count: $($frames.Count)"
```
```
Start: 3_background_dark_0000.png
End: 3_background_dark_9999.png
Total count: 14401
```
---
_**User (2025-10-29 04:20Z)**_
you might as well do all 14401 frames.
---
_**Assistant (default)**_
---
Checking ffmpeg NVENC HEVC preset syntax. Updating the plan to use the full 14,401 frames in an MP4 container.
---
Received invalid string to replace in plan
---
---
Tool use: **create_plan**
---
_**User (2025-10-29 04:20Z)**_
No, process all 14401 frames.
---
_**Assistant (default)**_
---
Command uses `%04d.png` which only captures frames 00009999, missing the rest. Checking for five-digit frame numbers.
---
Tool use: **run_terminal_cmd**
```bash
Get-ChildItem "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark\*.png" | Where-Object { $_.Name -match '_\d{5}\.' } | Select-Object Name -First 10
```
```
```
---
---
Tool use: **run_terminal_cmd**
```bash
$frames = Get-ChildItem "D:\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark\*.png" | Sort-Object Name; Write-Host "Unique count: $($frames | Select-Object -ExpandProperty BaseName -Unique | Measure-Object | Select-Object -ExpandProperty Count)"
```
---
---

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,466 @@
<!-- Generated by SpecStory, Markdown v2.0.0 -->
<!-- cursor Session 4aeb7f9e-851f-4b2a-ae52-01c07b68e74f (2025-11-01 15:48Z) -->
# PNG rendering error troubleshooting (2025-11-01 15:48Z)
_**User (2025-11-01 15:48Z)**_
why is this happening to render
libpng error: IDAT: CRC error
ImageInput::read_image() failed: PNG read error: IDAT: CRC error
PNG library error
IMB_load_image_from_memory: unknown file-format (R:\Creative\artsy\maya\2025-02 BondingArtifact\Renders\3_background\2025-10-26\3_background_dark\3_background_dark_5489.png)
---
_**Agent (model default, mode Agent)**_
---
Reviewing the open file to understand the render configuration and identify why PNGs are being corrupted.
---
<tool-use data-tool-type="read" data-tool-name="read_file">
<details><summary>Tool use: **read_file** • Read file: c:\Program Files\Blender Foundation\Flamenco 3.7\scripts\TalkingHeads cycles_optix_gpu_NOBAT.js</summary>
</details>
</tool-use>
---
<tool-use data-tool-type="search" data-tool-name="codebase_search">
<details>
<summary>Tool use: **codebase_search** • Searched codebase "How are PNG images saved or written during rendering?" • **3** results</summary>
| File | Lines |
|------|-------|
| `.gitignore` | L134 |
| `.gitattributes` | L47 |
| `misc/checklist.txt` | L52 |
</details>
</tool-use>
---
---
Checking the render configuration and the corrupt file path for clues.
---
<tool-use data-tool-type="search" data-tool-name="codebase_search">
<details>
<summary>Tool use: **codebase_search** • Searched codebase "Where are render output files written or saved?" • **10** results</summary>
| File | Lines |
|------|-------|
| `.gitignore` | L134 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L3573-3642 |
| `misc/checklist.txt` | L52 |
| `misc/metadata.bat` | L23 |
| `misc/filenames.bat` | L1 |
| `.cursorindexingignore` | L3 |
| `.gitattributes` | L47 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L1475-1533 |
| `misc/BRANDING.txt` | L2 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L1976-2034 |
</details>
</tool-use>
---
<tool-use data-tool-type="grep" data-tool-name="grep">
<details>
<summary>Tool use: **grep** • Grep for "3_background" • 0 matches</summary>
Output mode: content
_No matches found_
</details>
</tool-use>
---
---
This PNG CRC error indicates the file was corrupted during render. The path `R:\...` suggests a network drive, which is a common source.
Common causes:
- Network storage issues: timeouts, interruptions, or bad connections corrupt writes
- Concurrent access: another process (comp, preview, antivirus) reading while a worker writes
- Incomplete writes: file read before the write finishes
- Antivirus/backup: scanning or copying files while theyre being written
Checking the render script for timing or synchronization issues:
---
<tool-use data-tool-type="read" data-tool-name="read_file">
<details><summary>Tool use: **read_file** • Read file: c:\Program Files\Blender Foundation\Flamenco 3.7\scripts\TalkingHeads cycles_optix_gpu_NOBAT.js</summary>
</details>
</tool-use>
---
---
## Solutions to fix this:
### **Immediate fixes:**
1. **Check if the file is actually corrupted**
- Verify frame `5489` on the R: drive
- If corrupted, re-render just that frame
2. **Render to local storage first, then copy**
- Change the render output to a local drive (C: or D:)
- After rendering completes, copy to the network drive
- Reduces corruption risk from network interruptions
3. **Increase chunk_size**
- In your Flamenco job settings, increase `chunk_size` (currently defaults to 1)
- Fewer concurrent writes = less contention
4. **Temporarily disable antivirus/backup scanning**
- Exclude the render output folder from real-time scanning
- Network drives are often scanned aggressively
### **Network storage specific:**
5. **Check network drive health**
- Test the R: drive connection stability
- Run `chkdsk R: /f` if it's accessible
- Check for network timeouts or packet loss
6. **Add file write delay/sync**
- If possible, configure Blender to flush writes completely before finishing
- Use `--python-expr` to add a small delay after rendering completes
### **For the current corrupted frame:**
The file `3_background_dark_5489.png` is corrupted. Either:
- Re-render frame 5489 specifically, or
- Skip it and continue (if it's not critical)
Should I help modify the Flamenco script to render to a local directory first, or focus on diagnosing the network storage issue?
---
_**User (2025-11-02 23:49Z)**_
How do I ensure my image can be denoised without rendering in future? Can saving as EXR fix it?
---
_**Agent (model default, mode Agent)**_
---
Checking how denoising and output formats are configured. Reviewing the render script for format/pass options and searching the codebase for denoising-related settings.
---
<tool-use data-tool-type="search" data-tool-name="codebase_search">
<details>
<summary>Tool use: **codebase_search** • Searched codebase "How are denoising passes or denoising data configured for rendering?" • **7** results</summary>
| File | Lines |
|------|-------|
| `misc/checklist.txt` | L52 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L3573-3642 |
| `.gitignore` | L134 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L1431-1479 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L73 |
| `.gitattributes` | L47 |
| `assets/Objects/gems/tx/BlackOnyx/Designer/BlackOnyx.sbs` | L1858-1925 |
</details>
</tool-use>
---
<tool-use data-tool-type="grep" data-tool-name="grep">
<details>
<summary>Tool use: **grep** • Grep for "denois" • 0 matches</summary>
Output mode: content
_No matches found_
</details>
</tool-use>
---
<tool-use data-tool-type="grep" data-tool-name="grep">
<details>
<summary>Tool use: **grep** • Grep for "EXR|exr|OpenEXR" • 0 matches</summary>
Output mode: content
_No matches found_
</details>
</tool-use>
---
---
EXR can enable post-render denoising, but you need to enable the denoising passes when rendering.
- EXR stores multiple passes/layers in one file (denoising data, beauty, etc.)
- EXR stores 32-bit float data (vs 8-bit PNG), preserving detail
- PNG cannot store denoising passes, so denoising later isn't possible
Updating the render script to automatically enable denoising passes when EXR is selected:
---
<tool-use data-tool-type="read" data-tool-name="read_file">
<details><summary>Tool use: **read_file** • Read file: c:\Program Files\Blender Foundation\Flamenco 3.7\scripts\TalkingHeads cycles_optix_gpu_NOBAT.js</summary>
</details>
</tool-use>
---
---
Adding code to automatically enable denoising passes when rendering to EXR:
---
<tool-use data-tool-type="read" data-tool-name="read_file">
<details><summary>Tool use: **read_file** • Read file: c:\Program Files\Blender Foundation\Flamenco 3.7\scripts\TalkingHeads cycles_optix_gpu_NOBAT.js</summary>
</details>
</tool-use>
---
<tool-use data-tool-type="write" data-tool-name="search_replace">
<details><summary>Tool use: **code_edit** • Edit file: c:\Program Files\Blender Foundation\Flamenco 3.7\scripts\TalkingHeads cycles_optix_gpu_NOBAT.js</summary>
</details>
</tool-use>
---
_**User (2025-11-02 23:49Z)**_
Don't edit anything. just tell me what I should set my render settings to.
---
_**Agent (model default, mode Agent)**_
---
Heres what to set in Blender so you can denoise later:
## Render settings
### 1. Output format
- Output Properties tab → Output section
- Set File Format to OpenEXR or OpenEXR Multilayer
- OpenEXR Multilayer is better if you need multiple passes later
### 2. Enable denoising passes
- Output Properties tab → View Layer Properties → Passes section
- Enable:
- Denoising Normal
- Denoising Albedo
- Denoising Shadow (optional, helps quality)
### 3. Cycles denoising (during render)
- Render Properties tab → Denoising section
- Denoiser: OpenImageDenoise (or OptiX if on NVIDIA)
- Enable Denoising
- If using Passes → Data:
- Enable Normal
- Enable Albedo (optional if not using denoising passes)
### 4. Render passes to save
- View Layer Properties → Passes → Data:
- Enable Normal
- Enable Albedo (if you want them as separate passes)
- With OpenEXR Multilayer, passes are stored as layers in the file
## Why this works
- EXR stores all passes in one file
- 32-bit float preserves detail
- Denoising Normal/Albedo passes let you re-denoise later
- You can denoise post-render in Compositor or with external tools
## After rendering
You can re-denoise later:
- In Blender Compositor: load the EXR, use Denoise node with the passes
- Or keep render denoising off during render and denoise entirely in post
Note: PNG cannot store denoising passes, so you cant re-denoise PNG renders later. EXR is required for this workflow.
---
_**User (2025-11-02 23:49Z)**_
So, when I send to sheepit, denoising will be disabled if I use the tiling option (which splits the frame into 2x2, 3x3, 4x4, or 6x6 so multiple workers can render one frame simultaneously, then they are combined) but I need to denoise it because the results are so noisy.
---
_**Agent (model default, mode Agent)**_
---
For SheepIt with tiling, you can't denoise during rendering (denoising needs the full frame), but you can denoise the combined EXR frames afterward if you save the required passes.
## Render settings for SheepIt + tiling
### 1. Output format
- File Format: OpenEXR Multilayer (or OpenEXR)
- Required so denoising passes are saved per tile and combined
### 2. View Layer passes (critical)
- View Layer Properties → Passes → Data:
- Enable Denoising Normal
- Enable Denoising Albedo
- Enable Denoising Shadow (recommended)
- View Layer Properties → Passes → Combined:
- Enable Denoising Data (if available; some versions)
### 3. Cycles render settings
- Render Properties → Denoising:
- Denoiser: OpenImageDenoise (or OptiX if available)
- Keep Denoising enabled in settings
- Note: With tiling, denoising won't run during render, but passes will still be saved.
### 4. After SheepIt combines tiles
You'll have EXR files with:
- Combined beauty pass (noisy)
- Denoising Normal pass
- Denoising Albedo pass
- Denoising Shadow pass (if enabled)
## Denoise the combined frames
### Option A: Blender Compositor
1. Open a combined EXR in Image Editor or Compositor
2. Use Denoise node:
- Connect Combined (beauty)
- Connect Denoising Normal
- Connect Denoising Albedo
- Optionally connect Denoising Shadow
3. Export the denoised result
### Option B: External tools
- OIDN (OpenImageDenoise CLI)
- GIMP (with EXR support)
- Natron or other compositing software
## Summary
- Render EXR with Denoising Normal, Denoising Albedo (and Shadow if available)
- Tiles will combine these passes into full-frame EXRs
- Denoise the combined EXRs in post using those passes
This way you get fast tiled rendering on SheepIt and clean denoised results afterward.
---
_**User (2025-11-03 07:09Z)**_
Ach, looks like I can't render splits. this makes splits kind of useless for me... so I can't send this blend with such high samples because it's so long that each machine doesn't get it, but if I split it, I'll just have to denoise it after anyway... is there a better solution for denoising than the compositing solution I devised?
---
_**Agent (model default, mode Agent)**_
---
Given SheepIts EXR restriction (no splits), here are practical alternatives:
## Option 1: Render with splits in PNG + enable render-time denoising
- Render to PNG (splits allowed)
- In Cycles: enable Denoising (OpenImageDenoise or OptiX)
- Denoising should run per tile during render
- Result: already denoised when tiles combine
Note: Verify that SheepIt/Blender doesnt disable denoising with splits. Test a single split frame first.
## Option 2: Lower samples + better denoising (recommended)
Modern denoisers work well with fewer samples:
- Reduce samples (e.g., 128256 instead of 1000+)
- Keep denoising enabled during render
- Render to PNG with splits enabled
- Benefits: faster renders, workers complete tasks, denoised output
## Option 3: AI denoisers that dont need passes
If render-time denoising doesnt work with splits, use standalone denoisers:
- Neat Video (plugin for Premiere/AE/Resolve)
- Topaz DeNoise AI
- Adobe Lightroom/Photoshop AI Denoise
Trade-off: No denoising passes needed, but single-pass input only.
## Option 4: Hybrid two-pass workflow
If you need high samples:
1. Pass 1: Render beauty only with splits (PNG), high samples
2. Pass 2: Render denoising passes (Normal/Albedo) full-frame, low samples, EXR
3. Composite: Combine passes in Blender Compositor and denoise
Downside: Two renders, more work.
## Recommendation
Try Option 1 first: PNG + splits + render-time denoising enabled. If that doesnt work or quality isnt enough, use Option 2 (lower samples + denoising). The quality difference with modern denoisers is often minimal, and render time drops significantly.
If you can share sample count and resolution, I can suggest specific settings.
---

View File

@@ -0,0 +1,43 @@
<!-- Generated by SpecStory, Markdown v2.0.0 -->
<!-- cursor Session b83f8a7d-e74a-40ee-90fd-e66cfc30956e (2025-11-02 20:29Z) -->
# Seamless video looping in Premiere (2025-11-02 20:29Z)
_**User (2025-11-02 20:29Z)**_
I'm trying to seamlessly loop a video in premiere. I have a morph cut that works really well, but I can't get it to work so that the final frame looks identical to the first frame. I can get it to loop on itself infinitely with morph cuts, but I want to get that in the video file itself so it loops properly in OBS.
---
_**Agent (model default, mode Agent)**_
---
To export a seamless loop from Premiere so OBS plays it back seamlessly:
1. Ensure the loop point frames match:
- Set an In/Out range around the morph cut.
- Trim to a frame boundary at both ends.
- Optionally add a 1-frame freeze (copy of first frame) at the end if needed.
2. Export settings:
- Set the In/Out range to include the full loop.
- Use an export format that preserves frames and avoids frame drops (e.g., H.264 with high quality, or ProRes if possible).
- Disable frame blending or other post-processing that could alter frames.
3. Verify frame match:
- In the Timeline, place the playhead at the start and end frames.
- Check in Program Monitor (step frame-by-frame) that the last frame matches the first frame.
If the morph cut in the preview looks seamless but the export doesnt:
- The transition may be interpolated: duplicate the first frame at the end, then export.
- Try a different export codec/settings that better preserves frames.
Should I help you verify the codec/settings or automate any trimming/freeze frame steps?
---

File diff suppressed because it is too large Load Diff

BIN
Deliverable/3_background_dark.mp4 LFS Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
Pr/3_background/background.aep LFS Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,16 @@
@echo off
setlocal
:: Get current date in YYYY-MM-DD format
for /f "tokens=2-4 delims=/ " %%a in ('date /t') do (
set mm=%%a
set dd=%%b
set yy=%%c
)
set yyyy=20%yy:~-2%
:: Create daily folder
mkdir "%yyyy%-%mm%-%dd%"
echo Daily folder created: %yyyy%-%mm%-%dd%
pause

View File

@@ -0,0 +1,37 @@
@echo off
setlocal EnableExtensions
set "script_dir=%~dp0"
for %%I in ("%script_dir%..\..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $ps1Path = Join-Path $structDir 'UpdateSequences.ps1'; if (-not (Test-Path -LiteralPath $ps1Path)) { throw \"UpdateSequences.ps1 not found at $ps1Path\" }; Write-Output $ps1Path"`) do set "ps1=%%I"
if not defined ps1 (
echo [ERROR] Unable to resolve UpdateSequences.ps1 path from config.
exit /b 1
)
echo Running PowerShell update script...
powershell -NoProfile -ExecutionPolicy Bypass -File "%ps1%"
set "rc=%errorlevel%"
echo PowerShell exited with RC=%rc%
echo Done.
pause >nul
exit /b %rc%

View File

@@ -0,0 +1,16 @@
@echo off
setlocal
:: Get current date in YYYY-MM-DD format
for /f "tokens=2-4 delims=/ " %%a in ('date /t') do (
set mm=%%a
set dd=%%b
set yy=%%c
)
set yyyy=20%yy:~-2%
:: Create daily folder
mkdir "%yyyy%-%mm%-%dd%"
echo Daily folder created: %yyyy%-%mm%-%dd%
pause

View File

@@ -0,0 +1,37 @@
@echo off
setlocal EnableExtensions
set "script_dir=%~dp0"
for %%I in ("%script_dir%..\..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $ps1Path = Join-Path $structDir 'UpdateSequences.ps1'; if (-not (Test-Path -LiteralPath $ps1Path)) { throw \"UpdateSequences.ps1 not found at $ps1Path\" }; Write-Output $ps1Path"`) do set "ps1=%%I"
if not defined ps1 (
echo [ERROR] Unable to resolve UpdateSequences.ps1 path from config.
exit /b 1
)
echo Running PowerShell update script...
powershell -NoProfile -ExecutionPolicy Bypass -File "%ps1%"
set "rc=%errorlevel%"
echo PowerShell exited with RC=%rc%
echo Done.
pause >nul
exit /b %rc%

View File

@@ -0,0 +1,16 @@
@echo off
setlocal
:: Get current date in YYYY-MM-DD format
for /f "tokens=2-4 delims=/ " %%a in ('date /t') do (
set mm=%%a
set dd=%%b
set yy=%%c
)
set yyyy=20%yy:~-2%
:: Create daily folder
mkdir "%yyyy%-%mm%-%dd%"
echo Daily folder created: %yyyy%-%mm%-%dd%
pause

View File

@@ -0,0 +1,37 @@
@echo off
setlocal EnableExtensions
set "script_dir=%~dp0"
for %%I in ("%script_dir%..\..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $ps1Path = Join-Path $structDir 'UpdateSequences.ps1'; if (-not (Test-Path -LiteralPath $ps1Path)) { throw \"UpdateSequences.ps1 not found at $ps1Path\" }; Write-Output $ps1Path"`) do set "ps1=%%I"
if not defined ps1 (
echo [ERROR] Unable to resolve UpdateSequences.ps1 path from config.
exit /b 1
)
echo Running PowerShell update script...
powershell -NoProfile -ExecutionPolicy Bypass -File "%ps1%"
set "rc=%errorlevel%"
echo PowerShell exited with RC=%rc%
echo Done.
pause >nul
exit /b %rc%

View File

@@ -0,0 +1,16 @@
@echo off
setlocal
:: Get current date in YYYY-MM-DD format
for /f "tokens=2-4 delims=/ " %%a in ('date /t') do (
set mm=%%a
set dd=%%b
set yy=%%c
)
set yyyy=20%yy:~-2%
:: Create daily folder
mkdir "%yyyy%-%mm%-%dd%"
echo Daily folder created: %yyyy%-%mm%-%dd%
pause

View File

@@ -0,0 +1,37 @@
@echo off
setlocal EnableExtensions
set "script_dir=%~dp0"
for %%I in ("%script_dir%..\..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $ps1Path = Join-Path $structDir 'UpdateSequences.ps1'; if (-not (Test-Path -LiteralPath $ps1Path)) { throw \"UpdateSequences.ps1 not found at $ps1Path\" }; Write-Output $ps1Path"`) do set "ps1=%%I"
if not defined ps1 (
echo [ERROR] Unable to resolve UpdateSequences.ps1 path from config.
exit /b 1
)
echo Running PowerShell update script...
powershell -NoProfile -ExecutionPolicy Bypass -File "%ps1%"
set "rc=%errorlevel%"
echo PowerShell exited with RC=%rc%
echo Done.
pause >nul
exit /b %rc%

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,16 @@
@echo off
setlocal
:: Get current date in YYYY-MM-DD format
for /f "tokens=2-4 delims=/ " %%a in ('date /t') do (
set mm=%%a
set dd=%%b
set yy=%%c
)
set yyyy=20%yy:~-2%
:: Create daily folder
mkdir "%yyyy%-%mm%-%dd%"
echo Daily folder created: %yyyy%-%mm%-%dd%
pause

View File

@@ -0,0 +1,37 @@
@echo off
setlocal EnableExtensions
set "script_dir=%~dp0"
for %%I in ("%script_dir%..\..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $ps1Path = Join-Path $structDir 'UpdateSequences.ps1'; if (-not (Test-Path -LiteralPath $ps1Path)) { throw \"UpdateSequences.ps1 not found at $ps1Path\" }; Write-Output $ps1Path"`) do set "ps1=%%I"
if not defined ps1 (
echo [ERROR] Unable to resolve UpdateSequences.ps1 path from config.
exit /b 1
)
echo Running PowerShell update script...
powershell -NoProfile -ExecutionPolicy Bypass -File "%ps1%"
set "rc=%errorlevel%"
echo PowerShell exited with RC=%rc%
echo Done.
pause >nul
exit /b %rc%

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,16 @@
@echo off
setlocal
:: Get current date in YYYY-MM-DD format
for /f "tokens=2-4 delims=/ " %%a in ('date /t') do (
set mm=%%a
set dd=%%b
set yy=%%c
)
set yyyy=20%yy:~-2%
:: Create daily folder
mkdir "%yyyy%-%mm%-%dd%"
echo Daily folder created: %yyyy%-%mm%-%dd%
pause

View File

@@ -0,0 +1,37 @@
@echo off
setlocal EnableExtensions
set "script_dir=%~dp0"
for %%I in ("%script_dir%..\..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $ps1Path = Join-Path $structDir 'UpdateSequences.ps1'; if (-not (Test-Path -LiteralPath $ps1Path)) { throw \"UpdateSequences.ps1 not found at $ps1Path\" }; Write-Output $ps1Path"`) do set "ps1=%%I"
if not defined ps1 (
echo [ERROR] Unable to resolve UpdateSequences.ps1 path from config.
exit /b 1
)
echo Running PowerShell update script...
powershell -NoProfile -ExecutionPolicy Bypass -File "%ps1%"
set "rc=%errorlevel%"
echo PowerShell exited with RC=%rc%
echo Done.
pause >nul
exit /b %rc%

42
Renders/UnzipSeqArchv.bat Normal file
View File

@@ -0,0 +1,42 @@
@echo off
setlocal EnableExtensions EnableDelayedExpansion
set "REN_DIR=%~dp0"
for %%I in ("%REN_DIR%..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $pyPath = Join-Path $structDir 'zip_sequences.py'; if (-not (Test-Path -LiteralPath $pyPath)) { throw \"zip_sequences.py not found at $pyPath\" }; Write-Output $pyPath"`) do set "PY_SCRIPT=%%I"
if not defined PY_SCRIPT (
echo [ERROR] Unable to resolve zip_sequences.py path from config.
exit /b 1
)
pushd "%PROJ_ROOT%" >nul 2>&1
python "%PY_SCRIPT%" --mode expand --verbose %*
set "ERR=!ERRORLEVEL!"
if not "!ERR!"=="0" (
echo Failed to expand render sequence archives (exit code !ERR!).
)
popd >nul 2>&1
exit /b !ERR!

View File

@@ -0,0 +1,58 @@
@echo off
setlocal enabledelayedexpansion
echo UpdateAllSequences - Running all UpdateSequences.bat files in submodule folders...
echo.
echo Searching for submodule folders and UpdateSequences.bat files...
echo.
REM Find all submodule folders and check for UpdateSequences.bat
set /a count=0
set /a found=0
echo Found the following submodule folders with UpdateSequences.bat:
for /d %%S in (*) do (
if exist "%%S\UpdateSequences.bat" (
echo - %%S
set /a found+=1
)
)
if !found!==0 (
echo No submodule folders with UpdateSequences.bat found!
echo.
echo Note: This script looks for UpdateSequences.bat files in immediate subdirectories.
echo Make sure you're running this from the project root where submodules are located.
pause
exit /b 1
)
echo.
echo Found !found! submodule folders with UpdateSequences.bat files.
echo.
echo Starting execution...
echo.
REM Execute each UpdateSequences.bat found in submodule folders
for /d %%S in (*) do (
if exist "%%S\UpdateSequences.bat" (
set /a count+=1
echo [!count!/!found!] Running UpdateSequences.bat in %%S...
pushd "%%S"
call "UpdateSequences.bat" < NUL
set "rc=!errorlevel!"
popd
if !rc!==0 (
echo Completed: %%S (SUCCESS)
) else (
echo Completed: %%S (FAILED - RC=!rc!)
)
echo.
)
)
echo.
echo Operation completed. Successfully executed !count! UpdateSequences.bat files.
echo.
echo Note: Check individual _CURRENT\_UpdateSequences.log files in each submodule for details.
pause

42
Renders/ZipSeqArchv.bat Normal file
View File

@@ -0,0 +1,42 @@
@echo off
setlocal EnableExtensions EnableDelayedExpansion
set "REN_DIR=%~dp0"
for %%I in ("%REN_DIR%..") do set "PROJ_ROOT=%%~fI"
set "CONFIG_DIR=%PROJ_ROOT%\.config"
set "CONFIG_PATH=%CONFIG_DIR%\config.json"
set "GET_STRUCT_DIR=%CONFIG_DIR%\GetStructDir.ps1"
if not exist "%CONFIG_PATH%" (
echo [ERROR] config.json not found at %CONFIG_PATH%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
if not exist "%GET_STRUCT_DIR%" (
echo [ERROR] GetStructDir.ps1 not found at %GET_STRUCT_DIR%
echo Please run ConfigLoader.ps1 to deploy helper files.
exit /b 1
)
for /f "usebackq delims=" %%I in (`powershell -NoProfile -ExecutionPolicy Bypass -Command ^
"Set-StrictMode -Version Latest; $structDir = & '%GET_STRUCT_DIR%' -ProjectRoot '%PROJ_ROOT%'; if (-not $structDir) { throw \"Failed to get structDir from GetStructDir.ps1\" }; $pyPath = Join-Path $structDir 'zip_sequences.py'; if (-not (Test-Path -LiteralPath $pyPath)) { throw \"zip_sequences.py not found at $pyPath\" }; Write-Output $pyPath"`) do set "PY_SCRIPT=%%I"
if not defined PY_SCRIPT (
echo [ERROR] Unable to resolve zip_sequences.py path from config.
exit /b 1
)
pushd "%PROJ_ROOT%" >nul 2>&1
python "%PY_SCRIPT%" --verbose %*
set "ERR=!ERRORLEVEL!"
if not "!ERR!"=="0" (
echo Failed to update render sequence archives (exit code !ERR!).
)
popd >nul 2>&1
exit /b !ERR!

211
Sheepit-Script.sh Normal file
View File

@@ -0,0 +1,211 @@
#!/usr/bin/bash
script_version=0.3.1
echo "Script Version: $script_version"
#default stuff
username=""
renderkey=""
autosettings=""
gpu_cores=""
client_name=""
run_cpu_client="n"
bypass_confirm="n"
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-username)
username="$2"
shift 2;;
-renderkey)
renderkey="$2"
shift 2;;
-autosettings)
autosettings="$2"
shift 2;;
-gpucores)
gpu_cores="$2"
shift 2;;
-clientname)
client_name="$2"
shift 2;;
-cores)
cpu_cores="$2"
shift 2;;
-cpu)
if [ "$2" == "y" ]; then
run_cpu_client="y"
else
run_cpu_client="n"
fi
shift 2;;
-y)
bypass_confirm="y"
shift 1;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
line_thing="------------------------------------"
amount_of_gpus=$(nvidia-smi -L | wc -l)
amount_of_ram=$(free -h | awk '/^Mem:/ {print $7}' | sed 's/[^0-9]*//g')
cpu_cores_fixed=$(nproc)
echo "$line_thing"
echo "Welcome to the Vast.ai Sheepit setup script! Please follow the on-screen instructions to continue."
echo "$line_thing"
if [ -z "$username" ]; then
read -r -p "Enter your username: " username < /dev/tty
fi
if [ -z "$renderkey" ]; then
read -r -p "Enter your render key: " renderkey < /dev/tty
fi
if [ -z "$autosettings" ]; then
read -r -p "Do you want to use auto settings? (y/n): " autosettings < /dev/tty
fi
if [ -z "$client_name" ]; then
read -r -p "Enter client name (no spaces): " client_name < /dev/tty
fi
if [[ "$autosettings" =~ ^[Yy]$ ]]; then
echo "$line_thing"
echo "Auto settings enabled."
num_gpus=$amount_of_gpus
gpu_rec_amount_ram=$(( amount_of_ram / num_gpus ))
RAM=$gpu_rec_amount_ram
if [ -z "$gpu_cores" ]; then
gpu_cores=2
fi
else
echo "$line_thing"
echo "There are $amount_of_gpus NVIDIA GPUs in this machine."
while true; do
read -r -p "Enter the number of GPUs you would like to use for sheepit (1-$amount_of_gpus): " num_gpus < /dev/tty
if [[ "$num_gpus" =~ ^[1-9][0-9]*$ && "$num_gpus" -le "$amount_of_gpus" ]]; then
break
else
echo "Please enter a valid positive integer for the number of GPUs (1-$amount_of_gpus)."
fi
done
gpu_rec_amount_ram=$(( amount_of_ram / num_gpus ))
amount_of_gpus1=$(( num_gpus + 1 ))
cpu_rec_amount_ram=$(( amount_of_ram / amount_of_gpus1 ))
echo "$line_thing"
echo "This machine has ${amount_of_ram}GB RAM available."
echo "It is recommended to use ${gpu_rec_amount_ram}GB of RAM per GPU client, or ${cpu_rec_amount_ram}GB if also running a CPU client."
read -r -p "Enter amount of RAM for each client in GB: " RAM < /dev/tty
RAM="${RAM//[^0-9]/}"
while true; do
read -r -p "Enter number of cores for each GPU client: " gpu_cores < /dev/tty
if [[ "$gpu_cores" =~ ^[1-9][0-9]*$ ]]; then
break
else
echo "Please enter a valid positive integer."
fi
done
fi
if [[ "$run_cpu_client" == "y" ]]; then
WANT_CPU="y"
else
if [[ "$autosettings" =~ ^[Yy]$ ]]; then
WANT_CPU="n"
else
read -r -p "Do you want to run a CPU client as well? (y/n): " WANT_CPU < /dev/tty
fi
fi
if [[ "$WANT_CPU" =~ ^[Yy]$ ]]; then
cores_used_for_gpu=$(( num_gpus * gpu_cores ))
cores_left=$(( cpu_cores_fixed - cores_used_for_gpu ))
if [[ "$autosettings" =~ ^[Yy]$ ]]; then
cpu_cores=$cores_left
else
echo "This machine has $cpu_cores_fixed CPU cores."
echo "With GPU clients using $cores_used_for_gpu cores, there are $cores_left cores available for the CPU client."
while true; do
read -r -p "Enter number of cores for the CPU client: " cpu_cores < /dev/tty
if [[ "$cpu_cores" =~ ^[1-9][0-9]*$ ]]; then
break
else
echo "Please enter a valid positive integer."
fi
done
fi
fi
echo "$line_thing"
echo "These are the settings selected, is this ok? (Y/n)"
echo "Client name: $client_name"
echo "GPU Clients: $num_gpus"
echo "RAM per client: ${RAM}GB"
echo "Cores per GPU: ${gpu_cores}"
echo "Sheepit user: ${username}"
echo "Renderkey: ${renderkey}"
if [[ "$WANT_CPU" =~ ^[Yy]$ ]]; then
echo "Run a CPU client: Yes"
echo "CPU cores: $cpu_cores"
else
echo "Run a CPU client: No"
fi
if [[ "$bypass_confirm" != "y" ]]; then
read -r -p "(Y/n): " confirm < /dev/tty
if [[ "$confirm" =~ ^[Nn]$ ]]; then
echo "Aborted"
exit 0
fi
fi
echo "Starting sheepit..."
apt update
apt install -y sudo
sudo apt update && sudo apt upgrade -y
sudo apt install -y tmux default-jre blender wget xserver-xorg-video-dummy libboost-all-dev libgl1-mesa-dev libglu1-mesa libsm-dev libxkbcommon0 btop
sudo mkdir -p /sheepit/sheepitcache
sudo chown -R "$(whoami):$(id -gn)" /sheepit
wget https://www.sheepit-renderfarm.com/media/applet/client-latest.php -O /sheepit/client.jar
tmux new-session -d -s sheepit
for (( i = 0; i < num_gpus; i++ )); do
GPU_ID="OPTIX_$i"
CACHE_DIR="/sheepit/sheepitcache/$i"
HOSTNAME="${client_name}-$(printf "%03d" $(( i + 1 )))"
CMD="java -jar /sheepit/client.jar \
-login $username -password $renderkey \
-ui text --verbose --headless -cache-dir $CACHE_DIR \
-gpu $GPU_ID -cores $gpu_cores -memory ${RAM}G -hostname $HOSTNAME -shared-zip /sheepit/sheepitcache/shared"
if [ "$i" -eq 0 ]; then
tmux rename-window -t sheepit "gpu-$i"
tmux send-keys -t sheepit "$CMD" C-m
else
tmux new-window -t sheepit -n "gpu-$i"
tmux send-keys -t sheepit:"gpu-$i" "$CMD" C-m
fi
done
if [[ "$WANT_CPU" =~ ^[Yy]$ ]]; then
CPU_CACHE_DIR="/sheepit/sheepitcache/cpu"
CPU_HOSTNAME="${client_name}-cpu"
CPU_CMD="java -jar /sheepit/client.jar \
-login $username -password $renderkey \
-ui text --verbose --headless -cache-dir $CPU_CACHE_DIR \
-cores $cpu_cores -memory ${RAM}G -hostname $CPU_HOSTNAME -shared-zip /sheepit/sheepitcache/shared"
tmux new-window -t sheepit -n "cpu"
tmux send-keys -t sheepit:"cpu" "$CPU_CMD" C-m
fi
# Attach to the tmux session
tmux switch-client -t sheepit
tmux attach-session -t sheepit
echo "If it doesn't switch automatically, you can switch with 'tmux attach-session -t sheepit' or 'tmux switch-client -t sheepit'"

8
sheepitcommand.txt Normal file
View File

@@ -0,0 +1,8 @@
curl -s https://koolkid6958.dev/Sheepit-Script.sh | bash -s - \
-username RaincloudTheDragon \
-renderkey IfCOWBHFQpceG0601DmyrwOOJOAp2UJAQ0O0X0jF \
-autosettings y \
-gpucores 32 \
-clientname vast \
-cpu n \
-y