Files
Flamenco-Management/scripts/TalkingHeads Custom Render.js

382 lines
13 KiB
JavaScript
Raw Normal View History

2025-08-13 12:59:08 -06:00
// SPDX-License-Identifier: GPL-3.0-or-later
const JOB_TYPE = {
label: "TalkingHeads Custom Render",
description: "Render a sequence of frames, and create a preview video file",
settings: [
// Settings for artists to determine:
{
key: "frames",
type: "string",
required: true,
eval: "f'{C.scene.frame_start}-{C.scene.frame_end}'",
evalInfo: {
showLinkButton: true,
description: "Scene frame range",
},
description: "Frame range to render. Examples: '47', '1-30', '3, 5-10, 47-327'"
},
{
key: "chunk_size",
type: "int32",
default: 1,
description: "Number of frames to render in one Blender render task",
visible: "submission"
},
// Ensure blendfile is available for subsequent auto-evals
{
key: "blendfile",
type: "string",
required: true,
eval: "bpy.data.filepath",
description: "Path of the Blend file to render",
visible: "web"
},
2025-08-13 12:59:08 -06:00
{
key: "render_output_root",
type: "string",
subtype: "dir_path",
required: false,
visible: "submission",
eval: "__import__('os').path.normpath(__import__('os').path.join(((__import__('re').search(r'^(.*?)[\\/][Bb]lends[\\/]', bpy.data.filepath.replace('\\\\','/')) and __import__('re').search(r'^(.*?)[\\/][Bb]lends[\\/]', bpy.data.filepath.replace('\\\\','/')).group(1)) or __import__('os').path.dirname(bpy.data.filepath)), 'Renders'))",
evalInfo: {
showLinkButton: true,
description: "Auto-detect the project's Renders folder"
},
description: "Base path where renders are stored, typically the project's Renders folder. If empty, derived automatically."
},
{
key: "use_submodule",
label: "Use Submodule",
type: "bool",
required: false,
default: false,
visible: "submission",
description: "Include a submodule folder under Renders. Turn off to omit submodule entirely."
},
{
key: "submodule",
type: "string",
required: false,
visible: "submission",
eval: "(__import__('os').path.basename(__import__('os').path.dirname(bpy.data.filepath)) if settings.use_submodule else '')",
evalInfo: {
showLinkButton: true,
description: "Auto-fill with the current .blend file's parent folder"
},
description: "Optional submodule under Renders (e.g. 'Waterspider B'). If empty, omitted."
2025-08-13 12:59:08 -06:00
},
{
key: "render_output_path",
type: "string",
subtype: "file_path",
editable: false,
eval: "((lambda Path, abspath, os_path, settings_obj, blend: str(Path(abspath(settings_obj.render_output_root or '//')) / (((str(settings_obj.submodule or '').strip()) if (settings_obj.use_submodule and str(settings_obj.submodule or '').strip()) else ((os_path.basename(os_path.dirname(bpy.data.filepath))) if settings_obj.use_submodule else ''))) / blend / (blend + '_######')))(__import__('pathlib').Path, __import__('os').path.abspath, __import__('os').path, settings, __import__('os').path.splitext(__import__('os').path.basename(bpy.data.filepath))[0]))",
description: "Final file path of where render output will be saved"
},
2025-08-13 12:59:08 -06:00
// Automatically evaluated settings:
2025-08-13 12:59:08 -06:00
{
key: "fps",
type: "float",
eval: "C.scene.render.fps / C.scene.render.fps_base",
visible: "hidden"
},
{
key: "format",
type: "string",
required: true,
eval: "C.scene.render.image_settings.file_format",
visible: "web"
},
{
key: "image_file_extension",
type: "string",
required: true,
eval: "C.scene.render.file_extension",
visible: "hidden",
description: "File extension used when rendering images"
},
{
key: "has_previews",
type: "bool",
required: false,
eval: "C.scene.render.image_settings.use_preview",
visible: "hidden",
description: "Whether Blender will render preview images."
},
{
key: "scene",
type: "string",
required: true,
eval: "C.scene.name",
visible: "web",
description: "Name of the scene to render."
},
]
};
// Set of scene.render.image_settings.file_format values that produce
// files which FFmpeg is known not to handle as input.
const ffmpegIncompatibleImageFormats = new Set([
"EXR",
"MULTILAYER", // Old CLI-style format indicators
"OPEN_EXR",
"OPEN_EXR_MULTILAYER", // DNA values for these formats.
]);
// File formats that would cause rendering to video.
// This is not supported by this job type.
const videoFormats = ['FFMPEG', 'AVI_RAW', 'AVI_JPEG'];
function compileJob(job) {
print("Blender Render job submitted");
print("job: ", job);
const settings = job.settings;
// Ensure auto-filled values are applied at submission time.
try {
if (settings.use_submodule) {
const detectedSubmodule = detectSubmodule(settings) || '';
if (!settings.submodule || String(settings.submodule).trim() === '') {
settings.submodule = detectedSubmodule;
}
} else {
settings.submodule = '';
}
if (!settings.render_output_root || String(settings.render_output_root).trim() === '') {
// Auto-detect project root and Renders folder similar to eval button
const projectRoot = findProjectRootFromBlendfile(settings.blendfile);
if (projectRoot) settings.render_output_root = path.join(projectRoot, 'Renders');
else settings.render_output_root = path.join(path.dirname(settings.blendfile), 'Renders');
}
const recomposed = computeAutoRenderOutputPath(job);
if (recomposed) settings.render_output_path = recomposed;
} catch (e) {
print("Auto-fill on submit failed:", e);
}
2025-08-13 12:59:08 -06:00
if (videoFormats.indexOf(settings.format) >= 0) {
throw `This job type only renders images, and not "${settings.format}"`;
}
const renderOutput = normalizePathSeparators(settings.render_output_path || renderOutputPath(job));
2025-08-13 12:59:08 -06:00
// Make sure that when the job is investigated later, it shows the
// actually-used render output:
settings.render_output_path = renderOutput;
const renderDir = path.dirname(renderOutput);
const renderTasks = authorRenderTasks(settings, renderDir, renderOutput);
const videoTask = authorCreateVideoTask(settings, renderDir);
for (const rt of renderTasks) {
job.addTask(rt);
}
if (videoTask) {
// If there is a video task, all other tasks have to be done first.
for (const rt of renderTasks) {
videoTask.addDependency(rt);
}
job.addTask(videoTask);
}
}
// Derive project root and submodule from the blendfile path.
function computeAutoRenderOutputPath(job) {
const settings = job.settings || {};
if (!settings.blendfile) return null;
const projectRoot = findProjectRootFromBlendfile(settings.blendfile);
const submodule = (settings.submodule && ("" + settings.submodule).trim()) ? ("" + settings.submodule).trim() : detectSubmodule(settings);
// Resolve render root
let renderRoot = null;
if (settings.render_output_root && ("" + settings.render_output_root).trim()) {
renderRoot = ("" + settings.render_output_root).trim();
} else if (projectRoot) {
renderRoot = path.join(projectRoot, 'Renders');
} else {
// Fallback to the blendfile's directory Renders sibling
renderRoot = path.join(path.dirname(settings.blendfile), 'Renders');
}
const blendname = path.stem(settings.blendfile).replace('.flamenco', '');
print('AutoPath: blendfile=', settings.blendfile);
print('AutoPath: projectRoot=', projectRoot);
print('AutoPath: renderRoot=', renderRoot);
print('AutoPath: submodule=', submodule);
print('AutoPath: blendname=', blendname);
const parts = [renderRoot];
if (submodule) parts.push(submodule);
parts.push(blendname, `${blendname}_######`);
const finalPath = path.join.apply(path, parts);
print('AutoPath: finalPath=', finalPath);
return finalPath;
}
function findProjectRootFromBlendfile(blendfilePath) {
const blendDir = path.dirname(blendfilePath);
const normalized = blendDir.replace(/\\/g, '/');
const parts = normalized.split('/');
let blendsIndex = -1;
for (let i = 0; i < parts.length; i++) {
if (parts[i].toLowerCase() === 'blends') {
blendsIndex = i;
break;
}
}
if (blendsIndex <= 0) return null;
const rootParts = parts.slice(0, blendsIndex);
if (rootParts.length === 0) return null;
return rootParts.join('/');
}
function detectSubmoduleFromBlendfile(blendfilePath) {
const blendDir = path.dirname(blendfilePath);
const normalized = blendDir.replace(/\\/g, '/');
const parts = normalized.split('/');
for (let i = 0; i < parts.length; i++) {
if (parts[i].toLowerCase() === 'blends') {
if (i + 1 < parts.length && parts[i + 1].toLowerCase() === 'animations') {
if (i + 2 < parts.length) return parts[i + 2];
}
break;
}
}
return null;
}
// Prefer explicit setting; else detect robustly from blendfile path.
function detectSubmodule(settings) {
if (!settings) return null;
if (settings.submodule && ("" + settings.submodule).trim()) {
return ("" + settings.submodule).trim();
}
const bf = settings.blendfile || '';
// Try regex first (case-insensitive): /Blends/animations/<name>/...
try {
const bfNorm = bf.replace(/\\/g, '/');
const m = bfNorm.match(/\/(?:[Bb]lends)\/(?:[Aa]nimations)\/([^\/]+)/);
print('detectSubmodule: bf=', bfNorm, ' match=', m && m[1]);
if (m && m[1]) return m[1];
} catch (_) {}
return detectSubmoduleFromBlendfile(bf);
}
2025-08-13 12:59:08 -06:00
// Do field replacement on the render output path.
function renderOutputPath(job) {
let path = job.settings.render_output_path;
if (!path) {
throw "no render_output_path setting!";
}
return path.replace(/{([^}]+)}/g, (match, group0) => {
switch (group0) {
case "timestamp":
return formatTimestampLocal(job.created);
default:
return match;
}
});
}
// Ensure consistent separators for server-side consumption.
function normalizePathSeparators(p) {
if (!p) return p;
const forward = p.replace(/\\/g, '/');
// Collapse multiple slashes but preserve drive letter paths like 'A:/'
return forward.replace(/([^:])\/+/g, '$1/');
}
2025-08-13 12:59:08 -06:00
function authorRenderTasks(settings, renderDir, renderOutput) {
print("authorRenderTasks(", renderDir, renderOutput, ")");
let renderTasks = [];
let chunks = frameChunker(settings.frames, settings.chunk_size);
let baseArgs = [];
if (settings.scene) {
baseArgs = baseArgs.concat(["--scene", settings.scene]);
}
for (let chunk of chunks) {
const task = author.Task(`render-${chunk}`, "blender");
const command = author.Command("blender-render", {
exe: "{blender}",
exeArgs: "{blenderArgs}",
argsBefore: [],
blendfile: settings.blendfile,
args: baseArgs.concat([
"--render-output", path.join(renderDir, path.basename(renderOutput)),
"--render-format", settings.format,
"--render-frame", chunk.replaceAll("-", ".."), // Convert to Blender frame range notation.
])
});
task.addCommand(command);
renderTasks.push(task);
}
return renderTasks;
}
function authorCreateVideoTask(settings, renderDir) {
const needsPreviews = ffmpegIncompatibleImageFormats.has(settings.format);
if (needsPreviews && !settings.has_previews) {
print("Not authoring video task, FFmpeg-incompatible render output")
return;
}
if (!settings.fps) {
print("Not authoring video task, no FPS known:", settings);
return;
}
var frames = `${settings.frames}`;
if (frames.search(',') != -1) {
// Get the first and last frame from the list
const chunks = frameChunker(settings.frames, 1);
const firstFrame = chunks[0];
const lastFrame = chunks.slice(-1)[0];
frames = `${firstFrame}-${lastFrame}`;
}
const stem = path.stem(settings.blendfile).replace('.flamenco', '');
const outfile = path.join(renderDir, `${stem}-${frames}.mp4`);
const outfileExt = needsPreviews ? ".jpg" : settings.image_file_extension;
const task = author.Task('preview-video', 'ffmpeg');
const command = author.Command("frames-to-video", {
exe: "ffmpeg",
fps: settings.fps,
inputGlob: path.join(renderDir, `*${outfileExt}`),
outputFile: outfile,
args: [
'-c:v',
'h264_nvenc',
'-preset',
'medium',
'-rc',
'constqp',
'-qp',
'20',
'-g',
'18',
'-vf',
'pad=ceil(iw/2)*2:ceil(ih/2)*2',
'-pix_fmt',
'yuv420p',
'-r',
settings.fps,
'-y', // Be sure to always pass either "-n" or "-y".
],
});
task.addCommand(command);
print(`Creating output video for ${settings.format}`);
return task;
}