This commit is contained in:
Sewmina 2025-10-14 21:10:03 +05:30
parent d2bbbc3f35
commit dce1e00af1
7 changed files with 334 additions and 230 deletions

View File

@ -1,125 +0,0 @@
#!/usr/bin/env python3
"""
Script to create a 4-panel video from front, side, and top view images.
Layout:
- Top Left: Front view
- Top Right: Side view
- Bottom Left: Top view
- Bottom Right: Empty (black) for future content
"""
import cv2
import numpy as np
import os
import glob
from pathlib import Path
def create_4panel_video():
# Define paths
base_path = Path("/home/warlock/Projects/ScfHeatmapGen/js/2025-10-06_18-00-42")
front_dir = base_path / "front"
side_dir = base_path / "side"
top_dir = base_path / "top"
output_path = base_path / "combined_4panel_video.mp4"
# Get all image files from each directory
front_images = sorted(glob.glob(str(front_dir / "*.png")))
side_images = sorted(glob.glob(str(side_dir / "*.png")))
top_images = sorted(glob.glob(str(top_dir / "*.png")))
print(f"Found {len(front_images)} front images")
print(f"Found {len(side_images)} side images")
print(f"Found {len(top_images)} top images")
# Check if all directories have the same number of images
if not (len(front_images) == len(side_images) == len(top_images)):
print("Warning: Different number of images in directories!")
min_count = min(len(front_images), len(side_images), len(top_images))
front_images = front_images[:min_count]
side_images = side_images[:min_count]
top_images = top_images[:min_count]
print(f"Using {min_count} images from each directory")
# Read first image to get dimensions
first_img = cv2.imread(front_images[0])
if first_img is None:
print(f"Error: Could not read image {front_images[0]}")
return
img_height, img_width = first_img.shape[:2]
print(f"Image dimensions: {img_width}x{img_height}")
# Calculate panel dimensions (2x2 grid)
panel_width = img_width
panel_height = img_height
# Create video writer
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
fps = 30 # Adjust frame rate as needed
# Create output video with 2x2 panel layout
output_width = panel_width * 2
output_height = panel_height * 2
out = cv2.VideoWriter(str(output_path), fourcc, fps, (output_width, output_height))
print(f"Creating video: {output_width}x{output_height} at {fps} FPS")
print(f"Output file: {output_path}")
# Process each frame
for i, (front_img_path, side_img_path, top_img_path) in enumerate(zip(front_images, side_images, top_images)):
# Read images
front_img = cv2.imread(front_img_path)
side_img = cv2.imread(side_img_path)
top_img = cv2.imread(top_img_path)
if front_img is None or side_img is None or top_img is None:
print(f"Warning: Could not read images for frame {i+1}")
continue
# Resize images to panel size if needed
front_img = cv2.resize(front_img, (panel_width, panel_height))
side_img = cv2.resize(side_img, (panel_width, panel_height))
top_img = cv2.resize(top_img, (panel_width, panel_height))
# Create empty panel for bottom right
empty_panel = np.zeros((panel_height, panel_width, 3), dtype=np.uint8)
# Create 2x2 grid
# Top row
top_row = np.hstack([front_img, side_img])
# Bottom row
bottom_row = np.hstack([top_img, empty_panel])
# Combine rows
combined_frame = np.vstack([top_row, bottom_row])
# Add labels to each panel
font = cv2.FONT_HERSHEY_SIMPLEX
font_scale = 1.0
color = (255, 255, 255) # White
thickness = 2
# Add labels
cv2.putText(combined_frame, "Front View", (10, 30), font, font_scale, color, thickness)
cv2.putText(combined_frame, "Side View", (panel_width + 10, 30), font, font_scale, color, thickness)
cv2.putText(combined_frame, "Top View", (10, panel_height + 30), font, font_scale, color, thickness)
cv2.putText(combined_frame, "Reserved", (panel_width + 10, panel_height + 30), font, font_scale, color, thickness)
# Write frame
out.write(combined_frame)
# Progress indicator
if (i + 1) % 10 == 0:
print(f"Processed {i + 1}/{len(front_images)} frames")
# Release everything
out.release()
cv2.destroyAllWindows()
print(f"\nVideo creation completed!")
print(f"Output saved to: {output_path}")
print(f"Total frames: {len(front_images)}")
print(f"Duration: {len(front_images)/fps:.2f} seconds")
if __name__ == "__main__":
create_4panel_video()

View File

@ -1,96 +0,0 @@
#!/bin/bash
# Script to create a 4-panel video from front, side, and top view images
# Layout:
# - Top Left: Front view
# - Top Right: Side view
# - Bottom Left: Top view
# - Bottom Right: Empty (black) for future content
set -e
BASE_PATH="/home/warlock/Projects/ScfHeatmapGen/js/2025-10-06_18-00-42"
FRONT_DIR="$BASE_PATH/front"
SIDE_DIR="$BASE_PATH/side"
TOP_DIR="$BASE_PATH/top"
OUTPUT_DIR="$BASE_PATH/temp_frames"
OUTPUT_VIDEO="$BASE_PATH/combined_4panel_video.mp4"
echo "Creating 4-panel video from heatmap images..."
# Create temporary directory for combined frames
mkdir -p "$OUTPUT_DIR"
# Get the number of images (assuming all directories have the same count)
NUM_IMAGES=$(ls "$FRONT_DIR"/*.png | wc -l)
echo "Found $NUM_IMAGES images in each directory"
# Get dimensions of first image
FIRST_IMG=$(ls "$FRONT_DIR"/*.png | head -1)
IMG_INFO=$(identify "$FIRST_IMG")
IMG_WIDTH=$(echo "$IMG_INFO" | cut -d' ' -f3 | cut -d'x' -f1)
IMG_HEIGHT=$(echo "$IMG_INFO" | cut -d' ' -f3 | cut -d'x' -f2)
echo "Image dimensions: ${IMG_WIDTH}x${IMG_HEIGHT}"
# Calculate output dimensions (2x2 grid)
OUTPUT_WIDTH=$((IMG_WIDTH * 2))
OUTPUT_HEIGHT=$((IMG_HEIGHT * 2))
echo "Output video dimensions: ${OUTPUT_WIDTH}x${OUTPUT_HEIGHT}"
# Process each frame
for i in $(seq -w 1 $NUM_IMAGES); do
FRONT_IMG="$FRONT_DIR/${i}.png"
SIDE_IMG="$SIDE_DIR/${i}.png"
TOP_IMG="$TOP_DIR/${i}.png"
OUTPUT_FRAME="$OUTPUT_DIR/frame_${i}.png"
echo "Processing frame $i/$NUM_IMAGES"
# Create a black panel for the bottom right
convert -size ${IMG_WIDTH}x${IMG_HEIGHT} xc:black "$OUTPUT_DIR/empty_${i}.png"
# Create 2x2 grid using ImageMagick montage
montage \
"$FRONT_IMG" "$SIDE_IMG" \
"$TOP_IMG" "$OUTPUT_DIR/empty_${i}.png" \
-tile 2x2 \
-geometry ${IMG_WIDTH}x${IMG_HEIGHT}+0+0 \
-background black \
"$OUTPUT_FRAME"
# Add labels to each panel
convert "$OUTPUT_FRAME" \
-font DejaVu-Sans-Bold \
-pointsize 24 \
-fill white \
-annotate +20+30 "Front View" \
-annotate +$((IMG_WIDTH + 20))+30 "Side View" \
-annotate +20+$((IMG_HEIGHT + 30)) "Top View" \
-annotate +$((IMG_WIDTH + 20))+$((IMG_HEIGHT + 30)) "Reserved" \
"$OUTPUT_FRAME"
# Clean up temporary empty panel
rm "$OUTPUT_DIR/empty_${i}.png"
done
echo "All frames processed. Creating video..."
# Create video from frames using FFmpeg
ffmpeg -y \
-framerate 2 \
-i "$OUTPUT_DIR/frame_%03d.png" \
-c:v libopenh264 \
-pix_fmt yuv420p \
-crf 18 \
"$OUTPUT_VIDEO"
echo "Video created: $OUTPUT_VIDEO"
# # Clean up temporary frames
# echo "Cleaning up temporary files..."
# rm -rf "$OUTPUT_DIR"
echo "Done! Video saved to: $OUTPUT_VIDEO"
echo "Duration: $(echo "scale=2; $NUM_IMAGES/30" | bc) seconds"

View File

@ -1,6 +1,7 @@
import { Worker } from 'worker_threads';
import { readReplay } from "./utils/replay_reader";
import { getReplayDuration } from "./utils/drawer";
import { create4PanelVideo } from "./utils/video_creator";
import * as cliProgress from "cli-progress";
import * as os from 'os';
import * as fs from 'fs';
@ -14,8 +15,9 @@ const replayFiles = fs.readdirSync(replayPath)
.map((file: string) => path.join(replayPath, file));
// Timelapse configuration
const targetFPS = 60;
const targetFPS = 30;
const timelapseSpeed = 5;
const skipSeconds = 30; // Skip first N seconds of replay
// Create multibar container
const multibar = new cliProgress.MultiBar({
@ -52,7 +54,11 @@ async function prepareTasksForReplay(filePath: string, chunksPerView: number): P
const baseFileName = path.basename(filePath, '.json');
const duration = getReplayDuration(replayData);
const framesPerSecond = targetFPS / timelapseSpeed;
const totalFrames = Math.ceil(duration * framesPerSecond);
// Calculate frames to skip and adjust total frames
const skipFrames = Math.ceil(skipSeconds * framesPerSecond);
const totalFramesRaw = Math.ceil(duration * framesPerSecond);
const totalFrames = Math.max(0, totalFramesRaw - skipFrames);
// Create main directory for this replay
const replayDir = baseFileName;
@ -68,11 +74,12 @@ async function prepareTasksForReplay(filePath: string, chunksPerView: number): P
const framesPerChunk = Math.ceil(totalFrames / chunksPerView);
for (let chunkId = 0; chunkId < chunksPerView; chunkId++) {
const startFrame = chunkId * framesPerChunk;
const endFrame = Math.min(startFrame + framesPerChunk, totalFrames);
// Frames are 1-indexed, so add 1 to account for this
const startFrame = chunkId * framesPerChunk + skipFrames + 1;
const endFrame = Math.min(startFrame + framesPerChunk, totalFrames + skipFrames + 1);
// Skip empty chunks
if (startFrame >= totalFrames) continue;
if (startFrame >= totalFrames + skipFrames + 1) continue;
tasks.push({
filePath,
@ -228,11 +235,21 @@ async function processWithWorkerPool(allTasks: WorkerTask[]) {
console.log(`Will split each view into ${chunksPerView} chunks to utilize ${maxWorkers} workers`);
const allTasks: WorkerTask[] = [];
const replayMetadata = new Map<string, { replayDir: string, totalFrames: number, startFrame: number }>();
// Prepare all tasks
for (const filePath of replayFiles) {
const tasks = await prepareTasksForReplay(filePath, chunksPerView);
allTasks.push(...tasks);
// Store metadata for video creation
if (tasks.length > 0) {
replayMetadata.set(tasks[0].baseFileName, {
replayDir: tasks[0].replayDir,
totalFrames: tasks[0].totalFrames,
startFrame: tasks[0].startFrame
});
}
}
console.log(`Total tasks to process: ${allTasks.length} chunks (${replayFiles.length} replays × 3 views × ~${chunksPerView} chunks/view)`);
@ -240,8 +257,23 @@ async function processWithWorkerPool(allTasks: WorkerTask[]) {
// Process all tasks in parallel
await processWithWorkerPool(allTasks);
console.log('\nAll heatmap generation complete!');
// Create 4-panel videos for each replay
console.log('\n=== Creating 4-panel videos ===\n');
for (const [baseFileName, metadata] of replayMetadata) {
await create4PanelVideo(
metadata.replayDir,
metadata.totalFrames,
targetFPS,
timelapseSpeed,
metadata.startFrame,
multibar
);
}
multibar.stop();
console.log('\nAll processing complete!');
console.log('\n=== All processing complete! ===');
} catch (error) {
multibar.stop();
console.error('Error during processing:', error);

194
src/utils/video_creator.ts Normal file
View File

@ -0,0 +1,194 @@
import * as fs from 'fs';
import * as path from 'path';
import { loadImage } from 'canvas';
import { exec } from 'child_process';
import { promisify } from 'util';
import * as cliProgress from 'cli-progress';
import { Worker } from 'worker_threads';
import * as os from 'os';
const execAsync = promisify(exec);
interface VideoWorkerTask {
frontDir: string;
sideDir: string;
topDir: string;
tempDir: string;
imgWidth: number;
imgHeight: number;
startFrame: number;
endFrame: number;
chunkId: number;
firstFrameNum: number;
}
function createVideoWorker(task: VideoWorkerTask, progressBar: cliProgress.SingleBar, progressTracker: Map<string, number>): Promise<void> {
return new Promise((resolve, reject) => {
const workerPath = path.join(__dirname, '..', 'video_worker.js');
const worker = new Worker(workerPath, {
workerData: task
});
worker.on('message', (message) => {
if (message.type === 'progress') {
// Update shared progress tracker
const currentFrame = message.frame;
const currentProgress = progressTracker.get('video') || 0;
if (currentFrame > currentProgress) {
progressTracker.set('video', currentFrame);
progressBar.update(currentFrame);
}
} else if (message.type === 'complete') {
resolve();
} else if (message.type === 'error') {
reject(new Error(message.error));
}
});
worker.on('error', reject);
worker.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Video worker stopped with exit code ${code}`));
}
});
});
}
export async function create4PanelVideo(
replayDir: string,
totalFrames: number,
targetFPS: number,
timelapseSpeed: number,
startFrame: number,
multibar: cliProgress.MultiBar
) {
console.log(`\nCreating 4-panel video for ${replayDir}...`);
const frontDir = path.join(replayDir, 'front');
const sideDir = path.join(replayDir, 'side');
const topDir = path.join(replayDir, 'top');
const tempDir = path.join(replayDir, 'temp_frames');
const outputVideo = path.join(replayDir, 'combined_4panel_video.mp4');
// Create temp directory
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir);
}
// Get first image to determine dimensions (use the actual starting frame)
const firstFrameNum = String(startFrame).padStart(3, '0');
const firstImgPath = path.join(frontDir, `${firstFrameNum}.png`);
const firstImg = await loadImage(firstImgPath);
const imgWidth = firstImg.width;
const imgHeight = firstImg.height;
console.log(`Image dimensions: ${imgWidth}x${imgHeight}`);
console.log(`Output video dimensions: ${imgWidth * 2}x${imgHeight * 2}`);
// Calculate chunks for parallel processing (smaller chunks to prevent memory buildup)
const numCPUs = os.cpus().length;
const framesPerChunk = 50; // Process 50 frames per chunk to keep memory usage low
const numChunks = Math.ceil(totalFrames / framesPerChunk);
console.log(`Processing ${totalFrames} frames in ${numChunks} chunks (${framesPerChunk} frames each)`);
// Create progress bar for video frame generation
const videoBar = multibar.create(totalFrames, 0, {
filename: 'Creating 4-panel frames',
view: ''
});
// Create tasks for workers
const tasks: VideoWorkerTask[] = [];
const endFrame = startFrame + totalFrames;
for (let chunkId = 0; chunkId < numChunks; chunkId++) {
const chunkStart = startFrame + chunkId * framesPerChunk;
const chunkEnd = Math.min(chunkStart + framesPerChunk, endFrame);
if (chunkStart >= endFrame) continue;
tasks.push({
frontDir,
sideDir,
topDir,
tempDir,
imgWidth,
imgHeight,
startFrame: chunkStart,
endFrame: chunkEnd,
chunkId,
firstFrameNum: startFrame
});
}
// Process frames with worker pool (limit concurrent workers to avoid memory issues)
const progressTracker = new Map<string, number>();
const maxConcurrentWorkers = Math.min(8, numCPUs); // Limit to 8 workers max
const activeWorkers = new Map<number, Promise<void>>();
let taskIndex = 0;
let workerId = 0;
console.log(`Running with ${maxConcurrentWorkers} concurrent workers to prevent memory issues`);
while (taskIndex < tasks.length || activeWorkers.size > 0) {
// Fill up to max concurrent workers
while (activeWorkers.size < maxConcurrentWorkers && taskIndex < tasks.length) {
const task = tasks[taskIndex++];
const currentWorkerId = workerId++;
const workerPromise = createVideoWorker(task, videoBar, progressTracker).then(() => {
activeWorkers.delete(currentWorkerId);
}).catch((error) => {
activeWorkers.delete(currentWorkerId);
throw error;
});
activeWorkers.set(currentWorkerId, workerPromise);
}
// Wait for at least one worker to complete
if (activeWorkers.size > 0) {
await Promise.race(Array.from(activeWorkers.values()));
}
}
videoBar.stop();
console.log('All frames processed. Creating video...');
// Calculate output framerate
const outputFPS = targetFPS;
// Create video using FFmpeg with high quality settings
// Try NVIDIA hardware encoder first, fallback to high-quality software encoder
// -c:v h264_nvenc: NVIDIA hardware H.264 encoder
// -preset slow: High quality preset (p1-p7, slow = better quality)
// -cq 18: Constant quality mode (0-51, lower is better)
// -b:v 10M: Target bitrate 10 Mbps
// -pix_fmt yuv420p: Standard pixel format for compatibility
let ffmpegCmd = `ffmpeg -y -framerate ${outputFPS} -i "${tempDir}/frame_%03d.png" -c:v h264_nvenc -preset slow -cq 18 -b:v 10M -pix_fmt yuv420p "${outputVideo}"`;
// Fallback command if NVIDIA encoder fails (software encoder with high bitrate)
const fallbackCmd = `ffmpeg -y -framerate ${outputFPS} -i "${tempDir}/frame_%03d.png" -c:v mpeg4 -q:v 2 -b:v 15M -pix_fmt yuv420p "${outputVideo}"`;
try {
try {
console.log('Trying NVIDIA hardware encoder...');
await execAsync(ffmpegCmd);
} catch (nvencError) {
console.log('NVIDIA encoder failed, falling back to software encoder...');
await execAsync(fallbackCmd);
}
console.log(`Video created: ${outputVideo}`);
const duration = totalFrames / outputFPS;
console.log(`Duration: ${duration.toFixed(2)} seconds at ${outputFPS} FPS`);
// Clean up temporary frames
console.log('Cleaning up temporary files...');
fs.rmSync(tempDir, { recursive: true, force: true });
} catch (error) {
console.error('Error creating video:', error);
throw error;
}
}

99
src/video_worker.ts Normal file
View File

@ -0,0 +1,99 @@
import { parentPort, workerData } from 'worker_threads';
import { createCanvas, loadImage } from 'canvas';
import * as path from 'path';
import * as fs from 'fs';
interface VideoWorkerTask {
frontDir: string;
sideDir: string;
topDir: string;
tempDir: string;
imgWidth: number;
imgHeight: number;
startFrame: number;
endFrame: number;
firstFrameNum: number; // The first frame number in the entire video sequence
}
async function processFrames(task: VideoWorkerTask) {
// Process assigned frame range
for (let frameNum = task.startFrame; frameNum < task.endFrame; frameNum++) {
const frameStr = frameNum.toString().padStart(3, '0');
// Calculate output frame number (sequential starting from 1)
const outputFrameNum = frameNum - task.firstFrameNum + 1;
const outputFrameStr = outputFrameNum.toString().padStart(3, '0');
const frontImgPath = path.join(task.frontDir, `${frameStr}.png`);
const sideImgPath = path.join(task.sideDir, `${frameStr}.png`);
const topImgPath = path.join(task.topDir, `${frameStr}.png`);
const outputFrame = path.join(task.tempDir, `frame_${outputFrameStr}.png`);
// Load images
const [frontImg, sideImg, topImg] = await Promise.all([
loadImage(frontImgPath),
loadImage(sideImgPath),
loadImage(topImgPath)
]);
// Create 2x2 canvas
const canvas = createCanvas(task.imgWidth * 2, task.imgHeight * 2);
const ctx = canvas.getContext('2d');
// Fill with black background
ctx.fillStyle = 'black';
ctx.fillRect(0, 0, canvas.width, canvas.height);
// Draw images in grid
// Top Left: Front view
ctx.drawImage(frontImg, 0, 0, task.imgWidth, task.imgHeight);
// Top Right: Side view
ctx.drawImage(sideImg, task.imgWidth, 0, task.imgWidth, task.imgHeight);
// Bottom Left: Top view
ctx.drawImage(topImg, 0, task.imgHeight, task.imgWidth, task.imgHeight);
// Bottom Right: Empty (already black)
// Add labels
ctx.font = 'bold 24px DejaVu Sans';
ctx.fillStyle = 'white';
ctx.strokeStyle = 'black';
ctx.lineWidth = 3;
// Helper function to draw outlined text
const drawLabel = (text: string, x: number, y: number) => {
ctx.strokeText(text, x, y);
ctx.fillText(text, x, y);
};
drawLabel('Front View', 20, 30);
drawLabel('Side View', task.imgWidth + 20, 30);
drawLabel('Top View', 20, task.imgHeight + 30);
drawLabel('Reserved', task.imgWidth + 20, task.imgHeight + 30);
// Save frame
const buffer = canvas.toBuffer('image/png');
fs.writeFileSync(outputFrame, buffer);
// Send progress update
if (parentPort) {
parentPort.postMessage({
type: 'progress',
frame: frameNum
});
}
}
// Send completion message
if (parentPort) {
parentPort.postMessage({ type: 'complete' });
}
}
if (parentPort && workerData) {
processFrames(workerData as VideoWorkerTask).catch(error => {
if (parentPort) {
parentPort.postMessage({ type: 'error', error: error.message });
}
});
}

View File

@ -41,7 +41,7 @@ async function processView(task: WorkerTask) {
// Generate heatmaps for the assigned frame range
for (let frameNumber = task.startFrame; frameNumber < task.endFrame; frameNumber++) {
const timePoint = frameNumber / framesPerSecond;
const timePoint = (frameNumber - 1) / framesPerSecond;
await drawTimeBasedHeatmap(canvas, replayData, timePoint, {
width: 200,
@ -53,14 +53,14 @@ async function processView(task: WorkerTask) {
preloadedBackground: preloadedBackground
});
const outputPath = path.join(viewDir, `${(frameNumber + 1).toString().padStart(3, '0')}.png`);
const outputPath = path.join(viewDir, `${frameNumber.toString().padStart(3, '0')}.png`);
fs.writeFileSync(outputPath, canvas.toBuffer("image/png"));
// Send progress update
if (parentPort) {
parentPort.postMessage({
type: 'progress',
frame: frameNumber + 1
frame: frameNumber
});
}
}