blender-mask-peoples/core/batch_processor.py
2026-02-22 04:36:28 +09:00

281 lines
10 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
Batch processor for sequential Generate+Bake across multiple VSE strips.
Uses timer-based async chaining so Blender's UI stays responsive.
"""
from typing import List, Optional, Callable, Any
# Lazy-imported inside Blender
bpy = None
class _DummyOperator:
"""Dummy operator object for _start_bake_impl calls."""
def report(self, level, msg):
print(f"[FaceMask] Batch: {msg}")
class BatchProcessor:
"""Manages sequential Generate Detection Cache → Bake across a list of strips."""
def __init__(self):
self.is_running: bool = False
self._mode: str = "full" # "full" or "mask_only"
self._strip_names: List[str] = []
self._current_idx: int = 0
self._context: Any = None
self._cancelled: bool = False
self._results: List[dict] = []
self._on_item_complete: Optional[Callable] = None # (idx, total, name, status)
self._on_all_complete: Optional[Callable] = None # (results)
# ------------------------------------------------------------------
# Public API
# ------------------------------------------------------------------
def start(self, context, strips, on_item_complete=None, on_all_complete=None, mode="full"):
"""Start batch processing for the given strips.
mode:
"full" - マスク生成(キャッシュなければ)→ Bake
"mask_only" - キャッシュを無視してマスク生成のみBakeしない
"""
global bpy
import bpy as _bpy
bpy = _bpy
if self.is_running:
raise RuntimeError("Batch already running")
self.is_running = True
self._mode = mode
self._strip_names = [s.name for s in strips]
self._current_idx = 0
self._context = context
self._cancelled = False
self._results = []
self._on_item_complete = on_item_complete
self._on_all_complete = on_all_complete
wm = context.window_manager
wm.batch_current = 0
wm.batch_total = len(self._strip_names)
wm.batch_current_name = ""
bpy.app.timers.register(self._process_next, first_interval=0.0)
def cancel(self):
"""Cancel batch. Stops currently running mask gen / bake."""
self._cancelled = True
from .async_generator import get_generator
from .async_bake_generator import get_bake_generator
gen = get_generator()
bake_gen = get_bake_generator()
if gen.is_running:
gen.cancel()
if bake_gen.is_running:
bake_gen.cancel()
# ------------------------------------------------------------------
# Internal: queue stepping
# ------------------------------------------------------------------
def _process_next(self):
"""Process the next strip in the queue (called via timer)."""
if self._cancelled:
self._finish()
return None
if self._current_idx >= len(self._strip_names):
self._finish()
return None
strip_name = self._strip_names[self._current_idx]
seq_editor = self._context.scene.sequence_editor
strip = seq_editor.strips.get(strip_name)
if strip is None:
print(f"[FaceMask] Batch: strip not found, skipping: {strip_name}")
self._results.append({"strip": strip_name, "status": "skipped"})
if self._on_item_complete:
self._on_item_complete(self._current_idx, len(self._strip_names), strip_name, "skipped")
self._current_idx += 1
bpy.app.timers.register(self._process_next, first_interval=0.0)
return None
# Update wm progress labels
wm = self._context.window_manager
wm.batch_current = self._current_idx + 1
wm.batch_current_name = strip_name
for area in self._context.screen.areas:
if area.type == "SEQUENCE_EDITOR":
area.tag_redraw()
if self._mode == "mask_only":
# キャッシュを無視して常にマスク生成Bakeしない
self._start_mask_gen(strip)
else:
from .utils import check_detection_cache
if not check_detection_cache(strip.name):
self._start_mask_gen(strip)
else:
self._start_bake(strip)
return None # one-shot timer
def _schedule_next(self):
bpy.app.timers.register(self._process_next, first_interval=0.0)
# ------------------------------------------------------------------
# Mask generation
# ------------------------------------------------------------------
def _start_mask_gen(self, strip):
from ..operators.generate_mask import start_mask_gen_for_strip
strip_name = strip.name
def on_complete(status, data):
self._on_mask_done(strip_name, status, data)
def on_progress(current, total):
wm = self._context.window_manager
wm.mask_progress = current
wm.mask_total = max(total, 1)
for area in self._context.screen.areas:
if area.type == "SEQUENCE_EDITOR":
area.tag_redraw()
try:
start_mask_gen_for_strip(self._context, strip, on_complete, on_progress)
print(f"[FaceMask] Batch: started mask gen for {strip_name}")
except Exception as e:
print(f"[FaceMask] Batch: failed to start mask gen for {strip_name}: {e}")
self._on_mask_done(strip_name, "error", str(e))
def _on_mask_done(self, strip_name, status, data):
if self._cancelled or status == "cancelled":
self._results.append({"strip": strip_name, "status": "cancelled"})
self._finish()
return
if status == "error":
print(f"[FaceMask] Batch: mask gen failed for {strip_name}: {data}")
self._results.append({"strip": strip_name, "status": "error", "reason": str(data)})
if self._on_item_complete:
self._on_item_complete(self._current_idx, len(self._strip_names), strip_name, "error")
self._current_idx += 1
self._schedule_next()
return
# Mask gen succeeded
if self._mode == "mask_only":
# Bakeしない結果を記録して次へ
self._results.append({"strip": strip_name, "status": "done"})
if self._on_item_complete:
self._on_item_complete(self._current_idx, len(self._strip_names), strip_name, "done")
self._current_idx += 1
self._schedule_next()
return
# full mode: proceed to bake
seq_editor = self._context.scene.sequence_editor
strip = seq_editor.strips.get(strip_name)
if strip is None:
print(f"[FaceMask] Batch: strip removed after mask gen: {strip_name}")
self._results.append({"strip": strip_name, "status": "skipped"})
if self._on_item_complete:
self._on_item_complete(self._current_idx, len(self._strip_names), strip_name, "skipped")
self._current_idx += 1
self._schedule_next()
return
self._start_bake(strip)
# ------------------------------------------------------------------
# Bake
# ------------------------------------------------------------------
def _start_bake(self, strip):
from .async_bake_generator import get_bake_generator
from ..operators.apply_blur import _start_bake_impl
strip_name = strip.name
def on_complete_extra(status, data):
self._on_bake_done(strip_name, status, data)
bake_gen = get_bake_generator()
result = _start_bake_impl(
_DummyOperator(),
self._context,
force=False,
strip=strip,
on_complete_extra=on_complete_extra,
)
if result == {"CANCELLED"}:
# Error starting bake
print(f"[FaceMask] Batch: bake failed to start for {strip_name}")
self._results.append({"strip": strip_name, "status": "error", "reason": "bake failed to start"})
if self._on_item_complete:
self._on_item_complete(self._current_idx, len(self._strip_names), strip_name, "error")
self._current_idx += 1
self._schedule_next()
elif not bake_gen.is_running:
# Cache hit: on_complete_extra was NOT called by _start_bake_impl
print(f"[FaceMask] Batch: bake cache hit for {strip_name}")
self._on_bake_done(strip_name, "done", None)
def _on_bake_done(self, strip_name, status, data):
if self._cancelled or status == "cancelled":
self._results.append({"strip": strip_name, "status": "cancelled"})
self._finish()
return
if status == "error":
print(f"[FaceMask] Batch: bake failed for {strip_name}: {data}")
self._results.append({"strip": strip_name, "status": "error", "reason": str(data)})
else:
self._results.append({"strip": strip_name, "status": "done"})
print(f"[FaceMask] Batch: completed {strip_name}")
if self._on_item_complete:
self._on_item_complete(self._current_idx, len(self._strip_names), strip_name, status)
self._current_idx += 1
self._schedule_next()
# ------------------------------------------------------------------
# Finish
# ------------------------------------------------------------------
def _finish(self):
self.is_running = False
wm = self._context.window_manager
wm.batch_current = 0
wm.batch_total = 0
wm.batch_current_name = ""
print(f"[FaceMask] Batch: all done. Results: {self._results}")
if self._on_all_complete:
self._on_all_complete(self._results)
for area in self._context.screen.areas:
if area.type == "SEQUENCE_EDITOR":
area.tag_redraw()
# Singleton
_batch_processor: Optional[BatchProcessor] = None
def get_batch_processor() -> BatchProcessor:
global _batch_processor
if _batch_processor is None:
_batch_processor = BatchProcessor()
return _batch_processor