EG/ssbo_component/ssbo_controller.py

1393 lines
54 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import math
from panda3d.core import (
GeomVertexFormat, GeomVertexWriter, GeomVertexReader, GeomVertexRewriter,
InternalName, Vec3, Vec4, LMatrix4f, ShaderBuffer, GeomEnums,
BoundingSphere, NodePath, GeomNode, Texture, SamplerState,
Point3, BoundingBox, Quat
)
import time
class ObjectController:
"""
混合架构控制器 (Chunked Static + Dynamic Editing)
================================================
- 默认: 每个 chunk 使用 flatten 后的静态表示
- 编辑: 被选中对象所属 chunk 切换为动态表示,直接改 NodePath 变换
- 提交: 离开 chunk 时仅重建该 chunk 的静态表示
"""
def __init__(self, chunk_size=64, chunk_world_size=40.0):
self.chunk_size = max(8, int(chunk_size))
self.chunk_world_size = max(8.0, float(chunk_world_size))
self._reset_state()
def _reset_state(self):
self.name_to_ids = {}
self.id_to_name = {}
self.key_to_node = {}
self.node_list = []
self.display_names = {}
self.global_transforms = []
self.position_offsets = {}
self.vertex_index = {}
self.original_positions = {}
self.local_to_global_id = {}
self.local_transform_state = {}
self.local_transform_base_positions = {}
self.pick_vertex_index = {}
self.virtual_tree = None
self.virtual_tree_meta = None
self.model = None
self.pick_model = None
self.lightweight_flat_mode = False
self.supports_gpu_picking = True
self.id_to_chunk = {} # global_id -> chunk_id
self.id_to_object_np = {} # global_id -> dynamic object nodepath
self.id_to_pick_np = {} # global_id -> pick-scene nodepath
self.id_to_geom_index = {} # global_id -> owner GeomNode geom index
# chunk_id -> {
# "dynamic_np": NodePath,
# "static_np": NodePath or None,
# "members": [global_id],
# "dirty": bool,
# "dynamic_enabled": bool
# }
self.chunks = {}
self.active_chunks = set()
self._next_chunk_id = 0
# spatial cell key -> [chunk_id, ...]
self._cell_to_chunks = {}
# UI hierarchy metadata (matches source model parent/child structure)
self.tree_root_key = None
self.tree_nodes = {}
self._path_to_tree_key = {}
def _register_tree_node(self, key, display_name, parent_key):
self.tree_nodes[key] = {
"name": display_name,
"parent": parent_key,
"children": [],
"local_ids": [],
}
self.display_names[key] = display_name
self.name_to_ids[key] = []
if parent_key is not None and parent_key in self.tree_nodes:
self.tree_nodes[parent_key]["children"].append(key)
def _build_scene_tree(self, root_np):
"""Capture source model hierarchy for UI (independent from render batching)."""
self.tree_root_key = "0"
def walk(np, parent_key, key):
display_name = np.get_name() or "Unnamed"
self._register_tree_node(key, display_name, parent_key)
self._path_to_tree_key[str(np)] = key
children = list(np.get_children())
for i, child in enumerate(children):
walk(child, key, f"{key}/{i}")
walk(root_np, None, self.tree_root_key)
def _get_model_world_mat(self):
"""Return current model net transform matrix (to top/root)."""
if not self.model:
return LMatrix4f.ident_mat()
try:
if self.model.isEmpty():
return LMatrix4f.ident_mat()
except Exception:
try:
if self.model.is_empty():
return LMatrix4f.ident_mat()
except Exception:
pass
try:
return LMatrix4f(self.model.getNetTransform().getMat())
except Exception:
try:
# snake_case fallback in newer Panda3D bindings
return LMatrix4f(self.model.get_net_transform().get_mat())
except Exception:
pass
try:
top = self.model.getTop()
if top and not top.isEmpty():
return LMatrix4f(self.model.getMat(top))
except Exception:
pass
try:
return LMatrix4f(self.model.getMat())
except Exception:
return LMatrix4f.ident_mat()
def get_model_world_mat(self):
"""Public accessor for current model net transform matrix."""
return self._get_model_world_mat()
def _local_point_to_world(self, local_pos):
"""Convert a local-space point to world-space based on model net transform."""
mat = self._get_model_world_mat()
p = Point3(float(local_pos.x), float(local_pos.y), float(local_pos.z))
wp = mat.xformPoint(p)
return Vec3(wp.x, wp.y, wp.z)
def _world_vec_to_local(self, world_vec):
"""Convert a world-space vector to model-local space."""
mat = self._get_model_world_mat()
inv = LMatrix4f(mat)
try:
inv.invertInPlace()
except Exception:
try:
inv.invert_in_place()
except Exception:
return Vec3(world_vec)
v = Vec3(world_vec)
lv = inv.xformVec(v)
return Vec3(lv.x, lv.y, lv.z)
def world_vector_to_model_local(self, world_vec):
"""Public converter from world delta vector to model-local delta vector."""
return self._world_vec_to_local(world_vec)
def get_model_world_quat(self):
"""Return current model world quaternion."""
if not self.model:
return Quat.identQuat()
try:
if self.model.isEmpty():
return Quat.identQuat()
except Exception:
pass
try:
top = self.model.getTop()
if top and not top.isEmpty():
return Quat(self.model.getQuat(top))
except Exception:
pass
try:
return Quat(self.model.getQuat())
except Exception:
return Quat.identQuat()
def world_quat_delta_to_model_local(self, delta_quat_world):
"""
Convert world-space delta quaternion to model-local delta quaternion.
local = inv(model_world_rot) * world_delta * model_world_rot
"""
if delta_quat_world is None:
return Quat.identQuat()
model_q = self.get_model_world_quat()
inv_model_q = Quat(model_q)
inv_model_q.invertInPlace()
local_q = inv_model_q * Quat(delta_quat_world) * model_q
local_q.normalize()
return local_q
def _build_original_hierarchy_key(self, np, model_root):
"""Capture hierarchy path before flatten/reparent."""
parts = []
cur = np
while cur and not cur.is_empty() and cur != model_root:
name = cur.get_name() or ""
if name:
parts.append(name)
cur = cur.get_parent()
parts.reverse()
if not parts:
return np.get_name() or "Unnamed"
return "/".join(parts)
def _aggregate_tree_ids(self, key):
node = self.tree_nodes[key]
agg_ids = list(node["local_ids"])
for child_key in node["children"]:
agg_ids.extend(self._aggregate_tree_ids(child_key))
self.name_to_ids[key] = agg_ids
return agg_ids
def _build_tree_preorder(self, key, out):
out.append(key)
for child_key in self.tree_nodes[key]["children"]:
self._build_tree_preorder(child_key, out)
def should_hide_tree_node(self, key):
"""
Hide redundant wrapper nodes like ROOT so the UI shows the imported
model hierarchy instead of source-format packaging nodes.
"""
node = self.tree_nodes.get(key)
if not node:
return False
name = (node["name"] or "").strip().lower()
if name == "root":
return len(node["children"]) > 0
# Hide scene-package/runtime wrappers in virtual tree:
# - scene.bam container
# - chunk_* dynamic/static batching nodes
# - modelCollision_* helper nodes
if name.endswith(".bam") and len(node["children"]) > 0:
return True
if name.startswith("chunk_"):
return True
if name.startswith("modelcollision_"):
return True
return False
def get_runtime_structure_stats(self):
"""Summarize hybrid runtime structure to diagnose idle-state regressions."""
stats = {
"chunks_total": 0,
"chunks_dynamic_enabled": 0,
"chunk_static_nodes": 0,
"chunk_dynamic_nodes": 0,
"chunk_static_visible": 0,
"chunk_static_stashed": 0,
"chunk_dynamic_visible": 0,
"chunk_dynamic_stashed": 0,
"dynamic_object_nodes": 0,
"pick_nodes": 0,
"model_descendants": 0,
"pick_descendants": 0,
"model_geom_nodes": 0,
"pick_geom_nodes": 0,
}
chunks = getattr(self, "chunks", {}) or {}
stats["chunks_total"] = len(chunks)
for chunk in chunks.values():
if not isinstance(chunk, dict):
continue
if chunk.get("dynamic_enabled"):
stats["chunks_dynamic_enabled"] += 1
dynamic_np = chunk.get("dynamic_np")
static_np = chunk.get("static_np")
try:
if dynamic_np and not dynamic_np.is_empty():
stats["chunk_dynamic_nodes"] += 1
try:
if dynamic_np.is_stashed():
stats["chunk_dynamic_stashed"] += 1
elif not dynamic_np.is_hidden():
stats["chunk_dynamic_visible"] += 1
except Exception:
pass
stats["dynamic_object_nodes"] += len(list(dynamic_np.get_children()))
except Exception:
pass
try:
if static_np and not static_np.is_empty():
stats["chunk_static_nodes"] += 1
try:
if static_np.is_stashed():
stats["chunk_static_stashed"] += 1
elif not static_np.is_hidden():
stats["chunk_static_visible"] += 1
except Exception:
pass
except Exception:
pass
model = getattr(self, "model", None)
if model:
try:
if not model.is_empty():
stats["model_descendants"] = model.find_all_matches("**").get_num_paths()
stats["model_geom_nodes"] = model.find_all_matches("**/+GeomNode").get_num_paths()
except Exception:
pass
pick_model = getattr(self, "pick_model", None)
if pick_model:
try:
if not pick_model.is_empty():
stats["pick_descendants"] = pick_model.find_all_matches("**").get_num_paths()
stats["pick_geom_nodes"] = pick_model.find_all_matches("**/+GeomNode").get_num_paths()
stats["pick_nodes"] = len(list(pick_model.get_children()))
except Exception:
pass
return stats
def get_preferred_selection_ids(self, key):
"""Return the IDs that should be selected when a tree node is clicked.
Prefer the node's own local geometry so parent transforms remain directly
selectable. Only fall back to the aggregated subtree when the node itself
has no local renderable geometry.
"""
node = self.tree_nodes.get(key)
if node:
local_ids = list(node.get("local_ids", []) or [])
if local_ids:
return local_ids
return list(self.name_to_ids.get(key, []) or [])
def _encode_id_color(self, vdata, object_id):
if not vdata.has_column("color"):
new_fmt = vdata.get_format().get_union_format(GeomVertexFormat.get_v3c4())
vdata.set_format(new_fmt)
low = object_id & 0xFF
high = (object_id >> 8) & 0xFF
r = low / 255.0
g = high / 255.0
writer = GeomVertexWriter(vdata, InternalName.make("color"))
for row in range(vdata.get_num_rows()):
writer.set_row(row)
writer.set_data4f(r, g, 0.0, 1.0)
def _ensure_chunk(self, root_np, chunk_id):
if chunk_id in self.chunks:
return self.chunks[chunk_id]
dynamic_np = root_np.attach_new_node(f"chunk_{chunk_id:04d}_dynamic")
dynamic_np.stash()
chunk_data = {
"dynamic_np": dynamic_np,
"static_np": None,
"members": [],
"dirty": False,
"dynamic_enabled": False,
}
self.chunks[chunk_id] = chunk_data
return chunk_data
def _is_wrapper_segment(self, segment):
s = (segment or "").strip().lower()
if not s:
return True
if s in ("root",):
return True
if self._source_model_name and s == self._source_model_name:
return True
if self._source_model_stem and s == self._source_model_stem:
return True
return False
def _copy_node_tags(self, src_np, dst_np):
"""Copy all string tags from source node to rebuilt runtime node."""
if not src_np or not dst_np:
return
try:
for tag_name in src_np.get_tag_keys():
try:
dst_np.set_tag(tag_name, src_np.get_tag(tag_name))
except Exception:
continue
return
except Exception:
pass
try:
for tag_name in src_np.getTagKeys():
try:
dst_np.setTag(tag_name, src_np.getTag(tag_name))
except Exception:
continue
except Exception:
pass
def bake_ids_and_collect(self, model, lightweight=False):
"""
Bake IDs into vertex colors, flatten, then build vertex index.
NO transform reset — vertices keep world-space positions.
NO SSBO — uses RP default rendering.
"""
t0 = time.time()
geom_nodes = list(model.find_all_matches("**/+GeomNode"))
print(f"[控制器] 找到 {len(geom_nodes)} 个 GeomNode")
self.name_to_ids = {}
self.id_to_name = {}
self.key_to_node = {}
self.node_list = []
self.display_names = {}
self.global_transforms = []
self.id_to_chunk = {}
self.chunks = {}
self.vertex_index = {}
self.original_positions = {}
self.position_offsets = {}
self.local_to_global_id = {}
self.local_transform_state = {}
self.local_transform_base_positions = {}
self.pick_vertex_index = {}
self.virtual_tree = None
self.virtual_tree_meta = None
self.pick_model = None
model_name = (model.get_name() or "").strip()
self._source_model_name = model_name.lower()
self._source_model_stem = model_name.rsplit(".", 1)[0].lower() if "." in model_name else model_name.lower()
# Build source hierarchy metadata first so flat mode keeps the same
# selection/tree semantics as hybrid mode.
self._build_scene_tree(model)
if lightweight:
self.lightweight_flat_mode = True
self.supports_gpu_picking = False
self.model = model
self.chunk_node = model
chunk_key = model.get_name() or "default"
self.chunks[chunk_key] = {'node': model, 'base_id': 0}
self.key_to_node[self.tree_root_key] = model
try:
# 对于超大模型(节点数 > 8000使用较温和的 flatten_medium
# 避免 flatten_strong 造成长时间卡顿或内存激增。
node_count = model.find_all_matches("**").get_num_paths()
if node_count > 8000:
print(f"[控制器] 超大场景({node_count} 节点),使用 flatten_medium 代替 flatten_strong")
model.flatten_medium()
else:
model.flatten_strong()
except Exception as e:
print(f"[控制器] Flatten 失败: {e}")
pass
self._aggregate_tree_ids(self.tree_root_key)
self.node_list = []
self._build_tree_preorder(self.tree_root_key, self.node_list)
t1 = time.time()
print(f"[控制器] Flatten took {(t1-t0)*1000:.0f}ms")
print(f"[控制器] Lightweight flat tree built: {len(self.tree_nodes)} nodes")
return len(geom_nodes)
global_id_counter = 0
chunk_key = model.get_name() or "default"
# No chunk wrapper — flatten directly on model (same as load_jyc_flatten.py)
self.chunk_node = model
self.chunks[chunk_key] = {'node': model, 'base_id': 0}
# Cache original hierarchy path BEFORE flatten/reparent.
original_keys = {}
original_owner_keys = {}
for np in geom_nodes:
original_keys[id(np)] = self._build_original_hierarchy_key(np, model)
original_owner_keys[id(np)] = self._path_to_tree_key.get(str(np), self.tree_root_key)
# Flatten hierarchy
for np in geom_nodes:
np.wrt_reparent_to(model)
local_idx = 0
for np in geom_nodes:
gnode = np.node()
if gnode.get_num_parents() > 1:
parent = np.get_parent()
if not parent.is_empty():
new_np = np.copy_to(parent)
np.detach_node()
np = new_np
gnode = np.node()
unique_key = original_keys.get(id(np), str(np))
display_name = np.get_name() or f"Object_{global_id_counter}"
if unique_key not in self.name_to_ids:
self.name_to_ids[unique_key] = []
self.key_to_node[unique_key] = np
self.node_list.append(unique_key)
self.display_names[unique_key] = display_name
owner_key = original_owner_keys.get(id(np), self.tree_root_key)
if owner_key not in self.tree_nodes:
owner_key = self.tree_root_key
if owner_key not in self.key_to_node:
self.key_to_node[owner_key] = np
# Save original transform
mat_double = np.get_mat()
original_transform = LMatrix4f(mat_double)
for i in range(gnode.get_num_geoms()):
geom = gnode.modify_geom(i)
vdata = geom.modify_vertex_data()
if not vdata.has_column("color"):
new_format = vdata.get_format().get_union_format(GeomVertexFormat.get_v3c4())
vdata.set_format(new_format)
# Encode Local ID in R/G
low = local_idx % 256
high = local_idx // 256
r = low / 255.0
g = high / 255.0
writer = GeomVertexWriter(vdata, InternalName.make("color"))
for row in range(vdata.get_num_rows()):
writer.set_row(row)
writer.set_data4f(r, g, 0.0, 1.0)
self.global_transforms.append(original_transform)
self.id_to_chunk[global_id_counter] = (chunk_key, local_idx)
self.name_to_ids[unique_key].append(global_id_counter)
self.id_to_name[global_id_counter] = owner_key
self.tree_nodes[owner_key]["local_ids"].append(global_id_counter)
self.local_to_global_id[local_idx] = global_id_counter
self.position_offsets[local_idx] = Vec3(0, 0, 0)
global_id_counter += 1
local_idx += 1
# DO NOT reset transform — keep world-space positions
# Flatten directly on model — NO set_final, allows per-geom frustum culling
model.flatten_strong()
t1 = time.time()
print(f"[控制器] Flatten took {(t1-t0)*1000:.0f}ms")
# Build vertex index AFTER flatten
self._build_vertex_index(model)
self._init_local_transform_state()
self._aggregate_tree_ids(self.tree_root_key)
self.node_list = []
self._build_tree_preorder(self.tree_root_key, self.node_list)
# Keep ID colors only in picking clone to avoid affecting visible shading.
self.pick_model = model.copy_to(NodePath("ssbo_pick_root"))
self._build_pick_vertex_index(self.pick_model)
self._set_uniform_vertex_color(model, 1.0, 1.0, 1.0, 1.0)
t2 = time.time()
print(f"[控制器] Vertex index built in {(t2-t1)*1000:.0f}ms, "
f"{len(self.vertex_index)} unique IDs indexed")
print(f"[控制器] Flat tree built: {len(self.tree_nodes)} nodes")
self.model = model
return global_id_counter
def _set_uniform_vertex_color(self, root_np, r, g, b, a):
"""
Force vertex color to a uniform value on visible model to avoid
ID-encoding colors tinting the final render output.
"""
for gn_np in root_np.find_all_matches("**/+GeomNode"):
gnode = gn_np.node()
for gi in range(gnode.get_num_geoms()):
geom = gnode.modify_geom(gi)
vdata = geom.modify_vertex_data()
if not vdata.has_column("color"):
continue
writer = GeomVertexWriter(vdata, InternalName.make("color"))
for row in range(vdata.get_num_rows()):
writer.set_row(row)
writer.set_data4f(r, g, b, a)
def _build_tree_preorder(self, key, out):
out.append(key)
for child_key in self.tree_nodes[key]["children"]:
self._build_tree_preorder(child_key, out)
def should_hide_tree_node(self, key):
"""
Hide redundant wrapper nodes like ROOT so the UI shows the imported
model hierarchy instead of source-format packaging nodes.
"""
node = self.tree_nodes.get(key)
if not node:
return False
name = (node["name"] or "").strip().lower()
if name != "root":
return False
return len(node["children"]) > 0
def _encode_id_color(self, vdata, object_id):
if not vdata.has_column("color"):
new_fmt = vdata.get_format().get_union_format(GeomVertexFormat.get_v3c4())
vdata.set_format(new_fmt)
low = object_id & 0xFF
high = (object_id >> 8) & 0xFF
r = low / 255.0
g = high / 255.0
writer = GeomVertexWriter(vdata, InternalName.make("color"))
for row in range(vdata.get_num_rows()):
writer.set_row(row)
writer.set_data4f(r, g, 0.0, 1.0)
def _ensure_chunk(self, root_np, chunk_id):
if chunk_id in self.chunks:
return self.chunks[chunk_id]
dynamic_np = root_np.attach_new_node(f"chunk_{chunk_id:04d}_dynamic")
dynamic_np.stash()
chunk_data = {
"dynamic_np": dynamic_np,
"static_np": None,
"members": [],
"dirty": False,
"dynamic_enabled": False,
}
self.chunks[chunk_id] = chunk_data
return chunk_data
def _get_cell_key_from_pos(self, pos):
inv = 1.0 / self.chunk_world_size
return (
int(math.floor(pos.x * inv)),
int(math.floor(pos.y * inv)),
int(math.floor(pos.z * inv)),
)
def _allocate_spatial_chunk(self, root_np, world_pos):
"""
Allocate object into a spatially-local chunk for better frustum culling.
Objects in the same world cell are grouped together, and overflow creates
another chunk for that same cell.
"""
cell_key = self._get_cell_key_from_pos(world_pos)
chunk_ids = self._cell_to_chunks.setdefault(cell_key, [])
for chunk_id in chunk_ids:
chunk = self.chunks.get(chunk_id)
if chunk and len(chunk["members"]) < self.chunk_size:
return chunk_id, chunk
chunk_id = self._next_chunk_id
self._next_chunk_id += 1
chunk_ids.append(chunk_id)
return chunk_id, self._ensure_chunk(root_np, chunk_id)
def _rebuild_static_chunk(self, chunk_id):
chunk = self.chunks.get(chunk_id)
if not chunk:
return
old_static = chunk.get("static_np")
if old_static and not old_static.is_empty():
old_static.remove_node()
static_np = chunk["dynamic_np"].copy_to(self.model)
static_np.set_name(f"chunk_{chunk_id:04d}_static")
static_np.unstash()
# Editor idle performance depends on the static chunk being actually batched.
# The merged material/state preservation changes above keep per-geom render
# state intact, so we can restore the pre-merge flattening behavior here.
static_np.flatten_strong()
chunk["static_np"] = static_np
chunk["dirty"] = False
# Keep visibility coherent with current mode after rebuild.
if chunk["dynamic_enabled"]:
static_np.stash()
else:
static_np.unstash()
def build_virtual_hierarchy(self):
"""Build a readonly virtual tree from node_list path keys."""
root = {
"name": "",
"path": "",
"children": {},
"leaf_key": None,
"display_name": "",
"group_key": None,
"aggregate_ids": [],
}
max_depth = 0
leaf_count = 0
for key in self.node_list:
if not key:
continue
parts = [p for p in str(key).split("/") if p]
# Hide importer wrapper roots (e.g. model filename / ROOT) but keep real object hierarchy.
while len(parts) > 1 and self._is_wrapper_segment(parts[0]):
parts = parts[1:]
if not parts:
continue
max_depth = max(max_depth, len(parts))
cursor = root
path_acc = ""
for i, part in enumerate(parts):
path_acc = f"{path_acc}/{part}" if path_acc else part
child = cursor["children"].get(part)
if child is None:
child = {
"name": part,
"path": path_acc,
"children": {},
"leaf_key": None,
"display_name": part,
"group_key": None,
"aggregate_ids": [],
}
cursor["children"][part] = child
cursor = child
if i == len(parts) - 1:
cursor["leaf_key"] = key
cursor["display_name"] = self.display_names.get(key, part)
leaf_count += 1
# Build aggregate id groups for non-leaf selection (parent moves children).
def _aggregate(node):
agg = []
leaf_key = node.get("leaf_key")
if leaf_key:
agg.extend(self.name_to_ids.get(leaf_key, []))
for child in node.get("children", {}).values():
agg.extend(_aggregate(child))
# Stable unique ids
uniq = []
seen = set()
for gid in agg:
if gid in seen:
continue
seen.add(gid)
uniq.append(gid)
node["aggregate_ids"] = uniq
if node.get("path") and uniq:
group_key = f"__group__::{node['path']}"
node["group_key"] = group_key
self.name_to_ids[group_key] = uniq
self.display_names[group_key] = node.get("display_name", node.get("name", ""))
return uniq
_aggregate(root)
self.virtual_tree = root
self.virtual_tree_meta = {"max_depth": max_depth, "leaf_count": leaf_count}
return root
def get_virtual_hierarchy(self):
"""Return cached virtual tree; build on demand."""
if self.virtual_tree is None:
return self.build_virtual_hierarchy()
return self.virtual_tree
def _build_vertex_index(self, chunk_root):
"""
After flatten, batch-read all vertex data with numpy to build:
local_id -> [(geom_node_np, geom_idx, row_indices_array)]
Also stores original vertex positions per object (as numpy arrays).
"""
import numpy as np
for gn_np in chunk_root.find_all_matches("**/+GeomNode"):
gnode = gn_np.node()
for gi in range(gnode.get_num_geoms()):
geom = gnode.get_geom(gi)
vdata = geom.get_vertex_data()
num_rows = vdata.get_num_rows()
if num_rows == 0:
continue
# Find vertex and color column info
fmt = vdata.get_format()
# Get position column
pos_col = fmt.get_column(InternalName.get_vertex())
if pos_col is None:
continue
pos_array_idx = fmt.get_array_with(InternalName.get_vertex())
pos_start = pos_col.get_start()
# Get color column
color_col = fmt.get_column(InternalName.make("color"))
if color_col is None:
continue
color_array_idx = fmt.get_array_with(InternalName.make("color"))
color_start = color_col.get_start()
# Read raw position array
pos_array_format = fmt.get_array(pos_array_idx)
pos_stride = pos_array_format.get_stride()
pos_handle = vdata.get_array(pos_array_idx).get_handle()
pos_raw = bytes(pos_handle.get_data())
pos_buf = np.frombuffer(pos_raw, dtype=np.uint8).reshape(num_rows, pos_stride)
# Extract xyz positions (3 floats starting at pos_start)
positions = np.ndarray((num_rows, 3), dtype=np.float32,
buffer=pos_buf[:, pos_start:pos_start+12].tobytes())
# Read raw color array
color_array_format = fmt.get_array(color_array_idx)
color_stride = color_array_format.get_stride()
if color_array_idx == pos_array_idx:
color_buf = pos_buf
else:
color_handle = vdata.get_array(color_array_idx).get_handle()
color_raw = bytes(color_handle.get_data())
color_buf = np.frombuffer(color_raw, dtype=np.uint8).reshape(num_rows, color_stride)
# Decode color format to get ID
# Color can be stored as float32 RGBA or unorm8 RGBA
num_components = color_col.get_num_components()
component_bytes = color_col.get_component_bytes()
if component_bytes == 4: # float32 per component
color_data = np.ndarray((num_rows, num_components), dtype=np.float32,
buffer=color_buf[:, color_start:color_start+num_components*4].tobytes())
r_vals = (color_data[:, 0] * 255.0 + 0.5).astype(np.int32)
g_vals = (color_data[:, 1] * 255.0 + 0.5).astype(np.int32)
elif component_bytes == 1: # uint8 per component
color_bytes = color_buf[:, color_start:color_start+num_components].copy()
r_vals = color_bytes[:, 0].astype(np.int32)
g_vals = color_bytes[:, 1].astype(np.int32)
else:
# Fallback: skip this geom
continue
local_ids = r_vals + (g_vals << 8)
# Group rows by local_id using argsort (O(N log N) instead of O(N×K))
sort_idx = np.argsort(local_ids)
sorted_ids = local_ids[sort_idx]
sorted_positions = positions[sort_idx]
# Find group boundaries
boundaries = np.where(np.diff(sorted_ids) != 0)[0] + 1
# Split into groups
id_groups = np.split(sort_idx, boundaries)
pos_groups = np.split(sorted_positions, boundaries)
group_ids = sorted_ids[np.concatenate([[0], boundaries])]
for k in range(len(group_ids)):
uid = int(group_ids[k])
rows = id_groups[k]
pos = pos_groups[k]
if uid not in self.vertex_index:
self.vertex_index[uid] = []
self.original_positions[uid] = []
self.vertex_index[uid].append((gn_np, gi, rows))
self.original_positions[uid].append(pos.copy())
def _set_chunk_dynamic(self, chunk_id, enabled):
chunk = self.chunks.get(chunk_id)
if not chunk:
return
if enabled:
if chunk["dynamic_enabled"]:
return
chunk["dynamic_np"].unstash()
if chunk["static_np"] and not chunk["static_np"].is_empty():
chunk["static_np"].stash()
chunk["dynamic_enabled"] = True
self.active_chunks.add(chunk_id)
return
if not chunk["dynamic_enabled"]:
return
if chunk["static_np"] and not chunk["static_np"].is_empty():
chunk["static_np"].unstash()
chunk["dynamic_np"].stash()
chunk["dynamic_enabled"] = False
self.active_chunks.discard(chunk_id)
def _resolve_chunk_and_local_idx(self, global_id):
"""
Compatibility helper for merged branches:
- legacy: id_to_chunk[gid] -> (chunk_id, local_idx)
- current: id_to_chunk[gid] -> chunk_id (local_idx defaults to gid)
"""
mapping = self.id_to_chunk.get(global_id)
if mapping is None:
return None, None
if isinstance(mapping, (tuple, list)):
if not mapping:
return None, None
chunk_id = mapping[0]
local_idx = mapping[1] if len(mapping) > 1 else global_id
return chunk_id, local_idx
return mapping, global_id
def set_active_ids(self, active_ids):
"""切换编辑激活集合,仅保留 active_ids 对应 chunk 为动态模式。"""
target_chunks = set()
for obj_id in active_ids:
chunk_id, _ = self._resolve_chunk_and_local_idx(obj_id)
if chunk_id is not None:
target_chunks.add(chunk_id)
# Demote no-longer-active chunks. Dirty chunks are re-baked before demotion.
for chunk_id in list(self.active_chunks):
if chunk_id in target_chunks:
continue
# Always rebuild static chunk when leaving dynamic edit mode.
# Material/texture edits may not set `dirty`, but still need to
# propagate from dynamic objects to static representation.
self._rebuild_static_chunk(chunk_id)
self._set_chunk_dynamic(chunk_id, False)
# Promote target chunks.
for chunk_id in target_chunks:
self._set_chunk_dynamic(chunk_id, True)
def bake_ids_and_collect_hybrid(self, model):
"""
构建混合架构:
1) 把每个 geom 拆成可独立编辑的动态对象
2) 按 chunk 生成 flatten 后的静态副本
"""
t0 = time.time()
self._reset_state()
geom_nodes = list(model.find_all_matches("**/+GeomNode"))
print(f"[控制器] 找到 {len(geom_nodes)} 个 GeomNode")
# Build hierarchy metadata first so UI can mirror source model tree.
self._build_scene_tree(model)
root_name = model.get_name() or "scene"
scene_root = NodePath(root_name)
pick_root = NodePath(root_name + "_pick")
self.model = scene_root
self.pick_model = pick_root
global_id = 0
for np in geom_nodes:
gnode = np.node()
owner_key = self._path_to_tree_key.get(str(np), self.tree_root_key)
world_mat = LMatrix4f(np.get_mat(model))
# Preserve the inherited render state, not just the local node state.
# Scene/package reload often stores material textures/effects on parent
# nodes; using only local state drops those bindings and makes rebuilt
# chunk_* runtime objects render black after reopening a project.
try:
node_state = np.get_net_state()
except Exception:
try:
node_state = np.getNetState()
except Exception:
try:
node_state = np.get_state()
except Exception:
try:
node_state = np.getState()
except Exception:
node_state = None
for gi in range(gnode.get_num_geoms()):
# Render geometry stays untouched (keep original material/color behavior).
render_geom = gnode.get_geom(gi).make_copy()
render_gnode = GeomNode(f"obj_{global_id}")
geom_state = gnode.get_geom_state(gi)
try:
merged_state = node_state.compose(geom_state) if node_state is not None else geom_state
except Exception:
merged_state = geom_state
render_gnode.add_geom(render_geom, merged_state)
# Picking geometry gets encoded ID in vertex color.
pick_geom = gnode.get_geom(gi).make_copy()
pick_vdata = pick_geom.modify_vertex_data()
self._encode_id_color(pick_vdata, global_id)
pick_gnode = GeomNode(f"pick_{global_id}")
pick_gnode.add_geom(pick_geom, merged_state)
world_pos = world_mat.get_row3(3)
chunk_id, chunk = self._allocate_spatial_chunk(scene_root, world_pos)
obj_np = chunk["dynamic_np"].attach_new_node(render_gnode)
obj_np.set_mat(world_mat)
self._copy_node_tags(np, obj_np)
pick_np = pick_root.attach_new_node(pick_gnode)
pick_np.set_mat(world_mat)
chunk["members"].append(global_id)
self.id_to_chunk[global_id] = chunk_id
self.id_to_object_np[global_id] = obj_np
self.id_to_pick_np[global_id] = pick_np
self.id_to_geom_index[global_id] = gi
self.tree_nodes[owner_key]["local_ids"].append(global_id)
self.id_to_name[global_id] = owner_key
self.global_transforms.append(LMatrix4f(world_mat))
self.position_offsets[global_id] = Vec3(0, 0, 0)
global_id += 1
t1 = time.time()
print(f"[控制器] Dynamic object build took {(t1 - t0) * 1000:.0f}ms")
for chunk_id in sorted(self.chunks):
self._rebuild_static_chunk(chunk_id)
self._set_chunk_dynamic(chunk_id, False)
t2 = time.time()
print(f"[控制器] Static chunk flatten took {(t2 - t1) * 1000:.0f}ms")
print(f"[控制器] Built {len(self.chunks)} chunks, {global_id} objects")
print(f"[控制器] Spatial chunking: cell={self.chunk_world_size:.1f}, max_members={self.chunk_size}")
# Fill per-node aggregate IDs and build deterministic preorder list for UI.
self._aggregate_tree_ids(self.tree_root_key)
self.node_list = []
self._build_tree_preorder(self.tree_root_key, self.node_list)
model.remove_node()
return global_id
def _build_pick_vertex_index(self, pick_root):
"""
Build local_id -> [(geom_node_np, geom_idx, row_indices_array)] for pick model.
This keeps GPU-picking geometry writable in sync with visible geometry edits.
"""
import numpy as np
self.pick_vertex_index = {}
if not pick_root:
return
for gn_np in pick_root.find_all_matches("**/+GeomNode"):
gnode = gn_np.node()
for gi in range(gnode.get_num_geoms()):
geom = gnode.get_geom(gi)
vdata = geom.get_vertex_data()
num_rows = vdata.get_num_rows()
if num_rows == 0:
continue
fmt = vdata.get_format()
color_col = fmt.get_column(InternalName.make("color"))
if color_col is None:
continue
color_array_idx = fmt.get_array_with(InternalName.make("color"))
color_start = color_col.get_start()
color_array_format = fmt.get_array(color_array_idx)
color_stride = color_array_format.get_stride()
color_handle = vdata.get_array(color_array_idx).get_handle()
color_raw = bytes(color_handle.get_data())
color_buf = np.frombuffer(color_raw, dtype=np.uint8).reshape(num_rows, color_stride)
num_components = color_col.get_num_components()
component_bytes = color_col.get_component_bytes()
if component_bytes == 4:
color_data = np.ndarray(
(num_rows, num_components),
dtype=np.float32,
buffer=color_buf[:, color_start:color_start + num_components * 4].tobytes()
)
r_vals = (color_data[:, 0] * 255.0 + 0.5).astype(np.int32)
g_vals = (color_data[:, 1] * 255.0 + 0.5).astype(np.int32)
elif component_bytes == 1:
color_bytes = color_buf[:, color_start:color_start + num_components].copy()
r_vals = color_bytes[:, 0].astype(np.int32)
g_vals = color_bytes[:, 1].astype(np.int32)
else:
continue
local_ids = r_vals + (g_vals << 8)
sort_idx = np.argsort(local_ids)
sorted_ids = local_ids[sort_idx]
boundaries = np.where(np.diff(sorted_ids) != 0)[0] + 1
id_groups = np.split(sort_idx, boundaries)
group_ids = sorted_ids[np.concatenate([[0], boundaries])]
for k in range(len(group_ids)):
uid = int(group_ids[k])
rows = id_groups[k]
if uid not in self.pick_vertex_index:
self.pick_vertex_index[uid] = []
self.pick_vertex_index[uid].append((gn_np, gi, rows))
def _apply_vertices_to_pick(self, local_idx, entry_idx, new_pos):
"""Mirror one transformed vertex group to pick-model geometry."""
pick_entries = self.pick_vertex_index.get(local_idx)
if not pick_entries or entry_idx >= len(pick_entries):
return
pick_gn_np, pick_gi, pick_rows = pick_entries[entry_idx]
gnode = pick_gn_np.node()
geom = gnode.modify_geom(pick_gi)
vdata = geom.modify_vertex_data()
writer = GeomVertexWriter(vdata, "vertex")
max_rows = min(len(pick_rows), len(new_pos))
for j in range(max_rows):
writer.set_row(int(pick_rows[j]))
writer.set_data3f(float(new_pos[j, 0]), float(new_pos[j, 1]), float(new_pos[j, 2]))
def _init_local_transform_state(self):
"""Initialize transform state for each local_idx after vertex index is ready."""
self.local_transform_state = {}
self.local_transform_base_positions = {}
for local_idx in self.vertex_index.keys():
self.local_transform_base_positions[local_idx] = self.original_positions.get(local_idx, [])
self.local_transform_state[local_idx] = {
"offset": Vec3(0, 0, 0),
"quat": Quat.identQuat(),
"scale": Vec3(1, 1, 1),
"pivot": self.get_local_pivot(local_idx),
}
def get_local_indices_from_global_ids(self, global_ids):
"""Map global ids to unique local indices."""
local_indices = []
if not global_ids:
return local_indices
seen = set()
for global_id in global_ids:
_, local_idx = self._resolve_chunk_and_local_idx(global_id)
if local_idx is None:
continue
if local_idx in seen:
continue
if local_idx not in self.vertex_index:
continue
seen.add(local_idx)
local_indices.append(local_idx)
return local_indices
def get_local_pivot(self, local_idx):
"""Get pivot for one local object (model-local center)."""
global_id = self.local_to_global_id.get(local_idx)
if global_id is None:
return Vec3(0, 0, 0)
return self.get_object_center(global_id)
def get_selection_center(self, local_indices):
"""Get center point for a multi-object selection."""
if not local_indices:
return Vec3(0, 0, 0)
acc = Vec3(0, 0, 0)
valid = 0
for local_idx in local_indices:
state = self.local_transform_state.get(local_idx)
if not state:
continue
acc += state.get("pivot", Vec3(0, 0, 0)) + state.get("offset", Vec3(0, 0, 0))
valid += 1
if valid == 0:
return Vec3(0, 0, 0)
center_local = acc / float(valid)
return self._local_point_to_world(center_local)
def begin_transform_session(self, local_indices):
"""Create immutable baseline snapshot for one gizmo drag session."""
if not local_indices:
return {"locals": {}}
locals_snapshot = {}
for local_idx in local_indices:
base_state = self.local_transform_state.get(local_idx)
if not base_state:
continue
entries = self.vertex_index.get(local_idx, [])
base_positions = self.local_transform_base_positions.get(local_idx, [])
locals_snapshot[local_idx] = {
"offset": Vec3(base_state["offset"]),
"quat": Quat(base_state["quat"]),
"scale": Vec3(base_state["scale"]),
"pivot": Vec3(base_state["pivot"]),
"entries": entries,
"base_positions": base_positions,
}
return {"locals": locals_snapshot}
def apply_transform_session(self, snapshot, delta_pos, delta_quat, delta_scale):
"""Apply transform delta to all local indices in snapshot and rewrite vertices."""
import numpy as np
if not snapshot or "locals" not in snapshot:
return
if delta_pos is None:
delta_pos = Vec3(0, 0, 0)
if delta_quat is None:
delta_quat = Quat.identQuat()
if delta_scale is None:
delta_scale = Vec3(1, 1, 1)
dscale = np.array([delta_scale.x, delta_scale.y, delta_scale.z], dtype=np.float32)
dpos = np.array([delta_pos.x, delta_pos.y, delta_pos.z], dtype=np.float32)
for local_idx, local_data in snapshot["locals"].items():
base_offset = local_data["offset"]
base_quat = local_data["quat"]
base_scale = local_data["scale"]
pivot = local_data["pivot"]
final_offset = Vec3(base_offset) + delta_pos
final_quat = Quat(delta_quat * base_quat)
final_scale = Vec3(
base_scale.x * delta_scale.x,
base_scale.y * delta_scale.y,
base_scale.z * delta_scale.z,
)
rot_mat = self._quat_to_np_mat3(final_quat)
self.local_transform_state[local_idx]["offset"] = final_offset
self.local_transform_state[local_idx]["quat"] = final_quat
self.local_transform_state[local_idx]["scale"] = final_scale
self.position_offsets[local_idx] = final_offset
pivot_np = np.array([pivot.x, pivot.y, pivot.z], dtype=np.float32)
base_s = np.array([base_scale.x, base_scale.y, base_scale.z], dtype=np.float32)
total_scale = base_s * dscale
total_offset = np.array([base_offset.x, base_offset.y, base_offset.z], dtype=np.float32) + dpos
entries = local_data["entries"]
base_positions = local_data["base_positions"]
for i, (gn_np, gi, rows) in enumerate(entries):
if i >= len(base_positions):
continue
orig_pos = base_positions[i]
if orig_pos is None or len(orig_pos) == 0:
continue
centered = orig_pos - pivot_np
scaled = centered * total_scale
rotated = scaled @ rot_mat.T
new_pos = rotated + pivot_np + total_offset
gnode = gn_np.node()
geom = gnode.modify_geom(gi)
vdata = geom.modify_vertex_data()
writer = GeomVertexWriter(vdata, "vertex")
for j in range(len(rows)):
writer.set_row(int(rows[j]))
writer.set_data3f(float(new_pos[j, 0]), float(new_pos[j, 1]), float(new_pos[j, 2]))
self._apply_vertices_to_pick(local_idx, i, new_pos)
def _quat_to_np_mat3(self, quat):
"""Convert Panda3D Quat to 3x3 numpy rotation matrix."""
import numpy as np
q = Quat(quat)
q.normalize()
w = float(q.getR())
x = float(q.getI())
y = float(q.getJ())
z = float(q.getK())
xx = x * x
yy = y * y
zz = z * z
xy = x * y
xz = x * z
yz = y * z
wx = w * x
wy = w * y
wz = w * z
return np.array([
[1.0 - 2.0 * (yy + zz), 2.0 * (xy - wz), 2.0 * (xz + wy)],
[2.0 * (xy + wz), 1.0 - 2.0 * (xx + zz), 2.0 * (yz - wx)],
[2.0 * (xz - wy), 2.0 * (yz + wx), 1.0 - 2.0 * (xx + yy)],
], dtype=np.float32)
def create_ssbo(self):
"""No SSBO needed in hybrid mode."""
return None
def move_object(self, global_id, delta):
"""
Move an object by modifying vertex positions directly.
delta: Vec3 translation to apply.
Uses numpy for batch vertex updates.
"""
import numpy as np
if global_id not in self.id_to_chunk:
return
chunk_id, local_idx = self._resolve_chunk_and_local_idx(global_id)
if local_idx is None:
return
# Hybrid chunk mode (current) may move NodePaths directly without
# vertex_index/original_positions populated.
if local_idx not in self.vertex_index or local_idx not in self.original_positions:
obj_np = self.id_to_object_np.get(global_id)
if not obj_np or obj_np.is_empty():
return
next_pos = obj_np.get_pos() + delta
if hasattr(obj_np, "set_fluid_pos"):
obj_np.set_fluid_pos(next_pos)
else:
obj_np.set_pos(next_pos)
pick_np = self.id_to_pick_np.get(global_id)
if pick_np and not pick_np.is_empty():
pick_np.set_mat(self.model, obj_np.get_mat(self.model))
if chunk_id is not None and chunk_id in self.chunks:
self.chunks[chunk_id]["dirty"] = True
self.position_offsets[local_idx] = self.position_offsets.get(local_idx, Vec3(0)) + delta
return
# Accumulate offset
self.position_offsets[local_idx] = self.position_offsets.get(local_idx, Vec3(0)) + delta
offset = self.position_offsets[local_idx]
offset_arr = np.array([offset.x, offset.y, offset.z], dtype=np.float32)
# Update each (geom_node, geom_idx, rows) group
entries = self.vertex_index[local_idx]
originals = self.original_positions[local_idx]
for i, (gn_np, gi, rows) in enumerate(entries):
orig_pos = originals[i] # numpy array (N, 3)
new_pos = orig_pos + offset_arr # vectorized add
gnode = gn_np.node()
geom = gnode.modify_geom(gi)
vdata = geom.modify_vertex_data()
writer = GeomVertexWriter(vdata, "vertex")
for j in range(len(rows)):
writer.set_row(int(rows[j]))
writer.set_data3f(float(new_pos[j, 0]), float(new_pos[j, 1]), float(new_pos[j, 2]))
self._apply_vertices_to_pick(local_idx, i, new_pos)
def get_world_pos(self, global_id):
if not self.model:
return Vec3(0, 0, 0)
obj_np = self.id_to_object_np.get(global_id)
if obj_np and not obj_np.is_empty():
p = obj_np.get_pos(self.model)
return self._local_point_to_world(Vec3(p))
_, local_idx = self._resolve_chunk_and_local_idx(global_id)
if local_idx is None:
return Vec3(0, 0, 0)
original_mat = self.global_transforms[global_id]
original_pos = original_mat.get_row3(3)
offset = self.position_offsets.get(local_idx, Vec3(0))
local_pos = Vec3(original_pos) + offset
return self._local_point_to_world(local_pos)
def get_object_center(self, global_id):
if global_id >= len(self.global_transforms):
return Vec3(0, 0, 0)
mat = self.global_transforms[global_id]
return Vec3(mat.get_row3(3))
def get_transform(self, global_id):
if global_id >= len(self.global_transforms):
return LMatrix4f.ident_mat()
return self.global_transforms[global_id]
@property
def transforms(self):
return self.global_transforms