Fix gizmo selection: GPU picking, group proxy, parent-child transform, pick sync
This commit is contained in:
parent
eeb5dd193b
commit
53e6a829e4
40
imgui.ini
40
imgui.ini
@ -24,26 +24,26 @@ Size=832,45
|
||||
Collapsed=0
|
||||
|
||||
[Window][工具栏]
|
||||
Pos=327,20
|
||||
Size=1862,32
|
||||
Pos=278,20
|
||||
Size=1295,32
|
||||
Collapsed=0
|
||||
DockId=0x0000000D,0
|
||||
|
||||
[Window][场景树]
|
||||
Pos=0,20
|
||||
Size=325,854
|
||||
Size=276,622
|
||||
Collapsed=0
|
||||
DockId=0x00000007,0
|
||||
|
||||
[Window][属性面板]
|
||||
Pos=2191,20
|
||||
Size=369,1331
|
||||
Pos=1575,20
|
||||
Size=345,971
|
||||
Collapsed=0
|
||||
DockId=0x00000003,0
|
||||
|
||||
[Window][控制台]
|
||||
Pos=0,876
|
||||
Size=325,475
|
||||
Pos=0,644
|
||||
Size=276,347
|
||||
Collapsed=0
|
||||
DockId=0x00000008,0
|
||||
|
||||
@ -60,7 +60,7 @@ Collapsed=0
|
||||
|
||||
[Window][WindowOverViewport_11111111]
|
||||
Pos=0,20
|
||||
Size=2560,1331
|
||||
Size=1920,971
|
||||
Collapsed=0
|
||||
|
||||
[Window][测试窗口1]
|
||||
@ -84,7 +84,7 @@ Size=400,300
|
||||
Collapsed=0
|
||||
|
||||
[Window][选择路径]
|
||||
Pos=660,254
|
||||
Pos=660,245
|
||||
Size=600,500
|
||||
Collapsed=0
|
||||
|
||||
@ -94,13 +94,13 @@ Size=500,400
|
||||
Collapsed=0
|
||||
|
||||
[Window][导入模型]
|
||||
Pos=660,254
|
||||
Pos=660,245
|
||||
Size=600,500
|
||||
Collapsed=0
|
||||
|
||||
[Window][资源管理器]
|
||||
Pos=327,1013
|
||||
Size=1862,338
|
||||
Pos=278,657
|
||||
Size=1295,334
|
||||
Collapsed=0
|
||||
DockId=0x00000006,0
|
||||
|
||||
@ -201,17 +201,17 @@ Size=600,400
|
||||
Collapsed=0
|
||||
|
||||
[Docking][Data]
|
||||
DockSpace ID=0x08BD597D Window=0x1BBC0F80 Pos=0,20 Size=2560,1331 Split=X
|
||||
DockNode ID=0x00000001 Parent=0x08BD597D SizeRef=1549,989 Split=X
|
||||
DockNode ID=0x00000009 Parent=0x00000001 SizeRef=325,989 Split=Y Selected=0xE0015051
|
||||
DockSpace ID=0x08BD597D Window=0x1BBC0F80 Pos=0,20 Size=1920,971 Split=X
|
||||
DockNode ID=0x00000001 Parent=0x08BD597D SizeRef=1573,989 Split=X
|
||||
DockNode ID=0x00000009 Parent=0x00000001 SizeRef=276,989 Split=Y Selected=0xE0015051
|
||||
DockNode ID=0x00000007 Parent=0x00000009 SizeRef=271,634 Selected=0xE0015051
|
||||
DockNode ID=0x00000008 Parent=0x00000009 SizeRef=271,353 Selected=0x5428E753
|
||||
DockNode ID=0x0000000A Parent=0x00000001 SizeRef=1862,989 Split=Y
|
||||
DockNode ID=0x0000000A Parent=0x00000001 SizeRef=755,989 Split=Y
|
||||
DockNode ID=0x0000000D Parent=0x0000000A SizeRef=1318,32 HiddenTabBar=1 Selected=0x43A39006
|
||||
DockNode ID=0x0000000E Parent=0x0000000A SizeRef=1318,955 Split=Y
|
||||
DockNode ID=0x00000005 Parent=0x0000000E SizeRef=1341,957 CentralNode=1
|
||||
DockNode ID=0x00000006 Parent=0x0000000E SizeRef=1341,338 Selected=0x3A2E05C3
|
||||
DockNode ID=0x00000002 Parent=0x08BD597D SizeRef=369,989 Split=Y Selected=0x3188AB8D
|
||||
DockNode ID=0x0000000E Parent=0x0000000A SizeRef=1318,937 Split=Y
|
||||
DockNode ID=0x00000005 Parent=0x0000000E SizeRef=1341,601 CentralNode=1
|
||||
DockNode ID=0x00000006 Parent=0x0000000E SizeRef=1341,334 Selected=0x3A2E05C3
|
||||
DockNode ID=0x00000002 Parent=0x08BD597D SizeRef=345,989 Split=Y Selected=0x3188AB8D
|
||||
DockNode ID=0x00000003 Parent=0x00000002 SizeRef=351,390 Selected=0x5DB6FF37
|
||||
DockNode ID=0x00000004 Parent=0x00000002 SizeRef=351,597 Selected=0x1EB923B7
|
||||
|
||||
|
||||
@ -1,638 +1,327 @@
|
||||
|
||||
from panda3d.core import (
|
||||
GeomVertexFormat, GeomVertexWriter, GeomVertexReader, GeomVertexRewriter,
|
||||
InternalName, Vec3, Vec4, LMatrix4f, ShaderBuffer, GeomEnums,
|
||||
BoundingSphere, NodePath, GeomNode, Texture, SamplerState,
|
||||
Point3, BoundingBox, Quat
|
||||
)
|
||||
import struct
|
||||
from panda3d.core import GeomNode, GeomVertexFormat, GeomVertexWriter
|
||||
from panda3d.core import InternalName, LMatrix4f, NodePath, Vec3
|
||||
import time
|
||||
|
||||
class ObjectController:
|
||||
"""
|
||||
物体控制器 (No Custom Shader Mode)
|
||||
====================================
|
||||
Uses RP's default rendering (no rp.set_effect) for maximum FPS.
|
||||
Vertex colors baked for picking. Movement modifies vertex data directly.
|
||||
Stores original vertex positions per object for rotation/translation.
|
||||
混合架构控制器 (Chunked Static + Dynamic Editing)
|
||||
================================================
|
||||
- 默认: 每个 chunk 使用 flatten 后的静态表示
|
||||
- 编辑: 被选中对象所属 chunk 切换为动态表示,直接改 NodePath 变换
|
||||
- 提交: 离开 chunk 时仅重建该 chunk 的静态表示
|
||||
"""
|
||||
def __init__(self):
|
||||
|
||||
def __init__(self, chunk_size=64):
|
||||
self.chunk_size = max(8, int(chunk_size))
|
||||
self._reset_state()
|
||||
|
||||
def _reset_state(self):
|
||||
self.name_to_ids = {}
|
||||
self.id_to_name = {}
|
||||
self.key_to_node = {}
|
||||
self.node_list = []
|
||||
self.display_names = {}
|
||||
self.global_transforms = [] # Original transforms (for center/position)
|
||||
|
||||
self.id_to_chunk = {} # global_id -> (chunk_key, local_idx)
|
||||
self.chunks = {} # chunk_key -> dict with 'node' key
|
||||
|
||||
# Vertex index: local_id -> list of (geom_node_np, geom_idx, [row_indices])
|
||||
self.vertex_index = {}
|
||||
|
||||
# Original vertex positions: local_id -> list of (Vec3,) matching row order
|
||||
self.original_positions = {}
|
||||
|
||||
# Current position offsets: local_id -> Vec3 delta
|
||||
self.position_offsets = {}
|
||||
self.local_to_global_id = {}
|
||||
self.local_transform_state = {}
|
||||
self.local_transform_base_positions = {}
|
||||
self.virtual_tree = None
|
||||
self.virtual_tree_meta = None
|
||||
|
||||
self.model = None
|
||||
self.pick_model = None
|
||||
self.chunk_node = None # Single chunk node
|
||||
self._source_model_name = ""
|
||||
self._source_model_stem = ""
|
||||
|
||||
def _build_original_hierarchy_key(self, np, model_root):
|
||||
"""Capture hierarchy path before flatten/reparent."""
|
||||
parts = []
|
||||
cur = np
|
||||
while cur and not cur.is_empty() and cur != model_root:
|
||||
name = cur.get_name() or ""
|
||||
if name:
|
||||
parts.append(name)
|
||||
cur = cur.get_parent()
|
||||
parts.reverse()
|
||||
if not parts:
|
||||
return np.get_name() or "Unnamed"
|
||||
return "/".join(parts)
|
||||
|
||||
def _is_wrapper_segment(self, segment):
|
||||
s = (segment or "").strip().lower()
|
||||
if not s:
|
||||
return True
|
||||
if s in ("root",):
|
||||
return True
|
||||
if self._source_model_name and s == self._source_model_name:
|
||||
return True
|
||||
if self._source_model_stem and s == self._source_model_stem:
|
||||
return True
|
||||
return False
|
||||
|
||||
def bake_ids_and_collect(self, model):
|
||||
"""
|
||||
Bake IDs into vertex colors, flatten, then build vertex index.
|
||||
|
||||
NO transform reset — vertices keep world-space positions.
|
||||
NO SSBO — uses RP default rendering.
|
||||
"""
|
||||
t0 = time.time()
|
||||
|
||||
geom_nodes = list(model.find_all_matches("**/+GeomNode"))
|
||||
print(f"[控制器] 找到 {len(geom_nodes)} 个 GeomNode")
|
||||
|
||||
self.name_to_ids = {}
|
||||
self.id_to_name = {}
|
||||
self.key_to_node = {}
|
||||
self.node_list = []
|
||||
self.display_names = {}
|
||||
self.global_transforms = []
|
||||
self.id_to_chunk = {}
|
||||
self.chunks = {}
|
||||
self.vertex_index = {}
|
||||
self.original_positions = {}
|
||||
self.position_offsets = {}
|
||||
self.local_to_global_id = {}
|
||||
self.local_transform_state = {}
|
||||
self.local_transform_base_positions = {}
|
||||
self.virtual_tree = None
|
||||
self.virtual_tree_meta = None
|
||||
self.pick_model = None
|
||||
model_name = (model.get_name() or "").strip()
|
||||
self._source_model_name = model_name.lower()
|
||||
self._source_model_stem = model_name.rsplit(".", 1)[0].lower() if "." in model_name else model_name.lower()
|
||||
|
||||
global_id_counter = 0
|
||||
chunk_key = model.get_name() or "default"
|
||||
|
||||
# No chunk wrapper — flatten directly on model (same as load_jyc_flatten.py)
|
||||
self.chunk_node = model
|
||||
self.chunks[chunk_key] = {'node': model, 'base_id': 0}
|
||||
|
||||
# Cache original hierarchy path BEFORE flatten/reparent.
|
||||
original_keys = {}
|
||||
for np in geom_nodes:
|
||||
original_keys[id(np)] = self._build_original_hierarchy_key(np, model)
|
||||
|
||||
# Flatten hierarchy
|
||||
for np in geom_nodes:
|
||||
np.wrt_reparent_to(model)
|
||||
|
||||
local_idx = 0
|
||||
|
||||
self.model = None
|
||||
self.pick_model = None
|
||||
self.id_to_chunk = {} # global_id -> chunk_id
|
||||
self.id_to_object_np = {} # global_id -> dynamic object nodepath
|
||||
self.id_to_pick_np = {} # global_id -> pick-scene nodepath
|
||||
|
||||
# chunk_id -> {
|
||||
# "dynamic_np": NodePath,
|
||||
# "static_np": NodePath or None,
|
||||
# "members": [global_id],
|
||||
# "dirty": bool,
|
||||
# "dynamic_enabled": bool
|
||||
# }
|
||||
self.chunks = {}
|
||||
self.active_chunks = set()
|
||||
|
||||
# UI hierarchy metadata (matches source model parent/child structure)
|
||||
self.tree_root_key = None
|
||||
self.tree_nodes = {}
|
||||
self._path_to_tree_key = {}
|
||||
|
||||
def _register_tree_node(self, key, display_name, parent_key):
|
||||
self.tree_nodes[key] = {
|
||||
"name": display_name,
|
||||
"parent": parent_key,
|
||||
"children": [],
|
||||
"local_ids": [],
|
||||
}
|
||||
self.display_names[key] = display_name
|
||||
self.name_to_ids[key] = []
|
||||
if parent_key is not None and parent_key in self.tree_nodes:
|
||||
self.tree_nodes[parent_key]["children"].append(key)
|
||||
|
||||
def _build_scene_tree(self, root_np):
|
||||
"""Capture source model hierarchy for UI (independent from render batching)."""
|
||||
self.tree_root_key = "0"
|
||||
|
||||
def walk(np, parent_key, key):
|
||||
display_name = np.get_name() or "Unnamed"
|
||||
self._register_tree_node(key, display_name, parent_key)
|
||||
self._path_to_tree_key[str(np)] = key
|
||||
|
||||
children = list(np.get_children())
|
||||
for i, child in enumerate(children):
|
||||
walk(child, key, f"{key}/{i}")
|
||||
|
||||
walk(root_np, None, self.tree_root_key)
|
||||
|
||||
def _aggregate_tree_ids(self, key):
|
||||
node = self.tree_nodes[key]
|
||||
agg_ids = list(node["local_ids"])
|
||||
for child_key in node["children"]:
|
||||
agg_ids.extend(self._aggregate_tree_ids(child_key))
|
||||
self.name_to_ids[key] = agg_ids
|
||||
return agg_ids
|
||||
|
||||
def _build_tree_preorder(self, key, out):
|
||||
out.append(key)
|
||||
for child_key in self.tree_nodes[key]["children"]:
|
||||
self._build_tree_preorder(child_key, out)
|
||||
|
||||
def should_hide_tree_node(self, key):
|
||||
"""
|
||||
Hide a redundant wrapper node directly below the file root, e.g. ROOT.
|
||||
This keeps `model.glb` as the visible root in the UI.
|
||||
"""
|
||||
node = self.tree_nodes.get(key)
|
||||
if not node:
|
||||
return False
|
||||
if node["parent"] != self.tree_root_key:
|
||||
return False
|
||||
|
||||
name = (node["name"] or "").strip().lower()
|
||||
if name != "root":
|
||||
return False
|
||||
|
||||
# Keep node visible if it actually carries direct geoms.
|
||||
if node["local_ids"]:
|
||||
return False
|
||||
|
||||
return len(node["children"]) > 0
|
||||
|
||||
def _encode_id_color(self, vdata, object_id):
|
||||
if not vdata.has_column("color"):
|
||||
new_fmt = vdata.get_format().get_union_format(GeomVertexFormat.get_v3c4())
|
||||
vdata.set_format(new_fmt)
|
||||
|
||||
low = object_id & 0xFF
|
||||
high = (object_id >> 8) & 0xFF
|
||||
r = low / 255.0
|
||||
g = high / 255.0
|
||||
|
||||
writer = GeomVertexWriter(vdata, InternalName.make("color"))
|
||||
for row in range(vdata.get_num_rows()):
|
||||
writer.set_row(row)
|
||||
writer.set_data4f(r, g, 0.0, 1.0)
|
||||
|
||||
def _ensure_chunk(self, root_np, chunk_id):
|
||||
if chunk_id in self.chunks:
|
||||
return self.chunks[chunk_id]
|
||||
|
||||
dynamic_np = root_np.attach_new_node(f"chunk_{chunk_id:04d}_dynamic")
|
||||
dynamic_np.stash()
|
||||
|
||||
chunk_data = {
|
||||
"dynamic_np": dynamic_np,
|
||||
"static_np": None,
|
||||
"members": [],
|
||||
"dirty": False,
|
||||
"dynamic_enabled": False,
|
||||
}
|
||||
self.chunks[chunk_id] = chunk_data
|
||||
return chunk_data
|
||||
|
||||
def _rebuild_static_chunk(self, chunk_id):
|
||||
chunk = self.chunks.get(chunk_id)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
old_static = chunk.get("static_np")
|
||||
if old_static and not old_static.is_empty():
|
||||
old_static.remove_node()
|
||||
|
||||
static_np = chunk["dynamic_np"].copy_to(self.model)
|
||||
static_np.set_name(f"chunk_{chunk_id:04d}_static")
|
||||
static_np.unstash()
|
||||
static_np.flatten_strong()
|
||||
|
||||
chunk["static_np"] = static_np
|
||||
chunk["dirty"] = False
|
||||
|
||||
# Keep visibility coherent with current mode after rebuild.
|
||||
if chunk["dynamic_enabled"]:
|
||||
static_np.stash()
|
||||
else:
|
||||
static_np.unstash()
|
||||
|
||||
def _set_chunk_dynamic(self, chunk_id, enabled):
|
||||
chunk = self.chunks.get(chunk_id)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
if enabled:
|
||||
if chunk["dynamic_enabled"]:
|
||||
return
|
||||
chunk["dynamic_np"].unstash()
|
||||
if chunk["static_np"] and not chunk["static_np"].is_empty():
|
||||
chunk["static_np"].stash()
|
||||
chunk["dynamic_enabled"] = True
|
||||
self.active_chunks.add(chunk_id)
|
||||
return
|
||||
|
||||
if not chunk["dynamic_enabled"]:
|
||||
return
|
||||
if chunk["static_np"] and not chunk["static_np"].is_empty():
|
||||
chunk["static_np"].unstash()
|
||||
chunk["dynamic_np"].stash()
|
||||
chunk["dynamic_enabled"] = False
|
||||
self.active_chunks.discard(chunk_id)
|
||||
|
||||
def set_active_ids(self, active_ids):
|
||||
"""切换编辑激活集合,仅保留 active_ids 对应 chunk 为动态模式。"""
|
||||
target_chunks = {self.id_to_chunk[obj_id] for obj_id in active_ids if obj_id in self.id_to_chunk}
|
||||
|
||||
# Demote no-longer-active chunks. Dirty chunks are re-baked before demotion.
|
||||
for chunk_id in list(self.active_chunks):
|
||||
if chunk_id in target_chunks:
|
||||
continue
|
||||
if self.chunks[chunk_id]["dirty"]:
|
||||
self._rebuild_static_chunk(chunk_id)
|
||||
self._set_chunk_dynamic(chunk_id, False)
|
||||
|
||||
# Promote target chunks.
|
||||
for chunk_id in target_chunks:
|
||||
self._set_chunk_dynamic(chunk_id, True)
|
||||
|
||||
def bake_ids_and_collect(self, model):
|
||||
"""
|
||||
构建混合架构:
|
||||
1) 把每个 geom 拆成可独立编辑的动态对象
|
||||
2) 按 chunk 生成 flatten 后的静态副本
|
||||
"""
|
||||
t0 = time.time()
|
||||
self._reset_state()
|
||||
|
||||
geom_nodes = list(model.find_all_matches("**/+GeomNode"))
|
||||
print(f"[控制器] 找到 {len(geom_nodes)} 个 GeomNode")
|
||||
|
||||
# Build hierarchy metadata first so UI can mirror source model tree.
|
||||
self._build_scene_tree(model)
|
||||
|
||||
root_name = (model.get_name() or "scene") + "_hybrid"
|
||||
scene_root = NodePath(root_name)
|
||||
pick_root = NodePath(root_name + "_pick")
|
||||
self.model = scene_root
|
||||
self.pick_model = pick_root
|
||||
|
||||
global_id = 0
|
||||
for np in geom_nodes:
|
||||
gnode = np.node()
|
||||
|
||||
if gnode.get_num_parents() > 1:
|
||||
parent = np.get_parent()
|
||||
if not parent.is_empty():
|
||||
new_np = np.copy_to(parent)
|
||||
np.detach_node()
|
||||
np = new_np
|
||||
gnode = np.node()
|
||||
|
||||
unique_key = original_keys.get(id(np), str(np))
|
||||
display_name = np.get_name() or f"Object_{global_id_counter}"
|
||||
|
||||
if unique_key not in self.name_to_ids:
|
||||
self.name_to_ids[unique_key] = []
|
||||
self.key_to_node[unique_key] = np
|
||||
self.node_list.append(unique_key)
|
||||
self.display_names[unique_key] = display_name
|
||||
|
||||
# Save original transform
|
||||
mat_double = np.get_mat()
|
||||
original_transform = LMatrix4f(mat_double)
|
||||
|
||||
for i in range(gnode.get_num_geoms()):
|
||||
geom = gnode.modify_geom(i)
|
||||
vdata = geom.modify_vertex_data()
|
||||
|
||||
if not vdata.has_column("color"):
|
||||
new_format = vdata.get_format().get_union_format(GeomVertexFormat.get_v3c4())
|
||||
vdata.set_format(new_format)
|
||||
|
||||
# Encode Local ID in R/G
|
||||
low = local_idx % 256
|
||||
high = local_idx // 256
|
||||
r = low / 255.0
|
||||
g = high / 255.0
|
||||
|
||||
writer = GeomVertexWriter(vdata, InternalName.make("color"))
|
||||
for row in range(vdata.get_num_rows()):
|
||||
writer.set_row(row)
|
||||
writer.set_data4f(r, g, 0.0, 1.0)
|
||||
|
||||
self.global_transforms.append(original_transform)
|
||||
self.id_to_chunk[global_id_counter] = (chunk_key, local_idx)
|
||||
self.name_to_ids[unique_key].append(global_id_counter)
|
||||
self.id_to_name[global_id_counter] = unique_key
|
||||
self.local_to_global_id[local_idx] = global_id_counter
|
||||
self.position_offsets[local_idx] = Vec3(0, 0, 0)
|
||||
|
||||
global_id_counter += 1
|
||||
local_idx += 1
|
||||
|
||||
# DO NOT reset transform — keep world-space positions
|
||||
|
||||
# Flatten directly on model — NO set_final, allows per-geom frustum culling
|
||||
model.flatten_strong()
|
||||
|
||||
owner_key = self._path_to_tree_key.get(str(np), self.tree_root_key)
|
||||
|
||||
world_mat = LMatrix4f(np.get_mat(model))
|
||||
|
||||
for gi in range(gnode.get_num_geoms()):
|
||||
# Render geometry stays untouched (keep original material/color behavior).
|
||||
render_geom = gnode.get_geom(gi).make_copy()
|
||||
render_gnode = GeomNode(f"obj_{global_id}")
|
||||
render_gnode.add_geom(render_geom, gnode.get_geom_state(gi))
|
||||
|
||||
# Picking geometry gets encoded ID in vertex color.
|
||||
pick_geom = gnode.get_geom(gi).make_copy()
|
||||
pick_vdata = pick_geom.modify_vertex_data()
|
||||
self._encode_id_color(pick_vdata, global_id)
|
||||
pick_gnode = GeomNode(f"pick_{global_id}")
|
||||
pick_gnode.add_geom(pick_geom, gnode.get_geom_state(gi))
|
||||
|
||||
chunk_id = global_id // self.chunk_size
|
||||
chunk = self._ensure_chunk(scene_root, chunk_id)
|
||||
|
||||
obj_np = chunk["dynamic_np"].attach_new_node(render_gnode)
|
||||
obj_np.set_mat(world_mat)
|
||||
pick_np = pick_root.attach_new_node(pick_gnode)
|
||||
pick_np.set_mat(world_mat)
|
||||
|
||||
chunk["members"].append(global_id)
|
||||
self.id_to_chunk[global_id] = chunk_id
|
||||
self.id_to_object_np[global_id] = obj_np
|
||||
self.id_to_pick_np[global_id] = pick_np
|
||||
self.tree_nodes[owner_key]["local_ids"].append(global_id)
|
||||
self.id_to_name[global_id] = owner_key
|
||||
self.global_transforms.append(LMatrix4f(world_mat))
|
||||
self.position_offsets[global_id] = Vec3(0, 0, 0)
|
||||
global_id += 1
|
||||
|
||||
t1 = time.time()
|
||||
print(f"[控制器] Flatten took {(t1-t0)*1000:.0f}ms")
|
||||
|
||||
# Build vertex index AFTER flatten
|
||||
self._build_vertex_index(model)
|
||||
self._init_local_transform_state()
|
||||
self.build_virtual_hierarchy()
|
||||
print(f"[控制器] Dynamic object build took {(t1 - t0) * 1000:.0f}ms")
|
||||
|
||||
for chunk_id in sorted(self.chunks):
|
||||
self._rebuild_static_chunk(chunk_id)
|
||||
self._set_chunk_dynamic(chunk_id, False)
|
||||
|
||||
# Keep ID colors only in picking clone to avoid affecting visible shading.
|
||||
self.pick_model = model.copy_to(NodePath("ssbo_pick_root"))
|
||||
self._set_uniform_vertex_color(model, 1.0, 1.0, 1.0, 1.0)
|
||||
|
||||
t2 = time.time()
|
||||
print(f"[控制器] Vertex index built in {(t2-t1)*1000:.0f}ms, "
|
||||
f"{len(self.vertex_index)} unique IDs indexed")
|
||||
|
||||
self.model = model
|
||||
self.node_list.sort()
|
||||
return global_id_counter
|
||||
print(f"[控制器] Static chunk flatten took {(t2 - t1) * 1000:.0f}ms")
|
||||
print(f"[控制器] Built {len(self.chunks)} chunks, {global_id} objects")
|
||||
|
||||
def _set_uniform_vertex_color(self, root_np, r, g, b, a):
|
||||
"""
|
||||
Force vertex color to a uniform value on visible model to avoid
|
||||
ID-encoding colors tinting the final render output.
|
||||
"""
|
||||
for gn_np in root_np.find_all_matches("**/+GeomNode"):
|
||||
gnode = gn_np.node()
|
||||
for gi in range(gnode.get_num_geoms()):
|
||||
geom = gnode.modify_geom(gi)
|
||||
vdata = geom.modify_vertex_data()
|
||||
if not vdata.has_column("color"):
|
||||
continue
|
||||
writer = GeomVertexWriter(vdata, InternalName.make("color"))
|
||||
for row in range(vdata.get_num_rows()):
|
||||
writer.set_row(row)
|
||||
writer.set_data4f(r, g, b, a)
|
||||
# Fill per-node aggregate IDs and build deterministic preorder list for UI.
|
||||
self._aggregate_tree_ids(self.tree_root_key)
|
||||
self.node_list = []
|
||||
self._build_tree_preorder(self.tree_root_key, self.node_list)
|
||||
|
||||
def build_virtual_hierarchy(self):
|
||||
"""Build a readonly virtual tree from node_list path keys."""
|
||||
root = {
|
||||
"name": "",
|
||||
"path": "",
|
||||
"children": {},
|
||||
"leaf_key": None,
|
||||
"display_name": "",
|
||||
"group_key": None,
|
||||
"aggregate_ids": [],
|
||||
}
|
||||
max_depth = 0
|
||||
leaf_count = 0
|
||||
|
||||
for key in self.node_list:
|
||||
if not key:
|
||||
continue
|
||||
parts = [p for p in str(key).split("/") if p]
|
||||
# Hide importer wrapper roots (e.g. model filename / ROOT) but keep real object hierarchy.
|
||||
while len(parts) > 1 and self._is_wrapper_segment(parts[0]):
|
||||
parts = parts[1:]
|
||||
if not parts:
|
||||
continue
|
||||
max_depth = max(max_depth, len(parts))
|
||||
cursor = root
|
||||
path_acc = ""
|
||||
for i, part in enumerate(parts):
|
||||
path_acc = f"{path_acc}/{part}" if path_acc else part
|
||||
child = cursor["children"].get(part)
|
||||
if child is None:
|
||||
child = {
|
||||
"name": part,
|
||||
"path": path_acc,
|
||||
"children": {},
|
||||
"leaf_key": None,
|
||||
"display_name": part,
|
||||
"group_key": None,
|
||||
"aggregate_ids": [],
|
||||
}
|
||||
cursor["children"][part] = child
|
||||
cursor = child
|
||||
if i == len(parts) - 1:
|
||||
cursor["leaf_key"] = key
|
||||
cursor["display_name"] = self.display_names.get(key, part)
|
||||
leaf_count += 1
|
||||
|
||||
# Build aggregate id groups for non-leaf selection (parent moves children).
|
||||
def _aggregate(node):
|
||||
agg = []
|
||||
leaf_key = node.get("leaf_key")
|
||||
if leaf_key:
|
||||
agg.extend(self.name_to_ids.get(leaf_key, []))
|
||||
for child in node.get("children", {}).values():
|
||||
agg.extend(_aggregate(child))
|
||||
# Stable unique ids
|
||||
uniq = []
|
||||
seen = set()
|
||||
for gid in agg:
|
||||
if gid in seen:
|
||||
continue
|
||||
seen.add(gid)
|
||||
uniq.append(gid)
|
||||
node["aggregate_ids"] = uniq
|
||||
if node.get("path") and uniq:
|
||||
group_key = f"__group__::{node['path']}"
|
||||
node["group_key"] = group_key
|
||||
self.name_to_ids[group_key] = uniq
|
||||
self.display_names[group_key] = node.get("display_name", node.get("name", ""))
|
||||
return uniq
|
||||
|
||||
_aggregate(root)
|
||||
|
||||
self.virtual_tree = root
|
||||
self.virtual_tree_meta = {"max_depth": max_depth, "leaf_count": leaf_count}
|
||||
return root
|
||||
|
||||
def get_virtual_hierarchy(self):
|
||||
"""Return cached virtual tree; build on demand."""
|
||||
if self.virtual_tree is None:
|
||||
return self.build_virtual_hierarchy()
|
||||
return self.virtual_tree
|
||||
|
||||
def _build_vertex_index(self, chunk_root):
|
||||
"""
|
||||
After flatten, batch-read all vertex data with numpy to build:
|
||||
local_id -> [(geom_node_np, geom_idx, row_indices_array)]
|
||||
Also stores original vertex positions per object (as numpy arrays).
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
for gn_np in chunk_root.find_all_matches("**/+GeomNode"):
|
||||
gnode = gn_np.node()
|
||||
for gi in range(gnode.get_num_geoms()):
|
||||
geom = gnode.get_geom(gi)
|
||||
vdata = geom.get_vertex_data()
|
||||
num_rows = vdata.get_num_rows()
|
||||
|
||||
if num_rows == 0:
|
||||
continue
|
||||
|
||||
# Find vertex and color column info
|
||||
fmt = vdata.get_format()
|
||||
|
||||
# Get position column
|
||||
pos_col = fmt.get_column(InternalName.get_vertex())
|
||||
if pos_col is None:
|
||||
continue
|
||||
pos_array_idx = fmt.get_array_with(InternalName.get_vertex())
|
||||
pos_start = pos_col.get_start()
|
||||
|
||||
# Get color column
|
||||
color_col = fmt.get_column(InternalName.make("color"))
|
||||
if color_col is None:
|
||||
continue
|
||||
color_array_idx = fmt.get_array_with(InternalName.make("color"))
|
||||
color_start = color_col.get_start()
|
||||
|
||||
# Read raw position array
|
||||
pos_array_format = fmt.get_array(pos_array_idx)
|
||||
pos_stride = pos_array_format.get_stride()
|
||||
pos_handle = vdata.get_array(pos_array_idx).get_handle()
|
||||
pos_raw = bytes(pos_handle.get_data())
|
||||
pos_buf = np.frombuffer(pos_raw, dtype=np.uint8).reshape(num_rows, pos_stride)
|
||||
|
||||
# Extract xyz positions (3 floats starting at pos_start)
|
||||
positions = np.ndarray((num_rows, 3), dtype=np.float32,
|
||||
buffer=pos_buf[:, pos_start:pos_start+12].tobytes())
|
||||
|
||||
# Read raw color array
|
||||
color_array_format = fmt.get_array(color_array_idx)
|
||||
color_stride = color_array_format.get_stride()
|
||||
|
||||
if color_array_idx == pos_array_idx:
|
||||
color_buf = pos_buf
|
||||
else:
|
||||
color_handle = vdata.get_array(color_array_idx).get_handle()
|
||||
color_raw = bytes(color_handle.get_data())
|
||||
color_buf = np.frombuffer(color_raw, dtype=np.uint8).reshape(num_rows, color_stride)
|
||||
|
||||
# Decode color format to get ID
|
||||
# Color can be stored as float32 RGBA or unorm8 RGBA
|
||||
num_components = color_col.get_num_components()
|
||||
component_bytes = color_col.get_component_bytes()
|
||||
|
||||
if component_bytes == 4: # float32 per component
|
||||
color_data = np.ndarray((num_rows, num_components), dtype=np.float32,
|
||||
buffer=color_buf[:, color_start:color_start+num_components*4].tobytes())
|
||||
r_vals = (color_data[:, 0] * 255.0 + 0.5).astype(np.int32)
|
||||
g_vals = (color_data[:, 1] * 255.0 + 0.5).astype(np.int32)
|
||||
elif component_bytes == 1: # uint8 per component
|
||||
color_bytes = color_buf[:, color_start:color_start+num_components].copy()
|
||||
r_vals = color_bytes[:, 0].astype(np.int32)
|
||||
g_vals = color_bytes[:, 1].astype(np.int32)
|
||||
else:
|
||||
# Fallback: skip this geom
|
||||
continue
|
||||
|
||||
local_ids = r_vals + (g_vals << 8)
|
||||
|
||||
# Group rows by local_id using argsort (O(N log N) instead of O(N×K))
|
||||
sort_idx = np.argsort(local_ids)
|
||||
sorted_ids = local_ids[sort_idx]
|
||||
sorted_positions = positions[sort_idx]
|
||||
|
||||
# Find group boundaries
|
||||
boundaries = np.where(np.diff(sorted_ids) != 0)[0] + 1
|
||||
|
||||
# Split into groups
|
||||
id_groups = np.split(sort_idx, boundaries)
|
||||
pos_groups = np.split(sorted_positions, boundaries)
|
||||
group_ids = sorted_ids[np.concatenate([[0], boundaries])]
|
||||
|
||||
for k in range(len(group_ids)):
|
||||
uid = int(group_ids[k])
|
||||
rows = id_groups[k]
|
||||
pos = pos_groups[k]
|
||||
|
||||
if uid not in self.vertex_index:
|
||||
self.vertex_index[uid] = []
|
||||
self.original_positions[uid] = []
|
||||
|
||||
self.vertex_index[uid].append((gn_np, gi, rows))
|
||||
self.original_positions[uid].append(pos.copy())
|
||||
|
||||
def _init_local_transform_state(self):
|
||||
"""Initialize transform state for each local_idx after vertex index is ready."""
|
||||
self.local_transform_state = {}
|
||||
self.local_transform_base_positions = {}
|
||||
|
||||
for local_idx in self.vertex_index.keys():
|
||||
self.local_transform_base_positions[local_idx] = self.original_positions.get(local_idx, [])
|
||||
self.local_transform_state[local_idx] = {
|
||||
"offset": Vec3(0, 0, 0),
|
||||
"quat": Quat.identQuat(),
|
||||
"scale": Vec3(1, 1, 1),
|
||||
"pivot": self.get_local_pivot(local_idx),
|
||||
}
|
||||
|
||||
def get_local_indices_from_global_ids(self, global_ids):
|
||||
"""Map global ids to unique local indices."""
|
||||
local_indices = []
|
||||
if not global_ids:
|
||||
return local_indices
|
||||
seen = set()
|
||||
for global_id in global_ids:
|
||||
mapping = self.id_to_chunk.get(global_id)
|
||||
if not mapping:
|
||||
continue
|
||||
_, local_idx = mapping
|
||||
if local_idx in seen:
|
||||
continue
|
||||
if local_idx not in self.vertex_index:
|
||||
continue
|
||||
seen.add(local_idx)
|
||||
local_indices.append(local_idx)
|
||||
return local_indices
|
||||
|
||||
def get_local_pivot(self, local_idx):
|
||||
"""Get pivot for one local object (world-space center)."""
|
||||
global_id = self.local_to_global_id.get(local_idx)
|
||||
if global_id is None:
|
||||
return Vec3(0, 0, 0)
|
||||
return self.get_object_center(global_id)
|
||||
|
||||
def get_selection_center(self, local_indices):
|
||||
"""Get center point for a multi-object selection."""
|
||||
if not local_indices:
|
||||
return Vec3(0, 0, 0)
|
||||
acc = Vec3(0, 0, 0)
|
||||
valid = 0
|
||||
for local_idx in local_indices:
|
||||
state = self.local_transform_state.get(local_idx)
|
||||
if not state:
|
||||
continue
|
||||
acc += state.get("pivot", Vec3(0, 0, 0)) + state.get("offset", Vec3(0, 0, 0))
|
||||
valid += 1
|
||||
if valid == 0:
|
||||
return Vec3(0, 0, 0)
|
||||
return acc / float(valid)
|
||||
|
||||
def begin_transform_session(self, local_indices):
|
||||
"""Create immutable baseline snapshot for one gizmo drag session."""
|
||||
if not local_indices:
|
||||
return {"locals": {}}
|
||||
|
||||
locals_snapshot = {}
|
||||
for local_idx in local_indices:
|
||||
base_state = self.local_transform_state.get(local_idx)
|
||||
if not base_state:
|
||||
continue
|
||||
entries = self.vertex_index.get(local_idx, [])
|
||||
base_positions = self.local_transform_base_positions.get(local_idx, [])
|
||||
locals_snapshot[local_idx] = {
|
||||
"offset": Vec3(base_state["offset"]),
|
||||
"quat": Quat(base_state["quat"]),
|
||||
"scale": Vec3(base_state["scale"]),
|
||||
"pivot": Vec3(base_state["pivot"]),
|
||||
"entries": entries,
|
||||
"base_positions": base_positions,
|
||||
}
|
||||
return {"locals": locals_snapshot}
|
||||
|
||||
def apply_transform_session(self, snapshot, delta_pos, delta_quat, delta_scale):
|
||||
"""Apply transform delta to all local indices in snapshot and rewrite vertices."""
|
||||
import numpy as np
|
||||
|
||||
if not snapshot or "locals" not in snapshot:
|
||||
return
|
||||
if delta_pos is None:
|
||||
delta_pos = Vec3(0, 0, 0)
|
||||
if delta_quat is None:
|
||||
delta_quat = Quat.identQuat()
|
||||
if delta_scale is None:
|
||||
delta_scale = Vec3(1, 1, 1)
|
||||
|
||||
dscale = np.array([delta_scale.x, delta_scale.y, delta_scale.z], dtype=np.float32)
|
||||
dpos = np.array([delta_pos.x, delta_pos.y, delta_pos.z], dtype=np.float32)
|
||||
|
||||
for local_idx, local_data in snapshot["locals"].items():
|
||||
base_offset = local_data["offset"]
|
||||
base_quat = local_data["quat"]
|
||||
base_scale = local_data["scale"]
|
||||
pivot = local_data["pivot"]
|
||||
|
||||
final_offset = Vec3(base_offset) + delta_pos
|
||||
final_quat = Quat(delta_quat * base_quat)
|
||||
final_scale = Vec3(
|
||||
base_scale.x * delta_scale.x,
|
||||
base_scale.y * delta_scale.y,
|
||||
base_scale.z * delta_scale.z,
|
||||
)
|
||||
rot_mat = self._quat_to_np_mat3(final_quat)
|
||||
|
||||
self.local_transform_state[local_idx]["offset"] = final_offset
|
||||
self.local_transform_state[local_idx]["quat"] = final_quat
|
||||
self.local_transform_state[local_idx]["scale"] = final_scale
|
||||
self.position_offsets[local_idx] = final_offset
|
||||
|
||||
pivot_np = np.array([pivot.x, pivot.y, pivot.z], dtype=np.float32)
|
||||
base_s = np.array([base_scale.x, base_scale.y, base_scale.z], dtype=np.float32)
|
||||
total_scale = base_s * dscale
|
||||
total_offset = np.array([base_offset.x, base_offset.y, base_offset.z], dtype=np.float32) + dpos
|
||||
|
||||
entries = local_data["entries"]
|
||||
base_positions = local_data["base_positions"]
|
||||
for i, (gn_np, gi, rows) in enumerate(entries):
|
||||
if i >= len(base_positions):
|
||||
continue
|
||||
orig_pos = base_positions[i]
|
||||
if orig_pos is None or len(orig_pos) == 0:
|
||||
continue
|
||||
centered = orig_pos - pivot_np
|
||||
scaled = centered * total_scale
|
||||
rotated = scaled @ rot_mat.T
|
||||
new_pos = rotated + pivot_np + total_offset
|
||||
|
||||
gnode = gn_np.node()
|
||||
geom = gnode.modify_geom(gi)
|
||||
vdata = geom.modify_vertex_data()
|
||||
writer = GeomVertexWriter(vdata, "vertex")
|
||||
|
||||
for j in range(len(rows)):
|
||||
writer.set_row(int(rows[j]))
|
||||
writer.set_data3f(float(new_pos[j, 0]), float(new_pos[j, 1]), float(new_pos[j, 2]))
|
||||
|
||||
def _quat_to_np_mat3(self, quat):
|
||||
"""Convert Panda3D Quat to 3x3 numpy rotation matrix."""
|
||||
import numpy as np
|
||||
q = Quat(quat)
|
||||
q.normalize()
|
||||
w = float(q.getR())
|
||||
x = float(q.getI())
|
||||
y = float(q.getJ())
|
||||
z = float(q.getK())
|
||||
|
||||
xx = x * x
|
||||
yy = y * y
|
||||
zz = z * z
|
||||
xy = x * y
|
||||
xz = x * z
|
||||
yz = y * z
|
||||
wx = w * x
|
||||
wy = w * y
|
||||
wz = w * z
|
||||
|
||||
return np.array([
|
||||
[1.0 - 2.0 * (yy + zz), 2.0 * (xy - wz), 2.0 * (xz + wy)],
|
||||
[2.0 * (xy + wz), 1.0 - 2.0 * (xx + zz), 2.0 * (yz - wx)],
|
||||
[2.0 * (xz - wy), 2.0 * (yz + wx), 1.0 - 2.0 * (xx + yy)],
|
||||
], dtype=np.float32)
|
||||
model.remove_node()
|
||||
return global_id
|
||||
|
||||
def create_ssbo(self):
|
||||
"""No SSBO needed — using RP default rendering."""
|
||||
"""No SSBO needed in hybrid mode."""
|
||||
return None
|
||||
|
||||
def move_object(self, global_id, delta):
|
||||
"""
|
||||
Move an object by modifying vertex positions directly.
|
||||
delta: Vec3 translation to apply.
|
||||
Uses numpy for batch vertex updates.
|
||||
动态编辑路径: 仅改 NodePath 变换,不改顶点数据。
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
if global_id not in self.id_to_chunk:
|
||||
if global_id not in self.id_to_object_np:
|
||||
return
|
||||
|
||||
_, local_idx = self.id_to_chunk[global_id]
|
||||
|
||||
if local_idx not in self.vertex_index:
|
||||
return
|
||||
|
||||
# Accumulate offset
|
||||
self.position_offsets[local_idx] = self.position_offsets.get(local_idx, Vec3(0)) + delta
|
||||
offset = self.position_offsets[local_idx]
|
||||
offset_arr = np.array([offset.x, offset.y, offset.z], dtype=np.float32)
|
||||
|
||||
# Update each (geom_node, geom_idx, rows) group
|
||||
entries = self.vertex_index[local_idx]
|
||||
originals = self.original_positions[local_idx]
|
||||
|
||||
for i, (gn_np, gi, rows) in enumerate(entries):
|
||||
orig_pos = originals[i] # numpy array (N, 3)
|
||||
new_pos = orig_pos + offset_arr # vectorized add
|
||||
|
||||
gnode = gn_np.node()
|
||||
geom = gnode.modify_geom(gi)
|
||||
vdata = geom.modify_vertex_data()
|
||||
writer = GeomVertexWriter(vdata, "vertex")
|
||||
|
||||
for j in range(len(rows)):
|
||||
writer.set_row(int(rows[j]))
|
||||
writer.set_data3f(float(new_pos[j, 0]), float(new_pos[j, 1]), float(new_pos[j, 2]))
|
||||
|
||||
chunk_id = self.id_to_chunk[global_id]
|
||||
self._set_chunk_dynamic(chunk_id, True)
|
||||
|
||||
obj_np = self.id_to_object_np[global_id]
|
||||
next_pos = obj_np.get_pos() + delta
|
||||
# Fluid transform helps reduce visible one-frame transform lag in some pipelines.
|
||||
if hasattr(obj_np, "set_fluid_pos"):
|
||||
obj_np.set_fluid_pos(next_pos)
|
||||
else:
|
||||
obj_np.set_pos(next_pos)
|
||||
|
||||
pick_np = self.id_to_pick_np.get(global_id)
|
||||
if pick_np and not pick_np.is_empty():
|
||||
if hasattr(pick_np, "set_fluid_pos"):
|
||||
pick_np.set_fluid_pos(next_pos)
|
||||
else:
|
||||
pick_np.set_pos(next_pos)
|
||||
|
||||
self.position_offsets[global_id] = self.position_offsets.get(global_id, Vec3(0, 0, 0)) + delta
|
||||
self.chunks[chunk_id]["dirty"] = True
|
||||
|
||||
def get_world_pos(self, global_id):
|
||||
"""Get current world position of an object."""
|
||||
if global_id not in self.id_to_chunk:
|
||||
if global_id not in self.id_to_object_np or not self.model:
|
||||
return Vec3(0, 0, 0)
|
||||
_, local_idx = self.id_to_chunk[global_id]
|
||||
|
||||
original_mat = self.global_transforms[global_id]
|
||||
original_pos = original_mat.get_row3(3)
|
||||
offset = self.position_offsets.get(local_idx, Vec3(0))
|
||||
|
||||
return Vec3(original_pos) + offset
|
||||
return self.id_to_object_np[global_id].get_pos(self.model)
|
||||
|
||||
def get_object_center(self, global_id):
|
||||
"""Get the original center position of an object (for rotation pivot)."""
|
||||
if global_id >= len(self.global_transforms):
|
||||
return Vec3(0, 0, 0)
|
||||
mat = self.global_transforms[global_id]
|
||||
return Vec3(mat.get_row3(3))
|
||||
|
||||
def get_transform(self, global_id):
|
||||
"""Get original transform."""
|
||||
if global_id >= len(self.global_transforms):
|
||||
return LMatrix4f.ident_mat()
|
||||
return self.global_transforms[global_id]
|
||||
|
||||
@ -3,30 +3,51 @@ import sys
|
||||
import os
|
||||
import struct
|
||||
import time
|
||||
import types
|
||||
from panda3d.core import (
|
||||
Filename, loadPrcFileData, GeomVertexFormat,
|
||||
GeomVertexWriter, InternalName, Shader, Texture, SamplerState,
|
||||
Vec3, Vec4, Point2, Point3, LMatrix4f, ShaderBuffer, GeomEnums, OmniBoundingVolume, Quat,
|
||||
Vec3, Vec4, Point2, Point3, LMatrix4f, ShaderBuffer, GeomEnums, OmniBoundingVolume,
|
||||
TransparencyAttrib, BoundingSphere, NodePath,
|
||||
GraphicsEngine, WindowProperties, FrameBufferProperties,
|
||||
GraphicsPipe, GraphicsOutput, Camera, DisplayRegion, OrthographicLens,
|
||||
BoundingBox
|
||||
BoundingBox, BitMask32
|
||||
)
|
||||
|
||||
import p3dimgui.backend as p3dimgui_backend
|
||||
import p3dimgui.shaders as p3dimgui_shaders
|
||||
# p3dimgui.backend first tries `from shaders import *`, which can be shadowed by
|
||||
# project folders named `shaders/` and leave VERT_SHADER/FRAG_SHADER undefined.
|
||||
# Seed a valid fallback module before importing p3dimgui.
|
||||
_shaders_mod = sys.modules.get("shaders")
|
||||
if not (_shaders_mod and hasattr(_shaders_mod, "VERT_SHADER") and hasattr(_shaders_mod, "FRAG_SHADER")):
|
||||
_shaders_mod = types.ModuleType("shaders")
|
||||
_shaders_mod.FRAG_SHADER = """
|
||||
#version 120
|
||||
varying vec2 texcoord;
|
||||
varying vec4 color;
|
||||
uniform sampler2D p3d_Texture0;
|
||||
void main() {
|
||||
gl_FragColor = color * texture2D(p3d_Texture0, texcoord);
|
||||
}
|
||||
"""
|
||||
_shaders_mod.VERT_SHADER = """
|
||||
#version 120
|
||||
attribute vec4 p3d_Vertex;
|
||||
attribute vec4 p3d_Color;
|
||||
varying vec2 texcoord;
|
||||
varying vec4 color;
|
||||
uniform mat4 p3d_ModelViewProjectionMatrix;
|
||||
void main() {
|
||||
texcoord = p3d_Vertex.zw;
|
||||
color = p3d_Color.bgra;
|
||||
gl_Position = p3d_ModelViewProjectionMatrix * vec4(p3d_Vertex.x, 0.0, -p3d_Vertex.y, 1.0);
|
||||
}
|
||||
"""
|
||||
sys.modules["shaders"] = _shaders_mod
|
||||
|
||||
from p3dimgui import ImGuiBackend
|
||||
from imgui_bundle import imgui
|
||||
from rpcore.effect import Effect
|
||||
|
||||
# Work around p3dimgui import-order issue where backend may import an unrelated
|
||||
# top-level "shaders" module and miss these globals.
|
||||
if not hasattr(p3dimgui_backend, "VERT_SHADER"):
|
||||
p3dimgui_backend.VERT_SHADER = p3dimgui_shaders.VERT_SHADER
|
||||
if not hasattr(p3dimgui_backend, "FRAG_SHADER"):
|
||||
p3dimgui_backend.FRAG_SHADER = p3dimgui_shaders.FRAG_SHADER
|
||||
|
||||
ImGuiBackend = p3dimgui_backend.ImGuiBackend
|
||||
|
||||
from .ssbo_controller import ObjectController
|
||||
|
||||
class SSBOEditor:
|
||||
@ -44,12 +65,7 @@ class SSBOEditor:
|
||||
self.model = None
|
||||
self.ssbo = None
|
||||
self.font_path = font_path
|
||||
# Picking resources may be created later when a model is loaded.
|
||||
self.pick_buffer = None
|
||||
self.pick_texture = None
|
||||
self.pick_cam = None
|
||||
self.pick_cam_np = None
|
||||
self.pick_lens = None
|
||||
self._transform_gizmo = None
|
||||
|
||||
# Internal State
|
||||
self.selected_name = None
|
||||
@ -59,12 +75,8 @@ class SSBOEditor:
|
||||
self.filtered_nodes = []
|
||||
self.debug_mode = False
|
||||
self.keys = {}
|
||||
self._ssbo_transform_active = False
|
||||
self._ssbo_selected_local_indices = []
|
||||
self._ssbo_transform_snapshot = None
|
||||
self._ssbo_gizmo_proxy = None
|
||||
self._ssbo_proxy_start = {"pos": None, "quat": None, "scale": None}
|
||||
self._bound_transform_gizmo = None
|
||||
self.pick_mask = BitMask32.bit(29)
|
||||
self.pick_buffer = None
|
||||
|
||||
# Initialize ImGui Backend if not already present
|
||||
if not hasattr(self.base, 'imgui_backend'):
|
||||
@ -91,6 +103,32 @@ class SSBOEditor:
|
||||
if model_path:
|
||||
self.load_model(model_path)
|
||||
|
||||
def _enable_realtime_shadow_tasks(self):
|
||||
"""
|
||||
Force PSSM-related scheduled tasks to run every frame to avoid visible
|
||||
shadow lag/ghosting while editing moving objects.
|
||||
"""
|
||||
scheduler = getattr(self.rp, "task_scheduler", None)
|
||||
if not scheduler or not hasattr(scheduler, "_tasks"):
|
||||
return
|
||||
|
||||
required = {
|
||||
"pssm_scene_shadows",
|
||||
"pssm_distant_shadows",
|
||||
"pssm_convert_distant_to_esm",
|
||||
"pssm_blur_distant_vert",
|
||||
"pssm_blur_distant_horiz",
|
||||
}
|
||||
changed = False
|
||||
for frame_tasks in scheduler._tasks:
|
||||
for task_name in required:
|
||||
if task_name not in frame_tasks:
|
||||
frame_tasks.append(task_name)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
print("[SSBOEditor] Realtime shadow tasks enabled (PSSM updates every frame).")
|
||||
|
||||
def load_font(self):
|
||||
"""Load custom font for ImGui"""
|
||||
io = imgui.get_io()
|
||||
@ -115,7 +153,7 @@ class SSBOEditor:
|
||||
io.fonts.add_font_from_file_ttf(self.font_path, 18.0)
|
||||
else:
|
||||
# Fallback to default or common font
|
||||
default_font = os.path.join(os.path.dirname(os.path.dirname(__file__)), "font", "msyh.ttc")
|
||||
default_font = "d:/renderpipeline/font/msyh.ttc"
|
||||
if os.path.exists(default_font):
|
||||
io.fonts.clear()
|
||||
io.fonts.add_font_from_file_ttf(default_font, 18.0, glyph_ranges=glyph_ranges)
|
||||
@ -128,19 +166,22 @@ class SSBOEditor:
|
||||
io.fonts.add_font_default()
|
||||
|
||||
def load_model(self, model_path):
|
||||
"""Load and process a model — NO custom shader, uses RP default rendering."""
|
||||
"""Load and process a model using hybrid static/dynamic chunks."""
|
||||
print(f"[SSBOEditor] Loading model: {model_path}")
|
||||
fn = Filename.fromOsSpecific(model_path)
|
||||
self.model = self.base.loader.loadModel(fn)
|
||||
source_model = self.base.loader.loadModel(fn)
|
||||
model_name = os.path.basename(model_path)
|
||||
if model_name:
|
||||
source_model.set_name(model_name)
|
||||
|
||||
self.controller = ObjectController()
|
||||
count = self.controller.bake_ids_and_collect(self.model)
|
||||
self._ssbo_transform_active = False
|
||||
self._ssbo_selected_local_indices = []
|
||||
self._ssbo_transform_snapshot = None
|
||||
self._cleanup_ssbo_proxy()
|
||||
count = self.controller.bake_ids_and_collect(source_model)
|
||||
self.model = self.controller.model
|
||||
|
||||
self.model.reparent_to(self.base.render)
|
||||
|
||||
# Keep shadow feedback responsive during interactive edits.
|
||||
self._enable_realtime_shadow_tasks()
|
||||
|
||||
# NO rp.set_effect() — use RP default rendering for max FPS
|
||||
# NO SSBO creation — vertex positions are baked
|
||||
@ -209,6 +250,7 @@ class SSBOEditor:
|
||||
self.pick_buffer.add_render_texture(self.pick_texture, GraphicsOutput.RTM_copy_ram)
|
||||
|
||||
self.pick_cam = Camera("pick_camera")
|
||||
self.pick_cam.set_camera_mask(self.pick_mask)
|
||||
self.pick_cam_np = self.base.cam.attach_new_node(self.pick_cam)
|
||||
self.pick_lens = self.base.camLens.make_copy()
|
||||
self.pick_cam.set_lens(self.pick_lens)
|
||||
@ -218,21 +260,19 @@ class SSBOEditor:
|
||||
|
||||
# Load pick shader
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
pick_vert = os.path.join(current_dir, "shaders", "pick_id.vert")
|
||||
pick_frag = os.path.join(current_dir, "shaders", "pick_id.frag")
|
||||
|
||||
pick_vert = Filename.fromOsSpecific(pick_vert).getFullpath()
|
||||
pick_frag = Filename.fromOsSpecific(pick_frag).getFullpath()
|
||||
pick_vert_path = os.path.join(current_dir, "shaders", "pick_id.vert")
|
||||
pick_frag_path = os.path.join(current_dir, "shaders", "pick_id.frag")
|
||||
|
||||
try:
|
||||
pick_shader = Shader.load(
|
||||
Shader.SL_GLSL,
|
||||
pick_vert,
|
||||
pick_frag
|
||||
)
|
||||
pick_scene = getattr(self.controller, "pick_model", None) if self.controller else None
|
||||
if pick_scene is None:
|
||||
pick_scene = self.model
|
||||
# Read shader source directly from OS filesystem to avoid
|
||||
# Panda3D VFS case-mismatch issues on Windows.
|
||||
with open(pick_vert_path, 'r', encoding='utf-8') as f:
|
||||
vert_src = f.read().replace('\r', '')
|
||||
with open(pick_frag_path, 'r', encoding='utf-8') as f:
|
||||
frag_src = f.read().replace('\r', '')
|
||||
pick_shader = Shader.make(Shader.SL_GLSL, vert_src, frag_src)
|
||||
pick_scene = getattr(self.controller, "pick_model", None) or self.model
|
||||
pick_scene.show(self.pick_mask)
|
||||
self.pick_cam.set_scene(pick_scene)
|
||||
initial_state = NodePath("initial")
|
||||
initial_state.set_shader(pick_shader, 100)
|
||||
@ -249,9 +289,7 @@ class SSBOEditor:
|
||||
self.pick_buffer.set_clear_color_active(True)
|
||||
|
||||
def pick_object(self, mx, my):
|
||||
if (not self.pick_buffer or not self.pick_texture or not self.pick_lens or
|
||||
not self.controller or not self.model):
|
||||
return False
|
||||
if not self.pick_buffer: return
|
||||
|
||||
self.pick_lens.set_fov(0.1)
|
||||
self.pick_lens.set_film_offset(0, 0)
|
||||
@ -264,281 +302,254 @@ class SSBOEditor:
|
||||
self.pick_cam_np.set_pos(0, 0, 0)
|
||||
self.pick_cam_np.look_at(far_point)
|
||||
|
||||
# Ensure pick transforms are up-to-date before rendering the pick buffer.
|
||||
# The per-frame sync task may not have run yet for this frame.
|
||||
self._sync_pick_transforms()
|
||||
|
||||
# DEBUG: Compare obj vs pick positions
|
||||
if self.selected_ids:
|
||||
gid = self.selected_ids[0]
|
||||
obj = self.controller.id_to_object_np.get(gid)
|
||||
pick = self.controller.id_to_pick_np.get(gid)
|
||||
pm = self.controller.pick_model
|
||||
if obj and pick:
|
||||
print(f"[PICK DBG] obj[{gid}] world={obj.get_pos(self.base.render)}")
|
||||
print(f"[PICK DBG] pick[{gid}] in_pick_model={pick.get_pos(pm)} local={pick.get_pos()}")
|
||||
pp = getattr(self, '_pick_proxy', None)
|
||||
gp = getattr(self, '_group_proxy', None)
|
||||
if pp:
|
||||
print(f"[PICK DBG] pick_proxy pos={pp.get_pos()} mat row3={pp.get_mat().get_row3(3)}")
|
||||
if gp:
|
||||
print(f"[PICK DBG] group_proxy pos_render={gp.get_pos(self.base.render)}")
|
||||
|
||||
self.pick_buffer.set_active(True)
|
||||
self.base.graphicsEngine.render_frame()
|
||||
self.pick_buffer.set_active(False)
|
||||
self.base.graphicsEngine.extract_texture_data(
|
||||
self.pick_texture, self.base.win.get_gsg()
|
||||
)
|
||||
|
||||
ram_image = self.pick_texture.get_ram_image_as("RGBA")
|
||||
if ram_image:
|
||||
data = memoryview(ram_image)
|
||||
if len(data) >= 4:
|
||||
r, g, b, a = data[0], data[1], data[2], data[3]
|
||||
if a > 0:
|
||||
if a > 0 and b == 0:
|
||||
hit_id = r + (g << 8)
|
||||
node_key = self.controller.id_to_name.get(hit_id)
|
||||
if node_key:
|
||||
print(f"[Pick] Hit: ID={hit_id} -> {node_key}")
|
||||
self.select_node(node_key)
|
||||
return True
|
||||
return
|
||||
|
||||
self.selected_name = None
|
||||
self.selected_ids = []
|
||||
return False
|
||||
self.clear_selection()
|
||||
|
||||
|
||||
def on_mouse_click(self):
|
||||
io = imgui.get_io()
|
||||
if io.want_capture_mouse:
|
||||
if io.want_capture_mouse: return
|
||||
# Skip SSBO picking when user is interacting with the TransformGizmo,
|
||||
# otherwise pick_object would clear the selection and detach the gizmo
|
||||
# before the gizmo's own mouse handler fires.
|
||||
if self._transform_gizmo and self._transform_gizmo.is_hovering:
|
||||
return
|
||||
if self.base.mouseWatcherNode.has_mouse():
|
||||
mpos = self.base.mouseWatcherNode.get_mouse()
|
||||
# If clicking gizmo, skip SSBO pick.
|
||||
if self._try_start_gizmo_drag(mpos.x, mpos.y):
|
||||
return
|
||||
prev_selected = self.selected_name
|
||||
hit = self.pick_object(mpos.x, mpos.y)
|
||||
# SSBO miss must clear current selection.
|
||||
if not hit:
|
||||
self._sync_selection_none()
|
||||
# Always fallback to legacy ray pick when SSBO misses.
|
||||
# This keeps scene selection usable if SSBO ID mapping is incomplete.
|
||||
self._fallback_legacy_pick(mpos.x, mpos.y)
|
||||
elif prev_selected != self.selected_name:
|
||||
# Ensure selection visuals refresh when SSBO selection changes.
|
||||
self._sync_selection_from_key(self.selected_name)
|
||||
self.pick_object(mpos.x, mpos.y)
|
||||
|
||||
def toggle_debug(self):
|
||||
self.debug_mode = not self.debug_mode
|
||||
|
||||
def bind_transform_gizmo(self, gizmo):
|
||||
"""Bind a TransformGizmo so it follows SSBO selection."""
|
||||
self._transform_gizmo = gizmo
|
||||
|
||||
def _start_pick_sync_task(self):
|
||||
"""Start a per-frame task that syncs pick proxy with render proxy."""
|
||||
self.base.task_mgr.remove("ssbo_pick_sync")
|
||||
self.base.task_mgr.add(self._pick_sync_task, "ssbo_pick_sync")
|
||||
|
||||
def _stop_pick_sync_task(self):
|
||||
"""Stop the per-frame pick sync task."""
|
||||
self.base.task_mgr.remove("ssbo_pick_sync")
|
||||
|
||||
def _pick_sync_task(self, task):
|
||||
"""Per-frame: keep pick proxy transform in sync with render proxy."""
|
||||
self._sync_pick_transforms()
|
||||
return task.cont
|
||||
|
||||
def _sync_pick_transforms(self):
|
||||
"""Sync pick scene transforms to match render scene."""
|
||||
# Group proxy: sync the single pick proxy to the render proxy
|
||||
proxy = getattr(self, '_group_proxy', None)
|
||||
pick_proxy = getattr(self, '_pick_proxy', None)
|
||||
if proxy and pick_proxy and not proxy.is_empty() and not pick_proxy.is_empty():
|
||||
pick_proxy.set_mat(proxy.get_mat(self.base.render))
|
||||
return
|
||||
# Single object: sync individual pick_np
|
||||
if not self.controller:
|
||||
return
|
||||
for gid in self.selected_ids:
|
||||
obj_np = self.controller.id_to_object_np.get(gid)
|
||||
pick_np = self.controller.id_to_pick_np.get(gid)
|
||||
if obj_np and pick_np and not obj_np.is_empty() and not pick_np.is_empty():
|
||||
pick_np.set_mat(obj_np.get_mat(self.base.render))
|
||||
|
||||
def clear_selection(self):
|
||||
pass # No selection mask texture needed without custom shader
|
||||
self._stop_pick_sync_task()
|
||||
self._cleanup_group_proxy()
|
||||
self.selected_name = None
|
||||
self.selected_ids = []
|
||||
if self.controller:
|
||||
self.controller.set_active_ids([])
|
||||
if self._transform_gizmo:
|
||||
self._transform_gizmo.detach()
|
||||
|
||||
def _cleanup_group_proxy(self):
|
||||
"""Reparent objects back to their chunks and clean up proxies."""
|
||||
proxy = getattr(self, '_group_proxy', None)
|
||||
if not proxy:
|
||||
return
|
||||
originals = getattr(self, '_group_original_parents', {})
|
||||
pick_originals = getattr(self, '_pick_original_parents', {})
|
||||
|
||||
# Reparent render objects back to their original chunk parents
|
||||
for gid, parent_np in originals.items():
|
||||
obj_np = self.controller.id_to_object_np.get(gid)
|
||||
if obj_np and not obj_np.is_empty() and parent_np and not parent_np.is_empty():
|
||||
obj_np.wrt_reparent_to(parent_np)
|
||||
chunk_id = self.controller.id_to_chunk.get(gid)
|
||||
if chunk_id is not None and chunk_id in self.controller.chunks:
|
||||
self.controller.chunks[chunk_id]["dirty"] = True
|
||||
|
||||
# Reparent pick objects back to pick_model
|
||||
for gid, parent_np in pick_originals.items():
|
||||
pick_np = self.controller.id_to_pick_np.get(gid)
|
||||
if pick_np and not pick_np.is_empty() and parent_np and not parent_np.is_empty():
|
||||
pick_np.wrt_reparent_to(parent_np)
|
||||
|
||||
if not proxy.is_empty():
|
||||
proxy.remove_node()
|
||||
pick_proxy = getattr(self, '_pick_proxy', None)
|
||||
if pick_proxy and not pick_proxy.is_empty():
|
||||
pick_proxy.remove_node()
|
||||
|
||||
self._group_proxy = None
|
||||
self._pick_proxy = None
|
||||
self._group_original_parents = {}
|
||||
self._pick_original_parents = {}
|
||||
|
||||
def update_selection_mask(self):
|
||||
pass # No selection mask texture needed without custom shader
|
||||
|
||||
def select_node(self, key):
|
||||
if not self.controller or key not in self.controller.name_to_ids:
|
||||
return
|
||||
# Clean up previous group proxy before changing selection
|
||||
self._cleanup_group_proxy()
|
||||
self._stop_pick_sync_task()
|
||||
|
||||
self.selected_name = key
|
||||
self.selected_ids = self.controller.name_to_ids.get(key, [])
|
||||
self._sync_selection_from_key(key)
|
||||
self.controller.set_active_ids(self.selected_ids)
|
||||
|
||||
def _sync_selection_from_key(self, key):
|
||||
"""Sync SSBO picked key to legacy SelectionSystem."""
|
||||
try:
|
||||
if hasattr(self.base, "selection") and self.base.selection:
|
||||
kind, target = self._resolve_ssbo_selection_target(key)
|
||||
if kind == "proxy":
|
||||
target_np = target
|
||||
else:
|
||||
target_np = target if target is not None else self.model
|
||||
if target_np is None or target_np.isEmpty():
|
||||
target_np = self.model
|
||||
self.base.selection.updateSelection(target_np)
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] selection sync failed: {e}")
|
||||
|
||||
def _sync_selection_none(self):
|
||||
"""Clear legacy SelectionSystem selection."""
|
||||
try:
|
||||
self._ssbo_transform_active = False
|
||||
self._ssbo_selected_local_indices = []
|
||||
self._ssbo_transform_snapshot = None
|
||||
self._cleanup_ssbo_proxy()
|
||||
if hasattr(self.base, "selection") and self.base.selection:
|
||||
self.base.selection.updateSelection(None)
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] clear selection sync failed: {e}")
|
||||
|
||||
def bind_transform_gizmo(self, transform_gizmo):
|
||||
"""Bind TransformGizmo drag hooks so SSBO sub-object transforms can follow gizmo."""
|
||||
self._bound_transform_gizmo = transform_gizmo
|
||||
if not transform_gizmo:
|
||||
if not self._transform_gizmo or not self.selected_ids:
|
||||
if self._transform_gizmo:
|
||||
self._transform_gizmo.detach()
|
||||
return
|
||||
hooks = {
|
||||
"move": {
|
||||
"drag_start": [self._on_ssbo_gizmo_drag_start],
|
||||
"drag_move": [self._on_ssbo_gizmo_drag_move],
|
||||
"drag_end": [self._on_ssbo_gizmo_drag_end],
|
||||
},
|
||||
"rotate": {
|
||||
"drag_start": [self._on_ssbo_gizmo_drag_start],
|
||||
"drag_move": [self._on_ssbo_gizmo_drag_move],
|
||||
"drag_end": [self._on_ssbo_gizmo_drag_end],
|
||||
},
|
||||
"scale": {
|
||||
"drag_start": [self._on_ssbo_gizmo_drag_start],
|
||||
"drag_move": [self._on_ssbo_gizmo_drag_move],
|
||||
"drag_end": [self._on_ssbo_gizmo_drag_end],
|
||||
},
|
||||
}
|
||||
try:
|
||||
if hasattr(transform_gizmo, "set_event_hooks"):
|
||||
transform_gizmo.set_event_hooks(hooks, replace=False)
|
||||
print("[SSBOEditor] TransformGizmo hooks bound")
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] bind transform gizmo failed: {e}")
|
||||
|
||||
def _resolve_ssbo_selection_target(self, key):
|
||||
"""Resolve selected SSBO key to proxy node (preferred) or regular node."""
|
||||
self._ssbo_transform_active = False
|
||||
self._ssbo_transform_snapshot = None
|
||||
self._ssbo_selected_local_indices = []
|
||||
if len(self.selected_ids) == 1:
|
||||
# Single object: attach gizmo directly
|
||||
obj_np = self.controller.id_to_object_np.get(self.selected_ids[0])
|
||||
if obj_np and not obj_np.is_empty():
|
||||
self._transform_gizmo.attach(obj_np)
|
||||
self._start_pick_sync_task()
|
||||
return
|
||||
|
||||
if not self.controller or not key:
|
||||
return "node", self.model
|
||||
global_ids = self.controller.name_to_ids.get(key, [])
|
||||
local_indices = self.controller.get_local_indices_from_global_ids(global_ids)
|
||||
self._ssbo_selected_local_indices = local_indices
|
||||
if local_indices:
|
||||
print(f"[SSBOEditor] selection locals={len(local_indices)} key={key}")
|
||||
center = self.controller.get_selection_center(local_indices)
|
||||
proxy = self._ensure_ssbo_proxy(center)
|
||||
return "proxy", proxy
|
||||
target_np = self.controller.key_to_node.get(key)
|
||||
if target_np is None or target_np.isEmpty():
|
||||
target_np = self.model
|
||||
return "node", target_np
|
||||
# Multiple objects (parent node): create MIRRORED proxies in both
|
||||
# render and pick scenes so transforms stay synchronized naturally.
|
||||
from panda3d.core import Vec3
|
||||
proxy = self.base.render.attach_new_node("ssbo_group_proxy")
|
||||
pick_proxy = self.controller.pick_model.attach_new_node("ssbo_pick_proxy")
|
||||
|
||||
def _ensure_ssbo_proxy(self, center):
|
||||
if self._ssbo_gizmo_proxy is None or self._ssbo_gizmo_proxy.isEmpty():
|
||||
self._ssbo_gizmo_proxy = self.base.render.attach_new_node("ssbo_transform_proxy")
|
||||
self._ssbo_gizmo_proxy.setTag("is_ssbo_proxy", "1")
|
||||
self._ssbo_gizmo_proxy.set_pos(center)
|
||||
self._ssbo_gizmo_proxy.set_hpr(0, 0, 0)
|
||||
self._ssbo_gizmo_proxy.set_scale(1, 1, 1)
|
||||
return self._ssbo_gizmo_proxy
|
||||
center = Vec3(0, 0, 0)
|
||||
valid = []
|
||||
for gid in self.selected_ids:
|
||||
obj_np = self.controller.id_to_object_np.get(gid)
|
||||
if obj_np and not obj_np.is_empty():
|
||||
center += obj_np.get_pos(self.base.render)
|
||||
valid.append(gid)
|
||||
if not valid:
|
||||
proxy.remove_node()
|
||||
pick_proxy.remove_node()
|
||||
return
|
||||
|
||||
def _cleanup_ssbo_proxy(self):
|
||||
if self._ssbo_gizmo_proxy and not self._ssbo_gizmo_proxy.isEmpty():
|
||||
self._ssbo_gizmo_proxy.removeNode()
|
||||
self._ssbo_gizmo_proxy = None
|
||||
center /= len(valid)
|
||||
proxy.set_pos(self.base.render, center)
|
||||
pick_proxy.set_pos(center) # pick_model has identity transform
|
||||
|
||||
def _on_ssbo_gizmo_drag_start(self, payload):
|
||||
try:
|
||||
target = payload.get("target") if payload else None
|
||||
if not target or target != self._ssbo_gizmo_proxy:
|
||||
self._ssbo_transform_active = False
|
||||
return
|
||||
if not self.controller or not self._ssbo_selected_local_indices:
|
||||
self._ssbo_transform_active = False
|
||||
return
|
||||
self._ssbo_transform_snapshot = self.controller.begin_transform_session(
|
||||
self._ssbo_selected_local_indices
|
||||
)
|
||||
self._ssbo_proxy_start = {
|
||||
"pos": Vec3(target.getPos(self.base.render)),
|
||||
"quat": Quat(target.getQuat(self.base.render)),
|
||||
"scale": Vec3(target.getScale()),
|
||||
}
|
||||
self._ssbo_transform_active = True
|
||||
print(f"[SSBOEditor] drag_start locals={len(self._ssbo_selected_local_indices)}")
|
||||
except Exception as e:
|
||||
self._ssbo_transform_active = False
|
||||
print(f"[SSBOEditor] drag_start bridge failed: {e}")
|
||||
self._group_proxy = proxy
|
||||
self._pick_proxy = pick_proxy
|
||||
self._group_original_parents = {}
|
||||
self._pick_original_parents = {}
|
||||
|
||||
def _on_ssbo_gizmo_drag_move(self, payload):
|
||||
try:
|
||||
if not self._ssbo_transform_active:
|
||||
return
|
||||
target = payload.get("target") if payload else None
|
||||
if not target or target != self._ssbo_gizmo_proxy:
|
||||
return
|
||||
start_pos = self._ssbo_proxy_start.get("pos")
|
||||
start_quat = self._ssbo_proxy_start.get("quat")
|
||||
start_scale = self._ssbo_proxy_start.get("scale")
|
||||
if start_pos is None or start_quat is None or start_scale is None:
|
||||
return
|
||||
for gid in valid:
|
||||
# Reparent render object under render proxy
|
||||
obj_np = self.controller.id_to_object_np[gid]
|
||||
self._group_original_parents[gid] = obj_np.get_parent()
|
||||
obj_np.wrt_reparent_to(proxy)
|
||||
# Reparent pick object under pick proxy (mirrored structure)
|
||||
pick_np = self.controller.id_to_pick_np[gid]
|
||||
self._pick_original_parents[gid] = pick_np.get_parent()
|
||||
pick_np.wrt_reparent_to(pick_proxy)
|
||||
|
||||
curr_pos = Vec3(target.getPos(self.base.render))
|
||||
curr_quat = Quat(target.getQuat(self.base.render))
|
||||
curr_scale = Vec3(target.getScale())
|
||||
self._transform_gizmo.attach(proxy)
|
||||
self._start_pick_sync_task()
|
||||
|
||||
delta_pos = curr_pos - start_pos
|
||||
inv_start_quat = Quat(start_quat)
|
||||
inv_start_quat.invertInPlace()
|
||||
delta_quat = curr_quat * inv_start_quat
|
||||
delta_scale = Vec3(
|
||||
curr_scale.x / start_scale.x if abs(start_scale.x) > 1e-8 else 1.0,
|
||||
curr_scale.y / start_scale.y if abs(start_scale.y) > 1e-8 else 1.0,
|
||||
curr_scale.z / start_scale.z if abs(start_scale.z) > 1e-8 else 1.0,
|
||||
)
|
||||
def _rebuild_filtered_tree_rows(self):
|
||||
"""
|
||||
Build a flattened tree-row list with depth info for rendering in ImGui,
|
||||
while preserving source-model parent/child hierarchy.
|
||||
"""
|
||||
self.filtered_nodes = []
|
||||
if not self.controller or not self.controller.tree_root_key:
|
||||
return
|
||||
|
||||
self.controller.apply_transform_session(
|
||||
self._ssbo_transform_snapshot,
|
||||
delta_pos,
|
||||
delta_quat,
|
||||
delta_scale,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] drag_move bridge failed: {e}")
|
||||
search_lower = self.search_text.strip().lower()
|
||||
|
||||
def _on_ssbo_gizmo_drag_end(self, payload):
|
||||
try:
|
||||
if self._ssbo_transform_active:
|
||||
print(f"[SSBOEditor] drag_end locals={len(self._ssbo_selected_local_indices)}")
|
||||
self._ssbo_transform_active = False
|
||||
self._ssbo_transform_snapshot = None
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] drag_end bridge failed: {e}")
|
||||
def walk(key, depth):
|
||||
node = self.controller.tree_nodes.get(key)
|
||||
if not node:
|
||||
return False, []
|
||||
|
||||
def _fallback_legacy_pick(self, mx, my):
|
||||
"""Fallback to legacy ray picking when SSBO misses."""
|
||||
try:
|
||||
if not hasattr(self.base, "event_handler") or not self.base.event_handler:
|
||||
return
|
||||
win_w, win_h = self.base.win.getSize()
|
||||
x = (mx + 1.0) * 0.5 * win_w
|
||||
y = (1.0 - my) * 0.5 * win_h
|
||||
self.base.event_handler.mousePressEventLeft({"x": x, "y": y})
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] legacy fallback pick failed: {e}")
|
||||
# Skip redundant wrapper nodes (e.g. ROOT under model file root),
|
||||
# while preserving child hierarchy and selection mapping.
|
||||
if self.controller.should_hide_tree_node(key):
|
||||
merged_rows = []
|
||||
merged_visible = False
|
||||
for child_key in node["children"]:
|
||||
visible, rows = walk(child_key, depth)
|
||||
if visible:
|
||||
merged_visible = True
|
||||
merged_rows.extend(rows)
|
||||
return merged_visible, merged_rows
|
||||
|
||||
def _try_start_gizmo_drag(self, mouse_x=None, mouse_y=None):
|
||||
"""Try to start gizmo drag using the existing SelectionSystem pipeline."""
|
||||
try:
|
||||
new_transform = getattr(self.base, "newTransform", None)
|
||||
if (
|
||||
new_transform is not None and
|
||||
mouse_x is not None and
|
||||
mouse_y is not None and
|
||||
self._is_mouse_on_new_gizmo(new_transform, mouse_x, mouse_y)
|
||||
):
|
||||
return True
|
||||
selection = getattr(self.base, "selection", None)
|
||||
if not selection or not selection.gizmo:
|
||||
return False
|
||||
win_w, win_h = self.base.win.getSize()
|
||||
mpos = self.base.mouseWatcherNode.get_mouse()
|
||||
x = (mpos.x + 1.0) * 0.5 * win_w
|
||||
y = (1.0 - mpos.y) * 0.5 * win_h
|
||||
display = self.controller.display_names.get(key, key)
|
||||
obj_count = len(self.controller.name_to_ids.get(key, []))
|
||||
name_match = (not search_lower) or (search_lower in display.lower())
|
||||
|
||||
axis = selection.gizmoHighlightAxis or selection.checkGizmoClick(x, y)
|
||||
if axis:
|
||||
selection.startGizmoDrag(axis, x, y)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] gizmo drag start failed: {e}")
|
||||
return False
|
||||
child_rows = []
|
||||
child_match = False
|
||||
for child_key in node["children"]:
|
||||
visible, rows = walk(child_key, depth + 1)
|
||||
if visible:
|
||||
child_match = True
|
||||
child_rows.extend(rows)
|
||||
|
||||
def _is_mouse_on_new_gizmo(self, new_transform, mouse_x, mouse_y):
|
||||
"""Refresh and query hover state for TransformGizmo on current click position."""
|
||||
try:
|
||||
mouse_pos = Point3(mouse_x, mouse_y, 0.0)
|
||||
for gizmo_name in ("move_gizmo", "rotate_gizmo", "scale_gizmo"):
|
||||
gizmo = getattr(new_transform, gizmo_name, None)
|
||||
if not gizmo or not getattr(gizmo, "attached", False):
|
||||
continue
|
||||
hover_updater = getattr(gizmo, "_update_hover_highlight", None)
|
||||
if callable(hover_updater):
|
||||
hover_updater(mouse_pos)
|
||||
return bool(getattr(new_transform, "is_hovering", False))
|
||||
except Exception as e:
|
||||
print(f"[SSBOEditor] new gizmo hover check failed: {e}")
|
||||
return False
|
||||
visible = (not search_lower) or name_match or child_match
|
||||
if not visible:
|
||||
return False, []
|
||||
|
||||
row = (key, depth, display, obj_count)
|
||||
return True, [row] + child_rows
|
||||
|
||||
_, rows = walk(self.controller.tree_root_key, 0)
|
||||
self.filtered_nodes = rows
|
||||
|
||||
def focus_on_selected(self):
|
||||
if self.selected_name and self.selected_ids:
|
||||
@ -562,28 +573,18 @@ class SSBOEditor:
|
||||
changed, self.search_text = imgui.input_text("Search", self.search_text, 256)
|
||||
|
||||
if imgui.begin_child("ObjectList", (0, 380), child_flags=imgui.ChildFlags_.borders):
|
||||
if self.search_text != self.last_search_text:
|
||||
if self.search_text != self.last_search_text or not self.filtered_nodes:
|
||||
self.last_search_text = self.search_text
|
||||
search_lower = self.search_text.lower()
|
||||
self.filtered_nodes = []
|
||||
for key in self.controller.node_list:
|
||||
display = self.controller.display_names.get(key, key.split('/')[-1])
|
||||
if not search_lower or (search_lower in display.lower() or search_lower in key.lower()):
|
||||
geom_count = len(self.controller.name_to_ids.get(key, []))
|
||||
self.filtered_nodes.append((key, display, geom_count))
|
||||
|
||||
# If list is empty initially (no search), show all
|
||||
if not self.search_text and not self.filtered_nodes:
|
||||
if len(self.filtered_nodes) != len(self.controller.node_list):
|
||||
self.filtered_nodes = [(k, self.controller.display_names.get(k, k), len(self.controller.name_to_ids.get(k,[]))) for k in self.controller.node_list]
|
||||
self._rebuild_filtered_tree_rows()
|
||||
|
||||
count = len(self.filtered_nodes)
|
||||
clipper = imgui.ListClipper()
|
||||
clipper.begin(count)
|
||||
while clipper.step():
|
||||
for i in range(clipper.display_start, clipper.display_end):
|
||||
key, display, geom_count = self.filtered_nodes[i]
|
||||
label = f"{display} ({geom_count})"
|
||||
key, depth, display, geom_count = self.filtered_nodes[i]
|
||||
indent = " " * depth
|
||||
label = f"{indent}{display} ({geom_count})##{key}"
|
||||
is_selected = (key == self.selected_name)
|
||||
if imgui.selectable(label, is_selected)[0]:
|
||||
self.select_node(key)
|
||||
@ -591,7 +592,8 @@ class SSBOEditor:
|
||||
|
||||
imgui.separator()
|
||||
if self.selected_name:
|
||||
imgui.text_colored((1, 0.8, 0.2, 1), f"Selected: {self.selected_name}")
|
||||
selected_display = self.controller.display_names.get(self.selected_name, self.selected_name)
|
||||
imgui.text_colored((1, 0.8, 0.2, 1), f"Selected: {selected_display}")
|
||||
if imgui.button("Focus (F)"): self.focus_on_selected()
|
||||
imgui.end()
|
||||
|
||||
|
||||
@ -1027,14 +1027,9 @@ class AppActions:
|
||||
|
||||
def _import_model_for_runtime(self, file_path, prefer_scene_manager=False):
|
||||
"""Import model through the active runtime path.
|
||||
SSBO mode: load via SSBOEditor only (avoid duplicate SceneManager model).
|
||||
SSBO mode: load via SSBOEditor always (regardless of prefer_scene_manager).
|
||||
Legacy mode: load via SceneManager.
|
||||
"""
|
||||
if prefer_scene_manager:
|
||||
if hasattr(self, 'scene_manager') and self.scene_manager:
|
||||
return self.scene_manager.importModel(file_path)
|
||||
return None
|
||||
|
||||
if self.use_ssbo_mouse_picking and getattr(self, 'ssbo_editor', None):
|
||||
try:
|
||||
# Clear selection/gizmo first to avoid dangling references to soon-to-be removed nodes.
|
||||
|
||||
@ -513,7 +513,7 @@ class EditorPanels:
|
||||
imgui.pop_style_color()
|
||||
|
||||
def _draw_ssbo_virtual_children(self, node):
|
||||
"""Draw SSBO controller nodes as virtual children for scene tree."""
|
||||
"""Draw SSBO controller tree_nodes as virtual children for scene tree."""
|
||||
ssbo_editor = getattr(self.app, "ssbo_editor", None)
|
||||
if not ssbo_editor:
|
||||
return False
|
||||
@ -522,66 +522,59 @@ class EditorPanels:
|
||||
if not model or model != node or not controller:
|
||||
return False
|
||||
|
||||
tree_root = controller.get_virtual_hierarchy() if hasattr(controller, "get_virtual_hierarchy") else None
|
||||
if not tree_root or not tree_root.get("children"):
|
||||
root_key = getattr(controller, "tree_root_key", None)
|
||||
if not root_key or root_key not in controller.tree_nodes:
|
||||
imgui.text_disabled("(无可用子节点)")
|
||||
return True
|
||||
|
||||
for name in sorted(tree_root["children"].keys()):
|
||||
child = tree_root["children"][name]
|
||||
self._draw_ssbo_virtual_tree_node(ssbo_editor, child, "ssbo_root")
|
||||
root_node = controller.tree_nodes[root_key]
|
||||
if not root_node["children"]:
|
||||
imgui.text_disabled("(无可用子节点)")
|
||||
return True
|
||||
|
||||
for child_key in root_node["children"]:
|
||||
self._draw_ssbo_virtual_tree_node(ssbo_editor, controller, child_key)
|
||||
return True
|
||||
|
||||
def _draw_ssbo_virtual_tree_node(self, ssbo_editor, tree_node, unique_id_prefix, depth=0):
|
||||
"""Recursively draw virtual SSBO hierarchy in scene tree."""
|
||||
if not tree_node:
|
||||
def _draw_ssbo_virtual_tree_node(self, ssbo_editor, controller, key):
|
||||
"""Recursively draw SSBO tree_nodes hierarchy in scene tree."""
|
||||
node_data = controller.tree_nodes.get(key)
|
||||
if not node_data:
|
||||
return
|
||||
path = tree_node.get("path", "")
|
||||
display = tree_node.get("display_name") or tree_node.get("name") or path
|
||||
leaf_key = tree_node.get("leaf_key")
|
||||
group_key = tree_node.get("group_key")
|
||||
children = tree_node.get("children", {}) or {}
|
||||
|
||||
label = f"{display}##{unique_id_prefix}_{path}"
|
||||
# Skip redundant wrapper nodes (e.g. ROOT), show their children instead.
|
||||
if controller.should_hide_tree_node(key):
|
||||
for child_key in node_data["children"]:
|
||||
self._draw_ssbo_virtual_tree_node(ssbo_editor, controller, child_key)
|
||||
return
|
||||
|
||||
# Leaf: selectable to trigger SSBO selection.
|
||||
if not children and leaf_key:
|
||||
is_selected = (getattr(ssbo_editor, "selected_name", None) == leaf_key)
|
||||
display = controller.display_names.get(key, key)
|
||||
obj_count = len(controller.name_to_ids.get(key, []))
|
||||
children = node_data["children"]
|
||||
is_selected = (getattr(ssbo_editor, "selected_name", None) == key)
|
||||
|
||||
if not children:
|
||||
# Leaf node: selectable
|
||||
label = f"{display} ({obj_count})##{key}"
|
||||
if imgui.selectable(label, is_selected)[0]:
|
||||
ssbo_editor.select_node(leaf_key)
|
||||
ssbo_editor.select_node(key)
|
||||
if hasattr(self.app, "lui_manager"):
|
||||
self.app.lui_manager.selected_index = -1
|
||||
return
|
||||
|
||||
# Non-leaf: tree node only for hierarchy display.
|
||||
opened = imgui.tree_node(label)
|
||||
# Clicking non-leaf row selects its aggregate group so parent transform affects children.
|
||||
if group_key and imgui.is_item_clicked(0):
|
||||
ssbo_editor.select_node(group_key)
|
||||
if hasattr(self.app, "lui_manager"):
|
||||
self.app.lui_manager.selected_index = -1
|
||||
if opened:
|
||||
# If this node is also a selectable leaf, render selectable entry first.
|
||||
if group_key:
|
||||
is_group_selected = (getattr(ssbo_editor, "selected_name", None) == group_key)
|
||||
if imgui.selectable(f"[整体] {display}##group_{unique_id_prefix}_{path}", is_group_selected)[0]:
|
||||
ssbo_editor.select_node(group_key)
|
||||
if hasattr(self.app, "lui_manager"):
|
||||
self.app.lui_manager.selected_index = -1
|
||||
if leaf_key:
|
||||
is_selected = (getattr(ssbo_editor, "selected_name", None) == leaf_key)
|
||||
if imgui.selectable(f"[节点] {display}##leaf_{unique_id_prefix}_{path}", is_selected)[0]:
|
||||
ssbo_editor.select_node(leaf_key)
|
||||
if hasattr(self.app, "lui_manager"):
|
||||
self.app.lui_manager.selected_index = -1
|
||||
for child_name in sorted(children.keys()):
|
||||
self._draw_ssbo_virtual_tree_node(
|
||||
ssbo_editor,
|
||||
children[child_name],
|
||||
unique_id_prefix,
|
||||
depth + 1,
|
||||
)
|
||||
imgui.tree_pop()
|
||||
else:
|
||||
# Branch node: tree node
|
||||
flags = imgui.TreeNodeFlags_.open_on_arrow
|
||||
if is_selected:
|
||||
flags |= imgui.TreeNodeFlags_.selected
|
||||
label = f"{display} ({obj_count})##{key}"
|
||||
opened = imgui.tree_node_ex(label, flags)
|
||||
if imgui.is_item_clicked(0):
|
||||
ssbo_editor.select_node(key)
|
||||
if hasattr(self.app, "lui_manager"):
|
||||
self.app.lui_manager.selected_index = -1
|
||||
if opened:
|
||||
for child_key in children:
|
||||
self._draw_ssbo_virtual_tree_node(ssbo_editor, controller, child_key)
|
||||
imgui.tree_pop()
|
||||
def _show_node_context_menu(self, node, name, node_type):
|
||||
"""显示节点右键菜单"""
|
||||
self.app._context_menu_node = True
|
||||
|
||||
Loading…
Reference in New Issue
Block a user