329 lines
11 KiB
Python
329 lines
11 KiB
Python
"""Helpers for lightweight glTF metadata probing and static BAM caching."""
|
||
|
||
from __future__ import annotations
|
||
|
||
import hashlib
|
||
import json
|
||
import os
|
||
import struct
|
||
import tempfile
|
||
|
||
|
||
_GLTF_METADATA_CACHE = {}
|
||
|
||
|
||
def is_gltf_path(file_path: str) -> bool:
|
||
ext = os.path.splitext(str(file_path or ""))[1].lower()
|
||
return ext in {".gltf", ".glb"}
|
||
|
||
|
||
def _to_os_specific_path(file_path: str) -> str:
|
||
path_text = os.fspath(file_path or "")
|
||
if not path_text:
|
||
return path_text
|
||
if os.path.exists(path_text):
|
||
return os.path.normpath(path_text)
|
||
|
||
try:
|
||
from panda3d.core import Filename
|
||
|
||
for ctor_name in ("fromOsSpecificW", "from_os_specific_w", "fromOsSpecific", "from_os_specific"):
|
||
ctor = getattr(Filename, ctor_name, None)
|
||
if not ctor:
|
||
continue
|
||
try:
|
||
panda_filename = ctor(path_text)
|
||
os_path = panda_filename.to_os_specific()
|
||
if os_path and os.path.exists(os_path):
|
||
return os.path.normpath(os_path)
|
||
except Exception:
|
||
continue
|
||
|
||
converted = Filename(path_text).to_os_specific()
|
||
if converted and os.path.exists(converted):
|
||
return os.path.normpath(converted)
|
||
except Exception:
|
||
pass
|
||
|
||
if len(path_text) >= 3 and path_text[0] in ("/", "\\") and path_text[1].isalpha() and path_text[2] in ("/", "\\"):
|
||
drive_path = f"{path_text[1]}:{path_text[2:]}"
|
||
drive_path = os.path.normpath(drive_path)
|
||
if os.path.exists(drive_path):
|
||
return drive_path
|
||
|
||
return os.path.normpath(path_text)
|
||
|
||
|
||
def _load_gltf_json_payload(file_path: str):
|
||
file_path = _to_os_specific_path(file_path)
|
||
if not file_path or not os.path.exists(file_path):
|
||
return None
|
||
|
||
ext = os.path.splitext(file_path)[1].lower()
|
||
try:
|
||
if ext == ".gltf":
|
||
with open(file_path, "r", encoding="utf-8") as handle:
|
||
return json.load(handle)
|
||
|
||
if ext != ".glb":
|
||
return None
|
||
|
||
with open(file_path, "rb") as handle:
|
||
header = handle.read(12)
|
||
if len(header) != 12:
|
||
return None
|
||
magic, _version, total_length = struct.unpack("<4sII", header)
|
||
if magic != b"glTF":
|
||
return None
|
||
|
||
while handle.tell() < total_length:
|
||
chunk_header = handle.read(8)
|
||
if len(chunk_header) != 8:
|
||
break
|
||
chunk_length, chunk_type = struct.unpack("<I4s", chunk_header)
|
||
chunk_data = handle.read(chunk_length)
|
||
if chunk_type == b"JSON":
|
||
json_text = chunk_data.decode("utf-8", errors="ignore").rstrip("\x00 \t\r\n")
|
||
return json.loads(json_text)
|
||
except Exception:
|
||
return None
|
||
return None
|
||
|
||
|
||
def probe_gltf_metadata(file_path: str) -> dict:
|
||
file_path = _to_os_specific_path(file_path)
|
||
if not file_path or not os.path.exists(file_path):
|
||
return {
|
||
"is_gltf": False,
|
||
"has_animations": False,
|
||
"animation_count": 0,
|
||
}
|
||
|
||
try:
|
||
stat_info = os.stat(file_path)
|
||
cache_key = (
|
||
os.path.abspath(file_path),
|
||
int(stat_info.st_mtime_ns),
|
||
int(stat_info.st_size),
|
||
)
|
||
cached = _GLTF_METADATA_CACHE.get(cache_key)
|
||
if cached is not None:
|
||
return dict(cached)
|
||
except Exception:
|
||
cache_key = None
|
||
|
||
payload = _load_gltf_json_payload(file_path)
|
||
if not payload:
|
||
result = {
|
||
"is_gltf": False,
|
||
"has_animations": False,
|
||
"animation_count": 0,
|
||
}
|
||
if cache_key is not None:
|
||
_GLTF_METADATA_CACHE.clear()
|
||
_GLTF_METADATA_CACHE[cache_key] = dict(result)
|
||
return result
|
||
|
||
animations = payload.get("animations") or []
|
||
result = {
|
||
"is_gltf": True,
|
||
"has_animations": bool(animations),
|
||
"animation_count": len(animations),
|
||
}
|
||
if cache_key is not None:
|
||
_GLTF_METADATA_CACHE.clear()
|
||
_GLTF_METADATA_CACHE[cache_key] = dict(result)
|
||
return result
|
||
|
||
|
||
def _resolve_cache_root(project_root: str = "") -> str:
|
||
project_root = os.path.normpath(project_root or "")
|
||
if project_root and os.path.isdir(project_root):
|
||
try:
|
||
from project.project_schema import ProjectLayout, ensure_project_directories
|
||
|
||
layout = ProjectLayout(project_root)
|
||
ensure_project_directories(layout)
|
||
cache_root = os.path.join(layout.imported_root, "__gltf_visual_cache__")
|
||
os.makedirs(cache_root, exist_ok=True)
|
||
return cache_root
|
||
except Exception:
|
||
pass
|
||
|
||
cache_root = os.path.join(tempfile.gettempdir(), "EG_gltf_visual_cache")
|
||
os.makedirs(cache_root, exist_ok=True)
|
||
return cache_root
|
||
|
||
|
||
def _choose_visual_cache_builder(file_path: str, *, skip_animations: bool) -> str:
|
||
meta = probe_gltf_metadata(file_path)
|
||
if meta.get("is_gltf") and not meta.get("has_animations") and bool(skip_animations):
|
||
return "fluent"
|
||
return "gltf"
|
||
|
||
|
||
def _build_cache_key(file_path: str, *, skip_animations: bool, flatten_nodes: bool, builder: str) -> str:
|
||
stat_info = os.stat(file_path)
|
||
digest = hashlib.sha1(
|
||
"|".join(
|
||
[
|
||
os.path.abspath(file_path),
|
||
str(int(stat_info.st_mtime_ns)),
|
||
str(int(stat_info.st_size)),
|
||
f"skip_anim={int(bool(skip_animations))}",
|
||
f"flatten_nodes={int(bool(flatten_nodes))}",
|
||
f"builder={builder}",
|
||
"visual_cache_v=3",
|
||
]
|
||
).encode("utf-8", errors="ignore")
|
||
).hexdigest()
|
||
return digest
|
||
|
||
|
||
def get_gltf_visual_bam_path(
|
||
file_path: str,
|
||
*,
|
||
project_root: str = "",
|
||
skip_animations: bool = True,
|
||
flatten_nodes: bool = False,
|
||
) -> str:
|
||
if not is_gltf_path(file_path):
|
||
return file_path
|
||
file_path = _to_os_specific_path(file_path)
|
||
if not file_path or not os.path.exists(file_path):
|
||
return file_path
|
||
|
||
builder = _choose_visual_cache_builder(file_path, skip_animations=skip_animations)
|
||
cache_key = _build_cache_key(
|
||
file_path,
|
||
skip_animations=skip_animations,
|
||
flatten_nodes=flatten_nodes,
|
||
builder=builder,
|
||
)
|
||
cache_root = _resolve_cache_root(project_root)
|
||
cache_dir = os.path.join(cache_root, cache_key)
|
||
os.makedirs(cache_dir, exist_ok=True)
|
||
return os.path.join(cache_dir, "visual_scene.bam")
|
||
|
||
|
||
def ensure_gltf_visual_bam(
|
||
file_path: str,
|
||
*,
|
||
project_root: str = "",
|
||
skip_animations: bool = True,
|
||
flatten_nodes: bool = False,
|
||
) -> str:
|
||
if not is_gltf_path(file_path):
|
||
return file_path
|
||
file_path = _to_os_specific_path(file_path)
|
||
if not file_path or not os.path.exists(file_path):
|
||
return file_path
|
||
|
||
cache_bam = get_gltf_visual_bam_path(
|
||
file_path,
|
||
project_root=project_root,
|
||
skip_animations=skip_animations,
|
||
flatten_nodes=flatten_nodes,
|
||
)
|
||
if os.path.exists(cache_bam):
|
||
return cache_bam
|
||
|
||
temp_bam = f"{cache_bam}.tmp"
|
||
model_np = None
|
||
builder = _choose_visual_cache_builder(file_path, skip_animations=skip_animations)
|
||
try:
|
||
from panda3d.core import Filename
|
||
|
||
if builder == "fluent":
|
||
try:
|
||
from direct.showbase import ShowBaseGlobal
|
||
from panda3d.core import LoaderOptions
|
||
|
||
base = getattr(ShowBaseGlobal, "base", None)
|
||
if base and getattr(base, "loader", None):
|
||
loader_options = LoaderOptions()
|
||
if hasattr(loader_options, "LF_no_cache"):
|
||
loader_options.setFlags(loader_options.getFlags() | loader_options.LF_no_cache)
|
||
elif hasattr(loader_options, "LFNoCache"):
|
||
loader_options.setFlags(loader_options.getFlags() | loader_options.LFNoCache)
|
||
model_np = load_model_fluent(
|
||
base.loader,
|
||
Filename.from_os_specific(file_path),
|
||
loader_options,
|
||
)
|
||
if model_np and not model_np.is_empty():
|
||
print(f"[glTF缓存] 静态glTF使用流畅加载器构建可见BAM: {file_path}")
|
||
if not model_np or model_np.is_empty():
|
||
builder = "gltf"
|
||
except Exception as exc:
|
||
print(f"[glTF缓存] 流畅模式构建BAM失败,回退 gltf: {exc}")
|
||
builder = "gltf"
|
||
|
||
if builder == "gltf":
|
||
import gltf
|
||
from panda3d.core import NodePath
|
||
|
||
settings = gltf.GltfSettings(
|
||
skip_animations=bool(skip_animations),
|
||
flatten_nodes=bool(flatten_nodes),
|
||
)
|
||
model_root = gltf.load_model(os.path.abspath(file_path), settings)
|
||
if not model_root:
|
||
return file_path
|
||
|
||
model_np = NodePath(model_root)
|
||
if model_np.is_empty():
|
||
return file_path
|
||
|
||
temp_filename = Filename.from_os_specific(temp_bam)
|
||
if not model_np.write_bam_file(temp_filename):
|
||
return file_path
|
||
|
||
os.replace(temp_bam, cache_bam)
|
||
return cache_bam
|
||
except Exception as exc:
|
||
print(f"[glTF缓存] 构建可见BAM缓存失败: {exc}")
|
||
return file_path
|
||
finally:
|
||
try:
|
||
if model_np and not model_np.isEmpty():
|
||
model_np.removeNode()
|
||
except Exception:
|
||
pass
|
||
try:
|
||
if os.path.exists(temp_bam):
|
||
os.remove(temp_bam)
|
||
except Exception:
|
||
pass
|
||
|
||
|
||
def load_model_fluent(loader, file_path, *args, **kwargs):
|
||
"""
|
||
加载 glTF/glb 模型时临时禁用 panda3d-gltf 插件,
|
||
强制 Panda3D 使用下一个可用的加载器(如 Assimp 或自带轻量加载器)。
|
||
这对于静态大场景能显著提高加载后的流畅度。
|
||
"""
|
||
from panda3d.core import ConfigVariableList
|
||
|
||
cvl = ConfigVariableList("load-file-type")
|
||
removed_entries = []
|
||
|
||
# 扫描并移除所有涉及 panda3d-gltf 的注册项
|
||
i = 0
|
||
while i < cvl.get_num_values():
|
||
val = cvl.get_string_value(i)
|
||
if "panda3d-gltf" in val.lower():
|
||
removed_entries.append(val)
|
||
cvl.remove_value(i)
|
||
else:
|
||
i += 1
|
||
|
||
try:
|
||
# 调用实际加载器
|
||
print(f"[glTF智能加载] 正在以流畅模式加载 (跳过 panda3d-gltf): {file_path}")
|
||
return loader.loadModel(file_path, *args, **kwargs)
|
||
finally:
|
||
# 恢复注册项,以便后续需要动画功能的模型仍能使用它
|
||
for entry in removed_entries:
|
||
cvl.add_value(entry)
|