2637 lines
95 KiB
Python
2637 lines
95 KiB
Python
"""WebGL project packager for EG editor (Three.js static scene export)."""
|
||
|
||
from __future__ import annotations
|
||
|
||
import datetime
|
||
import json
|
||
import os
|
||
import re
|
||
import shutil
|
||
import stat
|
||
import subprocess
|
||
import tempfile
|
||
from pathlib import Path
|
||
from typing import Any, Dict, List, Optional, Tuple
|
||
|
||
|
||
class WebGLPackager:
|
||
"""Export current EG scene into a static WebGL package directory."""
|
||
|
||
BASIS_MATRIX_ROW_MAJOR = [
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
-1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
]
|
||
|
||
ENERGY_TO_INTENSITY_SCALE = 0.001
|
||
|
||
def __init__(self, world):
|
||
self.world = world
|
||
self.scene_manager = getattr(world, "scene_manager", None)
|
||
|
||
self._project_path = ""
|
||
self._output_root = ""
|
||
self._assets_model_dir = ""
|
||
self._assets_texture_dir = ""
|
||
|
||
self._copied_source_to_uri: Dict[str, str] = {}
|
||
self._name_counter: Dict[str, int] = {}
|
||
self._node_id_by_pointer: Dict[int, str] = {}
|
||
self._public_id_by_pointer: Dict[int, str] = {}
|
||
self._node_path_by_pointer: Dict[int, str] = {}
|
||
self._lookup_by_pointer: Dict[int, Dict[str, Any]] = {}
|
||
self._export_kind_hint_by_pointer: Dict[int, str] = {}
|
||
self._baseline_subnode_cache: Dict[str, Dict[str, Any]] = {}
|
||
|
||
self.report: Dict[str, Any] = {
|
||
"status": "failed",
|
||
"warnings": [],
|
||
"missing_assets": [],
|
||
"unsupported_assets": [],
|
||
"converted_assets": [],
|
||
"copied_assets": [],
|
||
"output_dir": "",
|
||
}
|
||
|
||
def package(self, project_path: str, output_dir: str) -> Dict[str, Any]:
|
||
"""Export project as WebGL static site and return export report."""
|
||
self._project_path = os.path.normpath(project_path)
|
||
|
||
project_name = os.path.basename(self._project_path.rstrip(os.sep)) or "project"
|
||
self._output_root = os.path.normpath(os.path.join(output_dir, f"{project_name}_webgl"))
|
||
self._assets_model_dir = os.path.join(self._output_root, "assets", "models")
|
||
self._assets_texture_dir = os.path.join(self._output_root, "assets", "textures")
|
||
self.report["output_dir"] = self._output_root
|
||
|
||
try:
|
||
if not os.path.isdir(self._project_path):
|
||
self._fail(f"项目路径不存在: {self._project_path}")
|
||
return self.report
|
||
|
||
self._prepare_output_dir()
|
||
self._copy_templates()
|
||
|
||
scene_manifest = self._build_scene_manifest(project_name)
|
||
self._write_json(
|
||
os.path.join(self._output_root, "scene", "scene_webgl.json"),
|
||
scene_manifest,
|
||
)
|
||
|
||
self._write_preview_scripts()
|
||
|
||
status = "success"
|
||
if self.report["missing_assets"] or self.report["unsupported_assets"]:
|
||
status = "partial"
|
||
self.report["status"] = status
|
||
|
||
except Exception as exc:
|
||
self._fail(f"WebGL打包失败: {exc}")
|
||
|
||
finally:
|
||
self._write_json(
|
||
os.path.join(self._output_root, "reports", "export_report.json"),
|
||
self.report,
|
||
)
|
||
|
||
return self.report
|
||
|
||
def _prepare_output_dir(self) -> None:
|
||
if os.path.isdir(self._output_root):
|
||
shutil.rmtree(self._output_root)
|
||
|
||
os.makedirs(os.path.join(self._output_root, "js"), exist_ok=True)
|
||
os.makedirs(os.path.join(self._output_root, "vendor"), exist_ok=True)
|
||
os.makedirs(self._assets_model_dir, exist_ok=True)
|
||
os.makedirs(self._assets_texture_dir, exist_ok=True)
|
||
os.makedirs(os.path.join(self._output_root, "scene"), exist_ok=True)
|
||
os.makedirs(os.path.join(self._output_root, "reports"), exist_ok=True)
|
||
|
||
def _copy_templates(self) -> None:
|
||
repo_root = Path(__file__).resolve().parent.parent
|
||
template_root = repo_root / "templates" / "webgl"
|
||
if not template_root.exists():
|
||
raise FileNotFoundError(f"模板目录不存在: {template_root}")
|
||
|
||
file_mapping = {
|
||
"index.html": "index.html",
|
||
"frontend_demo.html": "frontend_demo.html",
|
||
"style.css": "style.css",
|
||
"viewer.js": os.path.join("js", "viewer.js"),
|
||
}
|
||
for src_name, dst_rel in file_mapping.items():
|
||
src = template_root / src_name
|
||
dst = Path(self._output_root) / dst_rel
|
||
if not src.exists():
|
||
raise FileNotFoundError(f"模板文件不存在: {src}")
|
||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||
shutil.copy2(str(src), str(dst))
|
||
|
||
vendor_src = template_root / "vendor"
|
||
vendor_dst = Path(self._output_root) / "vendor"
|
||
if vendor_src.exists():
|
||
for entry in vendor_src.iterdir():
|
||
if entry.is_file():
|
||
shutil.copy2(str(entry), str(vendor_dst / entry.name))
|
||
|
||
self._try_resolve_vendor_files(vendor_dst)
|
||
self._copy_optional_postprocess_vendor_files(vendor_dst)
|
||
self._rewrite_vendor_module_imports(vendor_dst)
|
||
self._strip_vendor_source_mapping_urls(vendor_dst)
|
||
|
||
# Placeholder marker warning
|
||
placeholder_file = vendor_dst / "three.module.min.js"
|
||
if placeholder_file.exists():
|
||
content = placeholder_file.read_text(encoding="utf-8", errors="ignore")
|
||
if "EG_VENDOR_PLACEHOLDER" in content:
|
||
self.report["warnings"].append(
|
||
"当前 vendor 为占位文件,请替换为官方 three.module.min.js / OrbitControls.js / GLTFLoader.js 后再预览。"
|
||
)
|
||
|
||
def _try_resolve_vendor_files(self, vendor_dst: Path) -> None:
|
||
"""Try to replace template placeholders with system-installed Three.js modules."""
|
||
lookup_roots = [
|
||
Path("/usr/share/javascript/three"),
|
||
Path("/usr/share/nodejs/three"),
|
||
Path("/usr/lib/node_modules/three"),
|
||
Path.home() / ".local/lib/node_modules/three",
|
||
]
|
||
|
||
targets = {
|
||
"three.module.min.js": [
|
||
"build/three.module.min.js",
|
||
"build/three.module.js",
|
||
],
|
||
"OrbitControls.js": [
|
||
"examples/jsm/controls/OrbitControls.js",
|
||
],
|
||
"GLTFLoader.js": [
|
||
"examples/jsm/loaders/GLTFLoader.js",
|
||
],
|
||
}
|
||
|
||
for dst_name, rel_candidates in targets.items():
|
||
dst_path = vendor_dst / dst_name
|
||
if not dst_path.exists():
|
||
continue
|
||
|
||
try:
|
||
content = dst_path.read_text(encoding="utf-8", errors="ignore")
|
||
except Exception:
|
||
content = ""
|
||
|
||
# Only replace placeholders.
|
||
if "EG_VENDOR_PLACEHOLDER" not in content:
|
||
continue
|
||
|
||
found_source = None
|
||
for root in lookup_roots:
|
||
if not root.exists():
|
||
continue
|
||
for rel in rel_candidates:
|
||
candidate = root / rel
|
||
if candidate.exists() and candidate.is_file():
|
||
found_source = candidate
|
||
break
|
||
if found_source:
|
||
break
|
||
|
||
if found_source:
|
||
shutil.copy2(str(found_source), str(dst_path))
|
||
|
||
def _copy_optional_postprocess_vendor_files(self, vendor_dst: Path) -> None:
|
||
"""Try to copy optional postprocessing modules used by viewer approximation."""
|
||
lookup_roots = [
|
||
Path("/usr/share/javascript/three"),
|
||
Path("/usr/share/nodejs/three"),
|
||
Path("/usr/lib/node_modules/three"),
|
||
Path.home() / ".local/lib/node_modules/three",
|
||
]
|
||
|
||
optional_targets = {
|
||
"EffectComposer.js": [
|
||
"examples/jsm/postprocessing/EffectComposer.js",
|
||
],
|
||
"RenderPass.js": [
|
||
"examples/jsm/postprocessing/RenderPass.js",
|
||
],
|
||
"UnrealBloomPass.js": [
|
||
"examples/jsm/postprocessing/UnrealBloomPass.js",
|
||
],
|
||
"ShaderPass.js": [
|
||
"examples/jsm/postprocessing/ShaderPass.js",
|
||
],
|
||
"OutputPass.js": [
|
||
"examples/jsm/postprocessing/OutputPass.js",
|
||
],
|
||
"CopyShader.js": [
|
||
"examples/jsm/shaders/CopyShader.js",
|
||
],
|
||
"LuminosityHighPassShader.js": [
|
||
"examples/jsm/shaders/LuminosityHighPassShader.js",
|
||
],
|
||
}
|
||
|
||
for dst_name, rel_candidates in optional_targets.items():
|
||
dst_path = vendor_dst / dst_name
|
||
if dst_path.exists():
|
||
continue
|
||
|
||
found_source = None
|
||
for root in lookup_roots:
|
||
if not root.exists():
|
||
continue
|
||
for rel in rel_candidates:
|
||
candidate = root / rel
|
||
if candidate.exists() and candidate.is_file():
|
||
found_source = candidate
|
||
break
|
||
if found_source:
|
||
break
|
||
|
||
if found_source:
|
||
try:
|
||
shutil.copy2(str(found_source), str(dst_path))
|
||
except Exception:
|
||
continue
|
||
|
||
def _rewrite_vendor_module_imports(self, vendor_dst: Path) -> None:
|
||
"""Rewrite bare/relative Three.js addon imports to local vendor paths."""
|
||
js_files = [p for p in vendor_dst.glob("*.js") if p.is_file()]
|
||
if not js_files:
|
||
return
|
||
|
||
available_names = {p.name for p in js_files}
|
||
import_re = re.compile(
|
||
r"""(?P<prefix>\bfrom\s+)(?P<quote>['"])(?P<spec>[^'"]+)(?P=quote)"""
|
||
)
|
||
|
||
def rewrite_spec(spec: str) -> str:
|
||
raw = str(spec or "").strip()
|
||
if not raw:
|
||
return raw
|
||
|
||
if raw in {"three", "three.module.js", "three.module.min.js"}:
|
||
return "./three.module.min.js"
|
||
|
||
if raw.startswith("three/addons/") or raw.startswith("three/examples/jsm/"):
|
||
return "./" + raw.split("/")[-1]
|
||
|
||
if raw.endswith(".js"):
|
||
basename = os.path.basename(raw)
|
||
if basename in available_names:
|
||
return "./" + basename
|
||
|
||
return raw
|
||
|
||
for js_file in js_files:
|
||
if js_file.name == "three.module.min.js":
|
||
continue
|
||
try:
|
||
content = js_file.read_text(encoding="utf-8", errors="ignore")
|
||
except Exception:
|
||
continue
|
||
|
||
replaced = False
|
||
|
||
def _replace(match):
|
||
nonlocal replaced
|
||
old_spec = match.group("spec")
|
||
new_spec = rewrite_spec(old_spec)
|
||
if new_spec == old_spec:
|
||
return match.group(0)
|
||
replaced = True
|
||
return f"{match.group('prefix')}{match.group('quote')}{new_spec}{match.group('quote')}"
|
||
|
||
updated = import_re.sub(_replace, content)
|
||
if not replaced:
|
||
continue
|
||
try:
|
||
js_file.write_text(updated, encoding="utf-8")
|
||
except Exception:
|
||
continue
|
||
|
||
def _strip_vendor_source_mapping_urls(self, vendor_dst: Path) -> None:
|
||
"""Remove sourceMappingURL hints to avoid noisy 404s in offline preview."""
|
||
line_patterns = (
|
||
re.compile(r"^\s*//[#@]\s*sourceMappingURL=.*$", re.IGNORECASE),
|
||
re.compile(r"^\s*/\*[#@]\s*sourceMappingURL=.*\*/\s*$", re.IGNORECASE),
|
||
)
|
||
|
||
for js_file in vendor_dst.glob("*.js"):
|
||
try:
|
||
content = js_file.read_text(encoding="utf-8", errors="ignore")
|
||
except Exception:
|
||
continue
|
||
|
||
lines = content.splitlines()
|
||
filtered = [
|
||
line for line in lines
|
||
if not any(pattern.match(line) for pattern in line_patterns)
|
||
]
|
||
if len(filtered) == len(lines):
|
||
continue
|
||
|
||
text = "\n".join(filtered)
|
||
if content.endswith("\n"):
|
||
text += "\n"
|
||
try:
|
||
js_file.write_text(text, encoding="utf-8")
|
||
self.report["warnings"].append(f"已移除 sourceMappingURL: vendor/{js_file.name}")
|
||
except Exception:
|
||
continue
|
||
|
||
def _build_scene_manifest(self, project_name: str) -> Dict[str, Any]:
|
||
render = getattr(self.world, "render", None)
|
||
if not render:
|
||
raise RuntimeError("world.render 不可用")
|
||
|
||
export_nodes: List[Any] = []
|
||
|
||
model_nodes = self._collect_model_nodes()
|
||
spot_nodes = self._collect_valid_nodes(getattr(self.scene_manager, "Spotlight", []))
|
||
point_nodes = self._collect_valid_nodes(getattr(self.scene_manager, "Pointlight", []))
|
||
ground_node = self._get_default_ground_node()
|
||
|
||
export_nodes.extend(model_nodes)
|
||
export_nodes.extend(spot_nodes)
|
||
export_nodes.extend(point_nodes)
|
||
if ground_node is not None:
|
||
export_nodes.append(ground_node)
|
||
|
||
# Unique by pointer
|
||
uniq: Dict[int, Any] = {}
|
||
for node in export_nodes:
|
||
uniq[id(node)] = node
|
||
export_nodes = list(uniq.values())
|
||
|
||
for index, node in enumerate(export_nodes, start=1):
|
||
self._node_id_by_pointer[id(node)] = f"node_{index:04d}"
|
||
for node in model_nodes:
|
||
self._export_kind_hint_by_pointer[id(node)] = "model"
|
||
for node in point_nodes:
|
||
self._export_kind_hint_by_pointer[id(node)] = "point_light"
|
||
for node in spot_nodes:
|
||
self._export_kind_hint_by_pointer[id(node)] = "spot_light"
|
||
if ground_node is not None:
|
||
self._export_kind_hint_by_pointer[id(ground_node)] = "ground"
|
||
|
||
self._assign_export_lookup_metadata(export_nodes)
|
||
|
||
nodes_json: List[Dict[str, Any]] = []
|
||
|
||
for node in model_nodes:
|
||
entry = self._build_model_node_entry(node)
|
||
if entry:
|
||
nodes_json.append(entry)
|
||
|
||
for node in point_nodes:
|
||
entry = self._build_light_node_entry(node, kind="point_light")
|
||
if entry:
|
||
nodes_json.append(entry)
|
||
|
||
for node in spot_nodes:
|
||
entry = self._build_light_node_entry(node, kind="spot_light")
|
||
if entry:
|
||
nodes_json.append(entry)
|
||
|
||
if ground_node is not None:
|
||
entry = self._build_ground_node_entry(ground_node)
|
||
if entry:
|
||
nodes_json.append(entry)
|
||
|
||
manifest = {
|
||
"meta": {
|
||
"format_version": "1.0",
|
||
"api_version": 1,
|
||
"project_name": project_name,
|
||
"exported_at": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||
"transports": ["js_api", "postMessage"],
|
||
},
|
||
"coordinate": {
|
||
"source": "panda3d_zup",
|
||
"target": "threejs_yup",
|
||
"matrix_convention": "panda_row_vector_row_major",
|
||
"basis_matrix": self.BASIS_MATRIX_ROW_MAJOR,
|
||
},
|
||
"camera": self._build_camera_entry(),
|
||
"environment": self._build_environment_entry(),
|
||
"nodes": nodes_json,
|
||
}
|
||
return manifest
|
||
|
||
def _collect_model_nodes(self) -> List[Any]:
|
||
if not self.scene_manager:
|
||
return []
|
||
|
||
all_models: List[Any] = []
|
||
ssbo_editor = getattr(self.world, "ssbo_editor", None)
|
||
source_model_root = getattr(ssbo_editor, "source_model_root", None) if ssbo_editor else None
|
||
if source_model_root:
|
||
try:
|
||
if not source_model_root.isEmpty():
|
||
snapshot_fn = getattr(ssbo_editor, "_snapshot_top_level_transforms_to_source_root", None)
|
||
snapshot_material_fn = getattr(ssbo_editor, "_snapshot_runtime_materials_to_source_root", None)
|
||
if callable(snapshot_fn):
|
||
try:
|
||
snapshot_fn()
|
||
except Exception:
|
||
pass
|
||
if callable(snapshot_material_fn):
|
||
try:
|
||
snapshot_material_fn()
|
||
except Exception:
|
||
pass
|
||
all_models = self._collect_valid_nodes(list(source_model_root.getChildren()))
|
||
except Exception:
|
||
all_models = []
|
||
|
||
if not all_models:
|
||
all_models = self._collect_valid_nodes(getattr(self.scene_manager, "models", []))
|
||
|
||
light_ptrs = {id(n) for n in self._collect_valid_nodes(getattr(self.scene_manager, "Spotlight", []))}
|
||
light_ptrs |= {id(n) for n in self._collect_valid_nodes(getattr(self.scene_manager, "Pointlight", []))}
|
||
|
||
models: List[Any] = []
|
||
for node in all_models:
|
||
if id(node) in light_ptrs:
|
||
continue
|
||
if node.hasTag("light_type"):
|
||
continue
|
||
if node.getName() in {"render", "camera", "cam"}:
|
||
continue
|
||
# Only export true model roots. Scene traversal may put many child nodes
|
||
# into scene_manager.models, but they should be replayed via subnode overrides.
|
||
is_model_root = False
|
||
try:
|
||
is_model_root = node.hasTag("is_model_root")
|
||
except Exception:
|
||
is_model_root = False
|
||
has_model_path_tag = False
|
||
for tag_name in ("model_path", "saved_model_path", "original_path", "asset_path", "file"):
|
||
try:
|
||
if node.hasTag(tag_name):
|
||
has_model_path_tag = True
|
||
break
|
||
except Exception:
|
||
continue
|
||
if not is_model_root and not has_model_path_tag:
|
||
continue
|
||
models.append(node)
|
||
return models
|
||
|
||
@staticmethod
|
||
def _collect_valid_nodes(nodes: List[Any]) -> List[Any]:
|
||
valid = []
|
||
for node in nodes or []:
|
||
if not node:
|
||
continue
|
||
try:
|
||
if node.isEmpty():
|
||
continue
|
||
except Exception:
|
||
continue
|
||
valid.append(node)
|
||
return valid
|
||
|
||
def _get_default_ground_node(self):
|
||
ground = getattr(self.world, "ground", None)
|
||
if not ground:
|
||
return None
|
||
try:
|
||
if ground.isEmpty():
|
||
return None
|
||
return ground
|
||
except Exception:
|
||
return None
|
||
|
||
def _build_camera_entry(self) -> Dict[str, Any]:
|
||
render = getattr(self.world, "render", None)
|
||
cam = getattr(self.world, "cam", None) or getattr(self.world, "camera", None)
|
||
|
||
default = {
|
||
"matrix_local_row_major": [
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
-50.0,
|
||
20.0,
|
||
1.0,
|
||
],
|
||
"fov_deg": 80.0,
|
||
"near": 0.1,
|
||
"far": 10000.0,
|
||
}
|
||
|
||
if not cam or not render:
|
||
return default
|
||
|
||
try:
|
||
cam_mat = cam.getMat(render)
|
||
fov = 80.0
|
||
near = 0.1
|
||
far = 10000.0
|
||
lens = cam.node().getLens() if cam.node() else None
|
||
if lens:
|
||
try:
|
||
fov = float(lens.getFov()[0])
|
||
except Exception:
|
||
pass
|
||
try:
|
||
near = float(lens.getNear())
|
||
except Exception:
|
||
pass
|
||
try:
|
||
far = float(lens.getFar())
|
||
except Exception:
|
||
pass
|
||
return {
|
||
"matrix_local_row_major": self._mat4_to_row_major_list(cam_mat),
|
||
"fov_deg": fov,
|
||
"near": near,
|
||
"far": far,
|
||
}
|
||
except Exception:
|
||
return default
|
||
|
||
def _build_environment_entry(self) -> Dict[str, Any]:
|
||
ambient = getattr(self.world, "ambient_light", None)
|
||
directional = getattr(self.world, "directional_light", None)
|
||
|
||
ambient_color = [0.2, 0.2, 0.2]
|
||
directional_color = [0.8, 0.8, 0.8]
|
||
directional_dir = [0.0, 0.0, -1.0]
|
||
|
||
if ambient and not ambient.isEmpty():
|
||
try:
|
||
c = ambient.node().getColor()
|
||
ambient_color = [float(c[0]), float(c[1]), float(c[2])]
|
||
except Exception:
|
||
pass
|
||
|
||
if directional and not directional.isEmpty():
|
||
try:
|
||
c = directional.node().getColor()
|
||
directional_color = [float(c[0]), float(c[1]), float(c[2])]
|
||
except Exception:
|
||
pass
|
||
try:
|
||
q = directional.getQuat(getattr(self.world, "render", directional.getParent()))
|
||
fwd = q.getForward()
|
||
directional_dir = [float(fwd[0]), float(fwd[1]), float(fwd[2])]
|
||
except Exception:
|
||
pass
|
||
|
||
entry = {
|
||
"include_default_ground": True,
|
||
"ambient_light": {
|
||
"color": ambient_color,
|
||
"intensity": 1.0,
|
||
},
|
||
"directional_light": {
|
||
"color": directional_color,
|
||
"intensity": 1.0,
|
||
"direction": directional_dir,
|
||
},
|
||
}
|
||
skybox_entry = self._extract_skybox_entry()
|
||
if skybox_entry:
|
||
entry["skybox"] = skybox_entry
|
||
render_pipeline = self._extract_render_pipeline_settings()
|
||
if render_pipeline:
|
||
entry["render_pipeline"] = render_pipeline
|
||
return entry
|
||
|
||
def _extract_skybox_entry(self) -> Dict[str, Any]:
|
||
"""Export skybox texture config for viewer background."""
|
||
skybox_source = self._find_runtime_skybox_texture_source()
|
||
if not skybox_source:
|
||
skybox_source = self._find_fallback_skybox_source()
|
||
if not skybox_source:
|
||
return {}
|
||
|
||
if not self._is_supported_skybox_image(skybox_source):
|
||
self.report["warnings"].append(f"天空盒格式暂不支持,已跳过: {skybox_source}")
|
||
return {}
|
||
|
||
uri = self._copy_asset_to_textures(skybox_source)
|
||
if not uri:
|
||
self.report["warnings"].append(f"天空盒资源复制失败: {skybox_source}")
|
||
return {}
|
||
|
||
projection = self._guess_skybox_projection(skybox_source)
|
||
lower_color = self._extract_skybox_lower_hemisphere_color(skybox_source)
|
||
return {
|
||
"enabled": True,
|
||
"type": projection,
|
||
"uri": uri,
|
||
"apply_environment": projection != "skydome",
|
||
"clip_lower_hemisphere": projection == "skydome",
|
||
"lower_hemisphere_color": lower_color,
|
||
"horizon_blend": 0.06,
|
||
"horizon_sample_v": 0.01,
|
||
"lower_tint_strength": 0.65,
|
||
}
|
||
|
||
def _extract_skybox_lower_hemisphere_color(self, skybox_source: str) -> List[float]:
|
||
"""Pick lower hemisphere fill color, prefer horizon-adjacent sky tone."""
|
||
horizon_rgb = self._estimate_skybox_horizon_rgb(skybox_source)
|
||
if horizon_rgb:
|
||
return horizon_rgb
|
||
|
||
clear_rgb = self._extract_world_clear_rgb()
|
||
if clear_rgb and max(clear_rgb) > 0.02:
|
||
return clear_rgb
|
||
|
||
# Final fallback: dark bluish neutral.
|
||
return [0.08, 0.10, 0.13]
|
||
|
||
def _extract_world_clear_rgb(self) -> List[float]:
|
||
"""Read active clear/background color from world/showbase if available."""
|
||
|
||
def _to_rgb(value: Any) -> List[float]:
|
||
if value is None:
|
||
return []
|
||
try:
|
||
if hasattr(value, "__len__") and len(value) >= 3: # type: ignore[arg-type]
|
||
r = float(value[0]) # type: ignore[index]
|
||
g = float(value[1]) # type: ignore[index]
|
||
b = float(value[2]) # type: ignore[index]
|
||
return [max(0.0, min(1.0, r)), max(0.0, min(1.0, g)), max(0.0, min(1.0, b))]
|
||
except Exception:
|
||
return []
|
||
return []
|
||
|
||
world_getters = ("getBackgroundColor", "get_background_color")
|
||
for name in world_getters:
|
||
getter = getattr(self.world, name, None)
|
||
if callable(getter):
|
||
try:
|
||
rgb = _to_rgb(getter())
|
||
except Exception:
|
||
rgb = []
|
||
if rgb:
|
||
return rgb
|
||
|
||
win = getattr(self.world, "win", None)
|
||
if win and hasattr(win, "getClearColor"):
|
||
try:
|
||
rgb = _to_rgb(win.getClearColor())
|
||
except Exception:
|
||
rgb = []
|
||
if rgb:
|
||
return rgb
|
||
|
||
rp = getattr(self.world, "render_pipeline", None)
|
||
showbase = getattr(rp, "_showbase", None) if rp else None
|
||
rp_win = getattr(showbase, "win", None) if showbase else None
|
||
if rp_win and hasattr(rp_win, "getClearColor"):
|
||
try:
|
||
rgb = _to_rgb(rp_win.getClearColor())
|
||
except Exception:
|
||
rgb = []
|
||
if rgb:
|
||
return rgb
|
||
|
||
return []
|
||
|
||
@staticmethod
|
||
def _estimate_skybox_horizon_rgb(path: str) -> List[float]:
|
||
"""Estimate horizon color from skydome texture bottom band."""
|
||
try:
|
||
from PIL import Image, ImageStat # type: ignore
|
||
except Exception:
|
||
return []
|
||
|
||
try:
|
||
with Image.open(path) as im:
|
||
rgb_img = im.convert("RGB")
|
||
width, height = rgb_img.size
|
||
if width <= 0 or height <= 0:
|
||
return []
|
||
band_h = max(1, int(height // 64))
|
||
band = rgb_img.crop((0, height - band_h, width, height))
|
||
stat = ImageStat.Stat(band)
|
||
mean = stat.mean[:3] if stat.mean else [0.0, 0.0, 0.0]
|
||
return [
|
||
max(0.0, min(1.0, float(mean[0]) / 255.0)),
|
||
max(0.0, min(1.0, float(mean[1]) / 255.0)),
|
||
max(0.0, min(1.0, float(mean[2]) / 255.0)),
|
||
]
|
||
except Exception:
|
||
return []
|
||
|
||
def _find_runtime_skybox_texture_source(self) -> str:
|
||
candidates: List[str] = []
|
||
skybox_np = getattr(self.world, "skybox", None)
|
||
if skybox_np and not self._is_np_empty(skybox_np):
|
||
candidates.extend(self._extract_texture_sources_from_node(skybox_np))
|
||
|
||
render = getattr(self.world, "render", None)
|
||
if render:
|
||
try:
|
||
skyboxes = render.findAllMatches("**/skybox*")
|
||
except Exception:
|
||
skyboxes = None
|
||
if skyboxes:
|
||
try:
|
||
count = skyboxes.getNumPaths()
|
||
except Exception:
|
||
count = 0
|
||
for i in range(count):
|
||
try:
|
||
np = skyboxes.getPath(i)
|
||
except Exception:
|
||
continue
|
||
if self._is_np_empty(np):
|
||
continue
|
||
candidates.extend(self._extract_texture_sources_from_node(np))
|
||
|
||
for source in candidates:
|
||
if source and self._is_supported_skybox_image(source):
|
||
return source
|
||
return ""
|
||
|
||
def _extract_texture_sources_from_node(self, node) -> List[str]:
|
||
out: List[str] = []
|
||
for _, tex_hint in self._extract_texture_stage_and_paths(node):
|
||
resolved = self._resolve_skybox_path(tex_hint)
|
||
if resolved:
|
||
out.append(resolved)
|
||
return out
|
||
|
||
def _resolve_skybox_path(self, path_hint: str) -> str:
|
||
text = str(path_hint or "").strip()
|
||
if not text:
|
||
return ""
|
||
|
||
if os.path.isabs(text) and os.path.exists(text):
|
||
return os.path.normpath(text)
|
||
|
||
repo_root = str(Path(__file__).resolve().parent.parent)
|
||
roots = [
|
||
self._project_path,
|
||
os.path.join(self._project_path, "Resources"),
|
||
os.path.join(self._project_path, "scenes", "resources"),
|
||
repo_root,
|
||
os.path.join(repo_root, "RenderPipelineFile"),
|
||
os.path.join(repo_root, "RenderPipelineFile", "data"),
|
||
os.path.join(repo_root, "RenderPipelineFile", "data", "builtin_models", "skybox"),
|
||
os.getcwd(),
|
||
]
|
||
for root in roots:
|
||
if not root:
|
||
continue
|
||
full = os.path.normpath(os.path.join(root, text))
|
||
if os.path.exists(full):
|
||
return full
|
||
return ""
|
||
|
||
def _find_fallback_skybox_source(self) -> str:
|
||
repo_root = str(Path(__file__).resolve().parent.parent)
|
||
candidates = [
|
||
os.path.join(self._project_path, "Resources", "skybox.jpg"),
|
||
os.path.join(self._project_path, "Resources", "skybox.png"),
|
||
os.path.join(self._project_path, "skybox.jpg"),
|
||
os.path.join(self._project_path, "skybox.png"),
|
||
os.path.join(repo_root, "RenderPipelineFile", "data", "builtin_models", "skybox", "skybox.jpg"),
|
||
os.path.join(repo_root, "RenderPipelineFile", "data", "builtin_models", "skybox", "skybox-2.jpg"),
|
||
]
|
||
for path in candidates:
|
||
if path and os.path.exists(path) and self._is_supported_skybox_image(path):
|
||
return os.path.normpath(path)
|
||
return ""
|
||
|
||
@staticmethod
|
||
def _is_supported_skybox_image(path: str) -> bool:
|
||
ext = os.path.splitext(str(path or ""))[1].lower()
|
||
return ext in {".jpg", ".jpeg", ".png", ".webp", ".avif"}
|
||
|
||
@staticmethod
|
||
def _guess_skybox_projection(path: str) -> str:
|
||
text = str(path or "").replace("\\", "/").lower()
|
||
if "/builtin_models/skybox/skybox" in text:
|
||
# RenderPipeline default assets are skydome textures.
|
||
return "skydome"
|
||
|
||
width = 0
|
||
height = 0
|
||
try:
|
||
from PIL import Image # type: ignore
|
||
with Image.open(path) as im:
|
||
width, height = im.size
|
||
except Exception:
|
||
width, height = 0, 0
|
||
|
||
if width > 0 and height > 0:
|
||
ratio = float(width) / float(max(1, height))
|
||
if 3.2 <= ratio <= 4.8:
|
||
return "skydome"
|
||
if 1.8 <= ratio <= 2.2:
|
||
return "equirectangular"
|
||
return "equirectangular"
|
||
|
||
def _extract_render_pipeline_settings(self) -> Dict[str, Any]:
|
||
rp = getattr(self.world, "render_pipeline", None)
|
||
plugin_mgr = getattr(rp, "plugin_mgr", None) if rp else None
|
||
if not plugin_mgr:
|
||
return {}
|
||
|
||
enabled_plugins = set(getattr(plugin_mgr, "enabled_plugins", set()) or set())
|
||
|
||
tone_operator = str(
|
||
self._get_rp_plugin_setting(plugin_mgr, "color_correction", "tonemap_operator", "optimized")
|
||
).strip().lower()
|
||
if not tone_operator:
|
||
tone_operator = "optimized"
|
||
|
||
exposure_scale = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "color_correction", "exposure_scale", 1.0),
|
||
1.0,
|
||
)
|
||
min_exposure = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "color_correction", "min_exposure_value", 0.01),
|
||
0.01,
|
||
)
|
||
max_exposure = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "color_correction", "max_exposure_value", 1.0),
|
||
1.0,
|
||
)
|
||
if max_exposure < min_exposure:
|
||
min_exposure, max_exposure = max_exposure, min_exposure
|
||
|
||
manual_camera = bool(
|
||
self._get_rp_plugin_setting(plugin_mgr, "color_correction", "manual_camera_parameters", False)
|
||
)
|
||
web_exposure_boost = 1.0
|
||
if tone_operator in {"optimized", "uncharted2"}:
|
||
web_exposure_boost = 1.6
|
||
elif tone_operator == "reinhard":
|
||
web_exposure_boost = 1.25
|
||
elif tone_operator in {"exponential", "exponential2"}:
|
||
web_exposure_boost = 1.15
|
||
|
||
bloom_enabled = "bloom" in enabled_plugins
|
||
bloom_strength = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "bloom", "bloom_strength", 0.0),
|
||
0.0,
|
||
)
|
||
bloom_mips = int(self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "bloom", "num_mipmaps", 6),
|
||
6.0,
|
||
))
|
||
bloom_lens_dirt = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "bloom", "lens_dirt_factor", 0.0),
|
||
0.0,
|
||
)
|
||
|
||
pssm_enabled = "pssm" in enabled_plugins
|
||
shadow_resolution = int(self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "pssm", "resolution", 1024),
|
||
1024.0,
|
||
))
|
||
shadow_resolution = max(256, min(4096, shadow_resolution))
|
||
shadow_max_distance = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "pssm", "max_distance", 50.0),
|
||
50.0,
|
||
)
|
||
shadow_use_pcf = bool(
|
||
self._get_rp_plugin_setting(plugin_mgr, "pssm", "use_pcf", True)
|
||
)
|
||
|
||
ao_enabled = "ao" in enabled_plugins
|
||
ao_strength = self._safe_float(
|
||
self._get_rp_plugin_setting(plugin_mgr, "ao", "occlusion_strength", 1.0),
|
||
1.0,
|
||
)
|
||
ao_technique = str(
|
||
self._get_rp_plugin_setting(plugin_mgr, "ao", "technique", "SSAO")
|
||
).strip().upper() or "SSAO"
|
||
|
||
daytime_value = self._safe_float(getattr(getattr(rp, "daytime_mgr", None), "time", 12.0), 12.0)
|
||
fog_enabled = "volumetrics" in enabled_plugins
|
||
fog_ramp = self._safe_float(
|
||
self._get_rp_day_setting(plugin_mgr, rp, "volumetrics", "fog_ramp_size", 2000.0),
|
||
2000.0,
|
||
)
|
||
fog_intensity = self._safe_float(
|
||
self._get_rp_day_setting(plugin_mgr, rp, "volumetrics", "fog_intensity", 0.0),
|
||
0.0,
|
||
)
|
||
fog_color_raw = self._get_rp_day_setting(plugin_mgr, rp, "volumetrics", "fog_color", (140.0, 150.0, 165.0))
|
||
fog_color = self._normalize_daytime_color(fog_color_raw)
|
||
|
||
# Viewer side optional postprocessing modules.
|
||
has_bloom_vendor = all(
|
||
os.path.exists(os.path.join(self._output_root, "vendor", name))
|
||
for name in ("EffectComposer.js", "RenderPass.js", "UnrealBloomPass.js")
|
||
)
|
||
if bloom_enabled and not has_bloom_vendor:
|
||
self.report["warnings"].append(
|
||
"Bloom 已启用,但未找到 EffectComposer/RenderPass/UnrealBloomPass,本次 Web 导出将退化为无 Bloom。"
|
||
)
|
||
|
||
return {
|
||
"source": "render_pipeline",
|
||
"daytime": daytime_value,
|
||
"enabled_plugins": sorted(str(v) for v in enabled_plugins),
|
||
"tone_mapping": {
|
||
"operator": tone_operator,
|
||
"exposure_scale": exposure_scale,
|
||
"web_exposure_boost": web_exposure_boost,
|
||
"web_use_tone_mapping": False,
|
||
"manual_camera_parameters": manual_camera,
|
||
"min_exposure": min_exposure,
|
||
"max_exposure": max_exposure,
|
||
},
|
||
"bloom": {
|
||
"enabled": bloom_enabled and bloom_strength > 1e-5,
|
||
"strength": max(0.0, bloom_strength),
|
||
"mipmaps": max(2, bloom_mips),
|
||
"lens_dirt_factor": max(0.0, min(1.0, bloom_lens_dirt)),
|
||
"vendor_available": has_bloom_vendor,
|
||
},
|
||
"shadows": {
|
||
"enabled": pssm_enabled,
|
||
"resolution": shadow_resolution,
|
||
"max_distance": max(1.0, shadow_max_distance),
|
||
"use_pcf": shadow_use_pcf,
|
||
"web_enable": False,
|
||
},
|
||
"ao": {
|
||
"enabled": ao_enabled,
|
||
"technique": ao_technique,
|
||
"occlusion_strength": max(0.0, ao_strength),
|
||
},
|
||
"fog": {
|
||
"enabled": fog_enabled and fog_intensity > 1e-6,
|
||
"color": fog_color,
|
||
"intensity": max(0.0, fog_intensity),
|
||
"ramp_size": max(1.0, fog_ramp),
|
||
},
|
||
"aa": {
|
||
"smaa_enabled": "smaa" in enabled_plugins,
|
||
"fxaa_enabled": "fxaa" in enabled_plugins,
|
||
},
|
||
}
|
||
|
||
@staticmethod
|
||
def _get_rp_plugin_setting(plugin_mgr, plugin_id: str, setting_id: str, default: Any) -> Any:
|
||
try:
|
||
plugin_settings = getattr(plugin_mgr, "settings", {}).get(plugin_id)
|
||
if plugin_settings and setting_id in plugin_settings:
|
||
handle = plugin_settings[setting_id]
|
||
return getattr(handle, "value", default)
|
||
except Exception:
|
||
pass
|
||
return default
|
||
|
||
def _get_rp_day_setting(self, plugin_mgr, rp, plugin_id: str, setting_id: str, default: Any) -> Any:
|
||
try:
|
||
day_settings = getattr(plugin_mgr, "day_settings", {}).get(plugin_id)
|
||
if not day_settings or setting_id not in day_settings:
|
||
return default
|
||
handle = day_settings[setting_id]
|
||
if not hasattr(handle, "get_scaled_value_at"):
|
||
return default
|
||
daytime = self._safe_float(getattr(getattr(rp, "daytime_mgr", None), "time", 12.0), 12.0)
|
||
return handle.get_scaled_value_at(daytime)
|
||
except Exception:
|
||
return default
|
||
|
||
@staticmethod
|
||
def _normalize_daytime_color(value: Any) -> List[float]:
|
||
if isinstance(value, (list, tuple)) and len(value) >= 3:
|
||
out = [
|
||
float(value[0]),
|
||
float(value[1]),
|
||
float(value[2]),
|
||
]
|
||
else:
|
||
out = [0.55, 0.6, 0.65]
|
||
if any(v > 1.0 for v in out):
|
||
out = [v / 255.0 for v in out]
|
||
return [
|
||
max(0.0, min(1.0, out[0])),
|
||
max(0.0, min(1.0, out[1])),
|
||
max(0.0, min(1.0, out[2])),
|
||
]
|
||
|
||
def _assign_export_lookup_metadata(self, export_nodes: List[Any]) -> None:
|
||
self._public_id_by_pointer.clear()
|
||
self._node_path_by_pointer.clear()
|
||
self._lookup_by_pointer.clear()
|
||
|
||
export_nodes = self._collect_valid_nodes(export_nodes)
|
||
if not export_nodes:
|
||
return
|
||
|
||
parent_by_pointer: Dict[int, Optional[Any]] = {}
|
||
children_by_parent_pointer: Dict[Optional[int], List[Any]] = {}
|
||
|
||
for node in export_nodes:
|
||
parent = self._get_export_parent(node)
|
||
parent_by_pointer[id(node)] = parent
|
||
parent_ptr = id(parent) if parent else None
|
||
children_by_parent_pointer.setdefault(parent_ptr, []).append(node)
|
||
|
||
segment_by_pointer: Dict[int, str] = {}
|
||
for parent_ptr, children in children_by_parent_pointer.items():
|
||
del parent_ptr
|
||
children.sort(key=self._get_export_sibling_sort_key)
|
||
occurrence_by_name: Dict[str, int] = {}
|
||
for node in children:
|
||
segment_base = self._build_public_path_segment(node)
|
||
occurrence = occurrence_by_name.get(segment_base, 0)
|
||
occurrence_by_name[segment_base] = occurrence + 1
|
||
segment_by_pointer[id(node)] = f"{segment_base}#{occurrence}"
|
||
|
||
path_cache: Dict[int, str] = {}
|
||
|
||
def build_path(node) -> str:
|
||
node_ptr = id(node)
|
||
cached = path_cache.get(node_ptr)
|
||
if cached is not None:
|
||
return cached
|
||
|
||
parent = parent_by_pointer.get(node_ptr)
|
||
segment = segment_by_pointer.get(node_ptr, f"{self._build_public_path_segment(node)}#0")
|
||
if parent and id(parent) in self._node_id_by_pointer:
|
||
parent_path = build_path(parent)
|
||
path = f"{parent_path}/{segment}" if parent_path else segment
|
||
else:
|
||
path = segment
|
||
path_cache[node_ptr] = path
|
||
return path
|
||
|
||
public_id_base_by_pointer: Dict[int, str] = {}
|
||
duplicate_counts: Dict[str, int] = {}
|
||
|
||
for node in export_nodes:
|
||
node_ptr = id(node)
|
||
path = build_path(node)
|
||
self._node_path_by_pointer[node_ptr] = path
|
||
public_base = self._build_public_id_base(node, path)
|
||
duplicate_index = duplicate_counts.get(public_base, 0)
|
||
duplicate_counts[public_base] = duplicate_index + 1
|
||
public_id = public_base if duplicate_index == 0 else f"{public_base}#{duplicate_index}"
|
||
public_id_base_by_pointer[node_ptr] = public_base
|
||
self._public_id_by_pointer[node_ptr] = public_id
|
||
|
||
for node in export_nodes:
|
||
node_ptr = id(node)
|
||
node_id = self._node_id_by_pointer.get(node_ptr, "")
|
||
node_name = self._safe_node_name(node) or node_id or "node"
|
||
kind = self._export_kind_hint_by_pointer.get(node_ptr, "node")
|
||
parent = parent_by_pointer.get(node_ptr)
|
||
parent_id = self._node_id_by_pointer.get(id(parent)) if parent else None
|
||
parent_public_id = self._public_id_by_pointer.get(id(parent)) if parent else None
|
||
asset_guid = self._safe_get_tag_value(node, "asset_guid")
|
||
imported_node_key = self._resolve_imported_node_key_for_public_id(node)
|
||
|
||
self._lookup_by_pointer[node_ptr] = {
|
||
"id": node_id,
|
||
"public_id": self._public_id_by_pointer.get(node_ptr, ""),
|
||
"name": node_name,
|
||
"kind": kind,
|
||
"path": self._node_path_by_pointer.get(node_ptr, ""),
|
||
"parent_id": parent_id,
|
||
"parent_public_id": parent_public_id,
|
||
"asset_guid": asset_guid or "",
|
||
"imported_node_key": imported_node_key or "",
|
||
}
|
||
|
||
def _get_export_parent(self, node):
|
||
try:
|
||
parent = node.getParent()
|
||
except Exception:
|
||
parent = None
|
||
|
||
if parent and not parent.isEmpty() and id(parent) in self._node_id_by_pointer:
|
||
return parent
|
||
return None
|
||
|
||
def _get_export_sibling_sort_key(self, node) -> Tuple[int, str, str]:
|
||
sibling_index = self._get_node_sibling_index(node)
|
||
return (
|
||
sibling_index,
|
||
self._safe_node_name(node).lower(),
|
||
self._node_id_by_pointer.get(id(node), ""),
|
||
)
|
||
|
||
@staticmethod
|
||
def _get_node_sibling_index(node) -> int:
|
||
try:
|
||
parent = node.getParent()
|
||
except Exception:
|
||
parent = None
|
||
|
||
if not parent or parent.isEmpty():
|
||
return 0
|
||
|
||
try:
|
||
children = list(parent.getChildren())
|
||
except Exception:
|
||
return 0
|
||
|
||
target_ptr = id(node)
|
||
for index, child in enumerate(children):
|
||
if id(child) == target_ptr:
|
||
return index
|
||
return 0
|
||
|
||
def _build_public_path_segment(self, node) -> str:
|
||
kind = self._export_kind_hint_by_pointer.get(id(node), "node")
|
||
base_name = self._safe_node_name(node)
|
||
if not base_name:
|
||
base_name = kind
|
||
sanitized = self._sanitize_public_id_segment(base_name)
|
||
return sanitized or kind
|
||
|
||
def _build_public_id_base(self, node, path: str) -> str:
|
||
kind = self._export_kind_hint_by_pointer.get(id(node), "node")
|
||
if kind == "model":
|
||
asset_guid = self._safe_get_tag_value(node, "asset_guid")
|
||
if asset_guid:
|
||
imported_node_key = self._resolve_imported_node_key_for_public_id(node) or "root"
|
||
return f"model:{asset_guid}:{imported_node_key}"
|
||
return f"scene:{kind}:{path}"
|
||
|
||
def _resolve_imported_node_key_for_public_id(self, node) -> str:
|
||
for tag_name in ("imported_node_key", "source_model_node_key", "ssbo_tree_key", "tree_item_key"):
|
||
value = self._safe_get_tag_value(node, tag_name)
|
||
if value:
|
||
return value
|
||
return ""
|
||
|
||
@staticmethod
|
||
def _sanitize_public_id_segment(value: str) -> str:
|
||
text = str(value or "").strip()
|
||
if not text:
|
||
return ""
|
||
text = text.replace("\\", "/")
|
||
text = re.sub(r"\s+", "_", text)
|
||
text = re.sub(r"[/:]+", "_", text)
|
||
text = re.sub(r"_+", "_", text)
|
||
return text.strip("._")
|
||
|
||
@staticmethod
|
||
def _safe_node_name(node) -> str:
|
||
try:
|
||
return str(node.getName() or "").strip()
|
||
except Exception:
|
||
return ""
|
||
|
||
def _attach_lookup_metadata(self, node, entry: Dict[str, Any]) -> Dict[str, Any]:
|
||
node_ptr = id(node)
|
||
lookup = dict(self._lookup_by_pointer.get(node_ptr, {}) or {})
|
||
entry["public_id"] = self._public_id_by_pointer.get(node_ptr, "")
|
||
entry["path"] = self._node_path_by_pointer.get(node_ptr, "")
|
||
entry["parent_public_id"] = lookup.get("parent_public_id")
|
||
entry["lookup"] = lookup
|
||
return entry
|
||
|
||
def _build_model_node_entry(self, node) -> Optional[Dict[str, Any]]:
|
||
node_id = self._node_id_by_pointer.get(id(node))
|
||
if not node_id:
|
||
return None
|
||
|
||
parent_id, mat = self._get_parent_and_matrix(node)
|
||
node_name = node.getName() or node_id
|
||
|
||
model_source, source_tag = self._resolve_model_source(node)
|
||
if not model_source:
|
||
self.report["missing_assets"].append(
|
||
{
|
||
"node": node_name,
|
||
"reason": "model_path_not_found",
|
||
"tags_checked": ["model_path", "saved_model_path", "original_path", "asset_path", "file"],
|
||
}
|
||
)
|
||
return None
|
||
|
||
model_uri = self._prepare_model_asset(model_source, node_name)
|
||
if not model_uri:
|
||
self.report["unsupported_assets"].append(
|
||
{
|
||
"node": node_name,
|
||
"source": model_source,
|
||
"reason": "model_conversion_failed",
|
||
}
|
||
)
|
||
return None
|
||
|
||
material_override = self._extract_material_override(node)
|
||
|
||
textures = self._collect_and_copy_texture_overrides(node, model_source)
|
||
subnode_overrides = self._collect_ssbo_subnode_transform_overrides(node)
|
||
if not subnode_overrides:
|
||
subnode_overrides = self._collect_subnode_transform_overrides(node, model_source)
|
||
|
||
entry: Dict[str, Any] = {
|
||
"id": node_id,
|
||
"name": node_name,
|
||
"kind": "model",
|
||
"parent_id": parent_id,
|
||
"matrix_local_row_major": self._mat4_to_row_major_list(mat),
|
||
"model": {"uri": model_uri},
|
||
"material_override": material_override,
|
||
"source_model_tag": source_tag,
|
||
}
|
||
animation = self._extract_animation_settings(node, model_source)
|
||
if animation:
|
||
entry["animation"] = animation
|
||
scripts = self._extract_script_settings(node)
|
||
if scripts:
|
||
entry["scripts"] = scripts
|
||
if textures:
|
||
entry["texture_overrides"] = textures
|
||
if subnode_overrides:
|
||
entry["subnode_overrides"] = subnode_overrides
|
||
return self._attach_lookup_metadata(node, entry)
|
||
|
||
def _extract_animation_settings(self, node, model_source: str) -> Dict[str, Any]:
|
||
has_animation_hint = False
|
||
try:
|
||
if node.hasTag("has_animations"):
|
||
has_animation_hint = (node.getTag("has_animations").strip().lower() == "true")
|
||
except Exception:
|
||
has_animation_hint = False
|
||
|
||
python_animation_flag = self._safe_get_python_tag(node, "animation")
|
||
if python_animation_flag is True:
|
||
has_animation_hint = True
|
||
|
||
clip_name = (
|
||
self._safe_get_python_tag(node, "selected_animation")
|
||
or self._safe_get_tag_value(node, "saved_selected_animation")
|
||
or self._safe_get_tag_value(node, "selected_animation")
|
||
)
|
||
clip_name = str(clip_name).strip() if clip_name is not None else ""
|
||
|
||
speed_value = (
|
||
self._safe_get_python_tag(node, "anim_speed")
|
||
or self._safe_get_tag_value(node, "saved_anim_speed")
|
||
or self._safe_get_tag_value(node, "anim_speed")
|
||
)
|
||
speed = self._safe_float(speed_value, 1.0)
|
||
if abs(speed) < 1e-4:
|
||
speed = 1.0
|
||
|
||
mode = (
|
||
self._safe_get_python_tag(node, "anim_play_mode")
|
||
or self._safe_get_tag_value(node, "saved_anim_play_mode")
|
||
or self._safe_get_tag_value(node, "anim_play_mode")
|
||
or ""
|
||
)
|
||
mode = str(mode).strip().lower()
|
||
if mode not in {"play", "loop", "pause", "stop"}:
|
||
mode = "loop"
|
||
|
||
if not (has_animation_hint or clip_name):
|
||
return {}
|
||
|
||
return {
|
||
"enabled": True,
|
||
"clip_name": clip_name,
|
||
"speed": speed,
|
||
"mode": mode,
|
||
"autoplay": mode != "stop",
|
||
}
|
||
|
||
def _extract_script_settings(self, node) -> List[Dict[str, Any]]:
|
||
runtime_entries = self._extract_runtime_script_entries(node)
|
||
tag_entries = self._extract_tag_script_entries(node)
|
||
|
||
if not runtime_entries and not tag_entries:
|
||
return []
|
||
|
||
merged_entries: List[Dict[str, Any]] = []
|
||
merged_index: Dict[str, int] = {}
|
||
|
||
for entry in runtime_entries:
|
||
name = str(entry.get("name", "")).strip()
|
||
key = self._normalize_script_name_key(name)
|
||
if not key:
|
||
continue
|
||
merged_index[key] = len(merged_entries)
|
||
merged_entries.append(dict(entry))
|
||
|
||
for entry in tag_entries:
|
||
name = str(entry.get("name", "")).strip()
|
||
key = self._normalize_script_name_key(name)
|
||
if not key:
|
||
continue
|
||
if key in merged_index:
|
||
runtime_entry = merged_entries[merged_index[key]]
|
||
if entry.get("file") and not runtime_entry.get("file"):
|
||
runtime_entry["file"] = entry.get("file")
|
||
continue
|
||
merged_index[key] = len(merged_entries)
|
||
merged_entries.append(dict(entry))
|
||
|
||
return merged_entries
|
||
|
||
def _extract_runtime_script_entries(self, node) -> List[Dict[str, Any]]:
|
||
script_manager = getattr(self.world, "script_manager", None)
|
||
if not script_manager:
|
||
return []
|
||
|
||
get_scripts = getattr(script_manager, "get_scripts_on_object", None)
|
||
if not callable(get_scripts):
|
||
return []
|
||
|
||
try:
|
||
script_components = get_scripts(node) or []
|
||
except Exception:
|
||
script_components = []
|
||
|
||
if not script_components:
|
||
return []
|
||
|
||
out: List[Dict[str, Any]] = []
|
||
loader = getattr(script_manager, "loader", None)
|
||
find_script_file = getattr(loader, "find_script_file", None) if loader else None
|
||
build_script_reference = getattr(script_manager, "build_script_reference", None)
|
||
|
||
for component in script_components:
|
||
script_instance = getattr(component, "script_instance", None)
|
||
script_name = str(getattr(component, "script_name", "")).strip()
|
||
if not script_name and script_instance is not None:
|
||
script_name = script_instance.__class__.__name__
|
||
script_name = str(script_name or "").strip()
|
||
if not script_name:
|
||
continue
|
||
|
||
script_file = ""
|
||
if callable(find_script_file):
|
||
try:
|
||
script_file = str(find_script_file(script_name) or "").strip()
|
||
except Exception:
|
||
script_file = ""
|
||
|
||
entry: Dict[str, Any] = {
|
||
"name": script_name,
|
||
"enabled": bool(getattr(component, "enabled", True)),
|
||
}
|
||
if callable(build_script_reference):
|
||
try:
|
||
reference = build_script_reference(script_name, script_file) or {}
|
||
except Exception:
|
||
reference = {}
|
||
if isinstance(reference, dict):
|
||
for key in ("file", "project_relative_path", "script_guid", "relative_path", "path"):
|
||
value = str(reference.get(key, "") or "").strip()
|
||
if value:
|
||
entry[key] = value
|
||
elif script_file:
|
||
entry["file"] = script_file
|
||
|
||
params = self._serialize_script_instance_params(script_instance)
|
||
if params:
|
||
entry["params"] = params
|
||
|
||
out.append(entry)
|
||
|
||
return out
|
||
|
||
def _extract_tag_script_entries(self, node) -> List[Dict[str, Any]]:
|
||
has_scripts = self._safe_get_tag_value(node, "has_scripts").lower() == "true"
|
||
raw = self._safe_get_tag_value(node, "scripts_info")
|
||
if not has_scripts and not raw:
|
||
return []
|
||
if not raw:
|
||
return []
|
||
|
||
try:
|
||
info_list = json.loads(raw)
|
||
except Exception:
|
||
self.report["warnings"].append(
|
||
f"scripts_info 解析失败,已忽略: {node.getName()}"
|
||
)
|
||
return []
|
||
|
||
if not isinstance(info_list, list):
|
||
return []
|
||
|
||
out: List[Dict[str, Any]] = []
|
||
for item in info_list:
|
||
if not isinstance(item, dict):
|
||
continue
|
||
script_name = str(item.get("name", "")).strip()
|
||
if not script_name:
|
||
continue
|
||
entry: Dict[str, Any] = {
|
||
"name": script_name,
|
||
"enabled": bool(item.get("enabled", True)),
|
||
}
|
||
for key in ("file", "project_relative_path", "script_guid", "relative_path", "path"):
|
||
value = str(item.get(key, "") or "").strip()
|
||
if value:
|
||
entry[key] = value
|
||
params = item.get("params")
|
||
if isinstance(params, dict) and params:
|
||
entry["params"] = params
|
||
out.append(entry)
|
||
|
||
return out
|
||
|
||
@staticmethod
|
||
def _normalize_script_name_key(script_name: str) -> str:
|
||
normalized = str(script_name or "").strip().lower()
|
||
if normalized.endswith(".py"):
|
||
normalized = normalized[:-3]
|
||
normalized = re.sub(r"[^a-z0-9]+", "", normalized)
|
||
return normalized
|
||
|
||
def _serialize_script_instance_params(self, script_instance) -> Dict[str, Any]:
|
||
if not script_instance:
|
||
return {}
|
||
|
||
blocked_keys = {
|
||
"enabled",
|
||
"gameObject",
|
||
"transform",
|
||
"world",
|
||
}
|
||
out: Dict[str, Any] = {}
|
||
instance_vars = getattr(script_instance, "__dict__", {}) or {}
|
||
|
||
for key, value in instance_vars.items():
|
||
key_str = str(key).strip()
|
||
if not key_str:
|
||
continue
|
||
if key_str.startswith("_") or key_str in blocked_keys:
|
||
continue
|
||
serialized = self._serialize_script_value(value, depth=0)
|
||
if serialized is None and value is not None:
|
||
continue
|
||
out[key_str] = serialized
|
||
|
||
return out
|
||
|
||
def _serialize_script_value(self, value: Any, depth: int = 0) -> Any:
|
||
if depth > 4:
|
||
return None
|
||
if value is None:
|
||
return None
|
||
if isinstance(value, (bool, int, float, str)):
|
||
return value
|
||
|
||
if self._is_nodepath_like(value):
|
||
ref = self._serialize_node_reference(value)
|
||
return {"__node_ref__": ref} if ref else None
|
||
|
||
vec_value = self._serialize_vec_like(value)
|
||
if vec_value is not None:
|
||
return vec_value
|
||
|
||
if isinstance(value, (list, tuple)):
|
||
out = []
|
||
for item in value:
|
||
serialized = self._serialize_script_value(item, depth + 1)
|
||
if serialized is None and item is not None:
|
||
continue
|
||
out.append(serialized)
|
||
return out
|
||
|
||
if isinstance(value, dict):
|
||
out = {}
|
||
for k, v in value.items():
|
||
key = str(k)
|
||
if not key:
|
||
continue
|
||
serialized = self._serialize_script_value(v, depth + 1)
|
||
if serialized is None and v is not None:
|
||
continue
|
||
out[key] = serialized
|
||
return out
|
||
|
||
return None
|
||
|
||
@staticmethod
|
||
def _is_nodepath_like(value: Any) -> bool:
|
||
return (
|
||
value is not None
|
||
and hasattr(value, "getName")
|
||
and hasattr(value, "getParent")
|
||
and hasattr(value, "isEmpty")
|
||
)
|
||
|
||
def _serialize_node_reference(self, node) -> Dict[str, Any]:
|
||
if self._is_np_empty(node):
|
||
return {}
|
||
|
||
ref: Dict[str, Any] = {}
|
||
|
||
node_id = self._node_id_by_pointer.get(id(node))
|
||
if node_id:
|
||
ref["node_id"] = node_id
|
||
try:
|
||
node_name = str(node.getName() or "").strip()
|
||
except Exception:
|
||
node_name = ""
|
||
if node_name:
|
||
ref["node_name"] = node_name
|
||
|
||
if node_id:
|
||
return ref
|
||
|
||
# Fallback: track ancestor exported node + child name chain.
|
||
chain: List[str] = []
|
||
current = node
|
||
for _ in range(64):
|
||
if self._is_np_empty(current):
|
||
break
|
||
current_id = self._node_id_by_pointer.get(id(current))
|
||
if current_id:
|
||
ref["ancestor_node_id"] = current_id
|
||
chain.reverse()
|
||
if chain:
|
||
ref["child_name_chain"] = chain
|
||
break
|
||
try:
|
||
current_name = str(current.getName() or "").strip()
|
||
except Exception:
|
||
current_name = ""
|
||
if current_name:
|
||
chain.append(current_name)
|
||
try:
|
||
current = current.getParent()
|
||
except Exception:
|
||
break
|
||
|
||
return ref
|
||
|
||
@staticmethod
|
||
def _serialize_vec_like(value: Any) -> Optional[List[float]]:
|
||
get_components = []
|
||
for attr in ("getX", "getY", "getZ", "getW"):
|
||
getter = getattr(value, attr, None)
|
||
if callable(getter):
|
||
get_components.append(getter)
|
||
if not get_components:
|
||
return None
|
||
|
||
out: List[float] = []
|
||
for getter in get_components:
|
||
try:
|
||
out.append(float(getter()))
|
||
except Exception:
|
||
return None
|
||
return out
|
||
|
||
def _collect_ssbo_subnode_transform_overrides(self, model_node) -> Dict[str, Any]:
|
||
"""
|
||
Collect changed subnode transforms from SSBO controller runtime state.
|
||
This path is required because SSBO edits may not exist in model_node's
|
||
ordinary child hierarchy.
|
||
"""
|
||
ssbo_editor = getattr(self.world, "ssbo_editor", None)
|
||
controller = getattr(ssbo_editor, "controller", None) if ssbo_editor else None
|
||
controller_model = getattr(controller, "model", None) if controller else None
|
||
|
||
if not controller or self._is_np_empty(controller_model) or self._is_np_empty(model_node):
|
||
return {}
|
||
|
||
if not self._nodepath_equivalent(controller_model, model_node):
|
||
model_source_key = self._nodepath_source_key(model_node)
|
||
controller_source_key = self._nodepath_source_key(controller_model)
|
||
same_source = bool(model_source_key and controller_source_key and model_source_key == controller_source_key)
|
||
same_name = False
|
||
try:
|
||
same_name = (controller_model.getName() == model_node.getName())
|
||
except Exception:
|
||
same_name = False
|
||
if not same_source and not same_name:
|
||
return {}
|
||
|
||
id_to_object_np = getattr(controller, "id_to_object_np", {})
|
||
id_to_name = getattr(controller, "id_to_name", {})
|
||
global_transforms = getattr(controller, "global_transforms", [])
|
||
tree_root_key = str(getattr(controller, "tree_root_key", "0") or "0")
|
||
|
||
if not isinstance(id_to_object_np, dict) or not isinstance(id_to_name, dict):
|
||
return {}
|
||
|
||
by_index_map: Dict[Tuple[int, ...], List[float]] = {}
|
||
by_index_base_map: Dict[Tuple[int, ...], List[float]] = {}
|
||
by_name_map: Dict[Tuple[str, ...], List[float]] = {}
|
||
by_name_base_map: Dict[Tuple[str, ...], List[float]] = {}
|
||
|
||
for gid, obj_np in id_to_object_np.items():
|
||
if self._is_np_empty(obj_np):
|
||
continue
|
||
|
||
key = id_to_name.get(gid)
|
||
if key is None:
|
||
continue
|
||
key = str(key)
|
||
if not key:
|
||
continue
|
||
|
||
index_path = self._parse_ssbo_tree_key_to_index_path(key, tree_root_key)
|
||
if not index_path:
|
||
continue
|
||
|
||
try:
|
||
cur_mat = obj_np.getMat(controller_model)
|
||
except Exception:
|
||
try:
|
||
cur_mat = obj_np.get_mat(controller_model)
|
||
except Exception:
|
||
continue
|
||
|
||
base_mat = None
|
||
try:
|
||
if int(gid) < len(global_transforms):
|
||
base_mat = global_transforms[int(gid)]
|
||
except Exception:
|
||
base_mat = None
|
||
|
||
if base_mat is not None and self._mat4_objects_close(cur_mat, base_mat):
|
||
continue
|
||
|
||
mat_values = self._mat4_to_row_major_list(cur_mat)
|
||
if not mat_values or self._is_identity_matrix_list(mat_values):
|
||
continue
|
||
|
||
base_values = None
|
||
if base_mat is not None:
|
||
try:
|
||
base_values = self._mat4_to_row_major_list(base_mat)
|
||
except Exception:
|
||
base_values = None
|
||
if base_values and len(base_values) != 16:
|
||
base_values = None
|
||
|
||
existing = by_index_map.get(index_path)
|
||
if existing and not self._matrix_list_close(existing, mat_values):
|
||
# Multiple geoms mapped to same tree node with different mats:
|
||
# keep first one and warn once.
|
||
self.report["warnings"].append(
|
||
f"SSBO 子节点矩阵冲突,保留首个值: {model_node.getName()} path={list(index_path)}"
|
||
)
|
||
continue
|
||
if not existing:
|
||
by_index_map[index_path] = mat_values
|
||
if base_values:
|
||
by_index_base_map[index_path] = base_values
|
||
|
||
name_path = self._ssbo_index_path_to_name_path(controller, index_path, tree_root_key)
|
||
if name_path:
|
||
raw_name_path = tuple(str(v).strip() for v in name_path if str(v).strip())
|
||
normalized_name_path = self._normalize_name_path(name_path)
|
||
export_paths = []
|
||
if raw_name_path:
|
||
export_paths.append(raw_name_path)
|
||
if normalized_name_path and normalized_name_path != raw_name_path:
|
||
export_paths.append(normalized_name_path)
|
||
|
||
for p in export_paths:
|
||
by_name_map[p] = mat_values
|
||
if base_values and (p not in by_name_base_map):
|
||
by_name_base_map[p] = base_values
|
||
|
||
if not by_index_map:
|
||
return {}
|
||
|
||
by_index = []
|
||
for path, mat in sorted(by_index_map.items(), key=lambda x: (len(x[0]), x[0])):
|
||
item = {
|
||
"path": [int(v) for v in path],
|
||
"matrix_local_row_major": mat,
|
||
}
|
||
base_values = by_index_base_map.get(path)
|
||
if base_values:
|
||
item["base_matrix_model_root_row_major"] = base_values
|
||
by_index.append(item)
|
||
|
||
by_name = []
|
||
for path, mat in sorted(by_name_map.items(), key=lambda x: (len(x[0]), x[0])):
|
||
item = {
|
||
"path": [str(v) for v in path],
|
||
"matrix_local_row_major": mat,
|
||
}
|
||
base_values = by_name_base_map.get(path)
|
||
if base_values:
|
||
item["base_matrix_model_root_row_major"] = base_values
|
||
by_name.append(item)
|
||
|
||
payload = {
|
||
"source": "ssbo",
|
||
# SSBO runtime stores matrices in model-root space
|
||
# (obj_np.getMat(controller.model)).
|
||
"matrix_space": "model_root",
|
||
"by_index": by_index,
|
||
}
|
||
if by_name:
|
||
payload["by_name"] = by_name
|
||
self.report["warnings"].append(
|
||
f"SSBO子节点变换已导出: {model_node.getName()} ({len(by_index)} items)"
|
||
)
|
||
return payload
|
||
|
||
@staticmethod
|
||
def _is_np_empty(np) -> bool:
|
||
if not np:
|
||
return True
|
||
try:
|
||
return np.isEmpty()
|
||
except Exception:
|
||
try:
|
||
return np.is_empty()
|
||
except Exception:
|
||
return True
|
||
|
||
@staticmethod
|
||
def _nodepath_equivalent(a, b) -> bool:
|
||
try:
|
||
if a == b:
|
||
return True
|
||
except Exception:
|
||
pass
|
||
try:
|
||
return a.node() == b.node()
|
||
except Exception:
|
||
return False
|
||
|
||
@staticmethod
|
||
def _nodepath_source_key(np) -> str:
|
||
if not np:
|
||
return ""
|
||
for tag_name in ("saved_model_path", "model_path", "original_path", "file"):
|
||
try:
|
||
if np.hasTag(tag_name):
|
||
value = (np.getTag(tag_name) or "").strip()
|
||
if value:
|
||
return value.replace("\\", "/").lower()
|
||
except Exception:
|
||
continue
|
||
return ""
|
||
|
||
@staticmethod
|
||
def _parse_ssbo_tree_key_to_index_path(key: str, tree_root_key: str) -> Tuple[int, ...]:
|
||
text = str(key).strip()
|
||
if not text:
|
||
return tuple()
|
||
parts = [p for p in text.split("/") if p != ""]
|
||
if not parts:
|
||
return tuple()
|
||
# keys are like "0/1/2"; drop tree root segment.
|
||
if parts and parts[0] == str(tree_root_key):
|
||
parts = parts[1:]
|
||
out: List[int] = []
|
||
for p in parts:
|
||
try:
|
||
out.append(int(p))
|
||
except Exception:
|
||
return tuple()
|
||
return tuple(out)
|
||
|
||
def _ssbo_index_path_to_name_path(self, controller, index_path: Tuple[int, ...], tree_root_key: str) -> Tuple[str, ...]:
|
||
tree_nodes = getattr(controller, "tree_nodes", {}) or {}
|
||
if str(tree_root_key) not in tree_nodes:
|
||
return tuple()
|
||
|
||
parent_key = str(tree_root_key)
|
||
out: List[str] = []
|
||
for child_slot in index_path:
|
||
parent = tree_nodes.get(parent_key)
|
||
if not isinstance(parent, dict):
|
||
return tuple()
|
||
children = parent.get("children", [])
|
||
if not isinstance(children, list):
|
||
return tuple()
|
||
if child_slot < 0 or child_slot >= len(children):
|
||
return tuple()
|
||
|
||
child_key = children[child_slot]
|
||
child = tree_nodes.get(child_key, {})
|
||
child_name = str(child.get("name", ""))
|
||
|
||
# Build occurrence index among same-name siblings.
|
||
occur = 0
|
||
for i in range(child_slot):
|
||
prev_key = children[i]
|
||
prev_name = str(tree_nodes.get(prev_key, {}).get("name", ""))
|
||
if prev_name == child_name:
|
||
occur += 1
|
||
out.append(f"{child_name}#{occur}")
|
||
parent_key = child_key
|
||
|
||
return tuple(out)
|
||
|
||
@staticmethod
|
||
def _normalize_name_path(path: Tuple[str, ...]) -> Tuple[str, ...]:
|
||
"""Drop empty segments only; keep duplicate segments to preserve depth."""
|
||
normalized: List[str] = []
|
||
for seg in path:
|
||
s = str(seg).strip()
|
||
if not s:
|
||
continue
|
||
normalized.append(s)
|
||
return tuple(normalized)
|
||
|
||
def _collect_subnode_transform_overrides(self, root_node, model_source: str) -> Dict[str, Any]:
|
||
"""
|
||
Collect changed local transforms for descendants under a model root.
|
||
Only export deltas against a baseline loaded from model_source to avoid
|
||
replaying all intrinsic node transforms (which can cause double transforms).
|
||
"""
|
||
try:
|
||
if not root_node or root_node.isEmpty():
|
||
return {}
|
||
except Exception:
|
||
return {}
|
||
|
||
current_snapshot = self._snapshot_subnode_local_matrices(root_node)
|
||
if not current_snapshot["by_index"]:
|
||
return {}
|
||
|
||
baseline_snapshot = self._load_baseline_subnode_snapshot(model_source)
|
||
if not baseline_snapshot["by_index"] and not baseline_snapshot["by_name"]:
|
||
# No reliable baseline -> skip overrides to avoid corrupt scale/rotation.
|
||
self.report["warnings"].append(
|
||
f"跳过子节点变换导出(缺少基线): {root_node.getName() if hasattr(root_node, 'getName') else 'unknown'}"
|
||
)
|
||
return {}
|
||
|
||
changed_by_index: List[Dict[str, Any]] = []
|
||
changed_by_name: List[Dict[str, Any]] = []
|
||
|
||
for index_path, payload in current_snapshot["by_index"].items():
|
||
current_mat = payload["matrix_local_row_major"]
|
||
name_path = payload["name_path"]
|
||
|
||
baseline_payload = baseline_snapshot["by_index"].get(index_path)
|
||
if baseline_payload is None:
|
||
baseline_payload = baseline_snapshot["by_name"].get(name_path)
|
||
|
||
baseline_mat = baseline_payload["matrix_local_row_major"] if baseline_payload else None
|
||
if baseline_mat is not None and self._matrix_list_close(current_mat, baseline_mat):
|
||
continue
|
||
# Safety: identity overrides often indicate hierarchy mismatch and can
|
||
# destroy source glTF built-in transforms when replayed in web runtime.
|
||
if self._is_identity_matrix_list(current_mat):
|
||
continue
|
||
|
||
changed_by_index.append(
|
||
{
|
||
"path": [int(v) for v in index_path],
|
||
"matrix_local_row_major": current_mat,
|
||
}
|
||
)
|
||
changed_by_name.append(
|
||
{
|
||
"path": [str(v) for v in name_path],
|
||
"matrix_local_row_major": current_mat,
|
||
}
|
||
)
|
||
|
||
if not changed_by_index and not changed_by_name:
|
||
return {}
|
||
|
||
return {
|
||
"matrix_space": "local_parent",
|
||
"by_index": changed_by_index,
|
||
"by_name": changed_by_name,
|
||
}
|
||
|
||
def _snapshot_subnode_local_matrices(self, root_node) -> Dict[str, Dict[Tuple[Any, ...], Dict[str, Any]]]:
|
||
snapshot: Dict[str, Dict[Tuple[Any, ...], Dict[str, Any]]] = {
|
||
"by_index": {},
|
||
"by_name": {},
|
||
}
|
||
|
||
try:
|
||
if not root_node or root_node.isEmpty():
|
||
return snapshot
|
||
except Exception:
|
||
return snapshot
|
||
|
||
def walk(node, index_path: Tuple[int, ...], name_path: Tuple[str, ...]) -> None:
|
||
if index_path:
|
||
parent = None
|
||
try:
|
||
parent = node.getParent()
|
||
except Exception:
|
||
parent = None
|
||
|
||
if parent and not parent.isEmpty():
|
||
local_mat = node.getMat(parent)
|
||
else:
|
||
local_mat = node.getMat()
|
||
|
||
matrix_row_major = self._mat4_to_row_major_list(local_mat)
|
||
payload = {
|
||
"index_path": index_path,
|
||
"name_path": name_path,
|
||
"matrix_local_row_major": matrix_row_major,
|
||
}
|
||
snapshot["by_index"][index_path] = payload
|
||
snapshot["by_name"][name_path] = payload
|
||
|
||
name_count: Dict[str, int] = {}
|
||
for child_index, child in enumerate(node.getChildren()):
|
||
child_name = ""
|
||
try:
|
||
child_name = child.getName() or ""
|
||
except Exception:
|
||
child_name = ""
|
||
occur = name_count.get(child_name, 0)
|
||
name_count[child_name] = occur + 1
|
||
walk(
|
||
child,
|
||
index_path + (child_index,),
|
||
name_path + (f"{child_name}#{occur}",),
|
||
)
|
||
|
||
try:
|
||
walk(root_node, tuple(), tuple())
|
||
except Exception as exc:
|
||
self.report["warnings"].append(
|
||
f"采集子节点矩阵失败: {root_node.getName() if hasattr(root_node, 'getName') else 'unknown'} ({exc})"
|
||
)
|
||
|
||
return snapshot
|
||
|
||
def _load_baseline_subnode_snapshot(self, model_source: str) -> Dict[str, Dict[Tuple[Any, ...], Dict[str, Any]]]:
|
||
key = os.path.normpath(model_source or "")
|
||
if not key:
|
||
return {"by_index": {}, "by_name": {}}
|
||
if key in self._baseline_subnode_cache:
|
||
return self._baseline_subnode_cache[key]
|
||
|
||
snapshot: Dict[str, Dict[Tuple[Any, ...], Dict[str, Any]]] = {"by_index": {}, "by_name": {}}
|
||
loader = getattr(self.world, "loader", None)
|
||
if loader is None:
|
||
self._baseline_subnode_cache[key] = snapshot
|
||
return snapshot
|
||
|
||
baseline_root = None
|
||
try:
|
||
baseline_root = loader.loadModel(key)
|
||
if baseline_root and not baseline_root.isEmpty():
|
||
snapshot = self._snapshot_subnode_local_matrices(baseline_root)
|
||
except Exception as exc:
|
||
self.report["warnings"].append(f"加载基线模型失败: {key} ({exc})")
|
||
snapshot = {"by_index": {}, "by_name": {}}
|
||
finally:
|
||
try:
|
||
if baseline_root and not baseline_root.isEmpty():
|
||
baseline_root.removeNode()
|
||
except Exception:
|
||
pass
|
||
|
||
self._baseline_subnode_cache[key] = snapshot
|
||
return snapshot
|
||
|
||
@staticmethod
|
||
def _matrix_list_close(a: List[float], b: List[float], eps: float = 1e-5) -> bool:
|
||
if (not isinstance(a, list)) or (not isinstance(b, list)) or len(a) != 16 or len(b) != 16:
|
||
return False
|
||
for i in range(16):
|
||
try:
|
||
if abs(float(a[i]) - float(b[i])) > eps:
|
||
return False
|
||
except Exception:
|
||
return False
|
||
return True
|
||
|
||
@staticmethod
|
||
def _mat4_objects_close(a, b, eps: float = 1e-5) -> bool:
|
||
if a is None or b is None:
|
||
return False
|
||
try:
|
||
for r in range(4):
|
||
for c in range(4):
|
||
if abs(float(a.getCell(r, c)) - float(b.getCell(r, c))) > eps:
|
||
return False
|
||
return True
|
||
except Exception:
|
||
return False
|
||
|
||
@staticmethod
|
||
def _is_identity_matrix_list(values: List[float], eps: float = 1e-6) -> bool:
|
||
if not isinstance(values, list) or len(values) != 16:
|
||
return False
|
||
identity = [
|
||
1.0, 0.0, 0.0, 0.0,
|
||
0.0, 1.0, 0.0, 0.0,
|
||
0.0, 0.0, 1.0, 0.0,
|
||
0.0, 0.0, 0.0, 1.0,
|
||
]
|
||
for i in range(16):
|
||
try:
|
||
if abs(float(values[i]) - identity[i]) > eps:
|
||
return False
|
||
except Exception:
|
||
return False
|
||
return True
|
||
|
||
def _build_light_node_entry(self, node, kind: str) -> Optional[Dict[str, Any]]:
|
||
node_id = self._node_id_by_pointer.get(id(node))
|
||
if not node_id:
|
||
return None
|
||
|
||
parent_id, mat = self._get_parent_and_matrix(node)
|
||
node_name = node.getName() or node_id
|
||
|
||
light_obj = node.getPythonTag("rp_light_object") if node.hasPythonTag("rp_light_object") else None
|
||
|
||
color = [1.0, 1.0, 1.0]
|
||
energy = self._safe_float(node.getTag("light_energy"), 5000.0) if node.hasTag("light_energy") else 5000.0
|
||
radius = self._safe_float(node.getTag("light_radius"), 30.0) if node.hasTag("light_radius") else 30.0
|
||
spot_fov = self._safe_float(node.getTag("light_fov"), 45.0) if node.hasTag("light_fov") else 45.0
|
||
inner_ratio = 0.4
|
||
|
||
if light_obj is not None:
|
||
try:
|
||
c = getattr(light_obj, "color", None)
|
||
if c is not None:
|
||
color = [float(c[0]), float(c[1]), float(c[2])]
|
||
except Exception:
|
||
pass
|
||
try:
|
||
energy = float(getattr(light_obj, "energy", energy))
|
||
except Exception:
|
||
pass
|
||
try:
|
||
radius = float(getattr(light_obj, "radius", radius))
|
||
except Exception:
|
||
pass
|
||
try:
|
||
if hasattr(light_obj, "fov"):
|
||
spot_fov = float(getattr(light_obj, "fov", spot_fov))
|
||
except Exception:
|
||
pass
|
||
try:
|
||
if hasattr(light_obj, "inner_radius"):
|
||
inner_ratio = float(getattr(light_obj, "inner_radius", inner_ratio))
|
||
except Exception:
|
||
pass
|
||
|
||
intensity = max(0.0, energy * self.ENERGY_TO_INTENSITY_SCALE)
|
||
|
||
entry = {
|
||
"id": node_id,
|
||
"name": node_name,
|
||
"kind": kind,
|
||
"parent_id": parent_id,
|
||
"matrix_local_row_major": self._mat4_to_row_major_list(mat),
|
||
"light": {
|
||
"color": color,
|
||
"intensity": intensity,
|
||
"range": max(0.0, radius),
|
||
"spot_angle_deg": max(1.0, spot_fov),
|
||
"inner_cone_ratio": max(0.0, min(1.0, inner_ratio)),
|
||
},
|
||
}
|
||
return self._attach_lookup_metadata(node, entry)
|
||
|
||
def _build_ground_node_entry(self, node) -> Optional[Dict[str, Any]]:
|
||
node_id = self._node_id_by_pointer.get(id(node))
|
||
if not node_id:
|
||
return None
|
||
|
||
parent_id, mat = self._get_parent_and_matrix(node)
|
||
node_name = node.getName() or "ground"
|
||
|
||
color = [0.8, 0.8, 0.8, 1.0]
|
||
roughness = 1.0
|
||
metallic = 0.0
|
||
|
||
material = None
|
||
try:
|
||
material = node.getMaterial()
|
||
except Exception:
|
||
material = None
|
||
|
||
if material:
|
||
try:
|
||
if material.hasBaseColor():
|
||
c = material.getBaseColor()
|
||
color = [float(c[0]), float(c[1]), float(c[2]), float(c[3])]
|
||
except Exception:
|
||
pass
|
||
try:
|
||
roughness = float(material.getRoughness())
|
||
except Exception:
|
||
pass
|
||
try:
|
||
metallic = float(material.getMetallic())
|
||
except Exception:
|
||
pass
|
||
|
||
entry = {
|
||
"id": node_id,
|
||
"name": node_name,
|
||
"kind": "ground",
|
||
"parent_id": parent_id,
|
||
"matrix_local_row_major": self._mat4_to_row_major_list(mat),
|
||
"ground": {
|
||
"width": 100.0,
|
||
"height": 100.0,
|
||
},
|
||
"material_override": {
|
||
"base_color": [color[0], color[1], color[2], 1.0],
|
||
"roughness": roughness,
|
||
"metallic": metallic,
|
||
"opacity": 1.0,
|
||
},
|
||
}
|
||
return self._attach_lookup_metadata(node, entry)
|
||
|
||
def _resolve_model_source(self, node) -> Tuple[Optional[str], str]:
|
||
tags = ["model_path", "saved_model_path", "original_path", "asset_path", "file"]
|
||
for tag in tags:
|
||
if not node.hasTag(tag):
|
||
continue
|
||
value = (node.getTag(tag) or "").strip()
|
||
if not value:
|
||
continue
|
||
resolved = self._resolve_asset_path(value)
|
||
if resolved:
|
||
return resolved, tag
|
||
return None, ""
|
||
|
||
def _resolve_asset_path(self, candidate: str) -> Optional[str]:
|
||
candidate = (candidate or "").strip()
|
||
if not candidate:
|
||
return None
|
||
|
||
if candidate.startswith(("http://", "https://")):
|
||
return None
|
||
|
||
# Absolute path: use directly.
|
||
if os.path.isabs(candidate):
|
||
abs_candidate = os.path.normpath(candidate)
|
||
if os.path.exists(abs_candidate):
|
||
return abs_candidate
|
||
return None
|
||
|
||
# Relative path resolution policy.
|
||
search_roots = [
|
||
self._project_path,
|
||
os.path.join(self._project_path, "models"),
|
||
os.path.join(self._project_path, "Resources"),
|
||
os.path.join(self._project_path, "scenes", "resources"),
|
||
os.getcwd(),
|
||
]
|
||
|
||
for root in search_roots:
|
||
full = os.path.normpath(os.path.join(root, candidate))
|
||
if os.path.exists(full):
|
||
return full
|
||
|
||
return None
|
||
|
||
def _prepare_model_asset(self, source_path: str, node_name: str) -> Optional[str]:
|
||
source_path = os.path.normpath(source_path)
|
||
ext = os.path.splitext(source_path)[1].lower()
|
||
|
||
if ext in {".gltf", ".glb"}:
|
||
return self._copy_asset_to_models(source_path)
|
||
|
||
with tempfile.TemporaryDirectory(prefix="eg_webgl_conv_") as temp_dir:
|
||
conversion_source = source_path
|
||
|
||
if ext == ".bam":
|
||
temp_egg = os.path.join(temp_dir, self._sanitize_filename(Path(source_path).stem) + ".egg")
|
||
if not self._run_tool_command(["bam2egg", source_path, temp_egg], timeout=120):
|
||
return None
|
||
|
||
temp_obj = os.path.join(temp_dir, self._sanitize_filename(Path(source_path).stem) + ".obj")
|
||
if not self._run_tool_command(["egg2obj", temp_egg, temp_obj], timeout=120):
|
||
return None
|
||
conversion_source = temp_obj
|
||
|
||
elif ext == ".egg":
|
||
temp_obj = os.path.join(temp_dir, self._sanitize_filename(Path(source_path).stem) + ".obj")
|
||
if not self._run_tool_command(["egg2obj", source_path, temp_obj], timeout=120):
|
||
return None
|
||
conversion_source = temp_obj
|
||
|
||
target_filename = self._unique_filename(node_name or Path(source_path).stem, ".glb")
|
||
target_abs = os.path.join(self._assets_model_dir, target_filename)
|
||
|
||
converter = self._convert_to_glb(conversion_source, target_abs)
|
||
if not converter:
|
||
return None
|
||
|
||
self.report["converted_assets"].append(
|
||
{
|
||
"source": source_path,
|
||
"converted_from": conversion_source,
|
||
"target": os.path.relpath(target_abs, self._output_root).replace("\\", "/"),
|
||
"converter": converter,
|
||
}
|
||
)
|
||
return os.path.relpath(target_abs, self._output_root).replace("\\", "/")
|
||
|
||
def _convert_to_glb(self, source_path: str, target_path: str) -> str:
|
||
scene_manager = self.scene_manager
|
||
if scene_manager is None:
|
||
return ""
|
||
|
||
conversion_order = [
|
||
"_convertWithBlender",
|
||
"_convertWithFBX2glTF",
|
||
"_convertWithAssimp",
|
||
]
|
||
|
||
for method_name in conversion_order:
|
||
method = getattr(scene_manager, method_name, None)
|
||
if not callable(method):
|
||
continue
|
||
try:
|
||
ok = method(source_path, target_path, None)
|
||
except TypeError:
|
||
ok = method(source_path, target_path)
|
||
except Exception:
|
||
ok = False
|
||
|
||
if ok and os.path.exists(target_path):
|
||
return method_name
|
||
|
||
return ""
|
||
|
||
def _run_tool_command(self, args: List[str], timeout: int) -> bool:
|
||
executable = shutil.which(args[0])
|
||
if not executable:
|
||
self.report["warnings"].append(f"缺少转换工具: {args[0]}")
|
||
return False
|
||
|
||
try:
|
||
result = subprocess.run(
|
||
args,
|
||
check=False,
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=timeout,
|
||
)
|
||
except Exception as exc:
|
||
self.report["warnings"].append(f"执行命令失败: {' '.join(args)} ({exc})")
|
||
return False
|
||
|
||
if result.returncode != 0:
|
||
stderr = (result.stderr or "").strip()
|
||
stdout = (result.stdout or "").strip()
|
||
detail = stderr or stdout or f"exit={result.returncode}"
|
||
self.report["warnings"].append(f"命令失败: {' '.join(args)} -> {detail}")
|
||
return False
|
||
return True
|
||
|
||
def _copy_asset_to_models(self, source_path: str) -> str:
|
||
source_path = os.path.normpath(source_path)
|
||
if source_path in self._copied_source_to_uri:
|
||
return self._copied_source_to_uri[source_path]
|
||
|
||
ext = os.path.splitext(source_path)[1].lower() or ".bin"
|
||
safe_name = self._unique_filename(Path(source_path).stem, ext)
|
||
target_abs = os.path.join(self._assets_model_dir, safe_name)
|
||
shutil.copy2(source_path, target_abs)
|
||
|
||
rel_uri = os.path.relpath(target_abs, self._output_root).replace("\\", "/")
|
||
self._copied_source_to_uri[source_path] = rel_uri
|
||
self.report["copied_assets"].append(
|
||
{
|
||
"source": source_path,
|
||
"target": rel_uri,
|
||
"type": "model",
|
||
}
|
||
)
|
||
return rel_uri
|
||
|
||
def _collect_and_copy_texture_overrides(self, node, model_source: str) -> List[Dict[str, Any]]:
|
||
textures: List[Dict[str, Any]] = []
|
||
texture_pairs = self._extract_texture_stage_and_paths(node)
|
||
if not texture_pairs:
|
||
return textures
|
||
|
||
model_dir = os.path.dirname(model_source)
|
||
for stage_name, tex_path in texture_pairs:
|
||
abs_path = self._resolve_texture_path(tex_path, model_dir)
|
||
if not abs_path:
|
||
continue
|
||
rel_uri = self._copy_asset_to_textures(abs_path)
|
||
if rel_uri:
|
||
textures.append({"stage": stage_name, "uri": rel_uri})
|
||
|
||
return textures
|
||
|
||
def _extract_texture_stage_and_paths(self, node) -> List[Tuple[str, str]]:
|
||
pairs: List[Tuple[str, str]] = []
|
||
seen: set = set()
|
||
|
||
nodes_to_scan = [node]
|
||
try:
|
||
geom_paths = node.findAllMatches("**/+GeomNode")
|
||
for i in range(geom_paths.getNumPaths()):
|
||
nodes_to_scan.append(geom_paths.getPath(i))
|
||
except Exception:
|
||
pass
|
||
|
||
for np in nodes_to_scan:
|
||
try:
|
||
stages = np.findAllTextureStages()
|
||
except Exception:
|
||
continue
|
||
|
||
try:
|
||
stage_count = stages.getNumTextureStages()
|
||
except Exception:
|
||
stage_count = 0
|
||
|
||
for idx in range(stage_count):
|
||
try:
|
||
stage = stages.getTextureStage(idx)
|
||
texture = np.getTexture(stage)
|
||
except Exception:
|
||
continue
|
||
if not texture:
|
||
continue
|
||
|
||
tex_path = ""
|
||
try:
|
||
if texture.hasFullpath():
|
||
tex_path = texture.getFullpath().toOsSpecific()
|
||
except Exception:
|
||
tex_path = ""
|
||
|
||
if not tex_path:
|
||
continue
|
||
|
||
stage_name = stage.getName() if stage else f"stage_{idx}"
|
||
key = (stage_name, tex_path)
|
||
if key in seen:
|
||
continue
|
||
seen.add(key)
|
||
pairs.append(key)
|
||
|
||
return pairs
|
||
|
||
def _resolve_texture_path(self, path_hint: str, model_dir: str) -> Optional[str]:
|
||
path_hint = (path_hint or "").strip()
|
||
if not path_hint:
|
||
return None
|
||
|
||
if os.path.isabs(path_hint) and os.path.exists(path_hint):
|
||
return os.path.normpath(path_hint)
|
||
|
||
search_roots = [
|
||
model_dir,
|
||
self._project_path,
|
||
os.path.join(self._project_path, "scenes", "resources"),
|
||
os.getcwd(),
|
||
]
|
||
for root in search_roots:
|
||
full = os.path.normpath(os.path.join(root, path_hint))
|
||
if os.path.exists(full):
|
||
return full
|
||
|
||
return None
|
||
|
||
def _copy_asset_to_textures(self, source_path: str) -> Optional[str]:
|
||
source_path = os.path.normpath(source_path)
|
||
cache_key = f"texture::{source_path}"
|
||
if cache_key in self._copied_source_to_uri:
|
||
return self._copied_source_to_uri[cache_key]
|
||
|
||
ext = os.path.splitext(source_path)[1].lower() or ".png"
|
||
safe_name = self._unique_filename(Path(source_path).stem, ext)
|
||
target_abs = os.path.join(self._assets_texture_dir, safe_name)
|
||
try:
|
||
shutil.copy2(source_path, target_abs)
|
||
except Exception:
|
||
return None
|
||
|
||
rel_uri = os.path.relpath(target_abs, self._output_root).replace("\\", "/")
|
||
self._copied_source_to_uri[cache_key] = rel_uri
|
||
self.report["copied_assets"].append(
|
||
{
|
||
"source": source_path,
|
||
"target": rel_uri,
|
||
"type": "texture",
|
||
}
|
||
)
|
||
return rel_uri
|
||
|
||
def _extract_material_override(self, node) -> Dict[str, Any]:
|
||
base_color = [1.0, 1.0, 1.0, 1.0]
|
||
roughness = 0.5
|
||
metallic = 0.0
|
||
|
||
material = None
|
||
try:
|
||
material = node.getMaterial()
|
||
except Exception:
|
||
material = None
|
||
|
||
if material:
|
||
try:
|
||
if material.hasBaseColor():
|
||
c = material.getBaseColor()
|
||
base_color = [float(c[0]), float(c[1]), float(c[2]), float(c[3])]
|
||
except Exception:
|
||
pass
|
||
try:
|
||
roughness = float(material.getRoughness())
|
||
except Exception:
|
||
pass
|
||
try:
|
||
metallic = float(material.getMetallic())
|
||
except Exception:
|
||
pass
|
||
else:
|
||
try:
|
||
c = node.getColor()
|
||
base_color = [float(c[0]), float(c[1]), float(c[2]), float(c[3])]
|
||
except Exception:
|
||
pass
|
||
|
||
opacity = max(0.0, min(1.0, float(base_color[3])))
|
||
if opacity >= 0.999:
|
||
opacity = 1.0
|
||
elif opacity <= 0.001:
|
||
opacity = 0.0
|
||
|
||
return {
|
||
"base_color": base_color,
|
||
"roughness": roughness,
|
||
"metallic": metallic,
|
||
"opacity": opacity,
|
||
}
|
||
|
||
def _get_parent_and_matrix(self, node) -> Tuple[Optional[str], Any]:
|
||
render = getattr(self.world, "render", None)
|
||
parent_id = None
|
||
|
||
try:
|
||
parent = node.getParent()
|
||
except Exception:
|
||
parent = None
|
||
|
||
if parent and not parent.isEmpty() and id(parent) in self._node_id_by_pointer:
|
||
parent_id = self._node_id_by_pointer[id(parent)]
|
||
try:
|
||
return parent_id, node.getMat(parent)
|
||
except Exception:
|
||
pass
|
||
|
||
if render:
|
||
try:
|
||
return None, node.getMat(render)
|
||
except Exception:
|
||
pass
|
||
|
||
try:
|
||
return None, node.getMat()
|
||
except Exception:
|
||
return None, node.getTransform().getMat()
|
||
|
||
@staticmethod
|
||
def _mat4_to_row_major_list(mat4_obj) -> List[float]:
|
||
try:
|
||
values = []
|
||
for r in range(4):
|
||
for c in range(4):
|
||
values.append(float(mat4_obj.getCell(r, c)))
|
||
return values
|
||
except Exception:
|
||
return [
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
0.0,
|
||
1.0,
|
||
]
|
||
|
||
@staticmethod
|
||
def _safe_float(value: Any, default: float) -> float:
|
||
try:
|
||
return float(value)
|
||
except Exception:
|
||
return float(default)
|
||
|
||
@staticmethod
|
||
def _safe_get_python_tag(node, tag_name: str) -> Any:
|
||
try:
|
||
if hasattr(node, "hasPythonTag") and node.hasPythonTag(tag_name):
|
||
return node.getPythonTag(tag_name)
|
||
except Exception:
|
||
pass
|
||
try:
|
||
value = node.getPythonTag(tag_name)
|
||
return value
|
||
except Exception:
|
||
return None
|
||
|
||
@staticmethod
|
||
def _safe_get_tag_value(node, tag_name: str) -> str:
|
||
try:
|
||
if node.hasTag(tag_name):
|
||
return (node.getTag(tag_name) or "").strip()
|
||
except Exception:
|
||
return ""
|
||
return ""
|
||
|
||
def _unique_filename(self, stem: str, suffix: str) -> str:
|
||
safe_stem = self._sanitize_filename(stem) or "asset"
|
||
key = f"{safe_stem}{suffix.lower()}"
|
||
index = self._name_counter.get(key, 0)
|
||
self._name_counter[key] = index + 1
|
||
if index == 0:
|
||
return f"{safe_stem}{suffix}"
|
||
return f"{safe_stem}_{index:03d}{suffix}"
|
||
|
||
@staticmethod
|
||
def _sanitize_filename(name: str) -> str:
|
||
normalized = re.sub(r"[^A-Za-z0-9._-]+", "_", str(name or "")).strip("._")
|
||
return normalized or "asset"
|
||
|
||
def _write_preview_scripts(self) -> None:
|
||
sh_path = os.path.join(self._output_root, "run_preview.sh")
|
||
bat_path = os.path.join(self._output_root, "run_preview.bat")
|
||
readme_path = os.path.join(self._output_root, "PREVIEW.txt")
|
||
|
||
sh_content = (
|
||
"#!/usr/bin/env bash\n"
|
||
"set -e\n"
|
||
"cd \"$(dirname \"$0\")\"\n"
|
||
"echo \"Serving WebGL preview at http://127.0.0.1:8000\"\n"
|
||
"python3 -m http.server 8000\n"
|
||
)
|
||
bat_content = (
|
||
"@echo off\n"
|
||
"cd /d \"%~dp0\"\n"
|
||
"echo Serving WebGL preview at http://127.0.0.1:8000\n"
|
||
"python -m http.server 8000\n"
|
||
)
|
||
readme_content = (
|
||
"EG WebGL Preview\n"
|
||
"================\n\n"
|
||
"Do not open index.html directly with file:// . Modern browsers will block ES modules and fetch requests.\n\n"
|
||
"Use one of these methods instead:\n"
|
||
"1. Run run_preview.sh (Linux/macOS) or run_preview.bat (Windows)\n"
|
||
"2. Or open a terminal in this folder and run:\n"
|
||
" python3 -m http.server 8000\n\n"
|
||
"Then open:\n"
|
||
"http://127.0.0.1:8000/index.html\n"
|
||
"http://127.0.0.1:8000/frontend_demo.html\n"
|
||
)
|
||
|
||
with open(sh_path, "w", encoding="utf-8") as f:
|
||
f.write(sh_content)
|
||
with open(bat_path, "w", encoding="utf-8") as f:
|
||
f.write(bat_content)
|
||
with open(readme_path, "w", encoding="utf-8") as f:
|
||
f.write(readme_content)
|
||
|
||
current_mode = os.stat(sh_path).st_mode
|
||
os.chmod(sh_path, current_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||
|
||
@staticmethod
|
||
def _write_json(path: str, payload: Dict[str, Any]) -> None:
|
||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||
with open(path, "w", encoding="utf-8") as f:
|
||
json.dump(payload, f, ensure_ascii=False, indent=2)
|
||
|
||
def _fail(self, message: str) -> None:
|
||
self.report["status"] = "failed"
|
||
self.report["warnings"].append(message)
|