Jelajahi Sumber

Sujith :) ->
1. Some what working low level implementation

Sujith:) 5 jam lalu
induk
melakukan
b8cd09f2ef

+ 34 - 2
Assets/LLM/Editor/DataExtractorMenu.cs

@@ -39,8 +39,40 @@ namespace LLM.Editor
                 return;
             }
 
+            var indentChoice = EditorUtility.DisplayDialogComplex(
+                $"{level} Export Options",
+                "Choose the JSON output format.",
+                "Compact (No Indent)", // button 0
+                "Indented (Readable)", // button 1
+                "Cancel"               // button 2
+            );
+
+            string indentArgument;
+            switch (indentChoice)
+            {
+                case 0: // Indented
+                    indentArgument = ""; // Default is compact
+                    break;
+                case 1: // Compact
+                    indentArgument = " --indent 2";
+                    break;
+                default: // Cancel or closed dialog
+                    UnityEngine.Debug.Log($"{level} export cancelled by user.");
+                    return;
+            }
+
             var projectRoot = Path.GetFullPath(Path.Combine(Application.dataPath, ".."));
-            var pythonExecutable = Path.Combine(projectRoot, "venv", "bin", "python3");
+            
+            string pythonExecutable;
+            if (Application.platform == RuntimePlatform.WindowsEditor)
+            {
+                pythonExecutable = Path.Combine(projectRoot, "venv", "Scripts", "python.exe");
+            }
+            else
+            {
+                pythonExecutable = Path.Combine(projectRoot, "venv", "bin", "python");
+            }
+            
             var fullScriptPath = Path.Combine(projectRoot, scriptPath);
 
             if (!File.Exists(pythonExecutable))
@@ -57,7 +89,7 @@ namespace LLM.Editor
                 return;
             }
 
-            var arguments = $"\"{fullScriptPath}\" --input \"{projectRoot}\" --output \"{outputPath}\"";
+            var arguments = $"\"{fullScriptPath}\" --input \"{projectRoot}\" --output \"{outputPath}\"{indentArgument}";
         
             UnityEngine.Debug.Log($"Running command: \"{pythonExecutable}\" {arguments}");
 

TEMPAT SAMPAH
Assets/LLM/source/__pycache__/scene_processor.cpython-313.pyc


+ 12 - 13
Assets/LLM/source/extract_high_level.py

@@ -13,6 +13,7 @@ sys.path.append(str(utils_path))
 
 from yaml_utils import load_unity_yaml, convert_to_plain_python_types
 from file_utils import find_files_by_extension, create_guid_to_path_map
+from json_utils import write_json
 
 def parse_physics_settings(input_dir, project_mode):
     """
@@ -46,7 +47,7 @@ def parse_physics_settings(input_dir, project_mode):
     
     return physics_data
 
-def parse_project_settings(input_dir, output_dir):
+def parse_project_settings(input_dir, output_dir, indent=None):
     """
     Parses various project settings files to create a comprehensive manifest.
     """
@@ -193,14 +194,13 @@ def parse_project_settings(input_dir, output_dir):
     # --- Write manifest.json ---
     manifest_output_path = output_dir / "manifest.json"
     try:
-        with open(manifest_output_path, 'w', encoding='utf-8') as f:
-            json.dump(manifest_data, f, separators=(',', ':'))
+        write_json(manifest_data, manifest_output_path, indent=indent)
         print(f"Successfully created manifest.json at {manifest_output_path}")
-    except IOError as e:
+    except Exception as e:
         print(f"Error writing to {manifest_output_path}. {e}", file=sys.stderr)
 
 
-def parse_package_manifests(input_dir, output_dir):
+def parse_package_manifests(input_dir, output_dir, indent=None):
     """
     Parses the primary package manifest and creates a clean packages.json file.
     """
@@ -214,8 +214,7 @@ def parse_package_manifests(input_dir, output_dir):
                 packages_data = json.load(f)
             
             packages_output_path = output_dir / "packages.json"
-            with open(packages_output_path, 'w', encoding='utf-8') as f:
-                json.dump(packages_data, f, separators=(',', ':')) # Compact output
+            write_json(packages_data, packages_output_path, indent=indent)
             print(f"Successfully created packages.json at {packages_output_path}")
 
         except (IOError, json.JSONDecodeError) as e:
@@ -223,7 +222,7 @@ def parse_package_manifests(input_dir, output_dir):
     else:
         print(f"Warning: {manifest_path} not found.")
 
-def generate_guid_mappers(input_dir, output_dir):
+def generate_guid_mappers(input_dir, output_dir, indent=None):
     """
     Finds all .meta files and generates JSON files mapping GUIDs to asset paths.
     """
@@ -276,8 +275,7 @@ def generate_guid_mappers(input_dir, output_dir):
         for asset_type, guid_map in guid_maps.items():
             if guid_map:
                 output_path = mappers_dir / f"{asset_type}.json"
-                with open(output_path, 'w', encoding='utf-8') as f:
-                    json.dump(guid_map, f, separators=(',', ':')) # Compact output
+                write_json(guid_map, output_path, indent=indent)
         print(f"Successfully created GUID mappers in {mappers_dir}")
     except OSError as e:
         print(f"Error: Could not create GUID mapper directory or files. {e}", file=sys.stderr)
@@ -291,6 +289,7 @@ def main():
     )
     parser.add_argument("--input", type=str, required=True, help="The root directory of the target Unity project.")
     parser.add_argument("--output", type=str, required=True, help="The directory where the generated output folder will be saved.")
+    parser.add_argument("--indent", type=int, default=None, help="Indentation level for JSON output. Defaults to None (compact).")
     args = parser.parse_args()
 
     input_dir = Path(args.input)
@@ -308,9 +307,9 @@ def main():
         print(f"Error: Could not create output directory '{high_level_output_dir}'. {e}", file=sys.stderr)
         sys.exit(1)
 
-    parse_project_settings(input_dir, high_level_output_dir)
-    parse_package_manifests(input_dir, high_level_output_dir)
-    generate_guid_mappers(input_dir, high_level_output_dir)
+    parse_project_settings(input_dir, high_level_output_dir, indent=args.indent)
+    parse_package_manifests(input_dir, high_level_output_dir, indent=args.indent)
+    generate_guid_mappers(input_dir, high_level_output_dir, indent=args.indent)
 
     print("\nHigh-level extraction complete.")
 

+ 47 - 13
Assets/LLM/source/extract_low_level.py

@@ -9,6 +9,9 @@ sys.path.append(str(utils_path))
 
 from file_utils import find_files_by_extension
 from deep_parser import parse_scene_or_prefab
+from json_utils import write_json
+from yaml_utils import load_unity_yaml, convert_to_plain_python_types
+from hierarchy_utils import HierarchyParser
 
 def main():
     """
@@ -29,6 +32,12 @@ def main():
         required=True,
         help="The directory where the generated output folder will be saved."
     )
+    parser.add_argument(
+        "--indent",
+        type=int,
+        default=None,
+        help="Indentation level for JSON output. Defaults to None (compact)."
+    )
     args = parser.parse_args()
 
     input_dir = Path(args.input).resolve()
@@ -63,30 +72,55 @@ def main():
         file_path = Path(file_path_str)
         print(f"\nProcessing: {file_path.name}")
 
+        # --- Deep Parsing for Individual GameObjects ---
         gameobject_list = parse_scene_or_prefab(str(file_path))
 
-        if gameobject_list:
-            # Create the output subdirectory for this asset
-            relative_path = file_path.relative_to(input_dir)
-            asset_output_dir = low_level_output_dir / relative_path
-            try:
-                asset_output_dir.mkdir(parents=True, exist_ok=True)
-            except OSError as e:
-                print(f"Error creating directory {asset_output_dir}: {e}", file=sys.stderr)
-                continue
+        # Create the output subdirectory for this asset
+        relative_path = file_path.relative_to(input_dir)
+        asset_output_dir = low_level_output_dir / relative_path
+        try:
+            asset_output_dir.mkdir(parents=True, exist_ok=True)
+        except OSError as e:
+            print(f"Error creating directory {asset_output_dir}: {e}", file=sys.stderr)
+            continue
 
+        if gameobject_list:
             print(f"Saving {len(gameobject_list)} GameObjects to {asset_output_dir}")
             for go_data in gameobject_list:
                 file_id = go_data.get('fileID')
                 if file_id:
                     output_json_path = asset_output_dir / f"{file_id}.json"
                     try:
-                        with open(output_json_path, 'w', encoding='utf-8') as f:
-                            json.dump(go_data, f, separators=(',', ':'))
-                    except IOError as e:
+                        write_json(go_data, output_json_path, indent=args.indent)
+                    except Exception as e:
                         print(f"Error writing to {output_json_path}: {e}", file=sys.stderr)
         else:
-            print(f"Skipped {file_path.name} as deep parsing failed.")
+            print(f"Skipped deep parsing for {file_path.name}.")
+
+        # --- Hierarchy Parsing for Root Object Identification ---
+        try:
+            documents = load_unity_yaml(file_path)
+            if not documents:
+                print(f"Could not load YAML from {file_path.name} for hierarchy parsing.")
+                continue
+
+            raw_object_map = {int(doc.anchor.value): doc for doc in documents if hasattr(doc, 'anchor') and doc.anchor is not None}
+            object_map = {file_id: convert_to_plain_python_types(obj) for file_id, obj in raw_object_map.items()}
+
+            parser = HierarchyParser(object_map)
+            root_object_ids = parser.get_root_object_ids()
+            
+            # Extract just the fileIDs for the root_objects.json file
+            root_ids_list = [file_id for file_id, _ in root_object_ids]
+
+            if root_ids_list:
+                roots_output_path = asset_output_dir / "root_objects.json"
+                write_json(root_ids_list, roots_output_path, indent=args.indent)
+                print(f"Successfully saved root object list to {roots_output_path}")
+
+        except Exception as e:
+            print(f"Error during hierarchy parsing for {file_path.name}: {e}", file=sys.stderr)
+
 
     print("\nLow-level extraction complete.")
 

+ 28 - 25
Assets/LLM/source/extract_mid_level.py

@@ -2,7 +2,6 @@ import argparse
 import sys
 import json
 import shutil
-import subprocess
 from pathlib import Path
 
 # Add the utils directory to the Python path
@@ -10,6 +9,8 @@ utils_path = Path(__file__).parent / 'utils'
 sys.path.append(str(utils_path))
 
 from file_utils import replicate_directory_structure, find_files_by_extension, create_guid_to_path_map
+from json_utils import write_json
+from scene_processor import UnitySceneProcessor
 
 def copy_scripts(assets_dir, output_assets_dir):
     """
@@ -53,6 +54,12 @@ def main():
         required=True,
         help="The directory where the generated output folder will be saved."
     )
+    parser.add_argument(
+        "--indent",
+        type=int,
+        default=None,
+        help="Indentation level for JSON output. Defaults to None (compact)."
+    )
     args = parser.parse_args()
 
     input_dir = Path(args.input).resolve()
@@ -90,10 +97,10 @@ def main():
     guid_map = create_guid_to_path_map(str(input_dir))
     guid_map_path = mid_level_output_dir / "guid_map.json"
     try:
-        with open(guid_map_path, 'w', encoding='utf-8') as f:
-            json.dump(guid_map, f)
+        # Use the new centralized utility
+        write_json(guid_map, guid_map_path, indent=args.indent)
         print(f"Successfully created GUID map: {guid_map_path}")
-    except IOError as e:
+    except Exception as e:
         print(f"Error writing GUID map: {e}", file=sys.stderr)
         sys.exit(1)
 
@@ -106,7 +113,8 @@ def main():
     
     print(f"Found {len(files_to_process)} scene/prefab files to process.")
 
-    script_path = Path(__file__).parent / "scene_processor.py"
+    # Create a single processor instance to use for all files
+    processor = UnitySceneProcessor(guid_map)
 
     for file_path_str in files_to_process:
         file_path = Path(file_path_str)
@@ -115,27 +123,22 @@ def main():
         output_path = output_assets_dir / relative_path
         output_path = output_path.with_suffix('.json')
         
-        command = [
-            sys.executable,
-            str(script_path),
-            "--input",
-            str(file_path),
-            "--guid-map",
-            str(guid_map_path),
-            "--output",
-            str(output_path)
-        ]
-        
         try:
-            print(f"\n--- Calling processor for: {file_path.name} ---")
-            subprocess.run(command, check=True, text=True)
-        except subprocess.CalledProcessError as e:
-            print(f"Error processing {file_path.name}. Subprocess failed with exit code {e.returncode}", file=sys.stderr)
-            print(f"Stderr: {e.stderr}", file=sys.stderr)
-            print(f"Stdout: {e.stdout}", file=sys.stderr)
-        except FileNotFoundError:
-            print(f"Error: Could not find the 'scene_processor.py' script at '{script_path}'", file=sys.stderr)
-            sys.exit(1)
+            print(f"\n--- Processing: {file_path.name} ---")
+            # Process the file and get the result
+            result = processor.process_file(file_path)
+            
+            # Ensure the output directory exists
+            output_path.parent.mkdir(parents=True, exist_ok=True)
+
+            # Write the result using the centralized utility
+            write_json(result, output_path, indent=args.indent)
+            print(f"Successfully processed {file_path.name} -> {output_path}")
+
+        except Exception as e:
+            print(f"Error processing {file_path.name}: {e}", file=sys.stderr)
+            # Potentially continue to the next file
+            # sys.exit(1)
 
     print("\nMid-level extraction complete.")
 

+ 16 - 151
Assets/LLM/source/scene_processor.py

@@ -8,6 +8,7 @@ utils_path = Path(__file__).parent / 'utils'
 sys.path.append(str(utils_path))
 
 from yaml_utils import load_unity_yaml, convert_to_plain_python_types
+from hierarchy_utils import HierarchyParser
 
 class UnitySceneProcessor:
     def __init__(self, guid_map):
@@ -411,105 +412,6 @@ class UnitySceneProcessor:
         for go_id in nodes_to_delete:
             del self.nodes[go_id]
 
-    def get_root_objects(self):
-        """Get all root-level objects, sorted by m_RootOrder, and handle orphans"""
-        root_objects = []
-        all_children_ids = set()
-        
-        # Collect all child IDs to identify orphans
-        for go_id, node in self.nodes.items():
-            self._collect_child_ids(node, all_children_ids)
-        for prefab_id, prefab_node in self.prefab_nodes.items():
-            self._collect_child_ids(prefab_node, all_children_ids)
-        
-        # Find GameObjects that have no parent and collect their m_RootOrder
-        gameobject_roots = []
-        for go_id, node in self.nodes.items():
-            transform_id = self.gameobject_to_transform.get(go_id)
-            if not transform_id:
-                # No transform - likely orphan
-                if go_id not in all_children_ids:
-                    node['is_orphan'] = True
-                    gameobject_roots.append((node, 999999))  # Orphans go to end
-                continue
-            
-            # Check if this transform has a parent
-            has_parent = False
-            for parent_id, children in self.transform_children.items():
-                if transform_id in children:
-                    has_parent = True
-                    break
-            
-            # Also check if it's a child of any prefab
-            is_prefab_child = False
-            for parent_transform_id in self.stripped_transforms:
-                if transform_id in self.transform_children.get(parent_transform_id, []):
-                    is_prefab_child = True
-                    break
-            
-            if not has_parent and not is_prefab_child:
-                if go_id not in all_children_ids:
-                    # Get m_RootOrder from transform
-                    root_order = self._get_root_order(transform_id)
-                    gameobject_roots.append((node, root_order))
-                else:
-                    # This is an orphan that was somehow referenced but not properly parented
-                    node['is_orphan'] = True
-                    gameobject_roots.append((node, 999999))  # Orphans go to end
-        
-        # Find root prefab instances and collect their m_RootOrder
-        prefab_roots = []
-        for prefab_id, prefab_info in self.prefab_instances.items():
-            parent_transform_id = prefab_info['m_TransformParent']
-            if not parent_transform_id or parent_transform_id == 0:
-                if prefab_id in self.prefab_nodes:
-                    if prefab_id not in all_children_ids:
-                        # Get m_RootOrder directly from the prefab node
-                        root_order = self.prefab_nodes[prefab_id].get('m_RootOrder', 999999)
-                        prefab_roots.append((self.prefab_nodes[prefab_id], root_order))
-                    else:
-                        # Orphan prefab
-                        self.prefab_nodes[prefab_id]['is_orphan'] = True
-                        prefab_roots.append((self.prefab_nodes[prefab_id], 999999))  # Orphans go to end
-        
-        # Check for completely disconnected GameObjects (orphans)
-        for go_id, node in self.nodes.items():
-            if go_id not in all_children_ids and not any(obj[0].get('fileID') == str(go_id) for obj in gameobject_roots):
-                node['is_orphan'] = True
-                gameobject_roots.append((node, 999999))  # Orphans go to end
-        
-        # Check for completely disconnected PrefabInstances (orphans)
-        for prefab_id, prefab_node in self.prefab_nodes.items():
-            if prefab_id not in all_children_ids and not any(obj[0].get('fileID') == str(prefab_id) for obj in prefab_roots):
-                prefab_node['is_orphan'] = True
-                prefab_roots.append((prefab_node, 999999))  # Orphans go to end
-        
-        # Combine and sort all root objects by m_RootOrder
-        all_roots = gameobject_roots + prefab_roots
-        all_roots.sort(key=lambda x: x[1])  # Sort by m_RootOrder (second element of tuple)
-        
-        return [obj[0] for obj in all_roots]  # Return only the objects, not the tuples
-
-    def _get_root_order(self, transform_id):
-        """Get m_RootOrder from a transform"""
-        if transform_id not in self.object_map:
-            return 999999  # Default for missing transforms
-        
-        transform_data = self.object_map[transform_id].get('Transform', {})
-        return transform_data.get('m_RootOrder', 999999)
-
-    def _collect_child_ids(self, node, child_ids_set):
-        """Recursively collect all child IDs from a node tree"""
-        for child in node.get('children', []):
-            child_id = child.get('fileID')
-            if child_id:
-                # Convert to int for comparison with our keys
-                try:
-                    child_ids_set.add(int(child_id))
-                except ValueError:
-                    pass
-            self._collect_child_ids(child, child_ids_set)
-
     def cleanup_pass(self, nodes):
         """
         Recursively cleans up temporary or internal properties from the final node structure.
@@ -543,61 +445,24 @@ class UnitySceneProcessor:
         self.process_third_pass()
         self.merge_prefab_data_pass()
 
-        # Get the final, sorted root objects
-        root_objects = self.get_root_objects()
+        # Use the centralized parser to get the final, sorted root objects
+        parser = HierarchyParser(self.object_map)
+        root_object_ids = parser.get_root_object_ids()
+
+        # Build the final list of nodes from the identified roots
+        root_nodes = []
+        all_nodes = {**self.nodes, **self.prefab_nodes}
+        for file_id, _ in root_object_ids:
+            if file_id in all_nodes:
+                root_nodes.append(all_nodes[file_id])
         
         # Run the final cleanup pass
-        self.cleanup_pass(root_objects)
-
-        return root_objects
-
+        self.cleanup_pass(root_nodes)
 
-def main():
-    parser = argparse.ArgumentParser(description="Process Unity scene/prefab files into JSON representation")
-    parser.add_argument("--input", required=True, help="Path to input .unity or .prefab file")
-    parser.add_argument("--guid-map", required=True, help="Path to guid_map.json file")
-    parser.add_argument("--output", required=True, help="Path to output .json file")
-    
-    args = parser.parse_args()
-    
-    input_path = Path(args.input)
-    guid_map_path = Path(args.guid_map)
-    output_path = Path(args.output)
-    
-    if not input_path.exists():
-        print(f"Error: Input file {input_path} does not exist", file=sys.stderr)
-        sys.exit(1)
-    
-    if not guid_map_path.exists():
-        print(f"Error: GUID map file {guid_map_path} does not exist", file=sys.stderr)
-        sys.exit(1)
-    
-    # Load GUID map
-    try:
-        with open(guid_map_path, 'r', encoding='utf-8') as f:
-            guid_map = json.load(f)
-    except Exception as e:
-        print(f"Error loading GUID map: {e}", file=sys.stderr)
-        sys.exit(1)
-    
-    # Process the file
-    processor = UnitySceneProcessor(guid_map)
-    try:
-        result = processor.process_file(input_path)
-        
-        # Ensure output directory exists
-        output_path.parent.mkdir(parents=True, exist_ok=True)
-        
-        # Write output
-        with open(output_path, 'w', encoding='utf-8') as f:
-            json.dump(result, f, indent=2, ensure_ascii=False)
-        
-        print(f"Successfully processed {input_path.name} -> {output_path}")
-        
-    except Exception as e:
-        print(f"Error processing file {input_path}: {e}", file=sys.stderr)
-        sys.exit(1)
+        return root_nodes
 
 
 if __name__ == "__main__":
-    main()
+    # This script is intended to be used as a module.
+    # For direct execution, see extract_mid_level.py.
+    pass

TEMPAT SAMPAH
Assets/LLM/source/utils/__pycache__/deep_parser.cpython-313.pyc


TEMPAT SAMPAH
Assets/LLM/source/utils/__pycache__/hierarchy_utils.cpython-313.pyc


+ 7 - 0
Assets/LLM/source/utils/__pycache__/hierarchy_utils.cpython-313.pyc.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: 4703265c15fc24b48a6d7b188d9401a9
+DefaultImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

TEMPAT SAMPAH
Assets/LLM/source/utils/__pycache__/json_utils.cpython-313.pyc


+ 7 - 0
Assets/LLM/source/utils/__pycache__/json_utils.cpython-313.pyc.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: 831dc0142dac242389d027d853978441
+DefaultImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 13 - 1
Assets/LLM/source/utils/deep_parser.py

@@ -76,7 +76,19 @@ def parse_scene_or_prefab(file_path, guid_map=None, assets_dir=None):
             for comp_ref in go_info.get('m_Component', []):
                 comp_id = comp_ref['component']['fileID']
                 if comp_id in object_map:
-                    components_data.append(object_map[comp_id])
+                    component_data = object_map[comp_id]
+                    # Create a new structure to avoid modifying the original object_map
+                    restructured_component = {}
+                    for component_type, component_values in component_data.items():
+                        if isinstance(component_values, dict):
+                            # Copy the inner dict and inject the fileID
+                            new_values = component_values.copy()
+                            new_values['fileID'] = comp_id
+                            restructured_component[component_type] = new_values
+                        else:
+                            # Fallback for non-dict components, though unlikely
+                            restructured_component[component_type] = component_values
+                    components_data.append(restructured_component)
             
             children_ids = []
             my_transform_id = None

+ 101 - 0
Assets/LLM/source/utils/hierarchy_utils.py

@@ -0,0 +1,101 @@
+from pathlib import Path
+
+class HierarchyParser:
+    """
+    Parses a Unity scene's object_map to identify parent-child relationships
+    and determine the root objects in the hierarchy.
+    """
+    def __init__(self, object_map):
+        self.object_map = object_map
+        self.transform_to_gameobject = {}
+        self.gameobject_to_transform = {}
+        self.transform_children = {}
+        self.prefab_instances = {}
+        self.stripped_transforms = {}
+        self.all_child_ids = set()
+
+        self._build_maps()
+        self._find_all_children()
+
+    def _build_maps(self):
+        """
+        First pass over the object map to build essential lookups for transforms,
+        GameObjects, and PrefabInstances.
+        """
+        for file_id, obj_data in self.object_map.items():
+            if 'Transform' in obj_data:
+                transform_info = obj_data['Transform']
+                gameobject_id = transform_info.get('m_GameObject', {}).get('fileID')
+                if gameobject_id:
+                    self.transform_to_gameobject[file_id] = gameobject_id
+                    self.gameobject_to_transform[gameobject_id] = file_id
+                
+                parent_id = transform_info.get('m_Father', {}).get('fileID')
+                if parent_id and parent_id != 0:
+                    if parent_id not in self.transform_children:
+                        self.transform_children[parent_id] = []
+                    self.transform_children[parent_id].append(file_id)
+
+                prefab_instance_id = transform_info.get('m_PrefabInstance', {}).get('fileID')
+                if prefab_instance_id:
+                    self.stripped_transforms[file_id] = prefab_instance_id
+
+            elif 'PrefabInstance' in obj_data:
+                prefab_info = obj_data['PrefabInstance']
+                modifications = prefab_info.get('m_Modification', {})
+                parent_id = modifications.get('m_TransformParent', {}).get('fileID')
+                if parent_id and parent_id != 0:
+                    self.prefab_instances[file_id] = {'parent': parent_id}
+
+    def _find_all_children(self):
+        """
+        Iterates through the built maps to populate a set of all fileIDs that
+        are children of another object.
+        """
+        # Children of regular GameObjects (via Transform)
+        for parent_id, children in self.transform_children.items():
+            for child_transform_id in children:
+                child_go_id = self.transform_to_gameobject.get(child_transform_id)
+                if child_go_id:
+                    self.all_child_ids.add(child_go_id)
+                # Also add stripped transforms that point to prefabs
+                elif child_transform_id in self.stripped_transforms:
+                    prefab_instance_id = self.stripped_transforms[child_transform_id]
+                    self.all_child_ids.add(prefab_instance_id)
+
+        # Children that are PrefabInstances
+        for prefab_id, prefab_data in self.prefab_instances.items():
+            self.all_child_ids.add(prefab_id)
+
+    def get_root_object_ids(self):
+        """
+        Identifies all root objects (GameObjects and PrefabInstances) and returns
+        them as a sorted list of tuples containing (fileID, m_RootOrder).
+        """
+        root_objects = []
+
+        # Find root GameObjects
+        for go_id, transform_id in self.gameobject_to_transform.items():
+            if go_id not in self.all_child_ids:
+                transform_info = self.object_map.get(transform_id, {}).get('Transform', {})
+                root_order = transform_info.get('m_RootOrder', 999999)
+                root_objects.append((go_id, root_order))
+
+        # Find root PrefabInstances
+        for prefab_id, obj_data in self.object_map.items():
+            if 'PrefabInstance' not in obj_data:
+                continue
+            
+            if prefab_id not in self.all_child_ids:
+                modifications = obj_data['PrefabInstance'].get('m_Modification', {})
+                root_order = 999999
+                for mod in modifications.get('m_Modifications', []):
+                    if mod.get('propertyPath') == 'm_RootOrder':
+                        root_order = mod.get('value', 999999)
+                        break
+                root_objects.append((prefab_id, root_order))
+        
+        # Sort all root objects by their m_RootOrder
+        root_objects.sort(key=lambda x: x[1])
+        
+        return root_objects

+ 7 - 0
Assets/LLM/source/utils/hierarchy_utils.py.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: cd55d90797cda4d389b27256fd1e5771
+DefaultImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 21 - 0
Assets/LLM/source/utils/json_utils.py

@@ -0,0 +1,21 @@
+import json
+
+def write_json(data, file_path, indent=None, ensure_ascii=False):
+    """
+    Centralized function to write Python objects to a JSON file.
+
+    Args:
+        data: The Python object (e.g., dict, list) to serialize.
+        file_path: The path to the output file.
+        indent: The indentation level for pretty-printing. Defaults to None (compact).
+        ensure_ascii: Whether to escape non-ASCII characters. Defaults to False.
+    """
+    try:
+        with open(file_path, 'w', encoding='utf-8') as f:
+            json.dump(data, f, indent=indent, ensure_ascii=ensure_ascii)
+    except IOError as e:
+        print(f"Error writing JSON to {file_path}: {e}", file=sys.stderr)
+        raise
+    except TypeError as e:
+        print(f"Error serializing data to JSON: {e}", file=sys.stderr)
+        raise

+ 7 - 0
Assets/LLM/source/utils/json_utils.py.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: dc97fd3fb70fe479c9e95c30b730b050
+DefaultImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: