123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230 |
- import argparse
- import os
- import sys
- import json
- from pathlib import Path
- # Add the utils directory to the Python path
- # This allows importing modules from the 'utils' subfolder
- utils_path = Path(__file__).parent / 'utils'
- sys.path.append(str(utils_path))
- # Now we can import our utility modules
- from yaml_utils import load_unity_yaml
- from file_utils import find_files_by_extension
- def parse_project_settings(input_dir, output_dir):
- """
- Parses project settings and creates the manifest.json file.
- """
- print("\n--- Starting Task 2: Project Settings Parser ---")
-
- project_settings_path = input_dir / "ProjectSettings" / "ProjectSettings.asset"
- editor_settings_path = input_dir / "ProjectSettings" / "EditorSettings.asset"
-
- manifest_data = {}
- # Parse ProjectSettings.asset
- if project_settings_path.is_file():
- docs = load_unity_yaml(str(project_settings_path))
- if docs:
- # The main settings are usually in the first document
- player_settings = docs[0].get('PlayerSettings', {})
- manifest_data['productName'] = player_settings.get('productName')
- manifest_data['companyName'] = player_settings.get('companyName')
- manifest_data['bundleVersion'] = player_settings.get('bundleVersion')
- else:
- print(f"Warning: Could not parse {project_settings_path}")
- else:
- print(f"Warning: {project_settings_path} not found.")
- # Determine rendering mode from EditorSettings.asset
- if editor_settings_path.is_file():
- docs = load_unity_yaml(str(editor_settings_path))
- if docs:
- editor_settings = docs[0].get('EditorSettings', {})
- render_pipeline = editor_settings.get('m_RenderPipelineAsset')
- if render_pipeline and render_pipeline.get('guid'):
- # This indicates URP or HDRP is likely in use.
- # A more robust check would be to map this guid to the actual asset.
- manifest_data['renderPipeline'] = 'Scriptable'
- else:
- manifest_data['renderPipeline'] = 'Built-in'
- else:
- print(f"Warning: Could not parse {editor_settings_path}")
- else:
- print(f"Warning: {editor_settings_path} not found.")
- # Write the combined data to manifest.json
- manifest_output_path = output_dir / "manifest.json"
- try:
- with open(manifest_output_path, 'w', encoding='utf-8') as f:
- json.dump(manifest_data, f, indent=4)
- print(f"Successfully created manifest.json at {manifest_output_path}")
- except IOError as e:
- print(f"Error: Could not write to {manifest_output_path}. {e}", file=sys.stderr)
- def parse_package_manifests(input_dir, output_dir):
- """
- Parses package manifests and creates a combined packages.json file.
- """
- print("\n--- Starting Task 3: Package Manifest Extractor ---")
-
- manifest_path = input_dir / "Packages" / "manifest.json"
- lock_path = input_dir / "Packages" / "packages-lock.json"
-
- packages_data = {}
- # Read manifest.json
- if manifest_path.is_file():
- try:
- with open(manifest_path, 'r', encoding='utf-8') as f:
- packages_data['manifest'] = json.load(f)
- except (IOError, json.JSONDecodeError) as e:
- print(f"Error reading {manifest_path}: {e}", file=sys.stderr)
- else:
- print(f"Warning: {manifest_path} not found.")
- # Read packages-lock.json
- if lock_path.is_file():
- try:
- with open(lock_path, 'r', encoding='utf-8') as f:
- packages_data['lock'] = json.load(f)
- except (IOError, json.JSONDecodeError) as e:
- print(f"Error reading {lock_path}: {e}", file=sys.stderr)
- else:
- print(f"Warning: {lock_path} not found.")
- # Write the combined data to packages.json
- if packages_data:
- packages_output_path = output_dir / "packages.json"
- try:
- with open(packages_output_path, 'w', encoding='utf-8') as f:
- json.dump(packages_data, f, indent=4)
- print(f"Successfully created packages.json at {packages_output_path}")
- except IOError as e:
- print(f"Error: Could not write to {packages_output_path}. {e}", file=sys.stderr)
- def generate_guid_mappers(input_dir, output_dir):
- """
- Finds all .meta files and generates JSON files mapping GUIDs to asset paths.
- """
- print("\n--- Starting Task 4: GUID Mapper Generator ---")
- assets_dir = input_dir / "Assets"
- if not assets_dir.is_dir():
- print(f"Error: 'Assets' directory not found in '{input_dir}'", file=sys.stderr)
- return
- meta_files = find_files_by_extension(str(assets_dir), '.meta')
- print(f"Found {len(meta_files)} .meta files to process.")
- # Asset type mapping based on file extensions
- asset_type_map = {
- '.prefab': 'prefabs',
- '.unity': 'scenes',
- '.mat': 'materials',
- '.cs': 'scripts',
- '.png': 'textures',
- '.jpg': 'textures',
- '.jpeg': 'textures',
- '.asset': 'scriptable_objects',
- }
-
- guid_maps = {value: {} for value in asset_type_map.values()}
- guid_maps['others'] = {}
- for meta_file_path_str in meta_files:
- meta_file_path = Path(meta_file_path_str)
- asset_file_path = Path(meta_file_path_str.rsplit('.meta', 1)[0])
-
- guid = None
- try:
- with open(meta_file_path, 'r', encoding='utf-8') as f:
- for line in f:
- if line.strip().startswith('guid:'):
- guid = line.split(':')[1].strip()
- break
- except Exception as e:
- print(f"Warning: Could not read or parse guid from {meta_file_path}. {e}", file=sys.stderr)
- continue
- if guid:
- asset_ext = asset_file_path.suffix.lower()
- asset_type = asset_type_map.get(asset_ext, 'others')
-
- # Make path relative to the input directory for consistency
- relative_path = asset_file_path.relative_to(input_dir).as_posix()
- guid_maps[asset_type][guid] = relative_path
- # Write the GUID maps to separate JSON files
- mappers_dir = output_dir / "GuidMappers"
- try:
- mappers_dir.mkdir(parents=True, exist_ok=True)
- for asset_type, guid_map in guid_maps.items():
- if guid_map: # Only write files for types that have assets
- output_path = mappers_dir / f"{asset_type}.json"
- with open(output_path, 'w', encoding='utf-8') as f:
- json.dump(guid_map, f, indent=4)
- print(f"Successfully created GUID mappers in {mappers_dir}")
- except OSError as e:
- print(f"Error: Could not create GUID mapper directory or files. {e}", file=sys.stderr)
- def main():
- """
- Main function to run the high-level data extraction process.
- """
- parser = argparse.ArgumentParser(
- description="Extracts high-level summary data from a Unity project."
- )
- parser.add_argument(
- "--input",
- type=str,
- required=True,
- help="The root directory of the target Unity project."
- )
- parser.add_argument(
- "--output",
- type=str,
- required=True,
- help="The directory where the generated output folder will be saved."
- )
- args = parser.parse_args()
- # --- 1. Validate inputs and set up paths ---
- input_dir = Path(args.input)
- output_dir = Path(args.output)
- if not input_dir.is_dir():
- print(f"Error: Input path '{input_dir}' is not a valid directory.", file=sys.stderr)
- sys.exit(1)
- # Create the main output folder, named "HighLevel"
- high_level_output_dir = output_dir / "HighLevel"
- try:
- high_level_output_dir.mkdir(parents=True, exist_ok=True)
- print(f"Output will be saved to: {high_level_output_dir}")
- except OSError as e:
- print(f"Error: Could not create output directory '{high_level_output_dir}'. {e}", file=sys.stderr)
- sys.exit(1)
- # --- Run Extraction Tasks ---
- print("\nMilestone 2, Task 1 Complete: Argument parsing and folder creation successful.")
-
- # Run Task 2
- parse_project_settings(input_dir, high_level_output_dir)
- # Run Task 3
- parse_package_manifests(input_dir, high_level_output_dir)
- # Run Task 4
- generate_guid_mappers(input_dir, high_level_output_dir)
- print("\nHigh-level extraction complete.")
- if __name__ == "__main__":
- main()
|