Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
244 changes: 244 additions & 0 deletions scripts/python/upi_asmdef_validator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,244 @@
#! /usr/bin/env python3
# Requirements: python3

"""
Assembly Definition Platform Validator

This module provides functionality to validate and fix Unity assembly definition (.asmdef) files
to ensure they only reference platforms that are supported by the target Unity version.

This solves the issue where Unity 2021 doesn't support visionOS (added in 2022), but asmdef
files include "VisionOS" in includePlatforms, causing compilation errors when installed as
read-only packages via Unity Package Manager.
"""

import json
import shutil
from pathlib import Path
from typing import Dict, List, Set

class AsmdefPlatformValidator:
"""Validates and fixes assembly definition files based on supported platforms."""

# Mapping from build platform IDs to Unity asmdef platform names
PLATFORM_MAPPING = {
"iOS": "iOS",
"iOS_Simulator": "iOS", # iOS Simulator uses same platform in asmdef
"tvOS": "tvOS",
"tvOS_Simulator": "tvOS", # tvOS Simulator uses same platform in asmdef
"macOS": "macOSStandalone",
"visionOS": "VisionOS",
"visionOS_Simulator": "VisionOS" # visionOS Simulator uses same platform in asmdef
}

def __init__(self, printer=None):
"""
Initialize the validator.

Args:
printer: Optional printer object for logging messages
"""
self.printer = printer
self.backup_files = [] # Track backed up files for restoration

def _log(self, message: str, indent: int = 0):
"""Log a message using the printer if available."""
if self.printer:
self.printer.Message(" " * (indent * 2) + message)
else:
print(" " * (indent * 2) + message)

def _log_status(self, message: str, indent: int = 0):
"""Log a status message using the printer if available."""
if self.printer:
self.printer.StatusMessage(" " * (indent * 2) + message)
else:
print(" " * (indent * 2) + message)

def _log_warning(self, message: str, indent: int = 0):
"""Log a warning message using the printer if available."""
if self.printer:
self.printer.WarningMessage(" " * (indent * 2) + message)
else:
print(" " * (indent * 2) + "WARNING: " + message)

def get_supported_asmdef_platforms(self, built_platforms: Dict[str, bool]) -> Set[str]:
"""
Convert build platform flags to Unity asmdef platform names.

Args:
built_platforms: Dictionary of platform IDs to boolean (True if built)

Returns:
Set of Unity asmdef platform names that were built
"""
supported_platforms = set()

# Always include Editor platform
supported_platforms.add("Editor")

# Map build platforms to asmdef platforms
for platform_id, was_built in built_platforms.items():
if was_built and platform_id in self.PLATFORM_MAPPING:
supported_platforms.add(self.PLATFORM_MAPPING[platform_id])

return supported_platforms

def validate_and_fix_asmdef(self, asmdef_path: Path, supported_platforms: Set[str]) -> bool:
"""
Validate and fix an assembly definition file.

Args:
asmdef_path: Path to the .asmdef file
supported_platforms: Set of supported platform names

Returns:
True if the file was modified, False otherwise
"""
try:
# Read the asmdef file
with open(asmdef_path, 'r') as f:
asmdef_data = json.load(f)

# Check if includePlatforms exists and has entries
if 'includePlatforms' not in asmdef_data or not asmdef_data['includePlatforms']:
# No platform restrictions, so this is fine
return False

original_platforms = set(asmdef_data['includePlatforms'])

# Find unsupported platforms
unsupported_platforms = original_platforms - supported_platforms

if not unsupported_platforms:
# All platforms are supported
return False

self._log_warning(f"Found unsupported platform(s) in {asmdef_path.name}: {', '.join(sorted(unsupported_platforms))}", 1)

# Create backup before modifying
backup_path = asmdef_path.with_suffix('.asmdef.backup')
shutil.copy2(asmdef_path, backup_path)
self.backup_files.append((asmdef_path, backup_path))

# Remove unsupported platforms
fixed_platforms = sorted(original_platforms & supported_platforms)
asmdef_data['includePlatforms'] = fixed_platforms

# Write back to file with pretty formatting
with open(asmdef_path, 'w') as f:
json.dump(asmdef_data, f, indent=4)
f.write('\n') # Add trailing newline

self._log_status(f"Fixed {asmdef_path.name} - removed: {', '.join(sorted(unsupported_platforms))}", 1)

return True

except Exception as ex:
self._log_warning(f"Failed to process {asmdef_path}: {ex}")
return False

def restore_asmdef_files(self) -> None:
"""
Restore all backed up asmdef files to their original state.

This should be called after packaging is complete to avoid
leaving modified asmdef files in the source tree.
"""
if not self.backup_files:
return

self._log_status(f"Restoring {len(self.backup_files)} asmdef file(s) to original state")

for original_path, backup_path in self.backup_files:
try:
if backup_path.exists():
shutil.move(str(backup_path), str(original_path))
self._log_status(f"Restored {original_path.name}", 1)
except Exception as ex:
self._log_warning(f"Failed to restore {original_path}: {ex}", 1)

# Clear the backup list
self.backup_files.clear()

def process_plugin_asmdefs(self, plugin_path: Path, supported_platforms: Set[str]) -> int:
"""
Process all .asmdef files in a plugin directory.

Args:
plugin_path: Path to the plugin root directory
supported_platforms: Set of supported platform names

Returns:
Number of files that were modified
"""
asmdef_files = list(plugin_path.glob('**/*.asmdef'))

if not asmdef_files:
return 0

modified_count = 0
for asmdef_path in asmdef_files:
if self.validate_and_fix_asmdef(asmdef_path, supported_platforms):
modified_count += 1

return modified_count

def validate_plugin_before_packaging(self, plugin_path: Path, built_platforms: Dict[str, bool]) -> None:
"""
Validate and fix all asmdef files in a plugin before packaging.

This should be called before the tar packaging step to ensure that the
packaged plugin only references platforms that were actually built.

Args:
plugin_path: Path to the plugin Unity project
built_platforms: Dictionary of platform IDs to boolean (True if built)
"""
supported_platforms = self.get_supported_asmdef_platforms(built_platforms)

self._log(f"Validating assembly definitions for: {plugin_path.name}")
self._log(f"Supported platforms: {', '.join(sorted(supported_platforms))}", 1)

# Find the Unity package directory (where package.json lives)
package_json_files = list(plugin_path.glob('**/package.json'))

for package_json_path in package_json_files:
# Skip package cache entries
if 'PackageCache' in str(package_json_path):
continue

package_root = package_json_path.parent
modified_count = self.process_plugin_asmdefs(package_root, supported_platforms)

if modified_count > 0:
self._log_status(f"Fixed {modified_count} assembly definition file(s)")
else:
self._log_status("All assembly definitions are valid")

break


# Standalone testing function
if __name__ == '__main__':
import sys

if len(sys.argv) < 2:
print("Usage: python upi_asmdef_validator.py <plugin_path>")
sys.exit(1)

plugin_path = Path(sys.argv[1])
if not plugin_path.exists():
print(f"Error: Path does not exist: {plugin_path}")
sys.exit(1)

# Test with all platforms for demonstration
test_platforms = {
"iOS": True,
"macOS": True,
"tvOS": True,
"visionOS": False # Simulate Unity 2021 (no visionOS)
}

validator = AsmdefPlatformValidator()
validator.validate_plugin_before_packaging(plugin_path, test_platforms)
117 changes: 69 additions & 48 deletions scripts/python/upi_unity_native_plugin_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import scripts.python.upi_utility as utility
import scripts.python.upi_toolchain as toolchain
import scripts.python.upi_asmdef_validator as asmdef_validator

from scripts.python.upi_cli_argument_options import ConfigID

Expand Down Expand Up @@ -394,57 +395,77 @@ def ValidateProjectVersions(self):

# Packs plug-ins with tar and moves the resulting package to the currently configured build output folder.
def GeneratePlugInPackages(self) -> None:
# Check if all platforms are being built
all_platforms_built = all(CTX.platforms.values())

# Only initialize validator if not building all platforms
validator = None
if not all_platforms_built:
validator = asmdef_validator.AsmdefPlatformValidator(CTX.printer)
CTX.printer.StatusMessage("Building for subset of platforms - will validate assembly definitions")
else:
CTX.printer.StatusMessage("Building for all platforms - skipping assembly definition validation")

# Cache to return; tar should be invoked from the folder containing the associated package.json
working_dir = os.getcwd()
for plugin_id, native_plugin in self.native_unity_plugin_table.items():
CTX.printer.StatusMessageWithContext("Packing plug-in: ", f"{plugin_id}", "\n")

os.chdir(native_plugin.unity_project.path)

# Not all Unity projects keep their package.json file in the same location, so get all the paths to any package.json under the current folder hierarchy
# TODO: This will break if there's more than one package.json in the folder tree - with the exception of those in PackageCache, which are filtered.
package_json_file_paths = list(native_plugin.unity_project.path.glob('**/package.json'))
try:
for plugin_id, native_plugin in self.native_unity_plugin_table.items():
CTX.printer.StatusMessageWithContext("Packing plug-in: ", f"{plugin_id}", "\n")

# Ignore anything in the current project's package cache
target_package_json_path = Path()
for curr_package_json_path in package_json_file_paths:
if str(curr_package_json_path).find("PackageCache") != -1:
continue
else:
target_package_json_path = curr_package_json_path
break

# If /Demos exists in same folder, rename to Demos~ folder as needed
curr_demo_path = target_package_json_path.parent.joinpath("Demos")
curr_demo_meta_path = target_package_json_path.parent.joinpath("Demos.meta")
dest_demo_path = target_package_json_path.parent.joinpath("Demos~")
dest_demo_meta_path = target_package_json_path.parent.joinpath("../Demos.meta")

if curr_demo_path.exists():
utility.RunCommand(["mv", curr_demo_path, dest_demo_path])
utility.RunCommand(["mv", curr_demo_meta_path, dest_demo_meta_path])

# get the package name and version
package_json_file = open(target_package_json_path)
package_json_data = json.load(package_json_file)
tgz_filename = f"{package_json_data['name']}" "-" f"{package_json_data['version']}" ".tgz"
package_json_file.close()

# using tar:
pack_command = ["tar", "--auto-compress", "--create", "--file", f"{CTX.build_output_path.joinpath(tgz_filename)}", "--directory", f"{target_package_json_path.parent}", "-s", "/./package/", "." ]

CTX.printer.MessageWithContext("Project package.json path: ", f"{target_package_json_path}", CTX.printer.Indent(1))
CTX.printer.MessageWithContext("Pack command: ", f"{(' '.join(pack_command))}", CTX.printer.Indent(1))

pack_command_output = utility.RunCommand(pack_command)

if pack_command_output.returncode != 0:
CTX.printer.WarningMessage(f"Pack command completed with non-zero return code.\n\nSTDOUT:\n{pack_command_output.stdout}")
else:
CTX.printer.StatusMessage(f"Pack completed.")
# Validate and fix asmdef files before packaging (only if not building all platforms)
if validator:
validator.validate_plugin_before_packaging(native_plugin.unity_project.path, CTX.platforms)

if dest_demo_path.exists():
utility.RunCommand(["mv", dest_demo_path, curr_demo_path])
utility.RunCommand(["mv", dest_demo_meta_path, curr_demo_meta_path])
os.chdir(native_plugin.unity_project.path)

os.chdir(working_dir)
# Not all Unity projects keep their package.json file in the same location, so get all the paths to any package.json under the current folder hierarchy
# TODO: This will break if there's more than one package.json in the folder tree - with the exception of those in PackageCache, which are filtered.
package_json_file_paths = list(native_plugin.unity_project.path.glob('**/package.json'))

# Ignore anything in the current project's package cache
target_package_json_path = Path()
for curr_package_json_path in package_json_file_paths:
if str(curr_package_json_path).find("PackageCache") != -1:
continue
else:
target_package_json_path = curr_package_json_path
break

# If /Demos exists in same folder, rename to Demos~ folder as needed
curr_demo_path = target_package_json_path.parent.joinpath("Demos")
curr_demo_meta_path = target_package_json_path.parent.joinpath("Demos.meta")
dest_demo_path = target_package_json_path.parent.joinpath("Demos~")
dest_demo_meta_path = target_package_json_path.parent.joinpath("../Demos.meta")

if curr_demo_path.exists():
utility.RunCommand(["mv", curr_demo_path, dest_demo_path])
utility.RunCommand(["mv", curr_demo_meta_path, dest_demo_meta_path])

# get the package name and version
package_json_file = open(target_package_json_path)
package_json_data = json.load(package_json_file)
tgz_filename = f"{package_json_data['name']}" "-" f"{package_json_data['version']}" ".tgz"
package_json_file.close()

# using tar:
pack_command = ["tar", "--auto-compress", "--create", "--file", f"{CTX.build_output_path.joinpath(tgz_filename)}", "--directory", f"{target_package_json_path.parent}", "-s", "/./package/", "." ]

CTX.printer.MessageWithContext("Project package.json path: ", f"{target_package_json_path}", CTX.printer.Indent(1))
CTX.printer.MessageWithContext("Pack command: ", f"{(' '.join(pack_command))}", CTX.printer.Indent(1))

pack_command_output = utility.RunCommand(pack_command)

if pack_command_output.returncode != 0:
CTX.printer.WarningMessage(f"Pack command completed with non-zero return code.\n\nSTDOUT:\n{pack_command_output.stdout}")
else:
CTX.printer.StatusMessage(f"Pack completed.")

if dest_demo_path.exists():
utility.RunCommand(["mv", dest_demo_path, curr_demo_path])
utility.RunCommand(["mv", dest_demo_meta_path, curr_demo_meta_path])
finally:
# Always restore asmdef files to their original state, even if packaging fails
if validator:
validator.restore_asmdef_files()
os.chdir(working_dir)