22Python generator implementation using Python bindings.
33"""
44
5- from dataclasses import dataclass
65from pathlib import Path
6+ import os
7+ import pydantic
8+ from importlib .resources import files
9+
710from typing import Dict , Optional , List , Any
811from pixi_build_backend .types .generated_recipe import (
912 GenerateRecipeProtocol ,
1922
2023from .build_script import BuildScriptContext , BuildPlatform
2124from .distro import Distro
22- from .utils import get_build_input_globs , package_xml_to_conda_requirements , convert_package_xml_to_catkin_package , \
23- get_package_xml_content
25+ from .utils import (
26+ get_build_input_globs ,
27+ package_xml_to_conda_requirements ,
28+ convert_package_xml_to_catkin_package ,
29+ get_package_xml_content ,
30+ load_package_map_data ,
31+ )
32+
33+
34+ def _parse_str_as_abs_path (value : str | Path , manifest_root : Path ) -> Path :
35+ """Parse a string as a Path."""
36+ # Ensure the debug directory is a Path object
37+ if isinstance (value , str ):
38+ value = Path (value )
39+ # Ensure it's an absolute path
40+ if not value .is_absolute ():
41+ # Convert to absolute path relative to manifest root
42+ return (manifest_root / value ).resolve ()
43+ return value
2444
2545
26- @dataclass
27- class ROSBackendConfig :
46+ class ROSBackendConfig (pydantic .BaseModel , extra = "forbid" ):
2847 """ROS backend configuration."""
2948
3049 noarch : Optional [bool ] = None
3150 # Environment variables to set during the build
3251 env : Optional [Dict [str , str ]] = None
3352 # Directory for debug files of this script
34- debug_dir : Optional [Path ] = None
53+ debug_dir : Optional [Path ] = pydantic . Field ( default = None , alias = "debug-dir" )
3554 # Extra input globs to include in the build hash
36- extra_input_globs : Optional [List [str ]] = None
55+ extra_input_globs : Optional [List [str ]] = pydantic . Field ( default = None , alias = "extra-input-globs" )
3756 # ROS distribution to use, e.g., "foxy", "galactic", "humble"
3857 # TODO: This should be figured out in some other way, not from the config.
3958 distro : Optional [str ] = None
4059
60+ # Extra package mappings to use in the build
61+ extra_package_mappings : List [Path ] = pydantic .Field (default_factory = list , alias = "extra-package-mappings" )
62+
4163 def is_noarch (self ) -> bool :
4264 """Whether to build a noarch package or a platform-specific package."""
4365 return self .noarch is None or self .noarch
4466
45- def get_debug_dir (self ) -> Optional [Path ]:
46- """Get debug directory if set."""
47- if self .debug_dir is not None :
48- # Ensure the debug directory is a Path object
49- if isinstance (self .debug_dir , str ):
50- self .debug_dir = Path (self .debug_dir )
51- # Ensure it's an absolute path
52- if not self .debug_dir .is_absolute ():
53- # Convert to absolute path relative to the current working directory
54- self .debug_dir = Path .cwd () / self .debug_dir
55- return self .debug_dir
67+ @pydantic .field_validator ("debug_dir" , mode = "before" )
68+ @classmethod
69+ def _parse_debug_dir (cls , value , info : pydantic .ValidationInfo ) -> Optional [Path ]:
70+ """Parse debug directory if set."""
71+ if value is None :
72+ return None
73+ base_path = Path (os .getcwd ())
74+ if info .context and "manifest_root" in info .context :
75+ base_path = Path (info .context ["manifest_root" ])
76+ return _parse_str_as_abs_path (value , base_path )
77+
78+ @pydantic .field_validator ("extra_package_mappings" , mode = "before" )
79+ @classmethod
80+ def _parse_package_mappings (cls , input_value , info : pydantic .ValidationInfo ) -> Optional [List [Path ]]:
81+ """Parse additional package mappings if set."""
82+ if input_value is None :
83+ return []
84+ base_path = Path (os .getcwd ())
85+ if info .context and "manifest_root" in info .context :
86+ base_path = Path (info .context ["manifest_root" ])
87+
88+ res = []
89+ for path_value in input_value :
90+ res .append (_parse_str_as_abs_path (path_value , base_path ))
91+ return res
92+
5693
5794class ROSGenerator (GenerateRecipeProtocol ):
5895 """ROS recipe generator using Python bindings."""
@@ -65,14 +102,15 @@ def generate_recipe(
65102 host_platform : Platform ,
66103 _python_params : Optional [PythonParams ] = None ,
67104 ) -> GeneratedRecipe :
68- """Generate a recipe for a Python package."""
69- backend_config : ROSBackendConfig = ROSBackendConfig (** config )
70-
105+ """Generate a recipe for a Python package."""
71106 manifest_root = Path (manifest_path )
107+ backend_config : ROSBackendConfig = ROSBackendConfig .model_validate (
108+ config , context = {"manifest_root" : manifest_root }
109+ )
72110
73111 # Setup ROS distro first
74112 distro = Distro (backend_config .distro )
75-
113+
76114 # Create metadata provider for package.xml
77115 package_xml_path = manifest_root / "package.xml"
78116 metadata_provider = ROSPackageXmlMetadataProvider (str (package_xml_path ), distro )
@@ -84,11 +122,30 @@ def generate_recipe(
84122 package_xml_str = get_package_xml_content (manifest_root )
85123 package_xml = convert_package_xml_to_catkin_package (package_xml_str )
86124
125+ # load package map
126+
127+ # TODO: Currently hardcoded and not able to override, this should be configurable
128+ package_files = files ("pixi_build_ros" )
129+ robostack_file = package_files / "robostack.yaml"
130+ # workaround for from source install
131+ if not robostack_file .is_file ():
132+ robostack_file = Path (__file__ ).parent .parent .parent / "robostack.yaml"
133+
134+ package_map_data = load_package_map_data ([robostack_file ] + backend_config .extra_package_mappings )
135+
87136 # Get requirements from package.xml
88- package_requirements = package_xml_to_conda_requirements (package_xml , distro , host_platform )
137+ package_requirements = package_xml_to_conda_requirements (package_xml , distro , host_platform , package_map_data )
89138
90139 # Add standard dependencies
91- build_deps = ["ninja" , "python" , "setuptools" , "git" , "git-lfs" , "cmake" , "cpython" ]
140+ build_deps = [
141+ "ninja" ,
142+ "python" ,
143+ "setuptools" ,
144+ "git" ,
145+ "git-lfs" ,
146+ "cmake" ,
147+ "cpython" ,
148+ ]
92149 if host_platform .is_unix :
93150 build_deps .extend (["patch" , "make" , "coreutils" ])
94151 if host_platform .is_windows :
@@ -112,26 +169,24 @@ def generate_recipe(
112169 requirements = merge_requirements (generated_recipe .recipe .requirements , package_requirements )
113170 generated_recipe .recipe .requirements = requirements
114171
115-
116172 # Determine build platform
117173 build_platform = BuildPlatform .current ()
118174
119175 # Generate build script
120176 build_script_context = BuildScriptContext .load_from_template (package_xml , build_platform , manifest_root , distro )
121177 build_script_lines = build_script_context .render ()
122178
123- generated_recipe .recipe .build .script = Script (
179+ generated_recipe .recipe .build .script = Script (
124180 content = build_script_lines ,
125181 env = backend_config .env ,
126182 )
127183
128- debug_dir = backend_config .get_debug_dir ()
129- if debug_dir :
184+ if backend_config .debug_dir :
130185 recipe = generated_recipe .recipe .to_yaml ()
131186 package = generated_recipe .recipe .package
132- debug_file_path = debug_dir / f"{ package .name .get_concrete ()} -{ package .version } -recipe.yaml"
187+ debug_file_path = backend_config . debug_dir / f"{ package .name .get_concrete ()} -{ package .version } -recipe.yaml"
133188 debug_file_path .parent .mkdir (parents = True , exist_ok = True )
134- with open (debug_file_path , 'w' ) as debug_file :
189+ with open (debug_file_path , "w" ) as debug_file :
135190 debug_file .write (recipe )
136191
137192 # Test the build script before running to early out.
@@ -144,22 +199,26 @@ def extract_input_globs_from_build(self, config: ROSBackendConfig, workdir: Path
144199 """Extract input globs for the build."""
145200 return get_build_input_globs (config , editable )
146201
147- def default_variants (self , host_platform : Platform ) -> Dict [str , Any ]:
202+ def default_variants (self , host_platform : Platform ) -> Dict [str , Any ]:
148203 """Get the default variants for the generator."""
149204 variants = {}
150205 if host_platform .is_windows :
151206 variants ["cxx_compiler" ] = ["vs2019" ]
152207 return variants
153208
154- def merge_requirements (model_requirements : ConditionalRequirements , package_requirements : ConditionalRequirements ) -> ConditionalRequirements :
209+
210+ def merge_requirements (
211+ model_requirements : ConditionalRequirements ,
212+ package_requirements : ConditionalRequirements ,
213+ ) -> ConditionalRequirements :
155214 """Merge two sets of requirements."""
156215 merged = ConditionalRequirements ()
157216
158217 # The model requirements are the base, coming from the pixi manifest
159218 # We need to only add the names for non-existing dependencies
160219 def merge_unique_items (
161- model : List [ItemPackageDependency ],
162- package : List [ItemPackageDependency ],
220+ model : List [ItemPackageDependency ],
221+ package : List [ItemPackageDependency ],
163222 ) -> List [ItemPackageDependency ]:
164223 """Merge unique items from source into target."""
165224 result = model
@@ -179,5 +238,3 @@ def merge_unique_items(
179238
180239 # If the dependency is of type Source in one of the requirements, we need to set them to Source for all variants
181240 return merged
182-
183-
0 commit comments