@@ -81,14 +81,13 @@ def merge_lockfiles(base_lockfile, override_lockfile):
8181 if not isinstance (override_entry , dict ):
8282 continue
8383 arch = override_entry .get ('arch' , None )
84- override_packages = override_entry .get ('packages' , [])
84+ # Only merge packages if the architecture is already present in the
85+ # base lockfile. This prevents override files from injecting arches
86+ # that are not part of the current build.
8587 if arch in base_arches :
86- # Merge packages
88+ override_packages = override_entry . get ( ' packages' , [])
8789 base_packages = base_arches [arch ].get ('packages' , [])
8890 base_packages += override_packages
89- else :
90- # Add the arch from the override file
91- base_arches [arch ] = override_packages
9291
9392 # Reconstruct the arches list
9493 base_lockfile ['arches' ] = list (base_arches .values ())
@@ -119,21 +118,24 @@ def query_packages_location(locks, repoquery_args):
119118 if name not in processed_urls :
120119 processed_urls [name ] = url
121120 pkg_urls = list (processed_urls .values ())
122- # sanity check all the packages got resolved
123- if len (pkg_urls ) < len (locked_nevras ):
121+ # sanity check all the locked packages got resolved
122+ if len (pkg_urls ) != len (locked_nevras ):
124123 print ("Some packages from the lockfile could not be resolved. The rpm-ostree lockfile is probably out of date." )
125- for name in locks .keys ():
126- if name not in processed_urls :
127- print (f"could not resolve package { name } " )
128124 sys .exit (1 )
129125
126+ print (f"Done. Solved { len (pkg_urls )} packages." )
130127 return pkg_urls
131128
132129
133- def generate_lockfile ( contextdir , manifest , output_path , arches ):
130+ def generate_main ( args ):
134131 """
135132 Generates the cachi2/hermeto RPM lock file.
136133 """
134+ contextdir = args .context
135+ manifest = os .path .abspath (args .manifest )
136+ output_path = args .output
137+ arches = args .arch
138+
137139 if not arches :
138140 arches_to_resolve = [get_basearch ()]
139141 elif 'all' in arches :
@@ -151,7 +153,7 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
151153 repos = manifest_data .get ('repos' , [])
152154 repos += manifest_data .get ('lockfile-repos' , [])
153155
154- repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" ]
156+ repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" , "--quiet" ]
155157 # Tell dnf to load repos files from $contextdir
156158 repoquery_args .extend ([f"--setopt=reposdir={ contextdir } " ])
157159
@@ -165,64 +167,123 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
165167 print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. No manifest-lock exist for { arch } in { contextdir } " )
166168 sys .exit (1 )
167169 print (f"Resolving packages for { arch } ..." )
168- # append noarch as well because otherwise tose packages get excluded from results
169- # We use --forcearch here because otherwise dnf still respect the system basearch
170- # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
171- args_arch = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
172- pkg_urls = query_packages_location (locks , repoquery_args + args_arch )
170+ arch_args = []
171+ if arch is not get_basearch ():
172+ # append noarch as well because otherwise those packages get excluded from results
173+ # We use --forcearch here because otherwise dnf still respect the system basearch
174+ # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
175+ arch_args = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
176+ pkg_urls = query_packages_location (locks , repoquery_args + arch_args )
173177 packages .append ({'arch' : arch , 'packages' : pkg_urls })
174178
175179 lockfile = write_hermeto_lockfile (packages , repos )
176180
177- override_path = os .path .join (contextdir , 'konflux-lockfile-override.yaml' )
178- if os .path .exists (override_path ):
181+ try :
182+ with open (output_path , 'w' , encoding = 'utf-8' ) as f :
183+ yaml .safe_dump (lockfile , f , default_flow_style = False )
184+ except IOError as e :
185+ print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason: { e } " )
186+ sys .exit (1 )
187+
188+
189+ def merge_main (args ):
190+ """
191+ Merges multiple lockfiles into one, optionally applying an override file.
192+ """
193+ if not args .input :
194+ print ("Error: at least one input file is required for merging." , file = sys .stderr )
195+ sys .exit (1 )
196+
197+ try :
198+ with open (args .input [0 ], 'r' , encoding = 'utf-8' ) as f :
199+ base_lockfile = yaml .safe_load (f )
200+ except (IOError , yaml .YAMLError ) as e :
201+ print (f"Error reading base lockfile { args .input [0 ]} : { e } " , file = sys .stderr )
202+ sys .exit (1 )
203+
204+ for subsequent_file in args .input [1 :]:
205+ try :
206+ with open (subsequent_file , 'r' , encoding = 'utf-8' ) as f :
207+ next_lockfile = yaml .safe_load (f )
208+ base_lockfile = merge_lockfiles (base_lockfile , next_lockfile )
209+ except (IOError , yaml .YAMLError ) as e :
210+ print (f"Error reading or merging { subsequent_file } : { e } " , file = sys .stderr )
211+ sys .exit (1 )
212+
213+ if os .path .exists (args .override ):
179214 try :
180- with open (override_path , 'r' , encoding = "utf8" ) as f :
215+ with open (args . override , 'r' , encoding = "utf8" ) as f :
181216 override_data = yaml .safe_load (f )
182- print (f"Merging override from { override_path } " )
183- lockfile = merge_lockfiles (lockfile , override_data )
217+ print (f"Merging override from { args . override } " )
218+ base_lockfile = merge_lockfiles (base_lockfile , override_data )
184219 except (IOError , yaml .YAMLError ) as e :
185- print (f"\u274c Error: Could not read or parse override file '{ override_path } '. Reason : { e } " )
220+ print (f"Error reading or parsing override file '{ args . override } ' : { e } " , file = sys . stderr )
186221 sys .exit (1 )
187222
188223 try :
189- with open (output_path , 'w' , encoding = 'utf-8' ) as f :
190- yaml .safe_dump (lockfile , f , default_flow_style = False )
224+ with open (args .output , 'w' , encoding = 'utf-8' ) as f :
225+ yaml .safe_dump (base_lockfile , f , default_flow_style = False )
226+ print (f"Successfully merged lockfiles to { args .output } " )
191227 except IOError as e :
192- print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason : { e } " )
228+ print (f"Error writing to output file '{ args . output } ' : { e } " , file = sys . stderr )
193229 sys .exit (1 )
194230
195231
196232if __name__ == "__main__" :
197233 parser = argparse .ArgumentParser (
198- description = "Generate hermeto lock files."
234+ description = "Generate and merge hermeto lock files."
199235 )
236+ subparsers = parser .add_subparsers (dest = 'command' , required = True )
200237
201- parser .add_argument (
238+ # GENERATE command
239+ parser_generate = subparsers .add_parser (
240+ 'generate' ,
241+ help = 'Resolve RPMs and generate a lockfile for one or more architectures.'
242+ )
243+ parser_generate .add_argument (
202244 'manifest' ,
203245 help = 'Path to the flattened rpm-ostree manifest (e.g., tmp/manifest.json)'
204246 )
205-
206- parser .add_argument (
247+ parser_generate .add_argument (
207248 '--context' ,
208249 default = '.' ,
209250 help = "Path to the directory containing repofiles and lockfiles. (default: '.')"
210251 )
211-
212- parser .add_argument (
252+ parser_generate .add_argument (
213253 '--output' ,
214254 default = './rpms.lock.yaml' ,
215255 help = "Path for the hermeto lockfile. (default: './rpms.lock.yaml')"
216256 )
217-
218- parser .add_argument (
257+ parser_generate .add_argument (
219258 '--arch' ,
220259 action = 'append' ,
221260 choices = ['x86_64' , 'aarch64' , 's390x' , 'ppc64le' , 'all' ],
222261 help = "The architecture to resolve. Can be specified multiple times. 'all' resolves all architectures."
223262 )
263+ parser_generate .set_defaults (func = generate_main )
224264
225- args = parser .parse_args ()
265+ # MERGE command
266+ parser_merge = subparsers .add_parser (
267+ 'merge' ,
268+ help = 'Merge multiple architecture-specific lockfiles into a single file.'
269+ )
270+ parser_merge .add_argument (
271+ '--input' ,
272+ nargs = '+' ,
273+ required = True ,
274+ help = 'One or more input lockfiles to merge.'
275+ )
276+ parser_merge .add_argument (
277+ '--output' ,
278+ default = './rpms.lock.yaml' ,
279+ help = "Path for the merged lockfile. (default: './rpms.lock.yaml')"
280+ )
281+ parser_merge .add_argument (
282+ '--override' ,
283+ default = 'konflux-lockfile-override.yaml' ,
284+ help = "Path to an override file. (default: 'konflux-lockfile-override.yaml')"
285+ )
286+ parser_merge .set_defaults (func = merge_main )
226287
227- manifest_abs_path = os . path . abspath ( args . manifest )
228- generate_lockfile ( args .context , manifest_abs_path , args . output , args . arch )
288+ args = parser . parse_args ( )
289+ args .func ( args )
0 commit comments