@@ -77,18 +77,17 @@ def merge_lockfiles(base_lockfile, override_lockfile):
7777 return base_lockfile
7878
7979 for override_entry in override :
80- # override_entry is a dict like {'arch': x86_64','packages': [...]}
80+ # override_entry is a dict like {'arch': x86_64','packages': [...]}
8181 if not isinstance (override_entry , dict ):
8282 continue
8383 arch = override_entry .get ('arch' , None )
84- override_packages = override_entry .get ('packages' , [])
84+ # Only merge packages if the architecture is already present in the
85+ # base lockfile. This prevents override files from injecting arches
86+ # that are not part of the current build.
8587 if arch in base_arches :
86- # Merge packages
88+ override_packages = override_entry . get ( ' packages' , [])
8789 base_packages = base_arches [arch ].get ('packages' , [])
8890 base_packages += override_packages
89- else :
90- # Add the arch from the override file
91- base_arches [arch ] = override_packages
9291
9392 # Reconstruct the arches list
9493 base_lockfile ['arches' ] = list (base_arches .values ())
@@ -119,21 +118,24 @@ def query_packages_location(locks, repoquery_args):
119118 if name not in processed_urls :
120119 processed_urls [name ] = url
121120 pkg_urls = list (processed_urls .values ())
122- # sanity check all the packages got resolved
123- if len (pkg_urls ) < len (locked_nevras ):
121+ # sanity check all the locked packages got resolved
122+ if len (pkg_urls ) != len (locked_nevras ):
124123 print ("Some packages from the lockfile could not be resolved. The rpm-ostree lockfile is probably out of date." )
125- for name in locks .keys ():
126- if name not in processed_urls :
127- print (f"could not resolve package { name } " )
128124 sys .exit (1 )
129125
126+ print (f"Done. Solved { len (pkg_urls )} packages." )
130127 return pkg_urls
131128
132129
133- def generate_lockfile ( contextdir , manifest , output_path , arches ):
130+ def generate_main ( args ):
134131 """
135132 Generates the cachi2/hermeto RPM lock file.
136133 """
134+ contextdir = args .context
135+ manifest = os .path .abspath (args .manifest )
136+ output_path = args .output
137+ arches = args .arch
138+
137139 if not arches :
138140 arches_to_resolve = [get_basearch ()]
139141 elif 'all' in arches :
@@ -151,7 +153,7 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
151153 repos = manifest_data .get ('repos' , [])
152154 repos += manifest_data .get ('lockfile-repos' , [])
153155
154- repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" ]
156+ repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" , "--quiet" ]
155157 # Tell dnf to load repos files from $contextdir
156158 repoquery_args .extend ([f"--setopt=reposdir={ contextdir } " ])
157159
@@ -165,64 +167,122 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
165167 print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. No manifest-lock exist for { arch } in { contextdir } " )
166168 sys .exit (1 )
167169 print (f"Resolving packages for { arch } ..." )
168- # append noarch as well because otherwise tose packages get excluded from results
169- # We use --forcearch here because otherwise dnf still respect the system basearch
170- # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
171- args_arch = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
172- pkg_urls = query_packages_location (locks , repoquery_args + args_arch )
170+ arch_args = []
171+ if arch is not get_basearch ():
172+ # append noarch as well because otherwise those packages get excluded from results
173+ # We use --forcearch here because otherwise dnf still respect the system basearch
174+ # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
175+ arch_args = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
176+ pkg_urls = query_packages_location (locks , repoquery_args + arch_args )
173177 packages .append ({'arch' : arch , 'packages' : pkg_urls })
174178
175179 lockfile = write_hermeto_lockfile (packages , repos )
176180
177- override_path = os .path .join (contextdir , 'konflux-lockfile-override.yaml' )
178- if os .path .exists (override_path ):
181+ try :
182+ with open (output_path , 'w' , encoding = 'utf-8' ) as f :
183+ yaml .safe_dump (lockfile , f , default_flow_style = False )
184+ except IOError as e :
185+ print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason: { e } " )
186+ sys .exit (1 )
187+
188+ def merge_main (args ):
189+ """
190+ Merges multiple lockfiles into one, optionally applying an override file.
191+ """
192+ if not args .input :
193+ print ("Error: at least one input file is required for merging." , file = sys .stderr )
194+ sys .exit (1 )
195+
196+ try :
197+ with open (args .input [0 ], 'r' , encoding = 'utf-8' ) as f :
198+ base_lockfile = yaml .safe_load (f )
199+ except (IOError , yaml .YAMLError ) as e :
200+ print (f"Error reading base lockfile { args .input [0 ]} : { e } " , file = sys .stderr )
201+ sys .exit (1 )
202+
203+ for subsequent_file in args .input [1 :]:
179204 try :
180- with open (override_path , 'r' , encoding = "utf8" ) as f :
205+ with open (subsequent_file , 'r' , encoding = 'utf-8' ) as f :
206+ next_lockfile = yaml .safe_load (f )
207+ base_lockfile = merge_lockfiles (base_lockfile , next_lockfile )
208+ except (IOError , yaml .YAMLError ) as e :
209+ print (f"Error reading or merging { subsequent_file } : { e } " , file = sys .stderr )
210+ sys .exit (1 )
211+
212+ if os .path .exists (args .override ):
213+ try :
214+ with open (args .override , 'r' , encoding = "utf8" ) as f :
181215 override_data = yaml .safe_load (f )
182- print (f"Merging override from { override_path } " )
183- lockfile = merge_lockfiles (lockfile , override_data )
216+ print (f"Merging override from { args . override } " )
217+ base_lockfile = merge_lockfiles (base_lockfile , override_data )
184218 except (IOError , yaml .YAMLError ) as e :
185- print (f"\u274c Error: Could not read or parse override file '{ override_path } '. Reason : { e } " )
219+ print (f"Error reading or parsing override file '{ args . override } ' : { e } " , file = sys . stderr )
186220 sys .exit (1 )
187221
188222 try :
189- with open (output_path , 'w' , encoding = 'utf-8' ) as f :
190- yaml .safe_dump (lockfile , f , default_flow_style = False )
223+ with open (args .output , 'w' , encoding = 'utf-8' ) as f :
224+ yaml .safe_dump (base_lockfile , f , default_flow_style = False )
225+ print (f"Successfully merged lockfiles to { args .output } " )
191226 except IOError as e :
192- print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason : { e } " )
227+ print (f"Error writing to output file '{ args . output } ' : { e } " , file = sys . stderr )
193228 sys .exit (1 )
194229
195230
196231if __name__ == "__main__" :
197232 parser = argparse .ArgumentParser (
198- description = "Generate hermeto lock files."
233+ description = "Generate and merge hermeto lock files."
199234 )
235+ subparsers = parser .add_subparsers (dest = 'command' , required = True )
200236
201- parser .add_argument (
237+ # GENERATE command
238+ parser_generate = subparsers .add_parser (
239+ 'generate' ,
240+ help = 'Resolve RPMs and generate a lockfile for one or more architectures.'
241+ )
242+ parser_generate .add_argument (
202243 'manifest' ,
203244 help = 'Path to the flattened rpm-ostree manifest (e.g., tmp/manifest.json)'
204245 )
205-
206- parser .add_argument (
246+ parser_generate .add_argument (
207247 '--context' ,
208248 default = '.' ,
209249 help = "Path to the directory containing repofiles and lockfiles. (default: '.')"
210250 )
211-
212- parser .add_argument (
251+ parser_generate .add_argument (
213252 '--output' ,
214253 default = './rpms.lock.yaml' ,
215254 help = "Path for the hermeto lockfile. (default: './rpms.lock.yaml')"
216255 )
217-
218- parser .add_argument (
256+ parser_generate .add_argument (
219257 '--arch' ,
220258 action = 'append' ,
221259 choices = ['x86_64' , 'aarch64' , 's390x' , 'ppc64le' , 'all' ],
222260 help = "The architecture to resolve. Can be specified multiple times. 'all' resolves all architectures."
223261 )
262+ parser_generate .set_defaults (func = generate_main )
224263
225- args = parser .parse_args ()
264+ # MERGE command
265+ parser_merge = subparsers .add_parser (
266+ 'merge' ,
267+ help = 'Merge multiple architecture-specific lockfiles into a single file.'
268+ )
269+ parser_merge .add_argument (
270+ '--input' ,
271+ nargs = '+' ,
272+ required = True ,
273+ help = 'One or more input lockfiles to merge.'
274+ )
275+ parser_merge .add_argument (
276+ '--output' ,
277+ default = './rpms.lock.yaml' ,
278+ help = "Path for the merged lockfile. (default: './rpms.lock.yaml')"
279+ )
280+ parser_merge .add_argument (
281+ '--override' ,
282+ default = 'konflux-lockfile-override.yaml' ,
283+ help = "Path to an override file. (default: 'konflux-lockfile-override.yaml')"
284+ )
285+ parser_merge .set_defaults (func = merge_main )
226286
227- manifest_abs_path = os . path . abspath ( args . manifest )
228- generate_lockfile ( args .context , manifest_abs_path , args . output , args . arch )
287+ args = parser . parse_args ( )
288+ args .func ( args )
0 commit comments