@@ -32,10 +32,8 @@ def format_packages_with_repoid(pkgs, repos):
32
32
# This is needed because rpm-ostree uses the full list of repos to
33
33
# resolve packages and errors out if a repository is missing.
34
34
repo_numbers = len (local_repos )
35
- i = 0
36
- for pkg in pkgs :
35
+ for i , pkg in enumerate (pkgs ):
37
36
packages .append ({"url" : pkg , "repoid" : local_repos [i % repo_numbers ]})
38
- i += 1
39
37
return packages
40
38
41
39
@@ -62,33 +60,38 @@ def write_hermeto_lockfile(arch_packages, repos):
62
60
return lockfile
63
61
64
62
65
- def merge_lockfiles (base_lockfile , override_lockfile ):
63
+ def merge_lockfiles (base_lockfile , next_lockfile , override = False ):
66
64
"""
67
- Merges an override lockfile into a base lockfile.
65
+ Merges a lockfile into a base lockfile.
66
+
67
+ If is_override is True, it will only add packages to existing
68
+ architectures. Otherwise, it will add new architectures.
68
69
"""
69
- if not override_lockfile :
70
+ if not next_lockfile :
70
71
return base_lockfile
71
72
72
73
# Create a dictionary for base arches for easy lookup
73
74
base_arches = {arch ['arch' ]: arch for arch in base_lockfile .get ('arches' , [])}
74
75
75
- override = override_lockfile .get ('arches' , [])
76
- if not override :
76
+ next_arches_list = next_lockfile .get ('arches' , [])
77
+ if not next_arches_list :
77
78
return base_lockfile
78
79
79
- for override_entry in override :
80
- # override_entry is a dict like {'arch': x86_64','packages': [...]}
81
- if not isinstance (override_entry , dict ):
80
+ for next_arch_entry in next_arches_list :
81
+ if not isinstance (next_arch_entry , dict ):
82
+ continue
83
+ arch = next_arch_entry .get ('arch' , None )
84
+ if not arch :
82
85
continue
83
- arch = override_entry . get ( 'arch' , None )
84
- override_packages = override_entry .get ('packages' , [])
86
+
87
+ next_packages = next_arch_entry .get ('packages' , [])
85
88
if arch in base_arches :
86
- # Merge packages
89
+ # Arch exists, merge packages
87
90
base_packages = base_arches [arch ].get ('packages' , [])
88
- base_packages += override_packages
89
- else :
90
- # Add the arch from the override file
91
- base_arches [arch ] = override_packages
91
+ base_packages += next_packages
92
+ elif not override :
93
+ # Arch is new and this is not an override, so add it
94
+ base_arches [arch ] = next_arch_entry
92
95
93
96
# Reconstruct the arches list
94
97
base_lockfile ['arches' ] = list (base_arches .values ())
@@ -119,21 +122,24 @@ def query_packages_location(locks, repoquery_args):
119
122
if name not in processed_urls :
120
123
processed_urls [name ] = url
121
124
pkg_urls = list (processed_urls .values ())
122
- # sanity check all the packages got resolved
123
- if len (pkg_urls ) < len (locked_nevras ):
125
+ # sanity check all the locked packages got resolved
126
+ if len (pkg_urls ) != len (locked_nevras ):
124
127
print ("Some packages from the lockfile could not be resolved. The rpm-ostree lockfile is probably out of date." )
125
- for name in locks .keys ():
126
- if name not in processed_urls :
127
- print (f"could not resolve package { name } " )
128
128
sys .exit (1 )
129
129
130
+ print (f"Done. Resolved location for { len (pkg_urls )} packages." )
130
131
return pkg_urls
131
132
132
133
133
- def generate_lockfile ( contextdir , manifest , output_path , arches ):
134
+ def generate_main ( args ):
134
135
"""
135
136
Generates the cachi2/hermeto RPM lock file.
136
137
"""
138
+ contextdir = args .context
139
+ manifest = os .path .abspath (args .manifest )
140
+ output_path = args .output
141
+ arches = args .arch
142
+
137
143
if not arches :
138
144
arches_to_resolve = [get_basearch ()]
139
145
elif 'all' in arches :
@@ -151,7 +157,7 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
151
157
repos = manifest_data .get ('repos' , [])
152
158
repos += manifest_data .get ('lockfile-repos' , [])
153
159
154
- repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" ]
160
+ repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" , "--quiet" ]
155
161
# Tell dnf to load repos files from $contextdir
156
162
repoquery_args .extend ([f"--setopt=reposdir={ contextdir } " ])
157
163
@@ -165,64 +171,123 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
165
171
print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. No manifest-lock exist for { arch } in { contextdir } " )
166
172
sys .exit (1 )
167
173
print (f"Resolving packages for { arch } ..." )
168
- # append noarch as well because otherwise tose packages get excluded from results
169
- # We use --forcearch here because otherwise dnf still respect the system basearch
170
- # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
171
- args_arch = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
172
- pkg_urls = query_packages_location (locks , repoquery_args + args_arch )
174
+ arch_args = []
175
+ if arch is not get_basearch ():
176
+ # append noarch as well because otherwise those packages get excluded from results
177
+ # We use --forcearch here because otherwise dnf still respect the system basearch
178
+ # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
179
+ arch_args = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
180
+ pkg_urls = query_packages_location (locks , repoquery_args + arch_args )
173
181
packages .append ({'arch' : arch , 'packages' : pkg_urls })
174
182
175
183
lockfile = write_hermeto_lockfile (packages , repos )
176
184
177
- override_path = os .path .join (contextdir , 'konflux-lockfile-override.yaml' )
178
- if os .path .exists (override_path ):
185
+ try :
186
+ with open (output_path , 'w' , encoding = 'utf-8' ) as f :
187
+ yaml .safe_dump (lockfile , f , default_flow_style = False )
188
+ except IOError as e :
189
+ print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason: { e } " )
190
+ sys .exit (1 )
191
+
192
+
193
+ def merge_main (args ):
194
+ """
195
+ Merges multiple lockfiles into one, optionally applying an override file.
196
+ """
197
+ if not args .input :
198
+ print ("Error: at least one input file is required for merging." , file = sys .stderr )
199
+ sys .exit (1 )
200
+
201
+ try :
202
+ with open (args .input [0 ], 'r' , encoding = 'utf-8' ) as f :
203
+ base_lockfile = yaml .safe_load (f )
204
+ except (IOError , yaml .YAMLError ) as e :
205
+ print (f"Error reading base lockfile { args .input [0 ]} : { e } " , file = sys .stderr )
206
+ sys .exit (1 )
207
+
208
+ for subsequent_file in args .input [1 :]:
179
209
try :
180
- with open (override_path , 'r' , encoding = "utf8" ) as f :
210
+ with open (subsequent_file , 'r' , encoding = 'utf-8' ) as f :
211
+ next_lockfile = yaml .safe_load (f )
212
+ base_lockfile = merge_lockfiles (base_lockfile , next_lockfile )
213
+ except (IOError , yaml .YAMLError ) as e :
214
+ print (f"Error reading or merging { subsequent_file } : { e } " , file = sys .stderr )
215
+ sys .exit (1 )
216
+
217
+ if os .path .exists (args .override ):
218
+ try :
219
+ with open (args .override , 'r' , encoding = "utf8" ) as f :
181
220
override_data = yaml .safe_load (f )
182
- print (f"Merging override from { override_path } " )
183
- lockfile = merge_lockfiles (lockfile , override_data )
221
+ print (f"Merging override from { args . override } " )
222
+ base_lockfile = merge_lockfiles (base_lockfile , override_data , override = True )
184
223
except (IOError , yaml .YAMLError ) as e :
185
- print (f"\u274c Error: Could not read or parse override file '{ override_path } '. Reason : { e } " )
224
+ print (f"Error reading or parsing override file '{ args . override } ' : { e } " , file = sys . stderr )
186
225
sys .exit (1 )
187
226
188
227
try :
189
- with open (output_path , 'w' , encoding = 'utf-8' ) as f :
190
- yaml .safe_dump (lockfile , f , default_flow_style = False )
228
+ with open (args .output , 'w' , encoding = 'utf-8' ) as f :
229
+ yaml .safe_dump (base_lockfile , f , default_flow_style = False )
230
+ print (f"Successfully merged lockfiles to { args .output } " )
191
231
except IOError as e :
192
- print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason : { e } " )
232
+ print (f"Error writing to output file '{ args . output } ' : { e } " , file = sys . stderr )
193
233
sys .exit (1 )
194
234
195
235
196
236
if __name__ == "__main__" :
197
237
parser = argparse .ArgumentParser (
198
- description = "Generate hermeto lock files."
238
+ description = "Generate and merge hermeto lock files."
199
239
)
240
+ subparsers = parser .add_subparsers (dest = 'command' , required = True )
200
241
201
- parser .add_argument (
242
+ # GENERATE command
243
+ parser_generate = subparsers .add_parser (
244
+ 'generate' ,
245
+ help = 'Resolve RPMs and generate a lockfile for one or more architectures.'
246
+ )
247
+ parser_generate .add_argument (
202
248
'manifest' ,
203
249
help = 'Path to the flattened rpm-ostree manifest (e.g., tmp/manifest.json)'
204
250
)
205
-
206
- parser .add_argument (
251
+ parser_generate .add_argument (
207
252
'--context' ,
208
253
default = '.' ,
209
254
help = "Path to the directory containing repofiles and lockfiles. (default: '.')"
210
255
)
211
-
212
- parser .add_argument (
256
+ parser_generate .add_argument (
213
257
'--output' ,
214
258
default = './rpms.lock.yaml' ,
215
259
help = "Path for the hermeto lockfile. (default: './rpms.lock.yaml')"
216
260
)
217
-
218
- parser .add_argument (
261
+ parser_generate .add_argument (
219
262
'--arch' ,
220
263
action = 'append' ,
221
264
choices = ['x86_64' , 'aarch64' , 's390x' , 'ppc64le' , 'all' ],
222
265
help = "The architecture to resolve. Can be specified multiple times. 'all' resolves all architectures."
223
266
)
267
+ parser_generate .set_defaults (func = generate_main )
224
268
225
- args = parser .parse_args ()
269
+ # MERGE command
270
+ parser_merge = subparsers .add_parser (
271
+ 'merge' ,
272
+ help = 'Merge multiple architecture-specific lockfiles into a single file.'
273
+ )
274
+ parser_merge .add_argument (
275
+ '--input' ,
276
+ nargs = '+' ,
277
+ required = True ,
278
+ help = 'One or more input lockfiles to merge.'
279
+ )
280
+ parser_merge .add_argument (
281
+ '--output' ,
282
+ default = './rpms.lock.yaml' ,
283
+ help = "Path for the merged lockfile. (default: './rpms.lock.yaml')"
284
+ )
285
+ parser_merge .add_argument (
286
+ '--override' ,
287
+ default = 'konflux-lockfile-override.yaml' ,
288
+ help = "Path to an override file. (default: 'konflux-lockfile-override.yaml')"
289
+ )
290
+ parser_merge .set_defaults (func = merge_main )
226
291
227
- manifest_abs_path = os . path . abspath ( args . manifest )
228
- generate_lockfile ( args .context , manifest_abs_path , args . output , args . arch )
292
+ args = parser . parse_args ( )
293
+ args .func ( args )
0 commit comments