@@ -8,8 +8,7 @@ import subprocess
8
8
import yaml
9
9
10
10
sys .path .insert (0 , os .path .dirname (os .path .abspath (__file__ )))
11
- from cosalib .cmdlib import get_locked_nevras , get_basearch
12
-
11
+ from cosalib .cmdlib import get_basearch
13
12
14
13
def format_packages_with_repoid (pkgs , repos ):
15
14
"""
@@ -32,10 +31,8 @@ def format_packages_with_repoid(pkgs, repos):
32
31
# This is needed because rpm-ostree uses the full list of repos to
33
32
# resolve packages and errors out if a repository is missing.
34
33
repo_numbers = len (local_repos )
35
- i = 0
36
- for pkg in pkgs :
34
+ for i , pkg in enumerate (pkgs ):
37
35
packages .append ({"url" : pkg , "repoid" : local_repos [i % repo_numbers ]})
38
- i += 1
39
36
return packages
40
37
41
38
@@ -62,33 +59,38 @@ def write_hermeto_lockfile(arch_packages, repos):
62
59
return lockfile
63
60
64
61
65
- def merge_lockfiles (base_lockfile , override_lockfile ):
62
+ def merge_lockfiles (base_lockfile , next_lockfile , override = False ):
66
63
"""
67
- Merges an override lockfile into a base lockfile.
64
+ Merges a lockfile into a base lockfile.
65
+
66
+ If is_override is True, it will only add packages to existing
67
+ architectures. Otherwise, it will add new architectures.
68
68
"""
69
- if not override_lockfile :
69
+ if not next_lockfile :
70
70
return base_lockfile
71
71
72
72
# Create a dictionary for base arches for easy lookup
73
73
base_arches = {arch ['arch' ]: arch for arch in base_lockfile .get ('arches' , [])}
74
74
75
- override = override_lockfile .get ('arches' , [])
76
- if not override :
75
+ next_arches_list = next_lockfile .get ('arches' , [])
76
+ if not next_arches_list :
77
77
return base_lockfile
78
78
79
- for override_entry in override :
80
- # override_entry is a dict like {'arch': x86_64','packages': [...]}
81
- if not isinstance (override_entry , dict ):
79
+ for next_arch_entry in next_arches_list :
80
+ if not isinstance (next_arch_entry , dict ):
81
+ continue
82
+ arch = next_arch_entry .get ('arch' , None )
83
+ if not arch :
82
84
continue
83
- arch = override_entry . get ( 'arch' , None )
84
- override_packages = override_entry .get ('packages' , [])
85
+
86
+ next_packages = next_arch_entry .get ('packages' , [])
85
87
if arch in base_arches :
86
- # Merge packages
88
+ # Arch exists, merge packages
87
89
base_packages = base_arches [arch ].get ('packages' , [])
88
- base_packages += override_packages
89
- else :
90
- # Add the arch from the override file
91
- base_arches [arch ] = override_packages
90
+ base_packages += next_packages
91
+ elif not override :
92
+ # Arch is new and this is not an override, so add it
93
+ base_arches [arch ] = next_arch_entry
92
94
93
95
# Reconstruct the arches list
94
96
base_lockfile ['arches' ] = list (base_arches .values ())
@@ -103,7 +105,7 @@ def query_packages_location(locks, repoquery_args):
103
105
if not locks :
104
106
return pkg_urls
105
107
106
- locked_nevras = [f'{ k } -{ v } ' for (k , v ) in locks .items ()]
108
+ locked_nevras = [f'{ k } -{ v . get ( 'evra' , '' ) } ' for (k , v ) in locks .items ()]
107
109
cmd = ['dnf' , 'repoquery' ] + locked_nevras + repoquery_args
108
110
result = subprocess .check_output (cmd , text = True )
109
111
@@ -119,21 +121,37 @@ def query_packages_location(locks, repoquery_args):
119
121
if name not in processed_urls :
120
122
processed_urls [name ] = url
121
123
pkg_urls = list (processed_urls .values ())
122
- # sanity check all the packages got resolved
123
- if len (pkg_urls ) < len (locked_nevras ):
124
+ # sanity check all the locked packages got resolved
125
+ if len (pkg_urls ) != len (locked_nevras ):
124
126
print ("Some packages from the lockfile could not be resolved. The rpm-ostree lockfile is probably out of date." )
125
- for name in locks .keys ():
126
- if name not in processed_urls :
127
- print (f"could not resolve package { name } " )
128
127
sys .exit (1 )
129
128
129
+ print (f"Done. Resolved location for { len (pkg_urls )} packages." )
130
130
return pkg_urls
131
131
132
+ def get_locked_nevras (srcdir , arch ):
133
+
134
+ path = os .path .join (srcdir , f"manifest-lock.{ arch } .json" )
132
135
133
- def generate_lockfile (contextdir , manifest , output_path , arches ):
136
+ data = {}
137
+ if os .path .exists (path ):
138
+ with open (path , encoding = 'utf-8' ) as f :
139
+ data = json .load (f )
140
+ else :
141
+ print (f"rpm-ostree lockfile not found at { path } " )
142
+
143
+ return data .get ('packages' , [])
144
+
145
+
146
+ def generate_main (args ):
134
147
"""
135
148
Generates the cachi2/hermeto RPM lock file.
136
149
"""
150
+ contextdir = args .context
151
+ manifest = os .path .abspath (args .manifest )
152
+ output_path = args .output
153
+ arches = args .arch
154
+
137
155
if not arches :
138
156
arches_to_resolve = [get_basearch ()]
139
157
elif 'all' in arches :
@@ -151,7 +169,7 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
151
169
repos = manifest_data .get ('repos' , [])
152
170
repos += manifest_data .get ('lockfile-repos' , [])
153
171
154
- repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" ]
172
+ repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" , "--quiet" ]
155
173
# Tell dnf to load repos files from $contextdir
156
174
repoquery_args .extend ([f"--setopt=reposdir={ contextdir } " ])
157
175
@@ -162,67 +180,126 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
162
180
for arch in arches_to_resolve :
163
181
locks = get_locked_nevras (contextdir , arch )
164
182
if not locks :
165
- print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. No manifest-lock exist for { arch } in { contextdir } " )
183
+ print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. Empty manifest-lock for { arch } in { contextdir } " )
166
184
sys .exit (1 )
167
185
print (f"Resolving packages for { arch } ..." )
168
- # append noarch as well because otherwise tose packages get excluded from results
169
- # We use --forcearch here because otherwise dnf still respect the system basearch
170
- # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
171
- args_arch = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
172
- pkg_urls = query_packages_location (locks , repoquery_args + args_arch )
186
+ arch_args = []
187
+ if arch is not get_basearch ():
188
+ # append noarch as well because otherwise those packages get excluded from results
189
+ # We use --forcearch here because otherwise dnf still respect the system basearch
190
+ # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
191
+ arch_args = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
192
+ pkg_urls = query_packages_location (locks , repoquery_args + arch_args )
173
193
packages .append ({'arch' : arch , 'packages' : pkg_urls })
174
194
175
195
lockfile = write_hermeto_lockfile (packages , repos )
176
196
177
- override_path = os .path .join (contextdir , 'konflux-lockfile-override.yaml' )
178
- if os .path .exists (override_path ):
197
+ try :
198
+ with open (output_path , 'w' , encoding = 'utf-8' ) as f :
199
+ yaml .safe_dump (lockfile , f , default_flow_style = False )
200
+ except IOError as e :
201
+ print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason: { e } " )
202
+ sys .exit (1 )
203
+
204
+
205
+ def merge_main (args ):
206
+ """
207
+ Merges multiple lockfiles into one, optionally applying an override file.
208
+ """
209
+ if not args .input :
210
+ print ("Error: at least one input file is required for merging." , file = sys .stderr )
211
+ sys .exit (1 )
212
+
213
+ try :
214
+ with open (args .input [0 ], 'r' , encoding = 'utf-8' ) as f :
215
+ base_lockfile = yaml .safe_load (f )
216
+ except (IOError , yaml .YAMLError ) as e :
217
+ print (f"Error reading base lockfile { args .input [0 ]} : { e } " , file = sys .stderr )
218
+ sys .exit (1 )
219
+
220
+ for subsequent_file in args .input [1 :]:
179
221
try :
180
- with open (override_path , 'r' , encoding = "utf8" ) as f :
222
+ with open (subsequent_file , 'r' , encoding = 'utf-8' ) as f :
223
+ next_lockfile = yaml .safe_load (f )
224
+ base_lockfile = merge_lockfiles (base_lockfile , next_lockfile )
225
+ except (IOError , yaml .YAMLError ) as e :
226
+ print (f"Error reading or merging { subsequent_file } : { e } " , file = sys .stderr )
227
+ sys .exit (1 )
228
+
229
+ if os .path .exists (args .override ):
230
+ try :
231
+ with open (args .override , 'r' , encoding = "utf8" ) as f :
181
232
override_data = yaml .safe_load (f )
182
- print (f"Merging override from { override_path } " )
183
- lockfile = merge_lockfiles (lockfile , override_data )
233
+ print (f"Merging override from { args . override } " )
234
+ base_lockfile = merge_lockfiles (base_lockfile , override_data , override = True )
184
235
except (IOError , yaml .YAMLError ) as e :
185
- print (f"\u274c Error: Could not read or parse override file '{ override_path } '. Reason : { e } " )
236
+ print (f"Error reading or parsing override file '{ args . override } ' : { e } " , file = sys . stderr )
186
237
sys .exit (1 )
187
238
188
239
try :
189
- with open (output_path , 'w' , encoding = 'utf-8' ) as f :
190
- yaml .safe_dump (lockfile , f , default_flow_style = False )
240
+ with open (args .output , 'w' , encoding = 'utf-8' ) as f :
241
+ yaml .safe_dump (base_lockfile , f , default_flow_style = False )
242
+ print (f"Successfully merged lockfiles to { args .output } " )
191
243
except IOError as e :
192
- print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason : { e } " )
244
+ print (f"Error writing to output file '{ args . output } ' : { e } " , file = sys . stderr )
193
245
sys .exit (1 )
194
246
195
247
196
248
if __name__ == "__main__" :
197
249
parser = argparse .ArgumentParser (
198
- description = "Generate hermeto lock files."
250
+ description = "Generate and merge hermeto lock files."
199
251
)
252
+ subparsers = parser .add_subparsers (dest = 'command' , required = True )
200
253
201
- parser .add_argument (
254
+ # GENERATE command
255
+ parser_generate = subparsers .add_parser (
256
+ 'generate' ,
257
+ help = 'Resolve RPMs and generate a lockfile for one or more architectures.'
258
+ )
259
+ parser_generate .add_argument (
202
260
'manifest' ,
203
261
help = 'Path to the flattened rpm-ostree manifest (e.g., tmp/manifest.json)'
204
262
)
205
-
206
- parser .add_argument (
263
+ parser_generate .add_argument (
207
264
'--context' ,
208
265
default = '.' ,
209
266
help = "Path to the directory containing repofiles and lockfiles. (default: '.')"
210
267
)
211
-
212
- parser .add_argument (
268
+ parser_generate .add_argument (
213
269
'--output' ,
214
270
default = './rpms.lock.yaml' ,
215
271
help = "Path for the hermeto lockfile. (default: './rpms.lock.yaml')"
216
272
)
217
-
218
- parser .add_argument (
273
+ parser_generate .add_argument (
219
274
'--arch' ,
220
275
action = 'append' ,
221
276
choices = ['x86_64' , 'aarch64' , 's390x' , 'ppc64le' , 'all' ],
222
277
help = "The architecture to resolve. Can be specified multiple times. 'all' resolves all architectures."
223
278
)
279
+ parser_generate .set_defaults (func = generate_main )
224
280
225
- args = parser .parse_args ()
281
+ # MERGE command
282
+ parser_merge = subparsers .add_parser (
283
+ 'merge' ,
284
+ help = 'Merge multiple architecture-specific lockfiles into a single file.'
285
+ )
286
+ parser_merge .add_argument (
287
+ '--input' ,
288
+ nargs = '+' ,
289
+ required = True ,
290
+ help = 'One or more input lockfiles to merge.'
291
+ )
292
+ parser_merge .add_argument (
293
+ '--output' ,
294
+ default = './rpms.lock.yaml' ,
295
+ help = "Path for the merged lockfile. (default: './rpms.lock.yaml')"
296
+ )
297
+ parser_merge .add_argument (
298
+ '--override' ,
299
+ default = 'konflux-lockfile-override.yaml' ,
300
+ help = "Path to an override file. (default: 'konflux-lockfile-override.yaml')"
301
+ )
302
+ parser_merge .set_defaults (func = merge_main )
226
303
227
- manifest_abs_path = os . path . abspath ( args . manifest )
228
- generate_lockfile ( args .context , manifest_abs_path , args . output , args . arch )
304
+ args = parser . parse_args ( )
305
+ args .func ( args )
0 commit comments