@@ -8,8 +8,7 @@ import subprocess
88import yaml
99
1010sys .path .insert (0 , os .path .dirname (os .path .abspath (__file__ )))
11- from cosalib .cmdlib import get_locked_nevras , get_basearch
12-
11+ from cosalib .cmdlib import get_basearch
1312
1413def format_packages_with_repoid (pkgs , repos ):
1514 """
@@ -32,10 +31,8 @@ def format_packages_with_repoid(pkgs, repos):
3231 # This is needed because rpm-ostree uses the full list of repos to
3332 # resolve packages and errors out if a repository is missing.
3433 repo_numbers = len (local_repos )
35- i = 0
36- for pkg in pkgs :
34+ for i , pkg in enumerate (pkgs ):
3735 packages .append ({"url" : pkg , "repoid" : local_repos [i % repo_numbers ]})
38- i += 1
3936 return packages
4037
4138
@@ -62,33 +59,38 @@ def write_hermeto_lockfile(arch_packages, repos):
6259 return lockfile
6360
6461
65- def merge_lockfiles (base_lockfile , override_lockfile ):
62+ def merge_lockfiles (base_lockfile , next_lockfile , override = False ):
6663 """
67- Merges an override lockfile into a base lockfile.
64+ Merges a lockfile into a base lockfile.
65+
66+ If is_override is True, it will only add packages to existing
67+ architectures. Otherwise, it will add new architectures.
6868 """
69- if not override_lockfile :
69+ if not next_lockfile :
7070 return base_lockfile
7171
7272 # Create a dictionary for base arches for easy lookup
7373 base_arches = {arch ['arch' ]: arch for arch in base_lockfile .get ('arches' , [])}
7474
75- override = override_lockfile .get ('arches' , [])
76- if not override :
75+ next_arches_list = next_lockfile .get ('arches' , [])
76+ if not next_arches_list :
7777 return base_lockfile
7878
79- for override_entry in override :
80- # override_entry is a dict like {'arch': x86_64','packages': [...]}
81- if not isinstance (override_entry , dict ):
79+ for next_arch_entry in next_arches_list :
80+ if not isinstance (next_arch_entry , dict ):
81+ continue
82+ arch = next_arch_entry .get ('arch' , None )
83+ if not arch :
8284 continue
83- arch = override_entry . get ( 'arch' , None )
84- override_packages = override_entry .get ('packages' , [])
85+
86+ next_packages = next_arch_entry .get ('packages' , [])
8587 if arch in base_arches :
86- # Merge packages
88+ # Arch exists, merge packages
8789 base_packages = base_arches [arch ].get ('packages' , [])
88- base_packages += override_packages
89- else :
90- # Add the arch from the override file
91- base_arches [arch ] = override_packages
90+ base_packages += next_packages
91+ elif not override :
92+ # Arch is new and this is not an override, so add it
93+ base_arches [arch ] = next_arch_entry
9294
9395 # Reconstruct the arches list
9496 base_lockfile ['arches' ] = list (base_arches .values ())
@@ -103,7 +105,7 @@ def query_packages_location(locks, repoquery_args):
103105 if not locks :
104106 return pkg_urls
105107
106- locked_nevras = [f'{ k } -{ v } ' for (k , v ) in locks .items ()]
108+ locked_nevras = [f'{ k } -{ v . get ( 'evra' , '' ) } ' for (k , v ) in locks .items ()]
107109 cmd = ['dnf' , 'repoquery' ] + locked_nevras + repoquery_args
108110 result = subprocess .check_output (cmd , text = True )
109111
@@ -119,21 +121,37 @@ def query_packages_location(locks, repoquery_args):
119121 if name not in processed_urls :
120122 processed_urls [name ] = url
121123 pkg_urls = list (processed_urls .values ())
122- # sanity check all the packages got resolved
123- if len (pkg_urls ) < len (locked_nevras ):
124+ # sanity check all the locked packages got resolved
125+ if len (pkg_urls ) != len (locked_nevras ):
124126 print ("Some packages from the lockfile could not be resolved. The rpm-ostree lockfile is probably out of date." )
125- for name in locks .keys ():
126- if name not in processed_urls :
127- print (f"could not resolve package { name } " )
128127 sys .exit (1 )
129128
129+ print (f"Done. Resolved location for { len (pkg_urls )} packages." )
130130 return pkg_urls
131131
132+ def get_locked_nevras (srcdir , arch ):
133+
134+ path = os .path .join (srcdir , f"manifest-lock.{ arch } .json" )
132135
133- def generate_lockfile (contextdir , manifest , output_path , arches ):
136+ data = {}
137+ if os .path .exists (path ):
138+ with open (path , encoding = 'utf-8' ) as f :
139+ data = json .load (f )
140+ else :
141+ print (f"rpm-ostree lockfile not found at { path } " )
142+
143+ return data .get ('packages' , [])
144+
145+
146+ def generate_main (args ):
134147 """
135148 Generates the cachi2/hermeto RPM lock file.
136149 """
150+ contextdir = args .context
151+ manifest = os .path .abspath (args .manifest )
152+ output_path = args .output
153+ arches = args .arch
154+
137155 if not arches :
138156 arches_to_resolve = [get_basearch ()]
139157 elif 'all' in arches :
@@ -151,7 +169,7 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
151169 repos = manifest_data .get ('repos' , [])
152170 repos += manifest_data .get ('lockfile-repos' , [])
153171
154- repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" ]
172+ repoquery_args = ["--queryformat" , "%{name} %{location}\n " , "--disablerepo=*" , "--refresh" , "--quiet" ]
155173 # Tell dnf to load repos files from $contextdir
156174 repoquery_args .extend ([f"--setopt=reposdir={ contextdir } " ])
157175
@@ -162,67 +180,126 @@ def generate_lockfile(contextdir, manifest, output_path, arches):
162180 for arch in arches_to_resolve :
163181 locks = get_locked_nevras (contextdir , arch )
164182 if not locks :
165- print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. No manifest-lock exist for { arch } in { contextdir } " )
183+ print (f"This tool derive the konflux lockfile from rpm-ostree lockfiles. Empty manifest-lock for { arch } in { contextdir } " )
166184 sys .exit (1 )
167185 print (f"Resolving packages for { arch } ..." )
168- # append noarch as well because otherwise tose packages get excluded from results
169- # We use --forcearch here because otherwise dnf still respect the system basearch
170- # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
171- args_arch = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
172- pkg_urls = query_packages_location (locks , repoquery_args + args_arch )
186+ arch_args = []
187+ if arch is not get_basearch ():
188+ # append noarch as well because otherwise those packages get excluded from results
189+ # We use --forcearch here because otherwise dnf still respect the system basearch
190+ # we have to specify both --arch and --forcearch to get both result for $arch and $noarch
191+ arch_args = ['--forcearch' , arch , '--arch' , arch , '--arch' , 'noarch' ]
192+ pkg_urls = query_packages_location (locks , repoquery_args + arch_args )
173193 packages .append ({'arch' : arch , 'packages' : pkg_urls })
174194
175195 lockfile = write_hermeto_lockfile (packages , repos )
176196
177- override_path = os .path .join (contextdir , 'konflux-lockfile-override.yaml' )
178- if os .path .exists (override_path ):
197+ try :
198+ with open (output_path , 'w' , encoding = 'utf-8' ) as f :
199+ yaml .safe_dump (lockfile , f , default_flow_style = False )
200+ except IOError as e :
201+ print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason: { e } " )
202+ sys .exit (1 )
203+
204+
205+ def merge_main (args ):
206+ """
207+ Merges multiple lockfiles into one, optionally applying an override file.
208+ """
209+ if not args .input :
210+ print ("Error: at least one input file is required for merging." , file = sys .stderr )
211+ sys .exit (1 )
212+
213+ try :
214+ with open (args .input [0 ], 'r' , encoding = 'utf-8' ) as f :
215+ base_lockfile = yaml .safe_load (f )
216+ except (IOError , yaml .YAMLError ) as e :
217+ print (f"Error reading base lockfile { args .input [0 ]} : { e } " , file = sys .stderr )
218+ sys .exit (1 )
219+
220+ for subsequent_file in args .input [1 :]:
179221 try :
180- with open (override_path , 'r' , encoding = "utf8" ) as f :
222+ with open (subsequent_file , 'r' , encoding = 'utf-8' ) as f :
223+ next_lockfile = yaml .safe_load (f )
224+ base_lockfile = merge_lockfiles (base_lockfile , next_lockfile )
225+ except (IOError , yaml .YAMLError ) as e :
226+ print (f"Error reading or merging { subsequent_file } : { e } " , file = sys .stderr )
227+ sys .exit (1 )
228+
229+ if os .path .exists (args .override ):
230+ try :
231+ with open (args .override , 'r' , encoding = "utf8" ) as f :
181232 override_data = yaml .safe_load (f )
182- print (f"Merging override from { override_path } " )
183- lockfile = merge_lockfiles (lockfile , override_data )
233+ print (f"Merging override from { args . override } " )
234+ base_lockfile = merge_lockfiles (base_lockfile , override_data , override = True )
184235 except (IOError , yaml .YAMLError ) as e :
185- print (f"\u274c Error: Could not read or parse override file '{ override_path } '. Reason : { e } " )
236+ print (f"Error reading or parsing override file '{ args . override } ' : { e } " , file = sys . stderr )
186237 sys .exit (1 )
187238
188239 try :
189- with open (output_path , 'w' , encoding = 'utf-8' ) as f :
190- yaml .safe_dump (lockfile , f , default_flow_style = False )
240+ with open (args .output , 'w' , encoding = 'utf-8' ) as f :
241+ yaml .safe_dump (base_lockfile , f , default_flow_style = False )
242+ print (f"Successfully merged lockfiles to { args .output } " )
191243 except IOError as e :
192- print (f"\u274c Error: Could not write to output file '{ output_path } '. Reason : { e } " )
244+ print (f"Error writing to output file '{ args . output } ' : { e } " , file = sys . stderr )
193245 sys .exit (1 )
194246
195247
196248if __name__ == "__main__" :
197249 parser = argparse .ArgumentParser (
198- description = "Generate hermeto lock files."
250+ description = "Generate and merge hermeto lock files."
199251 )
252+ subparsers = parser .add_subparsers (dest = 'command' , required = True )
200253
201- parser .add_argument (
254+ # GENERATE command
255+ parser_generate = subparsers .add_parser (
256+ 'generate' ,
257+ help = 'Resolve RPMs and generate a lockfile for one or more architectures.'
258+ )
259+ parser_generate .add_argument (
202260 'manifest' ,
203261 help = 'Path to the flattened rpm-ostree manifest (e.g., tmp/manifest.json)'
204262 )
205-
206- parser .add_argument (
263+ parser_generate .add_argument (
207264 '--context' ,
208265 default = '.' ,
209266 help = "Path to the directory containing repofiles and lockfiles. (default: '.')"
210267 )
211-
212- parser .add_argument (
268+ parser_generate .add_argument (
213269 '--output' ,
214270 default = './rpms.lock.yaml' ,
215271 help = "Path for the hermeto lockfile. (default: './rpms.lock.yaml')"
216272 )
217-
218- parser .add_argument (
273+ parser_generate .add_argument (
219274 '--arch' ,
220275 action = 'append' ,
221276 choices = ['x86_64' , 'aarch64' , 's390x' , 'ppc64le' , 'all' ],
222277 help = "The architecture to resolve. Can be specified multiple times. 'all' resolves all architectures."
223278 )
279+ parser_generate .set_defaults (func = generate_main )
224280
225- args = parser .parse_args ()
281+ # MERGE command
282+ parser_merge = subparsers .add_parser (
283+ 'merge' ,
284+ help = 'Merge multiple architecture-specific lockfiles into a single file.'
285+ )
286+ parser_merge .add_argument (
287+ '--input' ,
288+ nargs = '+' ,
289+ required = True ,
290+ help = 'One or more input lockfiles to merge.'
291+ )
292+ parser_merge .add_argument (
293+ '--output' ,
294+ default = './rpms.lock.yaml' ,
295+ help = "Path for the merged lockfile. (default: './rpms.lock.yaml')"
296+ )
297+ parser_merge .add_argument (
298+ '--override' ,
299+ default = 'konflux-lockfile-override.yaml' ,
300+ help = "Path to an override file. (default: 'konflux-lockfile-override.yaml')"
301+ )
302+ parser_merge .set_defaults (func = merge_main )
226303
227- manifest_abs_path = os . path . abspath ( args . manifest )
228- generate_lockfile ( args .context , manifest_abs_path , args . output , args . arch )
304+ args = parser . parse_args ( )
305+ args .func ( args )
0 commit comments