]> git.ipfire.org Git - thirdparty/openembedded/openembedded-core-contrib.git/commitdiff
sstatesig/populate_sdk_ext: Improve unihash cache handling
authorRichard Purdie <richard.purdie@linuxfoundation.org>
Thu, 23 May 2024 16:17:30 +0000 (17:17 +0100)
committerRichard Purdie <richard.purdie@linuxfoundation.org>
Tue, 4 Jun 2024 11:04:36 +0000 (12:04 +0100)
Copying in the bb_unihashes cache file was at best a hack and creates a number of
challenges. One is staying in sync with bitbake since it may not have saved the
most recent version of the file. A second is a determinism problem since there may
be more entries in the file than the SDK should have had access to.

To improve the situation, add code to write the data into the locked-sigs.inc file
such that even when locked-sigs aren't used, the right hash mappings are injected
by the get_cached_unihash call.

The functions in copy_buildsystem need to be updated to preserve data they're not
editting.

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
meta/classes-recipe/populate_sdk_ext.bbclass
meta/lib/oe/copy_buildsystem.py
meta/lib/oe/sstatesig.py

index 09d5e2aeb663f3d584bf9b99d277876ac1ac9f13..019330e3ef72821d83a1411b297ca986a235ba5b 100644 (file)
@@ -413,10 +413,6 @@ def write_local_conf(d, baseoutpath, derivative, core_meta_subdir, uninative_che
             if os.path.exists(builddir + dest_stub):
                 shutil.copyfile(builddir + dest_stub, baseoutpath + dest_stub)
 
-    cachedir = os.path.join(baseoutpath, 'cache')
-    bb.utils.mkdirhier(cachedir)
-    bb.parse.siggen.copy_unitaskhashes(cachedir)
-
     # If PR Service is in use, we need to export this as well
     bb.note('Do we have a pr database?')
     if d.getVar("PRSERV_HOST"):
@@ -507,10 +503,6 @@ def prepare_locked_cache(d, baseoutpath, derivative, conf_initpath):
     else:
         tasklistfn = None
 
-    cachedir = os.path.join(baseoutpath, 'cache')
-    bb.utils.mkdirhier(cachedir)
-    bb.parse.siggen.copy_unitaskhashes(cachedir)
-
     # Add packagedata if enabled
     if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
         lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc'
index 81abfbf9e232257d0033328a0eabc649ab708a29..ced751b83563cbff8e2369f41fc45f658dab9c6d 100644 (file)
@@ -193,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p
                     else:
                         f.write(line)
                         invalue = False
-                elif line.startswith('SIGGEN_LOCKEDSIGS'):
+                elif line.startswith('SIGGEN_LOCKEDSIGS_t'):
                     invalue = True
                     f.write(line)
+                else:
+                    invalue = False
+                    f.write(line)
 
 def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None):
     merged = {}
     arch_order = []
+    otherdata = []
     with open(lockedsigs_main, 'r') as f:
         invalue = None
         for line in f:
@@ -212,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
                 invalue = line[18:].split('=', 1)[0].rstrip()
                 merged[invalue] = []
                 arch_order.append(invalue)
+            else:
+                invalue = None
+                otherdata.append(line)
 
     with open(lockedsigs_extra, 'r') as f:
         invalue = None
@@ -246,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
                     f.write('    "\n')
                     fulltypes.append(typename)
             f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes))
+            f.write('\n' + ''.join(otherdata))
 
     if copy_output:
         write_sigs_file(copy_output, list(tocopy.keys()), tocopy)
index db3c409216f5ae12de472c2fc484264ab0c289ec..b6f8ab92cb60df837b5445cfbfcebe9ec1cfea90 100644 (file)
@@ -93,6 +93,14 @@ def sstate_lockedsigs(d):
             sigs[pn][task] = [h, siggen_lockedsigs_var]
     return sigs
 
+def lockedsigs_unihashmap(d):
+    unihashmap = {}
+    data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
+    for entry in data:
+        pn, task, taskhash, unihash = entry.split(":")
+        unihashmap[(pn, task)] = (taskhash, unihash)
+    return unihashmap
+
 class SignatureGeneratorOEBasicHashMixIn(object):
     supports_multiconfig_datacaches = True
 
@@ -100,6 +108,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
         self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
         self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
         self.lockedsigs = sstate_lockedsigs(data)
+        self.unihashmap = lockedsigs_unihashmap(data)
         self.lockedhashes = {}
         self.lockedpnmap = {}
         self.lockedhashfn = {}
@@ -209,6 +218,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
     def get_cached_unihash(self, tid):
         if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
             return self.lockedhashes[tid]
+
+        (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
+        recipename = self.lockedpnmap[fn]
+
+        if (recipename, task) in self.unihashmap:
+            taskhash, unihash = self.unihashmap[(recipename, task)]
+            if taskhash == self.taskhash[tid]:
+                return unihash
+
         return super().get_cached_unihash(tid)
 
     def dump_sigtask(self, fn, task, stampbase, runtime):
@@ -219,6 +237,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
 
     def dump_lockedsigs(self, sigfile, taskfilter=None):
         types = {}
+        unihashmap = {}
         for tid in self.runtaskdeps:
             # Bitbake changed this to a tuple in newer versions
             if isinstance(tid, tuple):
@@ -226,13 +245,18 @@ class SignatureGeneratorOEBasicHashMixIn(object):
             if taskfilter:
                 if not tid in taskfilter:
                     continue
-            fn = bb.runqueue.fn_from_tid(tid)
+            (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
             t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
             t = 't-' + t.replace('_', '-')
             if t not in types:
                 types[t] = []
             types[t].append(tid)
 
+            taskhash = self.taskhash[tid]
+            unihash = self.get_unihash(tid)
+            if taskhash != unihash:
+                unihashmap[tid] = "    " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
+
         with open(sigfile, "w") as f:
             l = sorted(types)
             for t in l:
@@ -245,7 +269,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
                         continue
                     f.write("    " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
                 f.write('    "\n')
-            f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l)))
+            f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
+            f.write('SIGGEN_UNIHASHMAP += "\\\n')
+            sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
+            for tid in sortedtid:
+                f.write(unihashmap[tid] + " \\\n")
+            f.write('    "\n')
 
     def dump_siglist(self, sigfile, path_prefix_strip=None):
         def strip_fn(fn):