if os.path.exists(builddir + dest_stub):
shutil.copyfile(builddir + dest_stub, baseoutpath + dest_stub)
- cachedir = os.path.join(baseoutpath, 'cache')
- bb.utils.mkdirhier(cachedir)
- bb.parse.siggen.copy_unitaskhashes(cachedir)
-
# If PR Service is in use, we need to export this as well
bb.note('Do we have a pr database?')
if d.getVar("PRSERV_HOST"):
else:
tasklistfn = None
- cachedir = os.path.join(baseoutpath, 'cache')
- bb.utils.mkdirhier(cachedir)
- bb.parse.siggen.copy_unitaskhashes(cachedir)
-
# Add packagedata if enabled
if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc'
else:
f.write(line)
invalue = False
- elif line.startswith('SIGGEN_LOCKEDSIGS'):
+ elif line.startswith('SIGGEN_LOCKEDSIGS_t'):
invalue = True
f.write(line)
+ else:
+ invalue = False
+ f.write(line)
def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None):
merged = {}
arch_order = []
+ otherdata = []
with open(lockedsigs_main, 'r') as f:
invalue = None
for line in f:
invalue = line[18:].split('=', 1)[0].rstrip()
merged[invalue] = []
arch_order.append(invalue)
+ else:
+ invalue = None
+ otherdata.append(line)
with open(lockedsigs_extra, 'r') as f:
invalue = None
f.write(' "\n')
fulltypes.append(typename)
f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes))
+ f.write('\n' + ''.join(otherdata))
if copy_output:
write_sigs_file(copy_output, list(tocopy.keys()), tocopy)
sigs[pn][task] = [h, siggen_lockedsigs_var]
return sigs
+def lockedsigs_unihashmap(d):
+ unihashmap = {}
+ data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
+ for entry in data:
+ pn, task, taskhash, unihash = entry.split(":")
+ unihashmap[(pn, task)] = (taskhash, unihash)
+ return unihashmap
+
class SignatureGeneratorOEBasicHashMixIn(object):
supports_multiconfig_datacaches = True
self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
self.lockedsigs = sstate_lockedsigs(data)
+ self.unihashmap = lockedsigs_unihashmap(data)
self.lockedhashes = {}
self.lockedpnmap = {}
self.lockedhashfn = {}
def get_cached_unihash(self, tid):
if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
return self.lockedhashes[tid]
+
+ (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
+ recipename = self.lockedpnmap[fn]
+
+ if (recipename, task) in self.unihashmap:
+ taskhash, unihash = self.unihashmap[(recipename, task)]
+ if taskhash == self.taskhash[tid]:
+ return unihash
+
return super().get_cached_unihash(tid)
def dump_sigtask(self, fn, task, stampbase, runtime):
def dump_lockedsigs(self, sigfile, taskfilter=None):
types = {}
+ unihashmap = {}
for tid in self.runtaskdeps:
# Bitbake changed this to a tuple in newer versions
if isinstance(tid, tuple):
if taskfilter:
if not tid in taskfilter:
continue
- fn = bb.runqueue.fn_from_tid(tid)
+ (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
t = 't-' + t.replace('_', '-')
if t not in types:
types[t] = []
types[t].append(tid)
+ taskhash = self.taskhash[tid]
+ unihash = self.get_unihash(tid)
+ if taskhash != unihash:
+ unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
+
with open(sigfile, "w") as f:
l = sorted(types)
for t in l:
continue
f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
f.write(' "\n')
- f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l)))
+ f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
+ f.write('SIGGEN_UNIHASHMAP += "\\\n')
+ sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
+ for tid in sortedtid:
+ f.write(unihashmap[tid] + " \\\n")
+ f.write(' "\n')
def dump_siglist(self, sigfile, path_prefix_strip=None):
def strip_fn(fn):