]> git.ipfire.org Git - thirdparty/openembedded/openembedded-core.git/commitdiff
resulttool: Clean up repoducible build logs
authorRichard Purdie <richard.purdie@linuxfoundation.org>
Fri, 22 Nov 2024 16:15:46 +0000 (16:15 +0000)
committerRichard Purdie <richard.purdie@linuxfoundation.org>
Sat, 23 Nov 2024 14:28:29 +0000 (14:28 +0000)
We've improved the data stored for reproduicible builds. Teach resulttool how
to apply those cleanups when reprocessing data so we can reduce results file
sizes and make the data easier to process.

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
scripts/lib/resulttool/resultutils.py

index b9b93afaa6a5520552c8fa69670de20806fd7c7c..9cba8639a3905262eb9613b168a2ff666bf34ac6 100644 (file)
@@ -131,6 +131,27 @@ def strip_logs(results):
                     del newresults[res]['result']['ptestresult.sections'][i]['log']
     return newresults
 
+def handle_cleanups(results):
+    # Remove pointless path duplication from old format reproducibility results
+    for res2 in results:
+        try:
+            section = results[res2]['result']['reproducible']['files']
+            for pkgtype in section:
+                for filelist in section[pkgtype].copy():
+                    if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict:
+                        newlist = []
+                        for entry in section[pkgtype][filelist]:
+                            newlist.append(entry["reference"].split("/./")[1])
+                        section[pkgtype][filelist] = newlist
+
+        except KeyError:
+            pass
+    # Remove pointless duplicate rawlogs data
+    try:
+        del results[res2]['result']['reproducible.rawlogs']
+    except KeyError:
+        pass
+
 def decode_log(logdata):
     if isinstance(logdata, str):
         return logdata
@@ -173,6 +194,7 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
         resultsout = results[res]
         if not ptestjson:
             resultsout = strip_logs(results[res])
+        handle_cleanups(resultsout)
         with open(dst, 'w') as f:
             f.write(json.dumps(resultsout, sort_keys=True, indent=1))
         for res2 in results[res]: