From 4aeb10aa38efc6768928fbb74985e36e972b8e46 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Alexis=20Lothor=C3=A9?= Date: Fri, 5 Jul 2024 16:46:38 +0200 Subject: [PATCH] oeqa/utils/postactions: transfer whole archive over ssh instead of doing individual copies MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Fixes [YOCTO 15536] The postactions retrieval actions currently rely on scp executed individually on any file or directory expanded from TESTIMAGE_FAILED_QA_ARTIFACTS. Unfortunately, symlinks are not preserved with this mechanism, which lead to big storage space consumption. Things may go even worse if those symlinks create some circular chains. This mechanism then needs to be updated to preserve symlinks instead of following them during copy. There are multiple ways to do it: - create a local archive on the target and execute scp on this file - use rsync instead of scp for all files - create an archive and pipe it to ssh instead of storing it onto the target The first solution may create pressure on targets storage space, while the second assumes that rsync is installed on the target, which may not be true. So the third one is a compromise: tar is very likely present, at least through busybox, and no disk space is used on the target. Replace the current per-file scp call by a single call to tar run on the target. Retrieve the generated compressed archive directly from SSH output, and feed it to another tar process but on host, to uncompress and extract it at the same place as before. Signed-off-by: Alexis Lothoré Signed-off-by: Richard Purdie --- meta/lib/oeqa/utils/postactions.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py index ecdddd2d40e..2a08129d6c0 100644 --- a/meta/lib/oeqa/utils/postactions.py +++ b/meta/lib/oeqa/utils/postactions.py @@ -62,17 +62,16 @@ def get_artifacts_list(target, raw_list): return result def retrieve_test_artifacts(target, artifacts_list, target_dir): + import io, subprocess local_artifacts_dir = os.path.join(target_dir, "artifacts") - for artifact_path in artifacts_list: - if not os.path.isabs(artifact_path): - bb.warn(f"{artifact_path} is not an absolute path") - continue - try: - dest_dir = os.path.join(local_artifacts_dir, os.path.dirname(artifact_path[1:])) - os.makedirs(dest_dir, exist_ok=True) - target.copyFrom(artifact_path, dest_dir) - except Exception as e: - bb.warn(f"Can not retrieve {artifact_path} from test target: {e}") + try: + cmd = "tar zcf - " + " ".join(artifacts_list) + (status, output) = target.run(cmd, raw = True) + if status != 0 or not output: + raise Exception("Error while fetching compressed artifacts") + p = subprocess.run(["tar", "zxf", "-", "-C", local_artifacts_dir], input=output) + except Exception as e: + bb.warn(f"Can not retrieve {artifact_path} from test target: {e}") def list_and_fetch_failed_tests_artifacts(d, tc): artifacts_list = get_artifacts_list(tc.target, d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS")) -- 2.47.2