]> git.ipfire.org Git - thirdparty/u-boot.git/blob - .azure-pipelines.yml
net: fm: Support loading firmware from a filesystem
[thirdparty/u-boot.git] / .azure-pipelines.yml
1 variables:
2 windows_vm: windows-2019
3 ubuntu_vm: ubuntu-22.04
4 macos_vm: macOS-12
5 ci_runner_image: trini/u-boot-gitlab-ci-runner:jammy-20221130-11Jan2023
6 # Add '-u 0' options for Azure pipelines, otherwise we get "permission
7 # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
8 # since our $(ci_runner_image) user is not root.
9 container_option: -u 0
10 work_dir: /u
11
12 stages:
13 - stage: testsuites
14 jobs:
15 - job: tools_only_windows
16 displayName: 'Ensure host tools build for Windows'
17 pool:
18 vmImage: $(windows_vm)
19 steps:
20 - powershell: |
21 (New-Object Net.WebClient).DownloadFile("https://github.com/msys2/msys2-installer/releases/download/2021-06-04/msys2-base-x86_64-20210604.sfx.exe", "sfx.exe")
22 displayName: 'Install MSYS2'
23 - script: |
24 sfx.exe -y -o%CD:~0,2%\
25 %CD:~0,2%\msys64\usr\bin\bash -lc " "
26 %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
27 %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
28 displayName: 'Update MSYS2'
29 - script: |
30 %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm --needed -Sy make gcc bison flex diffutils openssl-devel libgnutls-devel libutil-linux-devel"
31 displayName: 'Install Toolchain'
32 - script: |
33 echo make tools-only_defconfig tools-only > build-tools.sh
34 %CD:~0,2%\msys64\usr\bin\bash -lc "bash build-tools.sh"
35 displayName: 'Build Host Tools'
36 env:
37 # Tell MSYS2 we need a POSIX emulation layer
38 MSYSTEM: MSYS
39 # Tell MSYS2 not to ‘cd’ our startup directory to HOME
40 CHERE_INVOKING: yes
41
42 - job: tools_only_macOS
43 displayName: 'Ensure host tools build for macOS X'
44 pool:
45 vmImage: $(macos_vm)
46 steps:
47 - script: brew install make ossp-uuid
48 displayName: Brew install dependencies
49 - script: |
50 gmake tools-only_config tools-only \
51 HOSTCFLAGS="-I/usr/local/opt/openssl@1.1/include" \
52 HOSTLDFLAGS="-L/usr/local/opt/openssl@1.1/lib" \
53 -j$(sysctl -n hw.logicalcpu)
54 displayName: 'Perform tools-only build'
55
56 - job: check_for_new_CONFIG_symbols_outside_Kconfig
57 displayName: 'Check for new CONFIG symbols outside Kconfig'
58 pool:
59 vmImage: $(ubuntu_vm)
60 container:
61 image: $(ci_runner_image)
62 options: $(container_option)
63 steps:
64 # If grep succeeds and finds a match the test fails as we should
65 # have no matches.
66 - script: git grep -E '^#[[:blank:]]*(define|undef)[[:blank:]]*CONFIG_'
67 include/configs `find arch -name config.h` && exit 1 || exit 0
68
69 - job: cppcheck
70 displayName: 'Static code analysis with cppcheck'
71 pool:
72 vmImage: $(ubuntu_vm)
73 container:
74 image: $(ci_runner_image)
75 options: $(container_option)
76 steps:
77 - script: cppcheck -j$(nproc) --force --quiet --inline-suppr .
78
79 - job: htmldocs
80 displayName: 'Build HTML documentation'
81 pool:
82 vmImage: $(ubuntu_vm)
83 container:
84 image: $(ci_runner_image)
85 options: $(container_option)
86 steps:
87 - script: |
88 virtualenv -p /usr/bin/python3 /tmp/venvhtml
89 . /tmp/venvhtml/bin/activate
90 pip install -r doc/sphinx/requirements.txt
91 make htmldocs
92
93 - job: todo
94 displayName: 'Search for TODO within source tree'
95 pool:
96 vmImage: $(ubuntu_vm)
97 container:
98 image: $(ci_runner_image)
99 options: $(container_option)
100 steps:
101 - script: grep -r TODO .
102 - script: grep -r FIXME .
103 - script: grep -r HACK . | grep -v HACKKIT
104
105 - job: sloccount
106 displayName: 'Some statistics about the code base'
107 pool:
108 vmImage: $(ubuntu_vm)
109 container:
110 image: $(ci_runner_image)
111 options: $(container_option)
112 steps:
113 - script: sloccount .
114
115 - job: maintainers
116 displayName: 'Ensure all configs have MAINTAINERS entries'
117 pool:
118 vmImage: $(ubuntu_vm)
119 container:
120 image: $(ci_runner_image)
121 options: $(container_option)
122 steps:
123 - script: |
124 ./tools/buildman/buildman -R
125
126 - job: tools_only
127 displayName: 'Ensure host tools build'
128 pool:
129 vmImage: $(ubuntu_vm)
130 container:
131 image: $(ci_runner_image)
132 options: $(container_option)
133 steps:
134 - script: |
135 make tools-only_config tools-only -j$(nproc)
136
137 - job: envtools
138 displayName: 'Ensure env tools build'
139 pool:
140 vmImage: $(ubuntu_vm)
141 container:
142 image: $(ci_runner_image)
143 options: $(container_option)
144 steps:
145 - script: |
146 make tools-only_config envtools -j$(nproc)
147
148 - job: utils
149 displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
150 pool:
151 vmImage: $(ubuntu_vm)
152 steps:
153 - script: |
154 cat << "EOF" > build.sh
155 cd $(work_dir)
156 git config --global user.name "Azure Pipelines"
157 git config --global user.email bmeng.cn@gmail.com
158 git config --global --add safe.directory $(work_dir)
159 export USER=azure
160 virtualenv -p /usr/bin/python3 /tmp/venv
161 . /tmp/venv/bin/activate
162 pip install -r test/py/requirements.txt
163 export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
164 export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
165 export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
166 ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board sandbox_spl
167 set -ex
168 ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
169 ./tools/buildman/buildman -t
170 ./tools/dtoc/dtoc -t
171 ./tools/patman/patman test
172 make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
173 EOF
174 cat build.sh
175 # We cannot use "container" like other jobs above, as buildman
176 # seems to hang forever with pre-configured "container" environment
177 docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
178
179 - job: nokia_rx51_test
180 displayName: 'Run tests for Nokia RX-51 (aka N900)'
181 pool:
182 vmImage: $(ubuntu_vm)
183 container:
184 image: $(ci_runner_image)
185 options: $(container_option)
186 steps:
187 - script: |
188 export PATH=/opt/gcc-12.2.0-nolibc/arm-linux-gnueabi/bin:$PATH
189 test/nokia_rx51_test.sh
190
191 - job: pylint
192 displayName: Check for any pylint regressions
193 pool:
194 vmImage: $(ubuntu_vm)
195 container:
196 image: $(ci_runner_image)
197 options: $(container_option)
198 steps:
199 - script: |
200 git config --global --add safe.directory $(work_dir)
201 export USER=azure
202 pip install -r test/py/requirements.txt
203 pip install asteval pylint==2.12.2 pyopenssl
204 export PATH=${PATH}:~/.local/bin
205 echo "[MASTER]" >> .pylintrc
206 echo "load-plugins=pylint.extensions.docparams" >> .pylintrc
207 export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
208 ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board sandbox_spl
209 set -ex
210 pylint --version
211 export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
212 make pylint_err
213
214 - stage: test_py
215 jobs:
216 - job: test_py
217 displayName: 'test.py'
218 pool:
219 vmImage: $(ubuntu_vm)
220 strategy:
221 matrix:
222 sandbox:
223 TEST_PY_BD: "sandbox"
224 sandbox_clang:
225 TEST_PY_BD: "sandbox"
226 OVERRIDE: "-O clang-14"
227 sandbox_nolto:
228 TEST_PY_BD: "sandbox"
229 BUILD_ENV: "NO_LTO=1"
230 sandbox_spl:
231 TEST_PY_BD: "sandbox_spl"
232 TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
233 sandbox_vpl:
234 TEST_PY_BD: "sandbox_vpl"
235 TEST_PY_TEST_SPEC: "test_vpl_help or test_spl"
236 sandbox_noinst:
237 TEST_PY_BD: "sandbox_noinst"
238 TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
239 sandbox_flattree:
240 TEST_PY_BD: "sandbox_flattree"
241 coreboot:
242 TEST_PY_BD: "coreboot"
243 TEST_PY_ID: "--id qemu"
244 TEST_PY_TEST_SPEC: "not sleep"
245 evb_ast2500:
246 TEST_PY_BD: "evb-ast2500"
247 TEST_PY_ID: "--id qemu"
248 evb_ast2600:
249 TEST_PY_BD: "evb-ast2600"
250 TEST_PY_ID: "--id qemu"
251 vexpress_ca9x4:
252 TEST_PY_BD: "vexpress_ca9x4"
253 TEST_PY_ID: "--id qemu"
254 integratorcp_cm926ejs:
255 TEST_PY_BD: "integratorcp_cm926ejs"
256 TEST_PY_ID: "--id qemu"
257 TEST_PY_TEST_SPEC: "not sleep"
258 qemu_arm:
259 TEST_PY_BD: "qemu_arm"
260 TEST_PY_TEST_SPEC: "not sleep"
261 qemu_arm64:
262 TEST_PY_BD: "qemu_arm64"
263 TEST_PY_TEST_SPEC: "not sleep"
264 qemu_malta:
265 TEST_PY_BD: "malta"
266 TEST_PY_ID: "--id qemu"
267 TEST_PY_TEST_SPEC: "not sleep and not efi"
268 qemu_maltael:
269 TEST_PY_BD: "maltael"
270 TEST_PY_ID: "--id qemu"
271 TEST_PY_TEST_SPEC: "not sleep and not efi"
272 qemu_malta64:
273 TEST_PY_BD: "malta64"
274 TEST_PY_ID: "--id qemu"
275 TEST_PY_TEST_SPEC: "not sleep and not efi"
276 qemu_malta64el:
277 TEST_PY_BD: "malta64el"
278 TEST_PY_ID: "--id qemu"
279 TEST_PY_TEST_SPEC: "not sleep and not efi"
280 qemu_ppce500:
281 TEST_PY_BD: "qemu-ppce500"
282 TEST_PY_TEST_SPEC: "not sleep"
283 qemu_riscv32:
284 TEST_PY_BD: "qemu-riscv32"
285 TEST_PY_TEST_SPEC: "not sleep"
286 qemu_riscv64:
287 TEST_PY_BD: "qemu-riscv64"
288 TEST_PY_TEST_SPEC: "not sleep"
289 qemu_riscv32_spl:
290 TEST_PY_BD: "qemu-riscv32_spl"
291 TEST_PY_TEST_SPEC: "not sleep"
292 qemu_riscv64_spl:
293 TEST_PY_BD: "qemu-riscv64_spl"
294 TEST_PY_TEST_SPEC: "not sleep"
295 qemu_x86:
296 TEST_PY_BD: "qemu-x86"
297 TEST_PY_TEST_SPEC: "not sleep"
298 qemu_x86_64:
299 TEST_PY_BD: "qemu-x86_64"
300 TEST_PY_TEST_SPEC: "not sleep"
301 r2dplus_i82557c:
302 TEST_PY_BD: "r2dplus"
303 TEST_PY_ID: "--id i82557c_qemu"
304 r2dplus_pcnet:
305 TEST_PY_BD: "r2dplus"
306 TEST_PY_ID: "--id pcnet_qemu"
307 r2dplus_rtl8139:
308 TEST_PY_BD: "r2dplus"
309 TEST_PY_ID: "--id rtl8139_qemu"
310 r2dplus_tulip:
311 TEST_PY_BD: "r2dplus"
312 TEST_PY_ID: "--id tulip_qemu"
313 sifive_unleashed_sdcard:
314 TEST_PY_BD: "sifive_unleashed"
315 TEST_PY_ID: "--id sdcard_qemu"
316 sifive_unleashed_spi-nor:
317 TEST_PY_BD: "sifive_unleashed"
318 TEST_PY_ID: "--id spi-nor_qemu"
319 xilinx_zynq_virt:
320 TEST_PY_BD: "xilinx_zynq_virt"
321 TEST_PY_ID: "--id qemu"
322 TEST_PY_TEST_SPEC: "not sleep"
323 xilinx_versal_virt:
324 TEST_PY_BD: "xilinx_versal_virt"
325 TEST_PY_ID: "--id qemu"
326 TEST_PY_TEST_SPEC: "not sleep"
327 xtfpga:
328 TEST_PY_BD: "xtfpga"
329 TEST_PY_ID: "--id qemu"
330 TEST_PY_TEST_SPEC: "not sleep"
331 steps:
332 - script: |
333 cat << EOF > test.sh
334 set -ex
335 # make environment variables available as tests are running inside a container
336 export WORK_DIR="${WORK_DIR}"
337 export TEST_PY_BD="${TEST_PY_BD}"
338 export TEST_PY_ID="${TEST_PY_ID}"
339 export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
340 export OVERRIDE="${OVERRIDE}"
341 export BUILD_ENV="${BUILD_ENV}"
342 EOF
343 cat << "EOF" >> test.sh
344 # the below corresponds to .gitlab-ci.yml "before_script"
345 cd ${WORK_DIR}
346 git config --global --add safe.directory ${WORK_DIR}
347 git clone --depth=1 https://source.denx.de/u-boot/u-boot-test-hooks /tmp/uboot-test-hooks
348 ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
349 ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
350 grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
351 grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
352 if [[ "${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
353 wget -O - https://github.com/riscv/opensbi/releases/download/v0.9/opensbi-0.9-rv-bin.tar.xz | tar -C /tmp -xJ;
354 export OPENSBI=/tmp/opensbi-0.9-rv-bin/share/opensbi/ilp32/generic/firmware/fw_dynamic.bin;
355 fi
356 if [[ "${TEST_PY_BD}" == "qemu-riscv64_spl" ]] || [[ "${TEST_PY_BD}" == "sifive_unleashed" ]]; then
357 wget -O - https://github.com/riscv/opensbi/releases/download/v0.9/opensbi-0.9-rv-bin.tar.xz | tar -C /tmp -xJ;
358 export OPENSBI=/tmp/opensbi-0.9-rv-bin/share/opensbi/lp64/generic/firmware/fw_dynamic.bin;
359 fi
360 # the below corresponds to .gitlab-ci.yml "script"
361 cd ${WORK_DIR}
362 export UBOOT_TRAVIS_BUILD_DIR=/tmp/${TEST_PY_BD};
363 tools/buildman/buildman -o ${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board ${TEST_PY_BD} ${OVERRIDE}
364 cp ~/grub_x86.efi ${UBOOT_TRAVIS_BUILD_DIR}/
365 cp ~/grub_x64.efi ${UBOOT_TRAVIS_BUILD_DIR}/
366 cp /opt/grub/grubriscv64.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_riscv64.efi
367 cp /opt/grub/grubaa64.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_arm64.efi
368 cp /opt/grub/grubarm.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_arm.efi
369 # create sdcard / spi-nor images for sifive unleashed using genimage
370 if [[ "${TEST_PY_BD}" == "sifive_unleashed" ]]; then
371 mkdir -p root;
372 cp ${UBOOT_TRAVIS_BUILD_DIR}/spl/u-boot-spl.bin .;
373 cp ${UBOOT_TRAVIS_BUILD_DIR}/u-boot.itb .;
374 rm -rf tmp;
375 genimage --inputpath . --config board/sifive/unleashed/genimage_sdcard.cfg;
376 cp images/sdcard.img ${UBOOT_TRAVIS_BUILD_DIR}/;
377 rm -rf tmp;
378 genimage --inputpath . --config board/sifive/unleashed/genimage_spi-nor.cfg;
379 cp images/spi-nor.img ${UBOOT_TRAVIS_BUILD_DIR}/;
380 fi
381 if [[ "${TEST_PY_BD}" == "coreboot" ]]; then
382 wget -O - "https://drive.google.com/uc?id=1x6nrtWIyIRPLS2cQBwYTnT2TbOI8UjmM&export=download" |xz -dc >${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom;
383 wget -O - "https://drive.google.com/uc?id=149Cz-5SZXHNKpi9xg6R_5XITWohu348y&export=download" >cbfstool;
384 chmod a+x cbfstool;
385 ./cbfstool ${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom add-flat-binary -f ${UBOOT_TRAVIS_BUILD_DIR}/u-boot.bin -n fallback/payload -c LZMA -l 0x1110000 -e 0x1110000;
386 fi
387 virtualenv -p /usr/bin/python3 /tmp/venv
388 . /tmp/venv/bin/activate
389 pip install -r test/py/requirements.txt
390 export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
391 export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
392 # "${var:+"-k $var"}" expands to "" if $var is empty, "-k $var" if not
393 ./test/py/test.py -ra --bd ${TEST_PY_BD} ${TEST_PY_ID} ${TEST_PY_TEST_SPEC:+"-k ${TEST_PY_TEST_SPEC}"} --build-dir "$UBOOT_TRAVIS_BUILD_DIR";
394 # the below corresponds to .gitlab-ci.yml "after_script"
395 rm -rf /tmp/uboot-test-hooks /tmp/venv
396 EOF
397 cat test.sh
398 # make current directory writeable to uboot user inside the container
399 # as sandbox testing need create files like spi flash images, etc.
400 # (TODO: clean up this in the future)
401 chmod 777 .
402 # Filesystem tests need extra docker args to run
403 set --
404 if [[ "${TEST_PY_BD}" == "sandbox" ]]; then
405 # mount -o loop needs the loop devices
406 if modprobe loop; then
407 for d in $(find /dev -maxdepth 1 -name 'loop*'); do
408 set -- "$@" --device $d:$d
409 done
410 fi
411 # Needed for mount syscall (for guestmount as well)
412 set -- "$@" --cap-add SYS_ADMIN
413 # Default apparmor profile denies mounts
414 set -- "$@" --security-opt apparmor=unconfined
415 fi
416 # Some tests using libguestfs-tools need the fuse device to run
417 docker run "$@" --device /dev/fuse:/dev/fuse -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
418
419 - stage: world_build
420 jobs:
421 - job: build_the_world
422 displayName: 'Build the World'
423 pool:
424 vmImage: $(ubuntu_vm)
425 strategy:
426 # Use almost the same target division in .travis.yml, only merged
427 # 3 small build jobs (arc/microblaze/xtensa) into one.
428 matrix:
429 arc_microblaze_xtensa:
430 BUILDMAN: "arc microblaze xtensa"
431 amlogic:
432 BUILDMAN: "amlogic"
433 arm11_arm7_arm920t_arm946es:
434 BUILDMAN: "arm11 arm7 arm920t arm946es"
435 arm926ejs:
436 BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,omap"
437 at91_non_armv7:
438 BUILDMAN: "at91 -x armv7"
439 at91_non_arm926ejs:
440 BUILDMAN: "at91 -x arm926ejs"
441 boundary_engicam_toradex:
442 BUILDMAN: "boundary engicam toradex"
443 arm_bcm:
444 BUILDMAN: "bcm -x mips"
445 nxp_arm32:
446 BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
447 nxp_ls101x:
448 BUILDMAN: "freescale&ls101"
449 nxp_ls102x:
450 BUILDMAN: "freescale&ls102"
451 nxp_ls104x:
452 BUILDMAN: "freescale&ls104"
453 nxp_ls108x:
454 BUILDMAN: "freescale&ls108"
455 nxp_ls20xx:
456 BUILDMAN: "freescale&ls20"
457 nxp_lx216x:
458 BUILDMAN: "freescale&lx216"
459 imx6:
460 BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
461 imx:
462 BUILDMAN: "mx -x mx6,imx8,freescale,technexion,toradex"
463 imx8_imx9:
464 BUILDMAN: "imx8 imx9"
465 keystone2_keystone3:
466 BUILDMAN: "k2 k3"
467 sandbox_asan:
468 BUILDMAN: "sandbox"
469 OVERRIDE: "-a ASAN"
470 sandbox_clang_asan:
471 BUILDMAN: "sandbox"
472 OVERRIDE: "-O clang-14 -a ASAN"
473 samsung_socfpga:
474 BUILDMAN: "samsung socfpga"
475 sun4i:
476 BUILDMAN: "sun4i"
477 sun5i:
478 BUILDMAN: "sun5i"
479 sun6i:
480 BUILDMAN: "sun6i"
481 sun7i:
482 BUILDMAN: "sun7i"
483 sun8i_32bit:
484 BUILDMAN: "sun8i&armv7"
485 sun8i_64bit:
486 BUILDMAN: "sun8i&aarch64"
487 sun9i:
488 BUILDMAN: "sun9i"
489 sun50i:
490 BUILDMAN: "sun50i"
491 arm_catch_all:
492 BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,renesas,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,rk,toradex,socfpga,k2,k3,zynq"
493 sandbox_x86:
494 BUILDMAN: "sandbox x86"
495 technexion:
496 BUILDMAN: "technexion"
497 kirkwood:
498 BUILDMAN: "kirkwood"
499 mvebu:
500 BUILDMAN: "mvebu"
501 m68k:
502 BUILDMAN: "m68k"
503 mips:
504 BUILDMAN: "mips"
505 powerpc:
506 BUILDMAN: "powerpc"
507 siemens:
508 BUILDMAN: "siemens"
509 tegra:
510 BUILDMAN: "tegra -x toradex"
511 am33xx_no_siemens:
512 BUILDMAN: "am33xx -x siemens"
513 omap:
514 BUILDMAN: "omap"
515 uniphier:
516 BUILDMAN: "uniphier"
517 aarch64_catch_all:
518 BUILDMAN: "aarch64 -x amlogic,bcm,imx8,imx9,k3,tegra,ls1,ls2,lx216,mvebu,uniphier,renesas,sunxi,samsung,socfpga,rk,versal,zynq"
519 rockchip_32bit:
520 BUILDMAN: "rk -x aarch64"
521 rockchip_64bit:
522 BUILDMAN: "rk&aarch64"
523 renesas:
524 BUILDMAN: "renesas"
525 zynq:
526 BUILDMAN: "zynq&armv7"
527 zynqmp_versal:
528 BUILDMAN: "versal|zynqmp&aarch64"
529 riscv:
530 BUILDMAN: "riscv"
531 steps:
532 - script: |
533 cat << EOF > build.sh
534 set -ex
535 cd ${WORK_DIR}
536 # make environment variables available as tests are running inside a container
537 export BUILDMAN="${BUILDMAN}"
538 git config --global --add safe.directory ${WORK_DIR}
539 EOF
540 cat << "EOF" >> build.sh
541 if [[ "${BUILDMAN}" != "" ]]; then
542 ret=0;
543 tools/buildman/buildman -o /tmp -PEWM ${BUILDMAN} ${OVERRIDE} || ret=$?;
544 if [[ $ret -ne 0 ]]; then
545 tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
546 exit $ret;
547 fi;
548 fi
549 EOF
550 cat build.sh
551 docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh