From 191f653258dd519ef114701baf83006cca7aed3f Mon Sep 17 00:00:00 2001 From: driazati Date: Tue, 4 Oct 2022 11:52:32 -0700 Subject: [PATCH] [ci] Template build steps This uses templating for the build steps and fixes a few instances of missing skip markers and timeout wrappers. This also adds a retry to JUnit S3 uploads which was missing before. --- Jenkinsfile | 254 +++++++++++++++++----------- ci/jenkins/Build.groovy.j2 | 328 +++++++++++++++++-------------------- ci/jenkins/macros.j2 | 47 ++++-- 3 files changed, 345 insertions(+), 284 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index e964ac79a3ce..78addc9b2c93 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -45,7 +45,7 @@ // 'python3 jenkins/generate.py' // Note: This timestamp is here to ensure that updates to the Jenkinsfile are // always rebased on main before merging: -// Generated at 2022-09-26T10:48:49.577077 +// Generated at 2022-10-04T13:17:33.929159 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils // NOTE: these lines are scanned by docker/dev_common.sh. Please update the regex as needed. --> @@ -731,13 +731,15 @@ stage('Build') { SKIP_SLOW_TESTS = "${skip_slow_tests}" } parallel( - 'BUILD: GPU': { + + 'BUILD: GPU': { if (!skip_ci) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-gpu") { init_git() docker_init(ci_gpu) - sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh build" + timeout(time: max_time, unit: 'MINUTES') { + sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh build" make("${ci_gpu} --no-gpu", 'build', '-j2') sh( script: """ @@ -775,18 +777,22 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } + } else { + Utils.markStageSkippedForConditional('BUILD: GPU') } }, + 'BUILD: CPU': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-cpu") { init_git() docker_init(ci_cpu) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh build", label: 'Create CPU cmake config', ) @@ -809,11 +815,10 @@ stage('Build') { label: 'Upload artifacts to S3', ) - timeout(time: max_time, unit: 'MINUTES') { - ci_setup(ci_cpu) - // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh" - // TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch - sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh", label: 'Rust build and test') + ci_setup(ci_cpu) + // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh" + // TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch + sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh", label: 'Rust build and test') } } } @@ -821,13 +826,15 @@ stage('Build') { Utils.markStageSkippedForConditional('BUILD: CPU') } }, + 'BUILD: CPU MINIMAL': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-cpu-minimal") { init_git() docker_init(ci_minimal) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_minimal} ./tests/scripts/task_config_build_minimal.sh build", label: 'Create CPU minimal cmake config', ) @@ -845,31 +852,32 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } } else { Utils.markStageSkippedForConditional('BUILD: CPU MINIMAL') } }, + 'BUILD: WASM': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-wasm") { init_git() docker_init(ci_wasm) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh build", label: 'Create WASM cmake config', ) make(ci_wasm, 'build', '-j2') cpp_unittest(ci_wasm) - timeout(time: max_time, unit: 'MINUTES') { - ci_setup(ci_wasm) - sh ( - script: "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh", - label: 'Run WASM lint and tests', - ) + ci_setup(ci_wasm) + sh ( + script: "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh", + label: 'Run WASM lint and tests', + ) } } } @@ -877,13 +885,15 @@ stage('Build') { Utils.markStageSkippedForConditional('BUILD: WASM') } }, + 'BUILD: i386': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-i386") { init_git() docker_init(ci_i386) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh build", label: 'Create i386 cmake config', ) @@ -905,20 +915,22 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } } else { Utils.markStageSkippedForConditional('BUILD: i386') } }, + 'BUILD: arm': { if (!skip_ci && is_docs_only_build != 1) { node('ARM-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-arm") { init_git() docker_init(ci_arm) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh build", label: 'Create ARM cmake config', ) @@ -938,20 +950,22 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } - } else { + } else { Utils.markStageSkippedForConditional('BUILD: arm') } }, + 'BUILD: Cortex-M': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-cortexm") { init_git() docker_init(ci_cortexm) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_cortexm} ./tests/scripts/task_config_build_cortexm.sh build", label: 'Create Cortex-M cmake config', ) @@ -970,20 +984,22 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } - } else { + } else { Utils.markStageSkippedForConditional('BUILD: Cortex-M') } }, + 'BUILD: Hexagon': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-hexagon") { init_git() docker_init(ci_hexagon) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_config_build_hexagon.sh build", label: 'Create Hexagon cmake config', ) @@ -1006,20 +1022,22 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } - } else { + } else { Utils.markStageSkippedForConditional('BUILD: Hexagon') } }, + 'BUILD: RISC-V': { if (!skip_ci && is_docs_only_build != 1) { node('CPU-SMALL') { ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/build-riscv") { init_git() docker_init(ci_riscv) - sh ( + timeout(time: max_time, unit: 'MINUTES') { + sh ( script: "${docker_run} ${ci_riscv} ./tests/scripts/task_config_build_riscv.sh build", label: 'Create RISC-V cmake config', ) @@ -1038,13 +1056,14 @@ stage('Build') { """, label: 'Upload artifacts to S3', ) - + } } } - } else { + } else { Utils.markStageSkippedForConditional('BUILD: RISC-V') } }, + ) } } @@ -1118,7 +1137,8 @@ def shard_run_unittest_GPU_1_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1181,7 +1201,8 @@ def shard_run_unittest_GPU_2_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1240,7 +1261,8 @@ def shard_run_unittest_GPU_3_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1298,7 +1320,8 @@ def shard_run_integration_CPU_1_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1355,7 +1378,8 @@ def shard_run_integration_CPU_2_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1412,7 +1436,8 @@ def shard_run_integration_CPU_3_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1469,7 +1494,8 @@ def shard_run_integration_CPU_4_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_CPU --recursive """, label: 'Upload JUnits to S3', ) @@ -1527,7 +1553,8 @@ def shard_run_python_i386_1_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/python_i386 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/python_i386 --recursive """, label: 'Upload JUnits to S3', ) @@ -1584,7 +1611,8 @@ def shard_run_python_i386_2_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/python_i386 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/python_i386 --recursive """, label: 'Upload JUnits to S3', ) @@ -1640,7 +1668,8 @@ def shard_run_python_i386_3_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/python_i386 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/python_i386 --recursive """, label: 'Upload JUnits to S3', ) @@ -1697,7 +1726,8 @@ def shard_run_test_Hexagon_1_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -1752,7 +1782,8 @@ def shard_run_test_Hexagon_2_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -1807,7 +1838,8 @@ def shard_run_test_Hexagon_3_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -1862,7 +1894,8 @@ def shard_run_test_Hexagon_4_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -1917,7 +1950,8 @@ def shard_run_test_Hexagon_5_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -1972,7 +2006,8 @@ def shard_run_test_Hexagon_6_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -2027,7 +2062,8 @@ def shard_run_test_Hexagon_7_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -2082,7 +2118,8 @@ def shard_run_test_Hexagon_8_of_8() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Hexagon --recursive """, label: 'Upload JUnits to S3', ) @@ -2139,7 +2176,8 @@ def shard_run_integration_aarch64_1_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -2195,7 +2233,8 @@ def shard_run_integration_aarch64_2_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -2251,7 +2290,8 @@ def shard_run_integration_aarch64_3_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -2307,7 +2347,8 @@ def shard_run_integration_aarch64_4_of_4() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/integration_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -2363,7 +2404,8 @@ def shard_run_topi_GPU_1_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2418,7 +2460,8 @@ def shard_run_topi_GPU_2_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2473,7 +2516,8 @@ def shard_run_topi_GPU_3_of_3() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2529,7 +2573,8 @@ def shard_run_frontend_GPU_1_of_6() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2584,7 +2629,8 @@ def shard_run_frontend_GPU_2_of_6() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2639,7 +2685,8 @@ def shard_run_frontend_GPU_3_of_6() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2694,7 +2741,8 @@ def shard_run_frontend_GPU_4_of_6() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2749,7 +2797,8 @@ def shard_run_frontend_GPU_5_of_6() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2804,7 +2853,8 @@ def shard_run_frontend_GPU_6_of_6() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_GPU --recursive """, label: 'Upload JUnits to S3', ) @@ -2865,7 +2915,8 @@ def shard_run_topi_aarch64_1_of_2() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -2924,7 +2975,8 @@ def shard_run_topi_aarch64_2_of_2() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/topi_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -2980,7 +3032,8 @@ def shard_run_frontend_aarch64_1_of_2() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -3035,7 +3088,8 @@ def shard_run_frontend_aarch64_2_of_2() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_aarch64 --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_aarch64 --recursive """, label: 'Upload JUnits to S3', ) @@ -3096,7 +3150,8 @@ def shard_run_test_Cortex_M_1_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3151,7 +3206,8 @@ def shard_run_test_Cortex_M_2_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3206,7 +3262,8 @@ def shard_run_test_Cortex_M_3_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3261,7 +3318,8 @@ def shard_run_test_Cortex_M_4_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3316,7 +3374,8 @@ def shard_run_test_Cortex_M_5_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3371,7 +3430,8 @@ def shard_run_test_Cortex_M_6_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3426,7 +3486,8 @@ def shard_run_test_Cortex_M_7_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3481,7 +3542,8 @@ def shard_run_test_Cortex_M_8_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3536,7 +3598,8 @@ def shard_run_test_Cortex_M_9_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3591,7 +3654,8 @@ def shard_run_test_Cortex_M_10_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3646,7 +3710,8 @@ def shard_run_test_Cortex_M_11_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3701,7 +3766,8 @@ def shard_run_test_Cortex_M_12_of_12() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_Cortex_M --recursive """, label: 'Upload JUnits to S3', ) @@ -3758,7 +3824,8 @@ def shard_run_test_RISC_V_1_of_1() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_RISC_V --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/test_RISC_V --recursive """, label: 'Upload JUnits to S3', ) @@ -3803,7 +3870,8 @@ def run_unittest_minimal() { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_CPU_MINIMAL --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_CPU_MINIMAL --recursive """, label: 'Upload JUnits to S3', ) @@ -4013,7 +4081,8 @@ stage('Test') { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_CPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/unittest_CPU --recursive """, label: 'Upload JUnits to S3', ) @@ -4064,7 +4133,8 @@ stage('Test') { sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_CPU --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/frontend_CPU --recursive """, label: 'Upload JUnits to S3', ) @@ -4110,14 +4180,14 @@ stage('Test') { ) } sh( - script: """ - set -eux - . ci/scripts/retry.sh - md5sum docs.tgz - retry 3 aws s3 cp --no-progress docs.tgz s3://${s3_prefix}/docs/docs.tgz - """, - label: 'Upload artifacts to S3', - ) + script: """ + set -eux + . ci/scripts/retry.sh + md5sum docs.tgz + retry 3 aws s3 cp --no-progress docs.tgz s3://${s3_prefix}/docs/docs.tgz + """, + label: 'Upload artifacts to S3', + ) sh( script: "aws s3 cp --no-progress _docs s3://${s3_prefix}/docs --recursive", diff --git a/ci/jenkins/Build.groovy.j2 b/ci/jenkins/Build.groovy.j2 index a083fe88ad80..49cffacdc16e 100644 --- a/ci/jenkins/Build.groovy.j2 +++ b/ci/jenkins/Build.groovy.j2 @@ -80,185 +80,157 @@ stage('Build') { SKIP_SLOW_TESTS = "${skip_slow_tests}" } parallel( - 'BUILD: GPU': { - if (!skip_ci) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-gpu') }}) { - init_git() - docker_init(ci_gpu) - sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh build" - make("${ci_gpu} --no-gpu", 'build', '-j2') - {{ m.upload_artifacts(tag='gpu', filenames=tvm_multilib, folders=microtvm_template_projects) }} - // compiler test - sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu_other.sh build2" - make("${ci_gpu} --no-gpu", 'build2', '-j2') - {{ m.upload_artifacts(tag='gpu2', filenames=tvm_multilib) }} - } - } - } - }, - 'BUILD: CPU': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-cpu') }}) { - init_git() - docker_init(ci_cpu) - sh ( - script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh build", - label: 'Create CPU cmake config', - ) - make(ci_cpu, 'build', '-j2') - {{ m.upload_artifacts(tag='cpu', filenames=tvm_multilib_tsim) }} - timeout(time: max_time, unit: 'MINUTES') { - ci_setup(ci_cpu) - // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh" - // TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch - sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh", label: 'Rust build and test') - } - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: CPU') - } - }, - 'BUILD: CPU MINIMAL': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-cpu-minimal') }}) { - init_git() - docker_init(ci_minimal) - sh ( - script: "${docker_run} ${ci_minimal} ./tests/scripts/task_config_build_minimal.sh build", - label: 'Create CPU minimal cmake config', - ) - make(ci_minimal, 'build', '-j2') - {{ m.upload_artifacts(tag='cpu-minimal', filenames=tvm_lib) }} - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: CPU MINIMAL') - } - }, - 'BUILD: WASM': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-wasm') }}) { - init_git() - docker_init(ci_wasm) - sh ( - script: "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh build", - label: 'Create WASM cmake config', - ) - make(ci_wasm, 'build', '-j2') - cpp_unittest(ci_wasm) - timeout(time: max_time, unit: 'MINUTES') { - ci_setup(ci_wasm) - sh ( - script: "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh", - label: 'Run WASM lint and tests', - ) - } - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: WASM') - } - }, - 'BUILD: i386': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-i386') }}) { - init_git() - docker_init(ci_i386) - sh ( - script: "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh build", - label: 'Create i386 cmake config', - ) - make(ci_i386, 'build', '-j2') - {{ m.upload_artifacts(tag='i386', filenames=tvm_multilib_tsim) }} - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: i386') - } - }, - 'BUILD: arm': { - if (!skip_ci && is_docs_only_build != 1) { - node('ARM-SMALL') { - ws({{ m.per_exec_ws('tvm/build-arm') }}) { - init_git() - docker_init(ci_arm) - sh ( - script: "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh build", - label: 'Create ARM cmake config', - ) - make(ci_arm, 'build', '-j4') - {{ m.upload_artifacts(tag='arm', filenames=tvm_multilib) }} - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: arm') - } - }, - 'BUILD: Cortex-M': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-cortexm') }}) { - init_git() - docker_init(ci_cortexm) - sh ( - script: "${docker_run} ${ci_cortexm} ./tests/scripts/task_config_build_cortexm.sh build", - label: 'Create Cortex-M cmake config', - ) - make(ci_cortexm, 'build', '-j2') - {{ m.upload_artifacts(tag='cortexm', filenames=tvm_lib, folders=microtvm_template_projects) }} - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: Cortex-M') - } - }, - 'BUILD: Hexagon': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-hexagon') }}) { - init_git() - docker_init(ci_hexagon) - sh ( - script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_config_build_hexagon.sh build", - label: 'Create Hexagon cmake config', - ) - make(ci_hexagon, 'build', '-j2') - sh ( - script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_build_hexagon_api.sh", - label: 'Build Hexagon API', - ) - {{ m.upload_artifacts(tag='hexagon', filenames=tvm_lib, folders=hexagon_api) }} - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: Hexagon') - } - }, - 'BUILD: RISC-V': { - if (!skip_ci && is_docs_only_build != 1) { - node('CPU-SMALL') { - ws({{ m.per_exec_ws('tvm/build-riscv') }}) { - init_git() - docker_init(ci_riscv) - sh ( - script: "${docker_run} ${ci_riscv} ./tests/scripts/task_config_build_riscv.sh build", - label: 'Create RISC-V cmake config', - ) - make(ci_riscv, 'build', '-j2') - {{ m.upload_artifacts(tag='riscv', filenames=tvm_lib, folders=microtvm_template_projects) }} - } - } - } else { - Utils.markStageSkippedForConditional('BUILD: RISC-V') - } - }, + {% call m.build_step( + name='BUILD: GPU', + node='CPU-SMALL', + condition='!skip_ci', + ws='tvm/build-gpu', + docker_image='ci_gpu', + ) %} + sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh build" + make("${ci_gpu} --no-gpu", 'build', '-j2') + {{ m.upload_artifacts(tag='gpu', filenames=tvm_multilib, folders=microtvm_template_projects) }} + + // compiler test + sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu_other.sh build2" + make("${ci_gpu} --no-gpu", 'build2', '-j2') + {{ m.upload_artifacts(tag='gpu2', filenames=tvm_multilib) }} + {% endcall %} + + {% call m.build_step( + name='BUILD: CPU', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-cpu', + docker_image='ci_cpu', + ) %} + sh ( + script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh build", + label: 'Create CPU cmake config', + ) + make(ci_cpu, 'build', '-j2') + {{ m.upload_artifacts(tag='cpu', filenames=tvm_multilib_tsim) }} + ci_setup(ci_cpu) + // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh" + // TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch + sh (script: "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh", label: 'Rust build and test') + {% endcall %} + + {% call m.build_step( + name='BUILD: CPU MINIMAL', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-cpu-minimal', + docker_image='ci_minimal', + ) %} + sh ( + script: "${docker_run} ${ci_minimal} ./tests/scripts/task_config_build_minimal.sh build", + label: 'Create CPU minimal cmake config', + ) + make(ci_minimal, 'build', '-j2') + {{ m.upload_artifacts(tag='cpu-minimal', filenames=tvm_lib) }} + {% endcall %} + + {% call m.build_step( + name='BUILD: WASM', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-wasm', + docker_image='ci_wasm', + ) %} + sh ( + script: "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh build", + label: 'Create WASM cmake config', + ) + make(ci_wasm, 'build', '-j2') + cpp_unittest(ci_wasm) + ci_setup(ci_wasm) + sh ( + script: "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh", + label: 'Run WASM lint and tests', + ) + {% endcall %} + + {% call m.build_step( + name='BUILD: i386', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-i386', + docker_image='ci_i386', + ) %} + sh ( + script: "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh build", + label: 'Create i386 cmake config', + ) + make(ci_i386, 'build', '-j2') + {{ m.upload_artifacts(tag='i386', filenames=tvm_multilib_tsim) }} + {% endcall %} + + {% call m.build_step( + name='BUILD: arm', + node='ARM-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-arm', + docker_image='ci_arm', + ) %} + sh ( + script: "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh build", + label: 'Create ARM cmake config', + ) + make(ci_arm, 'build', '-j4') + {{ m.upload_artifacts(tag='arm', filenames=tvm_multilib) }} + {% endcall %} + + {% call m.build_step( + name='BUILD: Cortex-M', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-cortexm', + docker_image='ci_cortexm', + ) %} + sh ( + script: "${docker_run} ${ci_cortexm} ./tests/scripts/task_config_build_cortexm.sh build", + label: 'Create Cortex-M cmake config', + ) + make(ci_cortexm, 'build', '-j2') + {{ m.upload_artifacts(tag='cortexm', filenames=tvm_lib, folders=microtvm_template_projects) }} + {% endcall %} + + {% call m.build_step( + name='BUILD: Hexagon', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-hexagon', + docker_image='ci_hexagon', + ) %} + sh ( + script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_config_build_hexagon.sh build", + label: 'Create Hexagon cmake config', + ) + make(ci_hexagon, 'build', '-j2') + sh ( + script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_build_hexagon_api.sh", + label: 'Build Hexagon API', + ) + {{ m.upload_artifacts(tag='hexagon', filenames=tvm_lib, folders=hexagon_api) }} + {% endcall %} + + {% call m.build_step( + name='BUILD: RISC-V', + node='CPU-SMALL', + condition='!skip_ci && is_docs_only_build != 1', + ws='tvm/build-riscv', + docker_image='ci_riscv', + ) %} + sh ( + script: "${docker_run} ${ci_riscv} ./tests/scripts/task_config_build_riscv.sh build", + label: 'Create RISC-V cmake config', + ) + make(ci_riscv, 'build', '-j2') + {{ m.upload_artifacts(tag='riscv', filenames=tvm_lib, folders=microtvm_template_projects) }} + {% endcall %} + ) } } diff --git a/ci/jenkins/macros.j2 b/ci/jenkins/macros.j2 index e6e69097b076..618b1d9d6b09 100644 --- a/ci/jenkins/macros.j2 +++ b/ci/jenkins/macros.j2 @@ -23,7 +23,8 @@ sh( script: """ set -eux - aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/{{ test_dir_name }} --recursive + . ci/scripts/retry.sh + retry 3 aws s3 cp --no-progress build/pytest-results s3://${s3_prefix}/pytest-results/{{ test_dir_name }} --recursive """, label: 'Upload JUnits to S3', ) @@ -88,6 +89,24 @@ def {{ method_name }}() { {% endfor %} {% endmacro %} +{% macro build_step(name, condition, node, docker_image, ws) %} + '{{ name }}': { + if ({{ condition }}) { + node('{{ node }}') { + ws({{ per_exec_ws(ws) }}) { + init_git() + docker_init({{ docker_image }}) + timeout(time: max_time, unit: 'MINUTES') { + {{ caller() | trim | indent(width=6) }} + } + } + } + } else { + Utils.markStageSkippedForConditional('{{ name }}') + } + }, +{% endmacro %} + {% macro test_step_body(name, node, ws, docker_image, platform) %} {% set test_dir_name = name.replace(":", "").replace(" ", "-").replace("-", "_")|string %} if (!skip_ci && is_docs_only_build != 1) { @@ -158,19 +177,19 @@ def {{ method_name }}() { {% macro upload_artifacts(tag, filenames, folders=None) %} sh( - script: """ - set -eux - . ci/scripts/retry.sh - {% for filename in filenames %} - md5sum {{ filename }} - retry 3 aws s3 cp --no-progress {{ filename }} s3://${s3_prefix}/{{ tag }}/{{ filename }} - {% endfor %} - {% for folder in (folders or []) %} - retry 3 aws s3 cp --no-progress {{ folder }} s3://${s3_prefix}/{{ tag }}/{{ folder }} --recursive - {% endfor %} - """, - label: 'Upload artifacts to S3', - ) + script: """ + set -eux + . ci/scripts/retry.sh + {% for filename in filenames %} + md5sum {{ filename }} + retry 3 aws s3 cp --no-progress {{ filename }} s3://${s3_prefix}/{{ tag }}/{{ filename }} + {% endfor %} + {% for folder in (folders or []) %} + retry 3 aws s3 cp --no-progress {{ folder }} s3://${s3_prefix}/{{ tag }}/{{ folder }} --recursive + {% endfor %} + """, + label: 'Upload artifacts to S3', + ) {% endmacro %} {% macro download_artifacts(tag, filenames, folders=None) %}