Skip to content

Commit

Permalink
[ci] Split out C++ unittests (apache#13335)
Browse files Browse the repository at this point in the history
* [ci] Split out C++ unittests

This makes C++ unittests follow the normal flow of build -> upload
artifacts -> download and run tests. To simplify the changes there is a
new utility for interacting with S3.

* Comments

Co-authored-by: driazati <[email protected]>
  • Loading branch information
driazati and driazati authored Nov 18, 2022
1 parent 37a8855 commit 490e0e3
Show file tree
Hide file tree
Showing 11 changed files with 636 additions and 1,351 deletions.
1,498 changes: 312 additions & 1,186 deletions Jenkinsfile

Large diffs are not rendered by default.

131 changes: 65 additions & 66 deletions ci/jenkins/Build.groovy.j2
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
def ci_setup(image) {
sh (
script: "${docker_run} ${image} ./tests/scripts/task_ci_setup.sh",
label: 'Set up CI environment',
script: "${docker_run} ${image} ./tests/scripts/task_clear_pytest.sh",
label: 'Clean up old workspace',
)
}

Expand All @@ -19,61 +19,43 @@ def fsim_test(image) {
)
}

def cmake_build(image, path, make_flag) {
def make_standalone_crt(image, build_dir) {
sh (
script: "${docker_run} --env CI_NUM_EXECUTORS ${image} ./tests/scripts/task_build.py --sccache-bucket tvm-sccache-prod",
label: 'Run cmake build',
script: """
set -eux
${docker_run} ${image} python3 ./tests/scripts/task_build.py \
--sccache-bucket tvm-sccache-prod \
--cmake-target standalone_crt \
--build-dir build
${docker_run} ${image} python3 ./tests/scripts/task_build.py \
--sccache-bucket tvm-sccache-prod \
--cmake-target crttest \
--build-dir build
""",
label: 'Make standalone CRT',
)
}

def cpp_unittest(image) {
def make_cpp_tests(image, build_dir) {
sh (
script: "${docker_run} --env CI_NUM_EXECUTORS ${image} ./tests/scripts/task_cpp_unittest.sh",
label: 'Build and run C++ tests',
script: """
set -eux
${docker_run} ${image} python3 ./tests/scripts/task_build.py \
--sccache-bucket tvm-sccache-prod \
--cmake-target cpptest \
--build-dir ${build_dir}
""",
label: 'Make C++ tests',
)
}

def add_microtvm_permissions() {
{% for folder in microtvm_template_projects %}
sh(
script: 'find {{ folder }} -type f | grep qemu-hack | xargs chmod +x',
label: 'Add execute permissions for microTVM files',
)
{% endfor %}
}

def add_hexagon_permissions() {
{% for folder in hexagon_api %}
sh(
script: 'find {{ folder }} -type f | xargs chmod +x',
label: 'Add execute permissions for hexagon files',
def cmake_build(image, path, make_flag) {
sh (
script: "${docker_run} --env CI_NUM_EXECUTORS ${image} ./tests/scripts/task_build.py --sccache-bucket tvm-sccache-prod",
label: 'Run cmake build',
)
{% endfor %}
}

// Run make. First try to do an incremental make from a previous workspace in hope to
// accelerate the compilation. If something is wrong, clean the workspace and then
// build from scratch.
def make(docker_type, path, make_flag) {
timeout(time: max_time, unit: 'MINUTES') {
try {
cmake_build(docker_type, path, make_flag)
} catch (hudson.AbortException ae) {
// script exited due to user abort, directly throw instead of retry
if (ae.getMessage().contains('script returned exit code 143')) {
throw ae
}
echo 'Incremental compilation failed. Fall back to build from scratch'
sh (
script: "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}",
label: 'Clear old cmake workspace',
)
cmake_build(docker_type, path, make_flag)
}
}
}


def build() {
stage('Build') {
environment {
Expand All @@ -89,13 +71,16 @@ stage('Build') {
docker_image='ci_gpu',
) %}
sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh build"
make("${ci_gpu} --no-gpu", 'build', '-j2')
{{ m.upload_artifacts(tag='gpu', filenames=tvm_multilib, folders=microtvm_template_projects) }}
cmake_build("${ci_gpu} --no-gpu", 'build', '-j2')
make_standalone_crt("${ci_gpu} --no-gpu", 'build')
{{ m.upload_artifacts(tag='gpu', filenames=tvm_multilib + tvm_allvisible + microtvm_template_projects + crttest + standalone_crt) }}

// compiler test
sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu_other.sh build2"
make("${ci_gpu} --no-gpu", 'build2', '-j2')
{{ m.upload_artifacts(tag='gpu2', filenames=tvm_multilib) }}
sh "rm -rf build"
sh "${docker_run} --no-gpu ${ci_gpu} ./tests/scripts/task_config_build_gpu_other.sh build"
cmake_build("${ci_gpu} --no-gpu", 'build', '-j2')
make_standalone_crt("${ci_gpu} --no-gpu", 'build')
{{ m.upload_artifacts(tag='gpu2', filenames=tvm_lib + crttest + standalone_crt) }}
{% endcall %}

{% call m.build_step(
Expand All @@ -109,8 +94,10 @@ stage('Build') {
script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh build",
label: 'Create CPU cmake config',
)
make(ci_cpu, 'build', '-j2')
{{ m.upload_artifacts(tag='cpu', filenames=tvm_multilib_tsim) }}
cmake_build(ci_cpu, 'build', '-j2')
make_standalone_crt(ci_cpu, 'build')
make_cpp_tests(ci_cpu, 'build')
{{ m.upload_artifacts(tag='cpu', filenames=tvm_multilib_tsim + tvm_allvisible + crttest + cpptest + standalone_crt) }}
ci_setup(ci_cpu)
// sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh"
// TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch
Expand All @@ -128,8 +115,9 @@ stage('Build') {
script: "${docker_run} ${ci_minimal} ./tests/scripts/task_config_build_minimal.sh build",
label: 'Create CPU minimal cmake config',
)
make(ci_minimal, 'build', '-j2')
{{ m.upload_artifacts(tag='cpu-minimal', filenames=tvm_lib) }}
cmake_build(ci_minimal, 'build', '-j2')
make_cpp_tests(ci_minimal, 'build')
{{ m.upload_artifacts(tag='cpu-minimal', filenames=tvm_lib + tvm_allvisible + cpptest) }}
{% endcall %}

{% call m.build_step(
Expand All @@ -143,7 +131,9 @@ stage('Build') {
script: "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh build",
label: 'Create WASM cmake config',
)
make(ci_wasm, 'build', '-j2')
cmake_build(ci_wasm, 'build', '-j2')
make_standalone_crt(ci_wasm, 'build')
make_cpp_tests(ci_wasm, 'build')
cpp_unittest(ci_wasm)
ci_setup(ci_wasm)
sh (
Expand All @@ -163,8 +153,10 @@ stage('Build') {
script: "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh build",
label: 'Create i386 cmake config',
)
make(ci_i386, 'build', '-j2')
{{ m.upload_artifacts(tag='i386', filenames=tvm_multilib_tsim) }}
cmake_build(ci_i386, 'build', '-j2')
make_standalone_crt(ci_i386, 'build')
make_cpp_tests(ci_i386, 'build')
{{ m.upload_artifacts(tag='i386', filenames=tvm_multilib_tsim + standalone_crt + crttest + cpptest) }}
{% endcall %}

{% call m.build_step(
Expand All @@ -178,8 +170,10 @@ stage('Build') {
script: "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh build",
label: 'Create ARM cmake config',
)
make(ci_arm, 'build', '-j4')
{{ m.upload_artifacts(tag='arm', filenames=tvm_multilib) }}
cmake_build(ci_arm, 'build', '-j4')
make_standalone_crt(ci_arm, 'build')
make_cpp_tests(ci_arm, 'build')
{{ m.upload_artifacts(tag='arm', filenames=tvm_multilib + cpptest + crttest + standalone_crt) }}
{% endcall %}

{% call m.build_step(
Expand All @@ -193,8 +187,10 @@ stage('Build') {
script: "${docker_run} ${ci_cortexm} ./tests/scripts/task_config_build_cortexm.sh build",
label: 'Create Cortex-M cmake config',
)
make(ci_cortexm, 'build', '-j2')
{{ m.upload_artifacts(tag='cortexm', filenames=tvm_lib, folders=microtvm_template_projects) }}
cmake_build(ci_cortexm, 'build', '-j2')
make_standalone_crt(ci_cortexm, 'build')
make_cpp_tests(ci_cortexm, 'build')
{{ m.upload_artifacts(tag='cortexm', filenames=tvm_lib + tvm_allvisible + crttest + standalone_crt + cpptest + microtvm_template_projects) }}
{% endcall %}

{% call m.build_step(
Expand All @@ -208,12 +204,13 @@ stage('Build') {
script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_config_build_hexagon.sh build",
label: 'Create Hexagon cmake config',
)
make(ci_hexagon, 'build', '-j2')
cmake_build(ci_hexagon, 'build', '-j2')
make_cpp_tests(ci_hexagon, 'build')
sh (
script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_build_hexagon_api.sh",
label: 'Build Hexagon API',
)
{{ m.upload_artifacts(tag='hexagon', filenames=tvm_lib, folders=hexagon_api) }}
{{ m.upload_artifacts(tag='hexagon', filenames=tvm_lib + cpptest + hexagon_api) }}
{% endcall %}

{% call m.build_step(
Expand All @@ -227,8 +224,10 @@ stage('Build') {
script: "${docker_run} ${ci_riscv} ./tests/scripts/task_config_build_riscv.sh build",
label: 'Create RISC-V cmake config',
)
make(ci_riscv, 'build', '-j2')
{{ m.upload_artifacts(tag='riscv', filenames=tvm_lib, folders=microtvm_template_projects) }}
cmake_build(ci_riscv, 'build', '-j2')
make_standalone_crt(ci_riscv, 'build')
make_cpp_tests(ci_riscv, 'build')
{{ m.upload_artifacts(tag='riscv', filenames=tvm_lib + tvm_allvisible + standalone_crt + crttest + cpptest + microtvm_template_projects) }}
{% endcall %}

)
Expand Down
2 changes: 1 addition & 1 deletion ci/jenkins/Deploy.groovy.j2
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def deploy() {
ws="tvm/deploy-docs",
) %}
init_git()
{{ m.download_artifacts(tag='docs', filenames=["docs.tgz"]) }}
{{ m.download_artifacts(tag='docs') }}
deploy_docs()
{% endcall %}
{% call m.deploy_step(
Expand Down
7 changes: 6 additions & 1 deletion ci/jenkins/Jenkinsfile.j2
Original file line number Diff line number Diff line change
Expand Up @@ -93,12 +93,17 @@ rebuild_docker_images = false

// Filenames for stashing between build and test steps
{% set tvm_runtime = ['build/libtvm_runtime.so', 'build/config.cmake'] %}
{% set crttest = ['build/crttest'] %}
{% set tvm_allvisible = ['build/libtvm_allvisible.so'] %}
{% set cpptest = ['build/cpptest', 'build/build.ninja', 'build/CMakeFiles/rules.ninja'] %}
{% set tvm_lib = ['build/libtvm.so'] + tvm_runtime %}
{% set tvm_multilib = ['build/libtvm.so', 'build/libvta_fsim.so'] + tvm_runtime %}
{% set tvm_multilib_tsim = ['build/libvta_tsim.so'] + tvm_multilib %}
{% set microtvm_template_projects = ['build/microtvm_template_projects',] %}
{% set hexagon_api = ['build/hexagon_api_output',] %}
s3_prefix = "tvm-jenkins-artifacts-prod/tvm/${env.BRANCH_NAME}/${env.BUILD_NUMBER}"
{% set standalone_crt = ['build/standalone_crt', 'build/build.ninja'] %}
s3_bucket = 'tvm-jenkins-artifacts-prod'
s3_prefix = "tvm/${env.BRANCH_NAME}/${env.BUILD_NUMBER}"

// Jenkins script root directory
jenkins_scripts_root = "ci/scripts/jenkins"
Expand Down
Loading

0 comments on commit 490e0e3

Please sign in to comment.