diff --git a/CMakeLists.txt b/CMakeLists.txt index d0e568c898..c43a347ed9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -152,10 +152,11 @@ print_sys_info("OS_NAME;OS_RELEASE;OS_VERSION;OS_PLATFORM") # In practice people only use Debug or Release. # # CMake options, specify with -DNOISEPAGE_{option}=On. -# NOISEPAGE_BUILD_BENCHMARKS : Enable building benchmarks as part of the ALL target. Default ON. -# NOISEPAGE_BUILD_TESTS : Enable building tests as part of the ALL (but the Self-Driving test) target. Default ON. -# NOISEPAGE_BUILD_SELF_DRIVING_E2E_TESTS : Enable building self-driving end-to-end tests. Default OFF +# NOISEPAGE_BUILD_BENCHMARKS : Enable building benchmarks as part of the ALL target. Default OFF. +# NOISEPAGE_BUILD_TESTS : Enable building (non-self-driving-e2e) tests as part of the ALL target. Default OFF. +# NOISEPAGE_BUILD_SELF_DRIVING_E2E_TESTS : Enable building self-driving end-to-end tests as part of the ALL target. Default OFF. # NOISEPAGE_GENERATE_COVERAGE : Enable C++ code coverage. Default OFF. +# NOISEPAGE_TEST_PARALLELISM : The number of tests that should run in parallel. Default 1. # NOISEPAGE_UNITTEST_OUTPUT_ON_FAILURE : Enable verbose unittest failures. Default OFF. Can be very verbose. # NOISEPAGE_UNITY_BUILD : Enable unity (aka jumbo) builds. Default OFF. # NOISEPAGE_USE_ASAN : Enable ASAN, a fast memory error detector. Default OFF. @@ -171,7 +172,6 @@ print_sys_info("OS_NAME;OS_RELEASE;OS_VERSION;OS_PLATFORM") # NOISEPAGE_INCLUDE_DIRECTORIES : Include directories to be used for NoisePage. # NOISEPAGE_LINK_LIBRARIES : Link libraries to be added to NoisePage. # NOISEPAGE_LINK_OPTIONS : Link options to be added to NoisePage. -# NOISEPAGE_TEST_PARALLELISM : The number of tests that should run in parallel. ####################################################################################################################### # Default to DEBUG builds if -DCMAKE_BUILD_TYPE was not specified. @@ -181,11 +181,11 @@ endif (NOT CMAKE_BUILD_TYPE) option(NOISEPAGE_BUILD_BENCHMARKS "Enable building benchmarks as part of the ALL target." - ON) + OFF) option(NOISEPAGE_BUILD_TESTS - "Enable building tests as part of the ALL target." - ON) + "Enable building (non-self-driving-e2e) tests as part of the ALL target." + OFF) option(NOISEPAGE_BUILD_SELF_DRIVING_E2E_TESTS "Enable building self-driving end-to-end tests as part of the ALL target." @@ -849,7 +849,7 @@ add_custom_target(self_driving_e2e_test # For now, this target is specif --resource-spec-file ${BUILD_SUPPORT_DATA_DIR}/ctest_resource_specs.json # For controlling conflicting tests. ${UNITTEST_OUTPUT_ON_FAILURE} # Whether to print output when a test fails. -j ${NOISEPAGE_TEST_PARALLELISM} # Maximum number of parallel jobs. - -L self_driving_e2e_test # Run all tests that have a label like this. See footgun warning above. + -L self_driving_e2e_test # Run all tests that have a label like this. See footgun warning above. --no-compress-output # Output verbosely so that it can be logged. -T Test # Run tests and log it to Testing/*/Test.xml. --timeout 3000 # 3000 second timeout per test. diff --git a/Jenkinsfile b/Jenkinsfile index 317478bf28..351f8899f6 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,5 +1,10 @@ -def utils // common build functions are loaded from Jenkinsfile-utils into this object -String utilsFileName = 'Jenkinsfile-utils' +// Common build functions will be loaded into the "utils" object in every stage. +// The build functions are loaded with Groovy's elvis operator ?:, where +// a ?: b means if (a) { return a; } else { return b; } +// and in this case, utils = utils ?: load(utilsFileName) means "if utils is truthy, return utils, else, load utils". +// This has to be done in every stage to support the Jenkins "restart from stage" feature. +def utils +String utilsFileName = 'Jenkinsfile-utils.groovy' pipeline { agent none @@ -9,337 +14,95 @@ pipeline { } stages { stage('Ready For CI') { - agent { - docker { - image 'noisepage:focal' - args '-v /jenkins/ccache:/home/jenkins/.ccache' - } - } - when { - not { - branch 'master' - } - } - steps { - script { - ready_for_build = sh script: 'python3 ./build-support/check_github_labels.py', returnStatus: true - if(ready_for_build != 0) { - currentBuild.result = 'ABORTED' - error('Not ready for CI. Please add ready-for-ci tag in Github when you are ready to build your PR.') - } - } - } - post { - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' } } + when { not { branch 'master' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageGithub() } } + post { cleanup { deleteDir() } } } stage('Check') { parallel { stage('ubuntu-20.04/gcc-9.3 (Debug/format/lint/censored)') { - agent { - docker { - image 'noisepage:focal' - } - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - sh script: 'echo y | sudo ./script/installation/packages.sh build', label: 'Installing packages' - sh 'cd apidoc && doxygen -u Doxyfile.in && doxygen Doxyfile.in 2>warnings.txt && if [ -s warnings.txt ]; then cat warnings.txt; false; fi' - sh 'mkdir build' - sh 'cd build && cmake -GNinja ..' - sh 'cd build && timeout 20m ninja check-format' - sh 'cd build && timeout 20m ninja check-lint' - sh 'cd build && timeout 20m ninja check-censored' - sh 'cd build && ninja check-clang-tidy' - } - post { - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageCheck() } } + post { cleanup { deleteDir() } } } - stage('ubuntu-20.04/clang-8.0 (Debug/format/lint/censored)') { - agent { - docker { - image 'noisepage:focal' - } - } - environment { - CC="/usr/bin/clang-8" - CXX="/usr/bin/clang++-8" - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - sh script: 'echo y | sudo ./script/installation/packages.sh build', label: 'Installing packages' - sh 'cd apidoc && doxygen -u Doxyfile.in && doxygen Doxyfile.in 2>warnings.txt && if [ -s warnings.txt ]; then cat warnings.txt; false; fi' - sh 'mkdir build' - sh 'cd build && cmake -GNinja ..' - sh 'cd build && timeout 20m ninja check-format' - sh 'cd build && timeout 20m ninja check-lint' - sh 'cd build && timeout 20m ninja check-censored' - sh 'cd build && ninja check-clang-tidy' - } - post { - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' } } + environment { CC="/usr/bin/clang-8" ; CXX="/usr/bin/clang++-8" } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageCheck() } } + post { cleanup { deleteDir() } } } } } - stage('Microbenchmark (Build only)') { - agent { - docker { - image 'noisepage:focal' - args '-v /jenkins/ccache:/home/jenkins/.ccache' - } - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(isBuildTests:false, isBuildBenchmarks:true) - } - } - post { - cleanup { - deleteDir() + stage('Build-only checks') { + parallel { + stage('Benchmarks (debug build only)') { + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageBuildDefault([ + buildCommand: 'ninja', + cmake: '-DCMAKE_BUILD_TYPE=Debug -DNOISEPAGE_BUILD_BENCHMARKS=ON', + ] ) } } + post { cleanup { deleteDir() } } + } + stage('Logging disabled (release build only)') { + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageBuildDefault([ + buildCommand: 'ninja', + cmake: '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_BUILD_BENCHMARKS=ON -DNOISEPAGE_BUILD_TESTS=ON -NOISEPAGE_BUILD_SELF_DRIVING_E2E_TESTS=ON -DNOISEPAGE_USE_LOGGING=OFF' + ] ) } } + post { cleanup { deleteDir() } } } } } stage('Test') { parallel { - stage('ubuntu-20.04/gcc-9.3 (Debug/ASAN/jumbotests)') { - agent { - docker { - image 'noisepage:focal' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' + // The first argument to utils.stageTest() indicates whether pipeline metrics should be gathered. + // Pipeline metrics take a while to run and don't need to be run in every stage. The pipeline metrics are only run in one arbitrary stage to check for breakage. - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(useASAN:true, isJumboTest:true) - } - - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15721', label: 'Kill PID(15721)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15722', label: 'Kill PID(15722)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15723', label: 'Kill PID(15723)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh script: 'cd build/bin && PYTHONPATH=../.. timeout 20m python3 -m script.testing.replication.tests_simple --build-type=debug', label: 'Replication (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=simple', label: 'UnitTest (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=simple -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Simple, Compiled Execution)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=extended', label: 'UnitTest (Extended)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=extended -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Extended, Compiled Execution)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=extended -a "pipeline_metrics_enable=True" -a "pipeline_metrics_sample_rate=100" -a "counters_enable=True" -a "query_trace_metrics_enable=True"', label: 'UnitTest (Extended with pipeline metrics, counters, and query trace metrics)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=extended -a "pipeline_metrics_enable=True" -a "pipeline_metrics_sample_rate=100" -a "counters_enable=True" -a "query_trace_metrics_enable=True" -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Extended, Compiled Execution with pipeline metrics, counters, and query trace metrics)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh 'cd build && timeout 1h ninja check-tpl' - sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja jumbotests' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - always { - archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) - xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] - } - cleanup { - deleteDir() - } - } + stage('ubuntu-20.04/gcc-9.3 (Debug/ASAN/jumbotests)') { + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageTest(true, [cmake: + '-DCMAKE_BUILD_TYPE=Debug -DNOISEPAGE_BUILD_TESTS=ON -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_ASAN=ON -DNOISEPAGE_USE_JUMBOTESTS=ON' + ] ) } } + post { always { script { utils = utils ?: load(utilsFileName) ; utils.stageArchive() } } ; cleanup { deleteDir() } } } stage('ubuntu-20.04/gcc-9.3 (Debug/Coverage/unittest)') { - agent { - docker { - image 'noisepage:focal' - label 'dgb' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - environment { - CODECOV_TOKEN=credentials('codecov-token') - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(isCodeCoverage:true) - } - - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15721', label: 'Kill PID(15721)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15722', label: 'Kill PID(15722)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15723', label: 'Kill PID(15723)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=simple', label: 'UnitTest (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=simple -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Simple, Compiled Execution)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=extended', label: 'UnitTest (Extended)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=extended -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Extended, Compiled Execution)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh 'cd build && timeout 1h ninja check-tpl' - sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja unittest' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - script{ - utils = utils ?: load(utilsFileName) - utils.cppCoverage() - } - - } - post { - always { - archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) - xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] - } - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; label 'dgb' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + environment { CODECOV_TOKEN=credentials('codecov-token') } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageTest(false, [cmake: + // Note that unity builds mess with coverage. + '-DCMAKE_BUILD_TYPE=Debug -DNOISEPAGE_BUILD_TESTS=ON -DNOISEPAGE_GENERATE_COVERAGE=ON' + ] ) } } + post { always { script { utils = utils ?: load(utilsFileName) ; utils.stageArchive() } } ; cleanup { deleteDir() } } } stage('ubuntu-20.04/clang-8.0 (Debug/ASAN/jumbotests)') { - agent { - docker { - image 'noisepage:focal' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - environment { - CC="/usr/bin/clang-8" - CXX="/usr/bin/clang++-8" - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(useASAN:true, isJumboTest:true) - } - - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15721', label: 'Kill PID(15721)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15722', label: 'Kill PID(15722)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15723', label: 'Kill PID(15723)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh script: 'cd build/bin && PYTHONPATH=../.. timeout 20m python3 -m script.testing.replication.tests_simple --build-type=debug', label: 'Replication (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=simple', label: 'UnitTest (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=simple -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Simple, Compiled Execution)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=debug --query-mode=extended', label: 'UnitTest (Extended)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=debug --query-mode=extended -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Extended, Compiled Execution)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh 'cd build && timeout 1h ninja check-tpl' - sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja jumbotests' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - always { - archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) - xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] - } - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + environment { CC="/usr/bin/clang-8" ; CXX="/usr/bin/clang++-8" } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageTest(false, [cmake: + '-DCMAKE_BUILD_TYPE=Debug -DNOISEPAGE_BUILD_TESTS=ON -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_ASAN=ON -DNOISEPAGE_USE_JUMBOTESTS=ON' + ] ) } } + post { always { script { utils = utils ?: load(utilsFileName) ; utils.stageArchive() } } ; cleanup { deleteDir() } } } stage('ubuntu-20.04/gcc-9.3 (Release/jumbotests)') { - agent { - docker { - image 'noisepage:focal' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isJumboTest:true) - } - - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15721', label: 'Kill PID(15721)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15722', label: 'Kill PID(15722)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15723', label: 'Kill PID(15723)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh script: 'cd build/bin && PYTHONPATH=../.. timeout 20m python3 -m script.testing.replication.tests_simple --build-type=release', label: 'Replication (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=release --query-mode=simple', label: 'UnitTest (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=release --query-mode=simple -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Simple, Compiled Execution)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=release --query-mode=extended', label: 'UnitTest (Extended)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=release --query-mode=extended -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Extended, Compiled Execution)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh 'cd build && timeout 1h ninja check-tpl' - sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja jumbotests' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - always { - archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) - xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] - } - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageTest(false, [cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_BUILD_TESTS=ON -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JUMBOTESTS=ON' + ] ) } } + post { always { script { utils = utils ?: load(utilsFileName) ; utils.stageArchive() } } ; cleanup { deleteDir() } } } stage('ubuntu-20.04/clang-8.0 (Release/jumbotests)') { - agent { - docker { - image 'noisepage:focal' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - environment { - CC="/usr/bin/clang-8" - CXX="/usr/bin/clang++-8" - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isJumboTest:true) - } - - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15721', label: 'Kill PID(15721)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15722', label: 'Kill PID(15722)' - sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15723', label: 'Kill PID(15723)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh script: 'cd build/bin && PYTHONPATH=../.. timeout 20m python3 -m script.testing.replication.tests_simple --build-type=release', label: 'Replication (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=release --query-mode=simple', label: 'UnitTest (Simple)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=release --query-mode=simple -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Simple, Compiled Execution)' - sh script: 'cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=release --query-mode=extended', label: 'UnitTest (Extended)' - sh script: 'cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=release --query-mode=extended -a "compiled_query_execution=True" -a "bytecode_handlers_path=./bytecode_handlers_ir.bc"', label: 'UnitTest (Extended, Compiled Execution)' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - sh 'cd build && timeout 1h ninja check-tpl' - sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja jumbotests' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - always { - archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) - xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] - } - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + environment { CC="/usr/bin/clang-8" ; CXX="/usr/bin/clang++-8" } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageTest(false, [cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_BUILD_TESTS=ON -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JUMBOTESTS=ON' + ] ) } } + post { always { script { utils = utils ?: load(utilsFileName) ; utils.stageArchive() } } ; cleanup { deleteDir() } } } } } @@ -347,120 +110,15 @@ pipeline { stage('End-to-End') { parallel { stage('Debug') { - agent { - docker { - image 'noisepage:focal' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(useASAN:true, isBuildTests:false) - } - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - sh script: ''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tatp.json --build-type=debug - ''', label:'OLTPBench (TATP)' - - sh script: ''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tatp_wal_disabled.json --build-type=debug - ''', label: 'OLTPBench (No WAL)' - - sh script: ''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/smallbank.json --build-type=debug - ''', label:'OLTPBench (Smallbank)' - - sh script: ''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/ycsb.json --build-type=debug - ''', label: 'OLTPBench (YCSB)' - - sh script: ''' - cd build - PYTHONPATH=.. timeout 5m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/noop.json --build-type=debug - ''', label: 'OLTPBench (NOOP)' - - // TODO: Need to fix OLTP-Bench's TPC-C to support scalefactor correctly - sh script: ''' - cd build - PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tpcc.json --build-type=debug - ''', label: 'OLTPBench (TPCC)' - - sh script: ''' - cd build - PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tpcc_parallel_disabled.json --build-type=debug - ''', label: 'OLTPBench (No Parallel)' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageOltpbenchDebug() } } + post { cleanup { deleteDir() } } } stage('Performance') { - agent { label 'benchmark' } - environment { - //Do not change. - //Performance Storage Service(Django) authentication information. The credentials can only be changed on Jenkins webpage - PSS_CREATOR = credentials('pss-creator') - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isBuildTests:false) - } - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - sh script:''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tatp.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (TATP)' - - sh script:''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tatp_wal_disabled.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (TATP No WAL)' - - sh script:''' - cd build - PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tatp_wal_ramdisk.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (TATP RamDisk WAL)' - - sh script:''' - cd build - PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tpcc.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (TPCC HDD WAL)' - - sh script:''' - cd build - PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tpcc_wal_disabled.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (TPCC No WAL)' - - sh script:''' - cd build - PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tpcc_wal_ramdisk.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (TPCC RamDisk WAL)' - } - post { - cleanup { - deleteDir() - } - } + agent { label 'benchmark' } + environment { PSS_CREATOR = credentials('pss-creator') /* Performance Storage Service (Django) auth credentials. Can only be changed from Jenkins webpage. */ } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageOltpbenchRelease() } } + post { cleanup { deleteDir() } } } } } @@ -468,141 +126,15 @@ pipeline { stage('Self-Driving') { parallel { stage('Workload Forecasting'){ - agent { - docker { - image 'noisepage:focal' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isBuildTests:false) - } - - // This scripts runs TPCC benchmark with query trace enabled. It also uses SET command to turn - // on query trace. - // --pattern_iter determines how many times a sequence of TPCC phases is run. Set to 3 so that - // enough trace could be generated for training and testing. - sh script :''' - cd build - PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --generate_data --pattern_iter=3 - ''', label: 'Generate trace and perform training' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - sh script :''' - cd build - PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --model_save_path=model.pickle --models=LSTM - ''', label: 'Generate trace and perform training' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - sh script: ''' - cd build - PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --test_file=query_trace.csv --model_load_path=model.pickle --test_model=LSTM - ''', label: 'Perform inference on the trained model' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageForecasting() } } + post { cleanup { deleteDir() } } } stage('Modeling'){ - agent { - docker { - image 'noisepage:focal' - label 'dgb' - args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' - } - } - environment { - CODECOV_TOKEN=credentials('codecov-token') - } - steps { - sh 'echo $NODE_NAME' - sh script: './build-support/print_docker_info.sh', label: 'Print image information.' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isBuildTests:false, isBuildSelfDrivingE2ETests: true) - } - - // This scripts runs TPCC benchmark with query trace enabled. It also uses SET command to turn - // on query trace. - // --pattern_iter determines how many times a sequence of TPCC phases is run. Set to 3 so that - // enough trace could be generated for training and testing. - sh script :''' - cd build - PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --generate_data --pattern_iter=3 - ''', label: 'Forecasting model training data generation' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - // This scripts runs TPCC benchmark with pipeline metrics enabled. - sh script :''' - cd build - PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --generate_data --record_pipeline_metrics --pattern_iter=1 - mkdir concurrent_runner_input - mv pipeline.csv concurrent_runner_input - ''', label: 'Interference model training data generation' - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - // The parameters to the execution_runners target are (arbitrarily picked to complete tests within a reasonable time / picked to exercise all OUs). - // Specifically, the parameters chosen are: - // - execution_runner_rows_limit=100, which sets the maximal number of rows/tuples processed to be 100 (small table) - // - rerun=0, which skips rerun since we are not testing benchmark performance here - // - warm_num=1, which also tests the warm up phase for the execution_runners. - // With the current set of parameters, the input generation process will finish under 10min - sh script :''' - cd build/bin - ../benchmark/execution_runners --execution_runner_rows_limit=100 --rerun=0 --warm_num=1 - ''', label: 'OU model training data generation' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - // Recompile the c++ binaries in Debug mode to generate code coverage. We had to compile in - // Release mode first to efficiently generate the data required by the tests - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(isCodeCoverage:true, isBuildTests:false, isBuildSelfDrivingE2ETests: true) - } - - sh script: ''' - cd build - export BUILD_ABS_PATH=`pwd` - timeout 10m ninja self_driving_e2e_test - ''', label: 'Running self-driving end-to-end test' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - // We need `coverage combine` because coverage files are generated separately for each test and - // then moved into the build root by `run-test.sh` - sh script :''' - cd build - coverage combine - ''', label: 'Combine Python code coverage' - - script{ - utils = utils ?: load(utilsFileName) - utils.cppCoverage() - } - - } - post { - always { - archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) - xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] - } - cleanup { - deleteDir() - } - } + agent { docker { image 'noisepage:focal' ; label 'dgb' ; args '--cap-add sys_ptrace -v /jenkins/ccache:/home/jenkins/.ccache' } } + environment { CODECOV_TOKEN=credentials('codecov-token') } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageModeling() } } + post { always { script { utils = utils ?: load(utilsFileName) ; utils.stageArchive() } } ; cleanup { deleteDir() } } } } } diff --git a/Jenkinsfile-nightly b/Jenkinsfile-nightly index 32734b004c..f9c3928686 100644 --- a/Jenkinsfile-nightly +++ b/Jenkinsfile-nightly @@ -1,123 +1,39 @@ -def utils // common build functions are loaded from Jenkinsfile-utils into this object -String utilsFileName = 'Jenkinsfile-utils' - +// Common build functions will be loaded into the "utils" object in every stage. +// This has to be done in every stage to support the Jenkins "restart from stage" feature. +def utils +String utilsFileName = 'Jenkinsfile-utils.groovy' pipeline { agent none - environment { - // Do not change. - // Performance Storage Service(Django) authentication information. - // The credentials can only be changed on the Jenkins webpage. - PSS_CREATOR = credentials('pss-creator') - } + environment { PSS_CREATOR = credentials('pss-creator') /* Performance Storage Service (Django) auth credentials. Can only be changed from Jenkins webpage. */ } options { buildDiscarder(logRotator(daysToKeepStr: '30')) parallelsAlwaysFailFast() } - - triggers { - cron('H H(2-3) * * *') - } - + triggers { cron('H H(2-3) * * *') } stages { stage('Artifact Stats') { - agent { - docker { - image 'noisepage:focal' - } - } - steps { - sh 'echo $NODE_NAME' - - // The following command compiles and builds the binary without caching and times the whole operation. - // The time gets output to a file which an artifact stats collector reads, in order to report the metrics. - script { - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(useCache:false, buildType:utils.RELEASE_BUILD, isBuildTests:false, isRecordTime:true) - } - - sh script: ''' - cd build - PYTHONPATH=.. python3 -m script.testing.artifact_stats --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'Artifact Stats' - } - post { - cleanup { - deleteDir() - } + agent { docker { image 'noisepage:focal' } } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageNightlyArtifact() } } + post { + unsuccessful { slackSend(color: "danger", message: "Nightly artifact stats failed!", channel: "#general") } + cleanup { deleteDir() } } } stage('Performance') { - agent { label 'benchmark' } - steps { - sh 'echo $NODE_NAME' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isBuildTests:false) - } - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - catchError(stageResult: 'Failure'){ - sh script:''' - cd build - PYTHONPATH=.. timeout 3h python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/nightly/nightly.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (HDD WAL)' - } - catchError(stageResult: 'Failure'){ - sh script:''' - cd build - PYTHONPATH=.. timeout 3h python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/nightly/nightly_ramdisk.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (RamDisk WAL)' - } - catchError(stageResult: 'Failure'){ - sh script:''' - cd build - PYTHONPATH=.. timeout 3h python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/nightly/nightly_wal_disabled.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label: 'OLTPBench (No WAL)' - } - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - archiveArtifacts(artifacts: 'build/oltp_result/**/*.*', excludes: 'build/oltp_result/**/*.csv', fingerprint: true) - } - post { - cleanup { - deleteDir() - } + agent { label 'benchmark' } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageNightlyPerformance() } } + post { + unsuccessful { slackSend(color: "danger", message: "Nightly performance failed!", channel: "#general") } + cleanup { deleteDir() } } } - stage('Microbenchmark') { - agent { label 'benchmark' } - steps { - sh 'echo $NODE_NAME' - - script{ - utils = utils ?: load(utilsFileName) - utils.noisePageBuild(buildType:utils.RELEASE_BUILD, isBuildTests:false, isBuildBenchmarks:true) - } - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - - // The micro_bench configuration has to be consistent because we currently check against previous runs with the same config - // # of Threads: 4 - // WAL Path: Ramdisk - sh script:''' - cd script/testing - PYTHONPATH=../.. python3 -m script.testing.microbench --num-threads=4 --benchmark-path $(pwd)/../../build/benchmark --logfile-path=/mnt/ramdisk/benchmark.log --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} - ''', label:'Microbenchmark' - - archiveArtifacts 'script/testing/*.json' - junit 'script/testing/*.xml' - - sh script: 'sudo lsof -i -P -n | grep LISTEN || true', label: 'Check ports.' - } - post { - cleanup { - deleteDir() - } + agent { label 'benchmark' } + steps { script { utils = utils ?: load(utilsFileName) ; utils.stageNightlyMicrobenchmark() } } + post { + unsuccessful { slackSend(color: "danger", message: "Nightly microbenchmark failed!", channel: "#general") } + cleanup { deleteDir() } } } } diff --git a/Jenkinsfile-utils b/Jenkinsfile-utils deleted file mode 100644 index e53ba4f744..0000000000 --- a/Jenkinsfile-utils +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env groovy -ENABLED = 'ON' -DISABLED = 'OFF' -UBUNTU = 'ubuntu' -DEBUG_BUILD = 'Debug' -RELEASE_BUILD = 'Release' -/** - * noisePageBuild will create a build directory and compile and build the - * noisepage binary in that directory. The options passed into the method - * determine the compilation and build options. Refer to the defaultArgs - * for the different options that can be passed in. - */ -void noisePageBuild(Map args = [:]) { - Map defaultArgs = [ - useCache: true, - buildType: DEBUG_BUILD, - os: UBUNTU, - isBuildTests: true, - useASAN: false, - isBuildBenchmarks: false, - isCodeCoverage: false, - isJumboTest: false, - isRecordTime: false, - isBuildSelfDrivingE2ETests: false, - ] - Map config = defaultArgs << args - String compileCmd = generateCompileCmd(config) - String buildCmd = generateBuildCmd(config) - String buildScript = generateBuildScript(compileCmd, buildCmd, config.isRecordTime) - - sh script:'echo y | sudo ./script/installation/packages.sh all', label: 'Installing packages' - - sh script:buildScript, label: 'Build' -} - -/** - * generateCompileCmd creates the cmake command string. It is based on the - * config passed into the function. The config arguments are the same as the - * defaultArgs in noisePageBuild. - */ -String generateCompileCmd(Map config = [:]) { - Map cmakeArgs = [ - '-DCMAKE_BUILD_TYPE': config.buildType, - '-DNOISEPAGE_UNITY_BUILD': ENABLED, - '-DNOISEPAGE_TEST_PARALLELISM': 1, - '-DNOISEPAGE_USE_ASAN': DISABLED, - '-DNOISEPAGE_USE_JEMALLOC': DISABLED, - '-DNOISEPAGE_BUILD_TESTS': ENABLED, - '-DNOISEPAGE_GENERATE_COVERAGE': DISABLED, - '-DNOISEPAGE_BUILD_BENCHMARKS': DISABLED, - '-DNOISEPAGE_USE_JUMBOTESTS': DISABLED, - '-DNOISEPAGE_BUILD_SELF_DRIVING_E2E_TESTS': DISABLED, - ] - - if (config.useCache) { - // currently ccache is only configured for ubuntu images - // For more info: https://github.com/cmu-db/noisepage/pull/830 - cmakeArgs['-DCMAKE_CXX_COMPILER_LAUNCHER'] = 'ccache' - } - - if (config.useASAN) { - cmakeArgs['-DNOISEPAGE_USE_ASAN'] = ENABLED - } - - if (config.buildType == RELEASE_BUILD && !config.isBuildTests && !config.useASAN) { - cmakeArgs['-DNOISEPAGE_USE_JEMALLOC'] = ENABLED - } - - if (config.isCodeCoverage) { - cmakeArgs['-DNOISEPAGE_GENERATE_COVERAGE'] = ENABLED - cmakeArgs['-DNOISEPAGE_UNITY_BUILD'] = DISABLED - // unity builds can throw off the accuracy of code coverage - } - - if (config.isBuildTests) { - // Different OS have different commands to get number of cpus - if (config.os == UBUNTU) { - cmakeArgs['-DNOISEPAGE_TEST_PARALLELISM'] = config.os == UBUNTU ? "\$(nproc)" : 1 - } - - if (config.isJumboTest) { - cmakeArgs['-DNOISEPAGE_USE_JUMBOTESTS'] = ENABLED - } - } else { - cmakeArgs['-DNOISEPAGE_BUILD_TESTS'] = DISABLED - } - - if (config.isBuildBenchmarks || config.isBuildSelfDrivingE2ETests) { - cmakeArgs['-DNOISEPAGE_BUILD_BENCHMARKS'] = ENABLED - } - - if (config.isBuildSelfDrivingE2ETests) { - cmakeArgs['-DNOISEPAGE_BUILD_SELF_DRIVING_E2E_TESTS'] = ENABLED - } - - String compileCmd = 'cmake -GNinja' - cmakeArgs.each { arg, value -> compileCmd += " $arg=$value" } - compileCmd += ' ..' - return compileCmd -} - -/* -generateBuildCmd creates the build command string based on the config passed -in. The config arguments are the same as the defaultArgs in noisePageBuild. -*/ -String generateBuildCmd(Map config = [:]) { - String buildCmd = 'ninja' - if (config.isBuildSelfDrivingE2ETests) { - buildCmd += ' execution_runners noisepage' - } - else if (!config.isBuildBenchmarks && !config.isBuildTests) { - buildCmd += ' noisepage' - } - return buildCmd -} - -/** - * generateBuildScript creates the full script string, including the commands to - * create the directory. It even allows us to wrap the compile command in a timer - * if we want to time how long the build takes. This time will be output to a - * file. - */ -String generateBuildScript(String compileCmd, String buildCmd, Boolean isRecordTime) { - String script = ''' - mkdir -p build - cd build - ''' - if (isRecordTime) { - script += """ - /usr/bin/time -o /tmp/noisepage-compiletime.txt -f %e sh -c \"$compileCmd - $buildCmd\"""" - } else { - script += """ - $compileCmd - $buildCmd""" - } - return script -} - -/** - * cppCoverage will collect the c++ code coverage information in the build directory, remove unrelated files from the - * coverage data, and report the coverage to codecov to show on GitHub. - */ -void cppCoverage() { - sh script :''' - cd build - lcov --directory . --capture --output-file coverage.info - lcov --remove coverage.info \'/usr/*\' --output-file coverage.info - lcov --remove coverage.info \'*/build/*\' --output-file coverage.info - lcov --remove coverage.info \'*/third_party/*\' --output-file coverage.info - lcov --remove coverage.info \'*/benchmark/*\' --output-file coverage.info - lcov --remove coverage.info \'*/test/*\' --output-file coverage.info - lcov --remove coverage.info \'*/src/main/*\' --output-file coverage.info - lcov --remove coverage.info \'*/src/include/common/error/*\' --output-file coverage.info - lcov --list coverage.info - curl -s https://codecov.io/bash | bash -s -- -X gcov - ''', label: 'Clean up c++ code coverage and report' -} - -return this diff --git a/Jenkinsfile-utils.groovy b/Jenkinsfile-utils.groovy new file mode 100644 index 0000000000..7212f8032b --- /dev/null +++ b/Jenkinsfile-utils.groovy @@ -0,0 +1,422 @@ +#!/usr/bin/env groovy + +// General structure of this file: +// +// SECTION: Stage functions. +// - stagePre() : Function that should be invoked at the start of every stageFoo() function. +// - stagePost() : Function that should be invoked at the end of every stageFoo() function. +// - stageFoo() : A Jenkins stage. +// +// SECTION: Utility functions. +// Random helper functions. +// +// You should probably know about Groovy's elvis operator ?:, +// where a ?: b means +// if (a) { return a; } else { return b; } + +// SECTION: Stage functions. + +/** This should be invoked before every stage. */ +void stagePre() { + sh script: 'echo $NODE_NAME', label: 'Print node name.' + sh script: './build-support/print_docker_info.sh', label: 'Print image information.' +} + +/** This should be invoked after every stage. */ +void stagePost() { + // No-op. +} + +/** Test if the GitHub "ready-for-ci" label is present. Otherwise, abort the build. */ +void stageGithub() { + stagePre() + ready_for_build = sh script: 'python3 ./build-support/check_github_labels.py', returnStatus: true, label: 'Test Github labels.' + if (0 != ready_for_build) { + currentBuild.result = 'ABORTED' + error('Not ready for CI. Please add ready-for-ci tag in Github when you are ready to build your PR.') + } + stagePost() +} + +/** Test if the codebase passes basic checks: format, documentation, lint, clang-tidy. */ +void stageCheck() { + stagePre() + installPackages('build') + sh 'cd apidoc && doxygen -u Doxyfile.in && doxygen Doxyfile.in 2>warnings.txt && if [ -s warnings.txt ]; then cat warnings.txt; false; fi' + sh 'mkdir build' + sh 'cd build && cmake -GNinja ..' + sh 'cd build && timeout 20m ninja check-format' + sh 'cd build && timeout 20m ninja check-lint' + sh 'cd build && timeout 20m ninja check-censored' + sh 'cd build && ninja check-clang-tidy' + stagePost() +} + +/** Build and run the default "ninja" target. */ +void stageBuildDefault(Map args = [:]) { + stagePre() + installPackages() + buildNoisePage(args) + stagePost() +} + +/** Run the C++ unit tests, TPL tests, optionally generate coverage. */ +void stageTest(Boolean runPipelineMetrics, Map args = [:]) { + stagePre() + installPackages() + buildNoisePage(args) + + sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15721', label: 'Kill port (15721)' + sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15722', label: 'Kill port (15722)' + sh script: 'cd build && timeout 10s sudo python3 -B ../script/testing/kill_server.py 15723', label: 'Kill port (15723)' + + buildType = (args.cmake.toUpperCase().contains("CMAKE_BUILD_TYPE=RELEASE")) ? "release" : "debug" + + sh script: "cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=$buildType --query-mode=simple", label: 'UnitTest (Simple)' + sh script: "cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=$buildType --query-mode=simple -a 'compiled_query_execution=True' -a 'bytecode_handlers_path=./bytecode_handlers_ir.bc'", label: 'UnitTest (Simple, Compiled Execution)' + sh script: "cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=$buildType --query-mode=extended", label: 'UnitTest (Extended)' + sh script: "cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=$buildType --query-mode=extended -a 'compiled_query_execution=True' -a 'bytecode_handlers_path=./bytecode_handlers_ir.bc'", label: 'UnitTest (Extended, Compiled Execution)' + + if (runPipelineMetrics) { + sh script: "cd build && PYTHONPATH=.. timeout 20m python3 -m script.testing.junit --build-type=$buildType --query-mode=extended -a 'pipeline_metrics_enable=True' -a 'pipeline_metrics_sample_rate=100' -a 'counters_enable=True' -a 'query_trace_metrics_enable=True'", label: 'UnitTest (Extended with pipeline metrics, counters, and query trace metrics)' + sh script: "cd build && PYTHONPATH=.. timeout 60m python3 -m script.testing.junit --build-type=$buildType --query-mode=extended -a 'pipeline_metrics_enable=True' -a 'pipeline_metrics_sample_rate=100' -a 'counters_enable=True' -a 'query_trace_metrics_enable=True' -a 'compiled_query_execution=True' -a 'bytecode_handlers_path=./bytecode_handlers_ir.bc'", label: 'UnitTest (Extended, Compiled Execution with pipeline metrics, counters, and query trace metrics)' + } + + sh 'cd build && timeout 1h ninja check-tpl' + + if (args.cmake.toUpperCase().contains("NOISEPAGE_USE_JUMBOTESTS=ON")) { + sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja jumbotests' + } else { + sh 'cd build && export BUILD_ABS_PATH=`pwd` && timeout 1h ninja unittest' + } + + if (args.cmake.toUpperCase().contains("NOISEPAGE_GENERATE_COVERAGE=ON")) { + uploadCoverage() + } + + stagePost() +} + +/** Run OLTPBench tests in debug mode. */ +void stageOltpbenchDebug() { + stagePre() + installPackages() + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Debug -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_ASAN=ON' + ]) + + sh script: ''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tatp.json --build-type=debug + ''', label:'OLTPBench (TATP)' + + sh script: ''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tatp_wal_disabled.json --build-type=debug + ''', label: 'OLTPBench (No WAL)' + + sh script: ''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/smallbank.json --build-type=debug + ''', label:'OLTPBench (Smallbank)' + + sh script: ''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/ycsb.json --build-type=debug + ''', label: 'OLTPBench (YCSB)' + + sh script: ''' + cd build + PYTHONPATH=.. timeout 5m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/noop.json --build-type=debug + ''', label: 'OLTPBench (NOOP)' + + sh script: ''' + cd build + PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tpcc.json --build-type=debug + ''', label: 'OLTPBench (TPCC)' + + sh script: ''' + cd build + PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_debug/tpcc_parallel_disabled.json --build-type=debug + ''', label: 'OLTPBench (No Parallel)' + + stagePost() +} + +/** Run OLTPBench tests in release mode, additionally publishing results. */ +void stageOltpbenchRelease() { + stagePre() + installPackages() + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JEMALLOC=ON' + ]) + + sh script:''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tatp.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (TATP)' + + sh script:''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tatp_wal_disabled.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (TATP No WAL)' + + sh script:''' + cd build + PYTHONPATH=.. timeout 10m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tatp_wal_ramdisk.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (TATP RamDisk WAL)' + + sh script:''' + cd build + PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tpcc.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (TPCC HDD WAL)' + + sh script:''' + cd build + PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tpcc_wal_disabled.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (TPCC No WAL)' + + sh script:''' + cd build + PYTHONPATH=.. timeout 30m python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/end_to_end_performance/tpcc_wal_ramdisk.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (TPCC RamDisk WAL)' + + stagePost() +} + +void stageForecasting() { + stagePre() + installPackages() + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JEMALLOC=ON' + ]) + + // The forecaster_standalone script runs TPC-C with query trace enabled. + // The forecaster_standalone script uses SET to enable query trace. + // --pattern_iter determines how many times to run a sequence of TPC-C phases. + // --pattern_iter is set to 3 (magic number) to generate enough data for training and testing. + sh script :''' + cd build + PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --generate_data --pattern_iter=3 + ''', label: 'Generate training data for forecasting model.' + + sh script :''' + cd build + PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --model_save_path=model.pickle --models=LSTM + ''', label: 'Train the model.' + + sh script: ''' + cd build + PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --test_file=query_trace.csv --model_load_path=model.pickle --test_model=LSTM + ''', label: 'Perform inference on the trained model.' + + stagePost() +} + +void stageModeling() { + stagePre() + installPackages() + + // Build the noisepage DBMS and the execution_runners binary in release mode for efficient data generation. + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JEMALLOC=ON' + ]) + buildNoisePageTarget("execution_runners") + + // The forecaster_standalone script runs TPC-C with query trace enabled. + // The forecaster_standalone script uses SET to enable query trace. + // --pattern_iter determines how many times to run a sequence of TPC-C phases. + // --pattern_iter is set to 3 (magic number) to generate enough data for training and testing. + sh script :''' + cd build + PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --generate_data --pattern_iter=3 + ''', label: 'Generate training data for forecasting model.' + + // This script runs TPC-C with pipeline metrics enabled, saving to build/concurrent_runner_input/pipeline.csv. + sh script :''' + cd build + PYTHONPATH=.. python3 -m script.self_driving.forecasting.forecaster_standalone --generate_data --record_pipeline_metrics --pattern_iter=1 + mkdir concurrent_runner_input + mv pipeline.csv concurrent_runner_input + ''', label: 'Interference model training data generation' + + // The parameters to the execution_runners target are arbitrarily picked to complete tests within 10 minutes while + // still exercising all OUs and generating a reasonable amount of training data. + // + // Specifically, the parameters chosen are: + // - execution_runner_rows_limit=100, which sets the max number of rows/tuples processed to be 100 (small table). + // - rerun=0, which skips rerun since we are not testing benchmark performance here. + // - warm_num=1, which also tests the warm up phase for the execution_runners. + sh script :''' + cd build/bin + ../benchmark/execution_runners --execution_runner_rows_limit=100 --rerun=0 --warm_num=1 + ''', label: 'OU model training data generation' + + // Recompile the noisepage DBMS in Debug mode with code coverage. + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Debug -DNOISEPAGE_GENERATE_COVERAGE=ON' + ]) + + // Run the self_driving_e2e_test. + sh script: ''' + cd build + export BUILD_ABS_PATH=`pwd` + timeout 10m ninja self_driving_e2e_test + ''', label: 'Running self-driving end-to-end test' + + // We need `coverage combine` because coverage files are generated separately for each test and then moved into the + // the build root by `run-test.sh` + sh script :''' + cd build + coverage combine + ''', label: 'Combine Python code coverage.' + + uploadCoverage() + + stagePost() +} + +void stageArchive() { + archiveArtifacts(artifacts: 'build/Testing/**/*.xml', fingerprint: true) + xunit reduceLog: false, tools: [CTest(deleteOutputFiles: false, failIfNotNew: false, pattern: 'build/Testing/**/*.xml', skipNoTestFiles: false, stopProcessingIfError: false)] +} + +void stageNightlyArtifact() { + stagePre() + installPackages() + buildNoisePage([useCache: false, shouldRecordTime:true, buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JEMALLOC=ON' + ]) + + sh script: ''' + cd build + PYTHONPATH=.. python3 -m script.testing.artifact_stats --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'Artifact Stats' + + stagePost() +} + +void stageNightlyPerformance() { + stagePre() + installPackages() + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JEMALLOC=ON' + ]) + + // catchError: set the overall stage to fail, but continue to execute subsequent steps. + catchError(stageResult: 'Failure'){ + sh script:''' + cd build + PYTHONPATH=.. timeout 3h python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/nightly/nightly.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (HDD WAL)' + } + catchError(stageResult: 'Failure'){ + sh script:''' + cd build + PYTHONPATH=.. timeout 3h python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/nightly/nightly_ramdisk.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (RamDisk WAL)' + } + catchError(stageResult: 'Failure'){ + sh script:''' + cd build + PYTHONPATH=.. timeout 3h python3 -m script.testing.oltpbench --config-file=../script/testing/oltpbench/configs/nightly/nightly_wal_disabled.json --build-type=release --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label: 'OLTPBench (No WAL)' + } + + archiveArtifacts(artifacts: 'build/oltp_result/**/*.*', excludes: 'build/oltp_result/**/*.csv', fingerprint: true) + stagePost() +} + +void stageNightlyMicrobenchmark() { + stagePre() + installPackages() + buildNoisePage([buildCommand:'ninja noisepage', cmake: + '-DCMAKE_BUILD_TYPE=Release -DNOISEPAGE_BUILD_BENCHMARKS=ON -DNOISEPAGE_UNITY_BUILD=ON -DNOISEPAGE_USE_JEMALLOC=ON -DNOISEPAGE_USE_LOGGING=OFF' + ]) + + // The micro_bench configuration has to be consistent because we currently check against previous runs with the same config + // # of Threads: 4 + // WAL Path: Ramdisk + sh script:''' + cd script/testing + PYTHONPATH=../.. python3 -m script.testing.microbench --num-threads=4 --benchmark-path $(pwd)/../../build/benchmark --logfile-path=/mnt/ramdisk/benchmark.log --publish-results=prod --publish-username=${PSS_CREATOR_USR} --publish-password=${PSS_CREATOR_PSW} + ''', label:'Microbenchmark' + + archiveArtifacts 'script/testing/*.json' + junit 'script/testing/*.xml' + stagePost() +} + +// SECTION: Utility functions. + +/** Install the packages. installType = {build, all}. */ +void installPackages(String installType='all') { + sh script:"echo y | sudo ./script/installation/packages.sh $installType", label: 'Installing packages.' +} + +/** Create a build folder, set up CMake flags, and build NoisePage. */ +void buildNoisePage(Map args = [:]) { + // Disable most options by default. Callers should be explicit. + Map config = [ + useCache: true, + shouldRecordTime: false, + buildCommand: 'ninja', + cmake: '', + ] + + config << args + + String cmakeCmd = 'cmake -GNinja' + if (config.useCache) { + cmakeCmd += ' -DCMAKE_CXX_COMPILER_LAUNCHER=ccache' + } + cmakeCmd += ' ' + cmakeCmd += config.cmake + cmakeCmd += ' ..' + + String buildCmd = config.buildCommand + + buildScript = ''' + mkdir -p build + cd build + ''' + buildScript += "$cmakeCmd" + if (config.shouldRecordTime) { + buildScript += """ + /usr/bin/time -o /tmp/noisepage-compiletime.txt -f %e sh -c \"$buildCmd\" + """ + } else { + buildScript += """ + $buildCmd + """ + } + + sh script:buildScript, label: 'Build NoisePage.' +} + +/** Build the specified target. buildNoisePage() MUST have been called! */ +void buildNoisePageTarget(String target) { + sh script:""" + cd build + ninja $target + """ +} + +/** Collect and process coverage information from the build directory; upload coverage to Codecov. */ +void uploadCoverage() { + sh script :''' + cd build + lcov --directory . --capture --output-file coverage.info + lcov --remove coverage.info \'/usr/*\' --output-file coverage.info + lcov --remove coverage.info \'*/build/*\' --output-file coverage.info + lcov --remove coverage.info \'*/third_party/*\' --output-file coverage.info + lcov --remove coverage.info \'*/benchmark/*\' --output-file coverage.info + lcov --remove coverage.info \'*/test/*\' --output-file coverage.info + lcov --remove coverage.info \'*/src/main/*\' --output-file coverage.info + lcov --remove coverage.info \'*/src/include/common/error/*\' --output-file coverage.info + lcov --list coverage.info + curl -s https://codecov.io/bash | bash -s -- -X gcov + ''', label: 'Process code coverage and upload to Codecov.' +} + +return this