mirror of
https://gitlab.isc.org/isc-projects/bind9
synced 2025-08-22 01:59:26 +00:00
We've switched to an updated dataset for shotgun jobs. The change in underlying traffic caused the more sensitive doh-get (and partially dot) jobs to overload the resolver, making the jobs unstable and unreliable, due to an increased number of timeouts. Readjust the load parameters slightly to avoid exceeding ~2 % of timeouts in the cold cache scenario to stabilize the job results.
1929 lines
58 KiB
YAML
1929 lines
58 KiB
YAML
variables:
|
|
# Not normally needed, but may be if some script uses `apt-get install`.
|
|
DEBIAN_FRONTEND: noninteractive
|
|
# Locale settings do not affect the build, but might affect tests.
|
|
LC_ALL: C
|
|
|
|
CI_REGISTRY_IMAGE: registry.gitlab.isc.org/isc-projects/images/bind9
|
|
CCACHE_DIR: "/ccache"
|
|
|
|
GIT_DEPTH: 1
|
|
GIT_CLEAN_FLAGS: -ffdxq
|
|
|
|
# The following values may be overwritten in GitLab's CI/CD Variables Settings.
|
|
BUILD_PARALLEL_JOBS: 6
|
|
TEST_PARALLEL_JOBS: 4
|
|
|
|
CLANG_VERSION: 20
|
|
CLANG: "clang-${CLANG_VERSION}"
|
|
SCAN_BUILD: "scan-build-${CLANG_VERSION}"
|
|
LLVM_SYMBOLIZER: "/usr/lib/llvm-${CLANG_VERSION}/bin/llvm-symbolizer"
|
|
CLANG_FORMAT: "clang-format-${CLANG_VERSION}"
|
|
|
|
CFLAGS_COMMON: -fno-omit-frame-pointer -fno-optimize-sibling-calls
|
|
|
|
UBASAN_CONFIGURE_FLAGS_COMMON: "-Db_sanitize=address,undefined -Didn=enabled -Djemalloc=disabled -Dtracing=disabled"
|
|
TSAN_CONFIGURE_FLAGS_COMMON: "-Db_sanitize=thread -Doptimization=2 -Ddebug=true -Didn=enabled -Dlocktype=system -Djemalloc=disabled --pkg-config-path /opt/tsan/lib/pkgconfig"
|
|
|
|
# Pass run-time flags to AddressSanitizer to get core dumps on error.
|
|
ASAN_OPTIONS: abort_on_error=1:disable_coredump=0:unmap_shadow_on_exit=1:detect_odr_violation=0
|
|
ASAN_SYMBOLIZER_PATH: "${LLVM_SYMBOLIZER}"
|
|
|
|
TSAN_OPTIONS_COMMON: "disable_coredump=0 second_deadlock_stack=1 atexit_sleep_ms=1000 history_size=7 log_exe_name=true log_path=tsan"
|
|
TSAN_SUPPRESSIONS: "suppressions=${CI_PROJECT_DIR}/.tsan-suppress"
|
|
TSAN_OPTIONS_DEBIAN: "${TSAN_OPTIONS_COMMON} ${TSAN_SUPPRESSIONS} external_symbolizer_path=${LLVM_SYMBOLIZER}"
|
|
TSAN_OPTIONS_FEDORA: "${TSAN_OPTIONS_COMMON} ${TSAN_SUPPRESSIONS} external_symbolizer_path=/usr/bin/llvm-symbolizer"
|
|
|
|
UBSAN_OPTIONS: "halt_on_error=1:abort_on_error=1:disable_coredump=0"
|
|
|
|
WITHOUT_LIBEDIT: "-Dline=disabled"
|
|
WITH_LIBEDIT: "-Dline=enabled"
|
|
|
|
STRESS_CONFIGURE_FLAGS: "-Doptimization=g -Dcmocka=disabled"
|
|
|
|
INSTALL_PATH: "${CI_PROJECT_DIR}/.local"
|
|
|
|
# In multithreaded unit tests, abort on the first failure
|
|
CMOCKA_TEST_ABORT: 1
|
|
|
|
# Disable pytest's "cacheprovider" plugin to prevent it from creating
|
|
# cross-testrun files as there is no need to use that feature in CI.
|
|
PYTEST_ADDOPTS: "-p no:cacheprovider"
|
|
|
|
HYPOTHESIS_PROFILE: "ci"
|
|
|
|
# Some jobs may clean up the build artifacts unless this is set to 0.
|
|
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 1
|
|
|
|
# DNS Shotgun performance testing defaults
|
|
SHOTGUN_ROUNDS: 1
|
|
SHOTGUN_DURATION: 120
|
|
# allow unlimited improvements against baseline
|
|
SHOTGUN_EVAL_THRESHOLD_CPU_MIN: '-inf'
|
|
SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN: '-inf'
|
|
SHOTGUN_EVAL_THRESHOLD_RCODE_MAX: '+inf'
|
|
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN: '-inf'
|
|
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN: '-inf'
|
|
|
|
# Even though there's only one job per runtime environment, the GitLab
|
|
# "instance" executor insists on cloning the Git repository to a path that
|
|
# contains a variable number from zero to the "maximum concurrent instances
|
|
# count" allowed on the GitLab Runner. See the "0" directory in this example
|
|
# path: /home/ec2-user/builds/t1_4FZzvz/0/isc-projects/bind9/.git/.
|
|
#
|
|
# This is not a problem for isolated jobs like "stress" tests that depend on
|
|
# no other jobs. However, it is a problem for jobs that need other jobs'
|
|
# artifacts. For example, a system test job that has its Git repo cloned to
|
|
# the "/1/" sub-path will fail if it downloads build job artifacts that have
|
|
# ./configure output files with "/0/" in its sub-path recorded.
|
|
GIT_CLONE_PATH_INSTANCE_EXECUTOR: "/home/ec2-user/builds/${CI_PROJECT_PATH}/"
|
|
|
|
default:
|
|
# Allow all running CI jobs to be automatically canceled when a new
|
|
# version of a branch is pushed.
|
|
#
|
|
# See: https://docs.gitlab.com/ee/ci/pipelines/settings.html#auto-cancel-redundant-pipelines
|
|
interruptible: true
|
|
|
|
# AWS can interrupt the spot instance anytime, so let's retry the job when
|
|
# the interruption event happens to avoid a pipeline failure.
|
|
retry:
|
|
max: 2
|
|
when:
|
|
- runner_system_failure
|
|
|
|
stages:
|
|
- precheck
|
|
- build
|
|
- unit
|
|
- system
|
|
- performance
|
|
- docs
|
|
- postcheck
|
|
- postmerge
|
|
- release
|
|
|
|
### Runner Tag Templates
|
|
|
|
# AlmaLinux autoscaling GitLab Runners on AWS EC2 (amd64)
|
|
|
|
.almalinux-8fips-amd64-image: &almalinux_8fips_amd64_image
|
|
tags:
|
|
- almalinux-8
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- fips
|
|
- shell
|
|
|
|
.almalinux-9fips-amd64-image: &almalinux_9fips_amd64_image
|
|
tags:
|
|
- almalinux-9
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- fips
|
|
- shell
|
|
|
|
.almalinux-10fips-amd64-image: &almalinux_10fips_amd64_image
|
|
tags:
|
|
- almalinux-10
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- fips
|
|
- shell
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (amd64)
|
|
|
|
.linux-amd64: &linux_amd64
|
|
tags:
|
|
- linux
|
|
- aws
|
|
- runner-manager
|
|
- amd64
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (arm64)
|
|
|
|
.linux-arm64: &linux_arm64
|
|
tags:
|
|
- linux
|
|
- aws
|
|
- runner-manager
|
|
- aarch64
|
|
|
|
.freebsd-autoscaler-13-amd64-tags: &freebsd_autoscaler_13_amd64_tags
|
|
tags:
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- bsd-stress-test-1
|
|
- shell
|
|
- stress-test
|
|
|
|
.freebsd-autoscaler-14-amd64-tags: &freebsd_autoscaler_14_amd64_tags
|
|
tags:
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- bsd-stress-test-2
|
|
- shell
|
|
- stress-test
|
|
|
|
.freebsd-autoscaler-amd64: &freebsd_autoscaler_amd64
|
|
variables:
|
|
CC: clang
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
# Use MIT Kerberos5 for BIND 9 GSS-API support because of FreeBSD Heimdal
|
|
# incompatibility; see https://bugs.freebsd.org/275241.
|
|
EXTRA_CONFIGURE: "${WITH_LIBEDIT} -Doptimization=g --native-file ci/freebsd.ini"
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (FreeBSD 13)
|
|
|
|
.freebsd-autoscaler-13-amd64: &freebsd_autoscaler_13_amd64
|
|
<<: *freebsd_autoscaler_amd64
|
|
<<: *freebsd_autoscaler_13_amd64_tags
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (FreeBSD 14)
|
|
|
|
.freebsd-autoscaler-14-amd64: &freebsd_autoscaler_14_amd64
|
|
<<: *freebsd_autoscaler_amd64
|
|
<<: *freebsd_autoscaler_14_amd64_tags
|
|
|
|
### Docker Image Templates
|
|
|
|
# Alpine Linux
|
|
|
|
.alpine-3.22-amd64: &alpine_3_22_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:alpine-3.22-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# AlmaLinux
|
|
|
|
.almalinux-8-amd64: &almalinux_8_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:almalinux-8-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.almalinux-9-amd64: &almalinux_9_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:almalinux-9-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.almalinux-10-amd64: &almalinux_10_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:almalinux-10-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# Debian
|
|
|
|
.debian-bookworm-amd64: &debian_bookworm_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-bookworm-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.tsan-debian-bookworm-amd64: &tsan_debian_bookworm_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:tsan-debian-bookworm-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.debian-bookworm-amd64cross32: &debian_bookworm_amd64cross32_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-bookworm-amd64cross32"
|
|
<<: *linux_amd64
|
|
|
|
.debian-sid-amd64: &debian_sid_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-sid-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# openSUSE Tumbleweed
|
|
|
|
.tumbleweed-latest-amd64: &tumbleweed_latest_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:tumbleweed-latest-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# Fedora
|
|
|
|
.tsan-fedora-42-amd64: &tsan_fedora_42_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:tsan-fedora-42-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.fedora-42-amd64: &fedora_42_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:fedora-42-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.fedora-42-arm64: &fedora_42_arm64_image
|
|
image: "$CI_REGISTRY_IMAGE:fedora-42-arm64"
|
|
<<: *linux_arm64
|
|
|
|
# Ubuntu
|
|
|
|
.ubuntu-jammy-amd64: &ubuntu_jammy_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:ubuntu-jammy-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.ubuntu-noble-amd64: &ubuntu_noble_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:ubuntu-noble-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# Base image
|
|
# This is a meta image that is used as a base for non-specific jobs
|
|
|
|
.base: &base_image
|
|
<<: *debian_bookworm_amd64_image
|
|
|
|
### Job Templates
|
|
|
|
.api-pipelines-schedules-tags-triggers-web-triggering-rules: &api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/'
|
|
- if: '$CI_COMMIT_TAG != null'
|
|
|
|
.default-triggering-rules_list: &default_triggering_rules_list
|
|
- if: '$CI_PIPELINE_SOURCE =~ /^(api|merge_request_event|pipeline|schedule|trigger|web)$/'
|
|
- if: '$CI_COMMIT_TAG != null'
|
|
|
|
.default-triggering-rules: &default_triggering_rules
|
|
rules:
|
|
- *default_triggering_rules_list
|
|
|
|
.precheck: &precheck_job
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
stage: precheck
|
|
|
|
.configure: &configure
|
|
- meson setup
|
|
--libdir=lib
|
|
-Dcmocka=enabled
|
|
-Ddeveloper=enabled
|
|
-Dleak-detection=enabled
|
|
-Doptimization=1
|
|
$EXTRA_CONFIGURE
|
|
build
|
|
|
|
# change directory to the workspace before including this
|
|
.find_python: &find_python
|
|
- PYTHON="$(cat build/bin/tests/system/isctest/vars/.build_vars/PYTHON)"
|
|
- test -x "$PYTHON"
|
|
|
|
.find_pytest: &find_pytest
|
|
- PYTEST="$(cat build/bin/tests/system/isctest/vars/.build_vars/PYTEST)"
|
|
- test -x "$PYTEST"
|
|
|
|
.parse_tsan: &parse_tsan
|
|
- *find_python
|
|
- find -name 'tsan.*' -exec "$PYTHON" util/parse_tsan.py {} \;
|
|
|
|
.check_readline_setup: &check_readline_setup
|
|
- if [[ -n "${WITHOUT_LIBEDIT}" ]]; then
|
|
! grep "^#define HAVE_LIBEDIT" build/config.h;
|
|
elif [[ -n "${WITH_LIBEDIT}" ]]; then
|
|
grep -e "^#define HAVE_LIBEDIT" build/config.h;
|
|
fi
|
|
|
|
.list_installed_package_versions: &list_installed_package_versions
|
|
- echo -e "\e[0Ksection_start:`date +%s`:installed_packages_section[collapsed=true]\r\e[0KHeader of the installed packages collapsible section"
|
|
- ( pip3 list || pip list || echo "no pip" ) 2>/dev/null
|
|
- for cmd in "apk info --verbose" "dpkg-query --show --showformat='\${Package}-\${Version}\n'" "pkg info --quiet" "rpm -qa | sort"; do
|
|
eval "$cmd" 2>/dev/null && break;
|
|
done || true
|
|
- echo -e "\e[0Ksection_end:`date +%s`:installed_packages_section\r\e[0K"
|
|
|
|
# Unpack release tarball and continue work in the extracted directory.
|
|
.unpack_release_tarball: &unpack_release_tarball
|
|
- tar --extract --file build/meson-dist/bind-*.tar.xz
|
|
- rm -f bind-*.tar.xz
|
|
- cd bind-*
|
|
|
|
.fips-feature-test: &fips_feature_test
|
|
- if build/feature-test --have-fips-mode; then
|
|
if [ "$(cat /proc/sys/crypto/fips_enabled)" = "1" ]; then
|
|
echo "FIPS is enabled";
|
|
else
|
|
echo "FIPS is disabled";
|
|
exit 1;
|
|
fi
|
|
fi
|
|
|
|
.check_for_junit_xml: &check_for_junit_xml
|
|
# test if junit.xml file exists and is longer 40 bytes
|
|
# (i.e., contains more than `<testsuites><testsuite /></testsuites>`)
|
|
- if [ -f "$CI_PROJECT_DIR"/junit.xml ]; then
|
|
if [ $(wc -c < "$CI_PROJECT_DIR"/junit.xml) -gt 40 ]; then
|
|
echo "junit.xml file exists and is longer than 40 bytes.";
|
|
else
|
|
echo "junit.xml file exists but is too short.";
|
|
exit 1;
|
|
fi
|
|
else
|
|
echo "junit.xml file does not exist.";
|
|
exit 1;
|
|
fi
|
|
|
|
.build: &build_job
|
|
<<: *default_triggering_rules
|
|
stage: build
|
|
before_script:
|
|
- test -w "${CCACHE_DIR}" && export PATH="/usr/lib/ccache:${PATH}"
|
|
- *list_installed_package_versions
|
|
script:
|
|
- *configure
|
|
- *check_readline_setup
|
|
- meson compile -C build
|
|
- meson compile -C build system-test-dependencies
|
|
- test -z "${NO_BUILD_TEST_PREREQ}" && ninja -C build meson-test-prereq
|
|
- test -z "${RUN_MESON_INSTALL}" || meson install -C build --destdir=$INSTALL_PATH
|
|
- test -z "${RUN_MESON_INSTALL}" || DESTDIR="${INSTALL_PATH}" sh build/util/check-make-install
|
|
#- test -z "${CROSS_COMPILATION}" || grep -F -A 1 "checking whether we are cross compiling" config.log | grep -q "result.*yes"
|
|
- test -z "${CROSS_COMPILATION}" || file build/lib/dns/gen | grep -F -q "ELF 64-bit LSB"
|
|
#- test -z "${CROSS_COMPILATION}" || ( ! git ls-files -z --others --exclude lib/dns/gen | xargs -0 file | grep "ELF 64-bit LSB" )
|
|
- build/named -V
|
|
- *fips_feature_test
|
|
needs: []
|
|
artifacts:
|
|
untracked: true
|
|
when: always
|
|
|
|
.setup_interfaces: &setup_interfaces
|
|
- if [ "$(id -u)" -eq "0" ]; then
|
|
sh -x build/bin/tests/system/ifconfig.sh up;
|
|
else
|
|
sudo sh -x build/bin/tests/system/ifconfig.sh up;
|
|
fi
|
|
|
|
.display_pytest_failures: &display_pytest_failures
|
|
- awk '/^=+ FAILURES =+/{flag=1;next}/^=+.*=+$/{flag=0}flag' bin/tests/system/pytest.out.txt || true
|
|
- awk '/^=+ ERRORS =+/{flag=1;next}/^=+.*=+$/{flag=0}flag' bin/tests/system/pytest.out.txt || true
|
|
|
|
.shotgun: &shotgun_job
|
|
<<: *base_image
|
|
stage: performance
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
variables:
|
|
BASELINE: '$CI_MERGE_REQUEST_DIFF_BASE_SHA'
|
|
- &shotgun_rule_tag
|
|
if: '$CI_COMMIT_TAG != null'
|
|
variables:
|
|
SHOTGUN_ROUNDS: 3
|
|
- &shotgun_rule_other
|
|
if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/'
|
|
# when using data from a single run, the overall instability of the results
|
|
# causes quite high false positive rate, rerun the test to attemp to reduce those
|
|
retry: 1
|
|
script:
|
|
- if [ -z "$BASELINE" ]; then export BASELINE=$BIND_BASELINE_VERSION; fi # this dotenv variable can't be set in the rules section, because rules are evaluated before any jobs run
|
|
- PIPELINE_ID=$(curl -s -X POST --fail
|
|
-F "token=$CI_JOB_TOKEN"
|
|
-F ref=main
|
|
-F "variables[SHOTGUN_TEST_VERSION]=['$CI_COMMIT_REF_NAME', '$BASELINE']"
|
|
-F "variables[SHOTGUN_DURATION]=300"
|
|
-F "variables[SHOTGUN_ROUNDS]=$SHOTGUN_ROUNDS"
|
|
-F "variables[SHOTGUN_TRAFFIC_MULTIPLIER]=$SHOTGUN_TRAFFIC_MULTIPLIER"
|
|
-F "variables[SHOTGUN_SCENARIO]=$SHOTGUN_SCENARIO"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_CPU_MIN]=$SHOTGUN_EVAL_THRESHOLD_CPU_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_CPU_MAX]=$SHOTGUN_EVAL_THRESHOLD_CPU_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN]=$SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_MEMORY_MAX]=$SHOTGUN_EVAL_THRESHOLD_MEMORY_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_RCODE_MIN]=$SHOTGUN_EVAL_THRESHOLD_RCODE_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_RCODE_MAX]=$SHOTGUN_EVAL_THRESHOLD_RCODE_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MAX]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MAX"
|
|
https://gitlab.isc.org/api/v4/projects/188/trigger/pipeline | jq .id)
|
|
- util/ci-wait-shotgun.py $PIPELINE_ID
|
|
needs:
|
|
- job: ci-variables
|
|
artifacts: true
|
|
timeout: 2h
|
|
|
|
.system_test_common: &system_test_job
|
|
<<: *default_triggering_rules
|
|
stage: system
|
|
before_script:
|
|
- *setup_interfaces
|
|
# This script needs to: 1) fail if the system tests fail, 2) fail if
|
|
# the junit.xml file is broken, 3) produce the junit.xml file even if
|
|
# the system tests fail. Therefore, $RET is used to "cache" the
|
|
# result of running pytest as interrupting the script immediately when
|
|
# system tests fail would make checking the contents of the junit.xml
|
|
# file impossible (GitLab Runner uses "set -o pipefail").
|
|
script:
|
|
- *fips_feature_test
|
|
- *find_pytest
|
|
- *find_python
|
|
- ( if [ "${CI_DISPOSABLE_ENVIRONMENT}" = "true" ]; then sleep 3000; "$PYTHON" "${CI_PROJECT_DIR}/util/get-running-system-tests.py"; fi ) &
|
|
- cd bin/tests/system
|
|
- RET=0
|
|
- >
|
|
("$PYTEST" --junit-xml="$CI_PROJECT_DIR"/junit.xml -n "$TEST_PARALLEL_JOBS" | tee pytest.out.txt) || RET=1
|
|
- *check_for_junit_xml
|
|
- (exit $RET)
|
|
- '( ! grep -F "grep: warning:" pytest.out.txt )'
|
|
- test "$CLEAN_BUILD_ARTIFACTS_ON_SUCCESS" -eq 0 || ( cd ../../.. && ninja -C build clean >/dev/null 2>&1 )
|
|
after_script:
|
|
- *display_pytest_failures
|
|
artifacts:
|
|
untracked: true
|
|
exclude:
|
|
- "**/__pycache__/**/*"
|
|
when: always
|
|
reports:
|
|
junit: junit.xml
|
|
|
|
.system_test_tsan: &system_test_tsan_job
|
|
<<: *system_test_job
|
|
after_script:
|
|
- *display_pytest_failures
|
|
- *parse_tsan
|
|
|
|
.unit_test_common: &unit_test_job
|
|
<<: *default_triggering_rules
|
|
stage: unit
|
|
# This script needs to: 1) fail if the unit tests fail, 2) fail if the
|
|
# junit.xml file is broken, 3) produce the junit.xml file even if the
|
|
# unit tests fail. Therefore, $RET is used to "cache" the result of
|
|
# running "meson test" as interrupting the script immediately when
|
|
# unit tests fail would make checking the contents of the junit.xml
|
|
# file impossible (GitLab Runner uses "set -o pipefail").
|
|
script:
|
|
- *fips_feature_test
|
|
- RET=0
|
|
- meson test -C build --no-rebuild --no-suite flaky || RET=1
|
|
- cp build/meson-logs/testlog.junit.xml $CI_PROJECT_DIR/junit.xml
|
|
- meson test -C build --no-rebuild --suite flaky --logbase testlog-flaky || meson test -C build --no-rebuild --suite flaky --logbase testlog-flaky || RET=1
|
|
- *check_for_junit_xml
|
|
- (exit $RET)
|
|
- test "$CLEAN_BUILD_ARTIFACTS_ON_SUCCESS" -eq 0 || ninja -C build clean >/dev/null 2>&1
|
|
artifacts:
|
|
untracked: true
|
|
when: always
|
|
reports:
|
|
junit:
|
|
- junit.xml
|
|
- build/meson-logs/testlog-flaky.junit.xml
|
|
|
|
.unit_test_tsan: &unit_test_tsan_job
|
|
<<: *unit_test_job
|
|
after_script:
|
|
- *parse_tsan
|
|
|
|
.docs: &docs_job
|
|
stage: docs
|
|
script:
|
|
- *configure
|
|
- meson compile -C build arm arm-epub man
|
|
- find build/man/ -maxdepth 2 -name "*.[0-9]" -exec mandoc -T lint "{}" \; | ( ! grep -v -e "skipping paragraph macro. sp after" -e "unknown font, skipping request. ft C" -e "input text line longer than 80 bytes" )
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || ninja -C build doc-misc
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || cp build/doc/misc/options build/doc/misc/rndc.grammar build/doc/misc/*.zoneopt doc/misc/
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || git diff > doc-misc.patch
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || if test "$(git status --porcelain --untracked-files=no | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
|
|
|
|
.respdiff: &respdiff_job
|
|
stage: system
|
|
before_script:
|
|
- *configure
|
|
- meson compile -C build
|
|
- *setup_interfaces
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- cd bind9-qa/respdiff
|
|
- sed -i -e 's|bin/named/|build/|g' -e 's|awk "/\\/bin\\/named\\/|awk "/build|g' respdiff.sh
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- bind9-qa/respdiff
|
|
exclude:
|
|
- bind9-qa/respdiff/rspworkdir/data.mdb # Exclude a 10 GB file.
|
|
untracked: true
|
|
when: always
|
|
|
|
### Job Definitions
|
|
|
|
# Jobs in the precheck stage
|
|
|
|
misc:
|
|
<<: *precheck_job
|
|
script:
|
|
- sh util/checklibs.sh > checklibs.out
|
|
- sh util/check-categories.sh
|
|
- sh util/check-gitignore.sh
|
|
- sh util/check-trailing-whitespace.sh
|
|
- bash util/unused-headers.sh
|
|
- bash util/xmllint-html.sh
|
|
# Check dangling symlinks in the repository
|
|
- if find . -xtype l | grep .; then exit 1; fi
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- checklibs.out
|
|
when: on_failure
|
|
|
|
black:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- black $(git ls-files '*.py')
|
|
- git diff > black.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- black.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
|
|
vulture:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- vulture --exclude "*ans.py,conftest.py,isctest" --ignore-names "pytestmark,reconfigure_policy,setup_filters" bin/tests/system/
|
|
|
|
ci-variables:
|
|
<<: *precheck_job
|
|
script:
|
|
# When testing a .0 release, compare it against the previous development
|
|
# release (e.g., 9.19.0 and 9.18.0 should both be compared against 9.17.22).
|
|
- export BIND_BASELINE_TAG=$(meson introspect meson.build --projectinfo | ./util/select-baseline-version.jq)
|
|
- BIND_BASELINE_VERSION="$(curl -s -G -d "version=^${BIND_BASELINE_TAG}" -d "order_by=version" "https://gitlab.isc.org/api/v4/projects/1/repository/tags" | jq -r '.[0].name')"
|
|
- echo "BIND_BASELINE_VERSION=$BIND_BASELINE_VERSION" >> ci_vars.env
|
|
artifacts:
|
|
reports:
|
|
dotenv: ci_vars.env
|
|
|
|
ci-orphaned-anchors:
|
|
<<: *precheck_job
|
|
script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- bind9-qa/ci-orphaned-anchors/check-orphaned-anchors-ci.py .gitlab-ci.yml
|
|
needs: []
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
changes:
|
|
- .gitlab-ci.yml
|
|
|
|
clang-format:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- if [ -r .clang-format ]; then "${CLANG_FORMAT}" -i -style=file $(git ls-files '*.c' '*.h'); fi
|
|
- git diff > clang-format.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- clang-format.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
|
|
coccinelle:
|
|
######################################################################
|
|
# Revert to using the "precheck_job" anchor after the "base" image is
|
|
# upgraded to Debian trixie, which has Coccinelle 1.2.
|
|
<<: *default_triggering_rules
|
|
<<: *debian_sid_amd64_image
|
|
stage: precheck
|
|
######################################################################
|
|
needs: []
|
|
script:
|
|
- util/check-cocci
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
|
|
meson-format:
|
|
######################################################################
|
|
# Revert to using the "precheck_job" anchor after the "base" image is
|
|
# upgraded to Debian trixie, which has muon 0.4.0.
|
|
<<: *default_triggering_rules
|
|
<<: *debian_sid_amd64_image
|
|
stage: precheck
|
|
######################################################################
|
|
needs: []
|
|
script:
|
|
- git ls-files "*meson.build" | xargs muon-meson fmt -i
|
|
- git diff > meson-format.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- meson-format.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
changes:
|
|
- "**/meson.build"
|
|
|
|
doctest:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- *configure
|
|
- meson compile -C build system-test-init
|
|
- *find_pytest
|
|
- cd bin/tests/system/isctest
|
|
- >
|
|
"$PYTEST" --noconftest --doctest-modules
|
|
|
|
pylint:
|
|
<<: *default_triggering_rules
|
|
<<: *debian_sid_amd64_image
|
|
stage: precheck
|
|
needs: []
|
|
variables:
|
|
PYTHONPATH: "${CI_PROJECT_DIR}/bin/tests/system"
|
|
script:
|
|
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc $(git ls-files '*.py' | grep -vE '(ans\.py|dangerfile\.py|^bin/tests/system/|^contrib/)')
|
|
# Ignore Pylint wrong-import-position error in system test to enable use of pytest.importorskip
|
|
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc --disable=wrong-import-position $(git ls-files 'bin/tests/system/*.py' | grep -vE '(ans\.py|vulture_ignore_list\.py)')
|
|
|
|
reuse:
|
|
<<: *precheck_job
|
|
needs: []
|
|
image:
|
|
name: docker.io/fsfe/reuse:latest
|
|
entrypoint: [""]
|
|
script:
|
|
- reuse lint
|
|
|
|
shfmt:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- shfmt -w -i 2 -ci -bn . $(find . -name "*.sh.in")
|
|
- git diff > shfmt.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- shfmt.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
|
|
danger:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- pip install git+https://gitlab.isc.org/isc-projects/hazard.git
|
|
- hazard
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
|
|
checkbashisms:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- checkbashisms $(find . -path './.git' -prune -o -type f -exec sh -c 'head -n 1 "{}" | grep -qsF "#!/bin/sh"' \; -print)
|
|
|
|
mypy:
|
|
<<: *default_triggering_rules
|
|
<<: *debian_sid_amd64_image
|
|
stage: precheck
|
|
script:
|
|
- mypy "bin/tests/system/isctest/"
|
|
|
|
tarball-create:
|
|
stage: precheck
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
script:
|
|
- *configure
|
|
- meson dist -C build --no-tests
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; git diff > diff.patch; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- diff.patch
|
|
- build/meson-dist/*.tar.xz
|
|
needs: []
|
|
|
|
# Jobs for doc builds on Debian 12 "bookworm" (amd64)
|
|
|
|
changelog:
|
|
<<: *base_image
|
|
<<: *docs_job
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_TITLE =~ /\s(dev|usr|pkg):/'
|
|
variables:
|
|
GIT_AUTHOR_NAME: $GITLAB_USER_NAME
|
|
GIT_AUTHOR_EMAIL: $GITLAB_USER_EMAIL
|
|
GIT_COMMITTER_NAME: $GITLAB_USER_NAME
|
|
GIT_COMMITTER_EMAIL: $GITLAB_USER_EMAIL
|
|
before_script:
|
|
- echo -e "$CI_MERGE_REQUEST_TITLE\n" > commitmsg
|
|
- sed -i 's/^Draft:\s*//' commitmsg
|
|
- echo -e "$CI_MERGE_REQUEST_DESCRIPTION" >> commitmsg
|
|
- git commit --allow-empty -F commitmsg
|
|
- export CHANGELOG=$(ls doc/changelog/changelog-9.* | sort --version-sort | tail -n 1)
|
|
- printf "\n" >> $CHANGELOG
|
|
- ./contrib/gitchangelog/gitchangelog.py HEAD^..HEAD >> $CHANGELOG
|
|
after_script:
|
|
- git diff
|
|
needs: []
|
|
artifacts:
|
|
untracked: true
|
|
|
|
linkcheck:
|
|
<<: *base_image
|
|
stage: docs
|
|
script:
|
|
# Some domains tested by linkchecker may think that we connect to them too
|
|
# often and will refuse connection or reply with and error code, which
|
|
# makes this job fail. Let's check links only on Wednesdays.
|
|
- if [ "$(date +%w)" != "3" ]; then exit 0; fi
|
|
- pushd doc/arm/ > /dev/null && sphinx-build -b linkcheck . linkcheck_output/
|
|
artifacts:
|
|
paths:
|
|
- doc/arm/linkcheck_output/
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "schedule"'
|
|
needs: []
|
|
|
|
docs:
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
<<: *docs_job
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
variables:
|
|
DOC_CHECK_MISC_CHANGE: 1
|
|
needs: []
|
|
artifacts:
|
|
untracked: true
|
|
when: always
|
|
|
|
docs:tarball:
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
<<: *docs_job
|
|
before_script:
|
|
- *unpack_release_tarball
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
|
|
# Job detecting named.conf breakage introduced since the previous point release
|
|
|
|
cross-version-config-tests:
|
|
stage: system
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
script:
|
|
- *configure
|
|
- *setup_interfaces
|
|
- meson compile -C build system-test-init system-test-dependencies
|
|
- meson compile -C build
|
|
- *find_pytest
|
|
- git clone --branch "${BIND_BASELINE_VERSION}" --depth 1 https://gitlab.isc.org/isc-projects/bind9.git "bind-${BIND_BASELINE_VERSION}"
|
|
- cd "bind-${BIND_BASELINE_VERSION}"
|
|
# The cross-version-config-tests job would fail when a system test is
|
|
# removed from the upcoming release. To avoid this, remove the system test
|
|
# also from the $BIND_BASELINE_VERSION.
|
|
- find bin/tests/system/ -mindepth 1 -maxdepth 1 -type d -exec sh -c 'test -e ../"$0" || rm -rfv -- "$0"' {} \;
|
|
# @DYLIB@ is missing
|
|
- cp ../bin/tests/system/isctest/vars/basic.py ./bin/tests/system/isctest/vars/basic.py
|
|
- cp ../bin/tests/system/isctest/vars/.build_vars/TOP_BUILDDIR ./bin/tests/system/isctest/vars/.build_vars/TOP_BUILDDIR
|
|
- echo "${CI_PROJECT_DIR}/bind-${BIND_BASELINE_VERSION}" > ../build/bin/tests/system/isctest/vars/.build_vars/TOP_SRCDIR
|
|
- cd ./bin/tests/system
|
|
# System tests that employ binary drivers will fail on ABI change and
|
|
# should not be run.
|
|
- rm -r dlzexternal
|
|
- rm -r dyndb
|
|
- >
|
|
"$PYTEST" --setup-only --junit-xml="$CI_PROJECT_DIR"/junit.xml -n "${TEST_PARALLEL_JOBS:-1}"
|
|
needs:
|
|
- job: ci-variables
|
|
artifacts: true
|
|
artifacts:
|
|
reports:
|
|
junit: junit.xml
|
|
paths:
|
|
- bind-*
|
|
- junit.xml
|
|
untracked: true
|
|
expire_in: "1 day"
|
|
when: always
|
|
|
|
# Jobs for regular GCC builds on Alpine Linux 3.22 (amd64)
|
|
|
|
gcc:alpine3.22:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "${WITHOUT_LIBEDIT}"
|
|
<<: *alpine_3_22_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:alpine3.22:amd64:
|
|
<<: *alpine_3_22_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:alpine3.22:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:alpine3.22:amd64:
|
|
<<: *alpine_3_22_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:alpine3.22:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Alma Linux 8 (amd64)
|
|
|
|
gcc:almalinux8:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *almalinux_8_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:almalinux8:amd64:
|
|
<<: *almalinux_8_amd64_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:almalinux8:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:almalinux8:amd64:
|
|
<<: *almalinux_8_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:almalinux8:amd64
|
|
artifacts: true
|
|
allow_failure: true # remove once GL#5448 is fixed
|
|
|
|
# Jobs for regular GCC builds on Alma Linux 9 (amd64)
|
|
|
|
gcc:almalinux9:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Ddeveloper=disabled"
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:almalinux9:amd64:
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:almalinux9:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:almalinux9:amd64:
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:almalinux9:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Alma Linux 10 (amd64)
|
|
|
|
gcc:almalinux10:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *almalinux_10_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:almalinux10:amd64:
|
|
<<: *almalinux_10_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:almalinux10:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:almalinux10:amd64:
|
|
<<: *almalinux_10_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:almalinux10:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for scheduled GCC builds on AlmaLinux 8 & 9 FIPS-aware images with FIPS
|
|
# mode in BIND 9 enabled
|
|
|
|
gcc:8fips:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dfips=enabled -Dtracing=disabled"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
<<: *almalinux_8fips_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:8fips:amd64:
|
|
<<: *almalinux_8fips_amd64_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:8fips:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:8fips:amd64:
|
|
<<: *almalinux_8fips_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:8fips:amd64
|
|
artifacts: true
|
|
|
|
gcc:9fips:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dfips=enabled -Dleak-detection=disabled"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
<<: *almalinux_9fips_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:9fips:amd64:
|
|
<<: *almalinux_9fips_amd64_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:9fips:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:9fips:amd64:
|
|
<<: *almalinux_9fips_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:9fips:amd64
|
|
artifacts: true
|
|
|
|
gcc:10fips:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dfips=enabled -Dleak-detection=disabled"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
<<: *almalinux_10fips_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:10fips:amd64:
|
|
<<: *almalinux_10fips_amd64_image
|
|
<<: *system_test_job
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:10fips:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:10fips:amd64:
|
|
<<: *almalinux_10fips_amd64_image
|
|
<<: *unit_test_job
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:10fips:amd64
|
|
artifacts: true
|
|
|
|
gcc:tarball:nosphinx:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Ddeveloper=disabled"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *build_job
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
- (! command -v sphinx-build >/dev/null)
|
|
- *unpack_release_tarball
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Debian 12 "bookworm" (amd64)
|
|
|
|
gcc:bookworm:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
# Tracing needs to be disabled otherwise gcovr fails
|
|
EXTRA_CONFIGURE: "-Doptimization=0 -Db_coverage=true -Dtracing=disabled -Didn=enabled ${WITH_LIBEDIT}"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:bookworm:amd64:
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *system_test_job
|
|
variables:
|
|
CI_ENABLE_LONG_TESTS: 1
|
|
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 0
|
|
TZ: Australia/Sydney
|
|
needs: # using artifacts from unit test job is required for gcov
|
|
- job: unit:gcc:bookworm:amd64
|
|
artifacts: true
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "schedule"'
|
|
variables:
|
|
CI_ENABLE_LIVE_INTERENT_TESTS: 1
|
|
- *default_triggering_rules_list
|
|
|
|
unit:gcc:bookworm:amd64:
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *unit_test_job
|
|
variables:
|
|
CI_ENABLE_LONG_TESTS: 1
|
|
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 0
|
|
needs:
|
|
- job: gcc:bookworm:amd64
|
|
artifacts: true
|
|
|
|
# Build job for cross-compiled GCC builds on 64-bit Debian 12 "bookworm"
|
|
# (amd64) with 32-bit BIND 9.
|
|
|
|
gcc:bookworm:amd64cross32:
|
|
variables:
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
CROSS_COMPILATION: 1
|
|
EXTRA_CONFIGURE: "--cross-file ci/amd64cross32.ini -Didn=enabled -Dgssapi=disabled -Dtracing=disabled ${WITH_LIBEDIT}"
|
|
<<: *debian_bookworm_amd64cross32_image
|
|
<<: *build_job
|
|
|
|
# Jobs for scan-build builds on Debian 12 "bookworm" (amd64)
|
|
|
|
.scan_build: &scan_build
|
|
- ${SCAN_BUILD} --html-title="BIND 9 ($CI_COMMIT_SHORT_SHA)"
|
|
--keep-cc
|
|
--status-bugs
|
|
--keep-going
|
|
-o scan-build.reports ninja -C build
|
|
|
|
scan-build:
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
stage: postcheck
|
|
variables:
|
|
CC: "${CLANG}"
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
SCANBUILD: "${CI_PROJECT_DIR}/scan-build.sh"
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
script:
|
|
- *configure
|
|
- *scan_build
|
|
artifacts:
|
|
paths:
|
|
- scan-build.reports/
|
|
when: on_failure
|
|
|
|
# Jobs for strict OpenSSL 3.x (no deprecated) GCC builds on Debian "sid" (amd64)
|
|
# Run with pkcs11-provider tests
|
|
|
|
gcc:ossl3:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DOPENSSL_NO_DEPRECATED=1 -DOPENSSL_API_COMPAT=30000"
|
|
# See https://gitlab.isc.org/isc-projects/bind9/-/issues/3444
|
|
EXTRA_CONFIGURE: "-Doptimization=3 -Djemalloc=disabled -Dleak-detection=disabled"
|
|
RUN_MESON_INSTALL: 1
|
|
|
|
system:gcc:ossl3:sid:amd64:
|
|
# Set up environment variables to run pkcs11-provider based system tests
|
|
variables:
|
|
OPENSSL_CONF: "/var/tmp/etc/openssl-provider.cnf"
|
|
SOFTHSM2_CONF: "/var/tmp/softhsm2/softhsm2.conf"
|
|
<<: *debian_sid_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:ossl3:sid:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:ossl3:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:ossl3:sid:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Debian "sid" (amd64)
|
|
# Also tests configration option: -Dlmdb=disabled
|
|
|
|
gcc:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=3 -Didn=enabled -Dlmdb=disabled ${WITH_LIBEDIT}"
|
|
RUN_MESON_INSTALL: 1
|
|
|
|
system:gcc:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:sid:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:sid:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for tarball GCC builds on Debian 12 "bookworm" (amd64)
|
|
|
|
gcc:tarball:
|
|
variables:
|
|
CC: gcc
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *base_image
|
|
<<: *build_job
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
- *unpack_release_tarball
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
|
|
system:gcc:tarball:
|
|
<<: *base_image
|
|
<<: *system_test_job
|
|
before_script:
|
|
- cd bind-*
|
|
- *setup_interfaces
|
|
after_script:
|
|
- cd bind-*
|
|
- *display_pytest_failures
|
|
needs:
|
|
- job: gcc:tarball
|
|
artifacts: true
|
|
|
|
unit:gcc:tarball:
|
|
<<: *base_image
|
|
<<: *unit_test_job
|
|
before_script:
|
|
- cd bind-*
|
|
needs:
|
|
- job: gcc:tarball
|
|
artifacts: true
|
|
|
|
# Jobs for debug GCC builds on openSUSE Tumbleweed (amd64)
|
|
|
|
gcc:tumbleweed:amd64:
|
|
<<: *tumbleweed_latest_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DDEBUG"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dgssapi=disabled ${WITH_LIBEDIT}"
|
|
|
|
system:gcc:tumbleweed:amd64:
|
|
<<: *tumbleweed_latest_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:tumbleweed:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:tumbleweed:amd64:
|
|
<<: *tumbleweed_latest_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:tumbleweed:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Ubuntu 22.04 Jammy Jellyfish (amd64)
|
|
|
|
gcc:jammy:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Dgeoip=disabled -Didn=enabled -Ddoh=disabled -Dcmocka=disabled -Ddnstap=disabled -Dgssapi=disabled"
|
|
NO_BUILD_TEST_PREREQ: 1 # remove this variable after removing this particular distro
|
|
<<: *ubuntu_jammy_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:jammy:amd64:
|
|
<<: *ubuntu_jammy_amd64_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:jammy:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:jammy:amd64:
|
|
<<: *ubuntu_jammy_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:jammy:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Ubuntu 24.04 Noble Numbat (amd64)
|
|
|
|
gcc:noble:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *ubuntu_noble_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:noble:amd64:
|
|
<<: *ubuntu_noble_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:noble:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:noble:amd64:
|
|
<<: *ubuntu_noble_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:noble:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for ASAN builds on Fedora 42 (amd64)
|
|
|
|
gcc:asan:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Db_sanitize=address,undefined -Didn=enabled -Djemalloc=disabled -Dtracing=disabled"
|
|
<<: *fedora_42_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:asan:
|
|
variables:
|
|
LSAN_OPTIONS: "suppressions=$CI_PROJECT_DIR/suppr-lsan.txt"
|
|
<<: *fedora_42_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:asan
|
|
artifacts: true
|
|
|
|
unit:gcc:asan:
|
|
<<: *fedora_42_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:asan
|
|
artifacts: true
|
|
|
|
clang:asan:
|
|
variables:
|
|
CC: ${CLANG}
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Db_sanitize=address,undefined -Db_lundef=false -Didn=enabled -Djemalloc=disabled -Dtracing=disabled"
|
|
<<: *base_image
|
|
<<: *build_job
|
|
|
|
system:clang:asan:
|
|
variables:
|
|
LSAN_OPTIONS: "suppressions=$CI_PROJECT_DIR/suppr-lsan.txt"
|
|
<<: *base_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:asan
|
|
artifacts: true
|
|
|
|
unit:clang:asan:
|
|
<<: *base_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:asan
|
|
artifacts: true
|
|
|
|
# Jobs for TSAN builds on Fedora 42 (amd64)
|
|
|
|
gcc:tsan:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -Wno-stringop-overread"
|
|
LDFLAGS: "-Wl,--disable-new-dtags"
|
|
EXTRA_CONFIGURE: "${TSAN_CONFIGURE_FLAGS_COMMON}"
|
|
<<: *tsan_fedora_42_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_FEDORA}"
|
|
<<: *tsan_fedora_42_amd64_image
|
|
<<: *system_test_tsan_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:tsan
|
|
artifacts: true
|
|
|
|
unit:gcc:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_FEDORA}"
|
|
<<: *tsan_fedora_42_amd64_image
|
|
<<: *unit_test_tsan_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:tsan
|
|
artifacts: true
|
|
|
|
clang:tsan:
|
|
<<: *tsan_debian_bookworm_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: "${CLANG}"
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
LDFLAGS: "-Wl,--disable-new-dtags"
|
|
EXTRA_CONFIGURE: "${TSAN_CONFIGURE_FLAGS_COMMON} -Db_lundef=false"
|
|
|
|
system:clang:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
|
|
<<: *tsan_debian_bookworm_amd64_image
|
|
<<: *system_test_tsan_job
|
|
needs:
|
|
- job: clang:tsan
|
|
artifacts: true
|
|
|
|
unit:clang:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
|
|
<<: *tsan_debian_bookworm_amd64_image
|
|
<<: *unit_test_tsan_job
|
|
needs:
|
|
- job: clang:tsan
|
|
artifacts: true
|
|
|
|
generate-tsan-stress-test-configs:
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
stage: system
|
|
script:
|
|
- util/generate-tsan-stress-jobs.py > tsan-stress-test-configs.yml
|
|
artifacts:
|
|
paths:
|
|
- tsan-stress-test-configs.yml
|
|
needs: []
|
|
when: manual
|
|
|
|
tsan:stress:
|
|
<<: *default_triggering_rules
|
|
stage: postcheck
|
|
variables:
|
|
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
|
|
trigger:
|
|
include:
|
|
- artifact: tsan-stress-test-configs.yml
|
|
job: generate-tsan-stress-test-configs
|
|
needs:
|
|
- job: generate-tsan-stress-test-configs
|
|
artifacts: true
|
|
- job: gcc:tsan
|
|
artifacts: true
|
|
- job: clang:tsan
|
|
artifacts: true
|
|
|
|
# Jobs for Clang builds on Debian 12 "bookworm" (amd64)
|
|
|
|
clang:bookworm:amd64:
|
|
variables:
|
|
CC: ${CLANG}
|
|
CFLAGS: "${CFLAGS_COMMON} -Wenum-conversion -DOPENSSL_API_COMPAT=10100"
|
|
# See https://gitlab.isc.org/isc-projects/bind9/-/issues/3444
|
|
EXTRA_CONFIGURE: "-Djemalloc=disabled -Dleak-detection=disabled"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *build_job
|
|
|
|
system:clang:bookworm:amd64:
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *system_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:bookworm:amd64
|
|
artifacts: true
|
|
|
|
unit:clang:bookworm:amd64:
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:bookworm:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for Clang builds on FreeBSD 13 (amd64)
|
|
|
|
clang:freebsd13:amd64:
|
|
<<: *build_job
|
|
<<: *freebsd_autoscaler_13_amd64
|
|
|
|
system:clang:freebsd13:amd64:
|
|
<<: *system_test_job
|
|
<<: *freebsd_autoscaler_13_amd64
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:freebsd13:amd64
|
|
artifacts: true
|
|
|
|
unit:clang:freebsd13:amd64:
|
|
<<: *unit_test_job
|
|
<<: *freebsd_autoscaler_13_amd64
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:freebsd13:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for Clang builds on FreeBSD 14 (amd64)
|
|
|
|
clang:freebsd14:amd64:
|
|
<<: *build_job
|
|
<<: *freebsd_autoscaler_14_amd64
|
|
|
|
system:clang:freebsd14:amd64:
|
|
<<: *system_test_job
|
|
<<: *freebsd_autoscaler_14_amd64
|
|
needs:
|
|
- job: clang:freebsd14:amd64
|
|
artifacts: true
|
|
|
|
unit:clang:freebsd14:amd64:
|
|
<<: *unit_test_job
|
|
<<: *freebsd_autoscaler_14_amd64
|
|
needs:
|
|
- job: clang:freebsd14:amd64
|
|
artifacts: true
|
|
retry: 1 # GL #4924
|
|
|
|
# Job producing a release directory
|
|
|
|
release:
|
|
<<: *base_image
|
|
stage: release
|
|
script:
|
|
- export BIND_DIRECTORY="$(basename build/meson-dist/bind-*.tar.xz ".tar.xz")"
|
|
# Prepare release tarball contents (tarballs + documentation)
|
|
- mkdir -p "${BIND_DIRECTORY}-release/doc/arm"
|
|
- pushd "${BIND_DIRECTORY}-release"
|
|
- mv "../build/meson-dist/${BIND_DIRECTORY}.tar.xz" .
|
|
- tar --extract --file="${BIND_DIRECTORY}.tar.xz"
|
|
- mv "${BIND_DIRECTORY}"/{COPYRIGHT,LICENSE,README.md,srcid} .
|
|
- rm -rf "${BIND_DIRECTORY}"
|
|
- mv ../build/arm/ doc/arm/html/
|
|
- mv ../build/arm-epub/Bv9ARM.epub doc/arm/
|
|
- echo '<!DOCTYPE HTML><html lang="en"><meta http-equiv="refresh" content="0; url=doc/arm/html/notes.html"><title>Redirect</title></html>' > "RELEASE-NOTES-${BIND_DIRECTORY}.html"
|
|
- echo '<!DOCTYPE HTML><html lang="en"><meta http-equiv="refresh" content="0; url=doc/arm/html/changelog.html"><title>Redirect</title></html>' > "CHANGELOG-${BIND_DIRECTORY}.html"
|
|
- popd
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
- job: docs
|
|
artifacts: true
|
|
rules:
|
|
- if: '$CI_COMMIT_TAG != null'
|
|
artifacts:
|
|
paths:
|
|
- "*-release"
|
|
expire_in: "1 month"
|
|
|
|
# Job signing the source tarballs in the release directory
|
|
|
|
sign:
|
|
stage: release
|
|
tags:
|
|
- signer
|
|
script:
|
|
- export RELEASE_DIRECTORY="$(echo *-release)"
|
|
- pushd "${RELEASE_DIRECTORY}"
|
|
- |
|
|
echo
|
|
cat > /tmp/sign-bind9.sh <<EOF
|
|
#!/bin/sh
|
|
{
|
|
for FILE in \$(find "${PWD}" -name "*.tar.xz" | sort); do
|
|
echo ">>> Signing \${FILE}..."
|
|
gpg2 --local-user "\${SIGNING_KEY_FINGERPRINT}" --armor --digest-algo SHA512 --detach-sign --output "\${FILE}.asc" "\${FILE}"
|
|
done
|
|
} 2>&1 | tee "${CI_PROJECT_DIR}/signing.log"
|
|
EOF
|
|
chmod +x /tmp/sign-bind9.sh
|
|
echo -e "\e[31m*** Please sign the releases by following the instructions at:\e[0m"
|
|
echo -e "\e[31m*** \e[0m"
|
|
echo -e "\e[31m*** ${SIGNING_HELP_URL}\e[0m"
|
|
echo -e "\e[31m*** \e[0m"
|
|
echo -e "\e[31m*** Sleeping until files in ${PWD} are signed... ⌛\e[0m"
|
|
while [ "$(find . -name "*.asc" -size +0 | sed "s|\.asc$||" | sort)" != "$(find . -name "*.tar.xz" | sort)" ]; do sleep 10; done
|
|
- popd
|
|
- tar --create --file="${RELEASE_DIRECTORY}.tar.gz" --gzip "${RELEASE_DIRECTORY}"
|
|
artifacts:
|
|
paths:
|
|
- "*.tar.gz"
|
|
- signing.log
|
|
expire_in: never
|
|
needs:
|
|
- job: release
|
|
artifacts: true
|
|
rules:
|
|
- if: '$CI_COMMIT_TAG != null'
|
|
when: manual
|
|
allow_failure: false
|
|
|
|
.customer-git: &customer_git
|
|
<<: *base_image
|
|
stage: release
|
|
when: manual
|
|
variables: # ensure clean git environment and sufficient history
|
|
GIT_STRATEGY: clone
|
|
GIT_DEPTH: 1000
|
|
|
|
# To trigger this job and push a branch to a customer, you must set the
|
|
# CUSTOMER job variable by clicking on the manual job (not the play button) and
|
|
# set it to the name of the target customer.
|
|
customer-git:branch:
|
|
<<: *customer_git
|
|
needs: []
|
|
rules:
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
variables:
|
|
BRANCH: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME'
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_PIPELINE_SOURCE =~ /^(api|pipeline|trigger|web)$/'
|
|
variables:
|
|
BRANCH: '$CI_COMMIT_BRANCH'
|
|
before_script:
|
|
- test -n "$CUSTOMER"
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
script:
|
|
- git checkout -b "$BRANCH" # ensure refs/heads/$BRANCH exists; GitLab clones with detached HEAD
|
|
- bind9-qa/releng/push_to_customer_repository.py --branch "$BRANCH" --customer "$CUSTOMER" --force
|
|
|
|
customer-git:tag:
|
|
<<: *customer_git
|
|
needs:
|
|
- job: release
|
|
artifacts: false
|
|
rules:
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_COMMIT_TAG != null'
|
|
before_script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- git clone --depth 1 "https://token:${ISC_CUSTOMERS_WRITE_TOKEN}@gitlab.isc.org/isc-customers/isc-customer-settings.git"
|
|
script:
|
|
- bind9-qa/releng/push_to_customer_repository.py --tag "$CI_COMMIT_TAG" --entitlements isc-customer-settings/entitlements.yaml --force
|
|
|
|
# Coverity Scan analysis upload
|
|
|
|
.coverity_prep: &coverity_prep
|
|
- curl --output /tmp/cov-analysis-linux64.md5 https://scan.coverity.com/download/linux64
|
|
--form project=$COVERITY_SCAN_PROJECT_NAME
|
|
--form token=$COVERITY_SCAN_TOKEN
|
|
--form md5=1
|
|
- curl --output /tmp/cov-analysis-linux64.tgz https://scan.coverity.com/download/linux64
|
|
--form project=$COVERITY_SCAN_PROJECT_NAME
|
|
--form token=$COVERITY_SCAN_TOKEN
|
|
- test "$(md5sum /tmp/cov-analysis-linux64.tgz | awk '{ print $1 }')" = "$(cat /tmp/cov-analysis-linux64.md5)"
|
|
- tar --extract --gzip --file=/tmp/cov-analysis-linux64.tgz --directory=/tmp
|
|
- test -d /tmp/cov-analysis-linux64-*
|
|
|
|
.coverity_build: &coverity_build
|
|
- /tmp/cov-analysis-linux64-*/bin/cov-build --dir /tmp/cov-int sh -c 'ninja -C ./build -v'
|
|
- tar --create --gzip --file=/tmp/cov-int.tar.gz --directory /tmp cov-int
|
|
- curl -v https://scan.coverity.com/builds?project=$COVERITY_SCAN_PROJECT_NAME
|
|
--form token=$COVERITY_SCAN_TOKEN
|
|
--form email=bind-changes@isc.org
|
|
--form file=@/tmp/cov-int.tar.gz
|
|
--form version="$(git rev-parse --short HEAD)"
|
|
--form description="$(git rev-parse --short HEAD) / $CI_COMMIT_TITLE / $CI_COMMIT_REF_NAME:$CI_PIPELINE_ID" 2>&1
|
|
| tee curl-response.txt
|
|
- grep -q 'Build successfully submitted' curl-response.txt
|
|
|
|
coverity:
|
|
<<: *base_image
|
|
stage: postcheck
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=g -Ddoc=disabled -Didn=enabled"
|
|
script:
|
|
- *coverity_prep
|
|
- *configure
|
|
- *coverity_build
|
|
after_script:
|
|
- mv -v /tmp/cov-int.tar.gz ${CI_PROJECT_DIR}/
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- curl-response.txt
|
|
- cov-int.tar.gz
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
rules:
|
|
- if: '$COVERITY_SCAN_PROJECT_NAME != null && $COVERITY_SCAN_TOKEN != null'
|
|
|
|
# Respdiff tests
|
|
|
|
respdiff:
|
|
<<: *respdiff_job
|
|
<<: *default_triggering_rules
|
|
<<: *debian_bookworm_amd64_image
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DISC_TRACK_PTHREADS_OBJECTS"
|
|
EXTRA_CONFIGURE: "-Doptimization=g"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
script:
|
|
- bash respdiff.sh -m /usr/lib/x86_64-linux-gnu/libjemalloc.so.2 -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
|
|
respdiff:asan:
|
|
<<: *respdiff_job
|
|
<<: *default_triggering_rules
|
|
<<: *debian_bookworm_amd64_image
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=g -Db_sanitize=address,undefined -Djemalloc=disabled"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
script:
|
|
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
|
|
respdiff:tsan:
|
|
<<: *respdiff_job
|
|
<<: *default_triggering_rules
|
|
<<: *tsan_debian_bookworm_amd64_image
|
|
variables:
|
|
CC: "${CLANG}"
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
LDFLAGS: "-Wl,--disable-new-dtags"
|
|
EXTRA_CONFIGURE: "${TSAN_CONFIGURE_FLAGS_COMMON} -Db_lundef=false"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
|
|
script:
|
|
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
after_script:
|
|
- *parse_tsan
|
|
|
|
respdiff-third-party:
|
|
<<: *respdiff_job
|
|
<<: *default_triggering_rules
|
|
<<: *debian_bookworm_amd64_image
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=g"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
script:
|
|
- bash respdiff.sh -s third_party -q "${PWD}/100k_mixed.txt" -c 1 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
|
|
.respdiff-recent-named: &respdiff_recent_named
|
|
<<: *respdiff_job
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *default_triggering_rules
|
|
needs:
|
|
- job: ci-variables
|
|
artifacts: true
|
|
script:
|
|
- cd ${CI_PROJECT_DIR}
|
|
- mkdir version-under-test
|
|
- mv build version-under-test/
|
|
- BASELINE=${CI_MERGE_REQUEST_TARGET_BRANCH_NAME:-$BIND_BASELINE_VERSION}
|
|
- git fetch --unshallow origin ${BASELINE}
|
|
- git checkout FETCH_HEAD
|
|
- *configure
|
|
- meson compile -C build
|
|
- cd bind9-qa/respdiff
|
|
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}/version-under-test" "${CI_PROJECT_DIR}/build/named"
|
|
|
|
respdiff:recent-named:
|
|
<<: *respdiff_recent_named
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DISC_TRACK_PTHREADS_OBJECTS"
|
|
EXTRA_CONFIGURE: "-Doptimization=g"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
|
|
# Performance tests
|
|
|
|
shotgun:udp:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: udp
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 15
|
|
|
|
shotgun:tcp:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: tcp
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 12
|
|
|
|
shotgun:dot:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: dot
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 5
|
|
rules: &shotgun_rules_manual_mr
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
variables:
|
|
BASELINE: '$CI_MERGE_REQUEST_DIFF_BASE_SHA'
|
|
when: manual # don't run on each MR unless requested
|
|
allow_failure: true
|
|
- *shotgun_rule_tag
|
|
- *shotgun_rule_other
|
|
|
|
shotgun:doh-get:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: doh-get
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 2
|
|
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX: 0.4 # bump from the default due to increased tail-end jitter
|
|
rules: *shotgun_rules_manual_mr
|
|
|
|
generate-stress-test-configs:
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
stage: precheck
|
|
script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- bind9-qa/stress/generate-stress-test-configs.py > stress-test-configs.yml
|
|
artifacts:
|
|
paths:
|
|
- stress-test-configs.yml
|
|
needs: []
|
|
|
|
stress-test-child-pipeline:
|
|
<<: *default_triggering_rules
|
|
stage: performance
|
|
trigger:
|
|
include:
|
|
- artifact: stress-test-configs.yml
|
|
job: generate-stress-test-configs
|
|
needs:
|
|
- job: generate-stress-test-configs
|
|
artifacts: true
|
|
|
|
# Simple reproductibilty test, needs an image with meson >=1.6.0
|
|
reproducible-build:
|
|
<<: *default_triggering_rules
|
|
<<: *alpine_3_22_amd64_image
|
|
stage: postcheck
|
|
needs: []
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
script:
|
|
- meson reprotest
|
|
--intermediaries
|
|
--
|
|
-Ddoc=disabled
|
|
-Doptimization=1
|
|
artifacts:
|
|
untracked: true
|
|
when: on_failure
|
|
|
|
# git fsck operates over the whole repository and is sufficient to schedule it
|
|
# only in one branch, preferably "main". GitLab's clone strategy prevents us
|
|
# from using the "bind9" repo clone; we need to clone it ourselves.
|
|
fsck:
|
|
<<: *base_image
|
|
stage: postcheck
|
|
variables:
|
|
GIT_STRATEGY: none
|
|
script:
|
|
- git clone https://gitlab.isc.org/isc-projects/bind9.git bind9-full-clone
|
|
- cd bind9-full-clone/
|
|
- git fsck
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "schedule"'
|
|
needs: []
|
|
|
|
gcov:
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
stage: postcheck
|
|
needs:
|
|
- job: system:gcc:bookworm:amd64
|
|
artifacts: true
|
|
script:
|
|
# Ensure gcov files for unit tests are found via tests/ rather than
|
|
# lib/*/tests/ to prevent confusing gcovr.
|
|
# - find lib/ -name tests -type l -delete
|
|
- ninja -C build coverage
|
|
- tail -n 3 build/meson-logs/coverage.txt
|
|
coverage: /^TOTAL.*\s+(\d+\%)$/
|
|
artifacts:
|
|
paths:
|
|
- build/meson-logs/
|
|
reports:
|
|
coverage_report:
|
|
coverage_format: cobertura
|
|
path: build/meson-logs/coverage.xml
|
|
|
|
# Pairwise testing of build options
|
|
|
|
pairwise:
|
|
<<: *base_image
|
|
stage: build
|
|
needs: []
|
|
script:
|
|
- util/pairwise-testing.sh
|
|
artifacts:
|
|
paths:
|
|
- pairwise-commands.txt
|
|
- pairwise-model.txt
|
|
- pairwise-output.*.txt
|
|
when: on_failure
|
|
rules:
|
|
- if: '$PAIRWISE_TESTING != null'
|
|
|
|
.post_merge_template: &post_merge
|
|
<<: *base_image
|
|
stage: postmerge
|
|
needs: []
|
|
# post-merge processes should run even if another MR was merged while the job was running (or queued)
|
|
interruptible: false
|
|
variables:
|
|
# automated commits will inherit identification from the user who pressed Merge button
|
|
GIT_COMMITTER_NAME: $GITLAB_USER_NAME
|
|
GIT_COMMITTER_EMAIL: $GITLAB_USER_EMAIL
|
|
# avoid leftover branches from previous jobs
|
|
GIT_STRATEGY: clone
|
|
# assumed max depth of a MR for backport or a rebased force-push
|
|
GIT_DEPTH: 1000
|
|
before_script:
|
|
# force-pushes should not trigger process automation (happens only in -sub branches)
|
|
- >
|
|
echo "previous branch tip: $CI_COMMIT_BEFORE_SHA"
|
|
- set +o pipefail; git log --format='%H' | grep --silent "$CI_COMMIT_BEFORE_SHA" && PREVIOUS_TIP_REACHABLE=1
|
|
- test "$PREVIOUS_TIP_REACHABLE" != "1" && echo "force-push detected, stop" && exit 1
|
|
# non-fast-forward merges are disabled so we have to have merge commit on top
|
|
- MERGE_REQUEST_ID="$(git log -1 --format='%b' | sed --silent -e "s|^See merge request ${CI_PROJECT_PATH}\!||p")"
|
|
- >
|
|
: stop if this is not a merge request in the current project\'s namespace
|
|
- test -n "$MERGE_REQUEST_ID"
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
|
|
backports:
|
|
<<: *post_merge
|
|
rules:
|
|
# -sub branches are handled manually
|
|
- if: '$CI_PIPELINE_SOURCE == "push" && ($CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+$/ || $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH)'
|
|
script:
|
|
# CI job token is not sufficient for push operations
|
|
- git remote get-url origin | sed -e "s/gitlab-ci-token:$CI_JOB_TOKEN/oauth2:$BIND_TEAM_WRITE_TOKEN/" | xargs git remote set-url --push origin
|
|
- bind9-qa/releng/backport_mr.py $CI_PROJECT_ID "$MERGE_REQUEST_ID"
|
|
|
|
merged-metadata:
|
|
<<: *post_merge
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "push" && ($CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+(-sub)?$/ || $CI_COMMIT_REF_NAME =~ /^v9.[0-9]+.[0-9]+-release$/ || $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH)'
|
|
script:
|
|
- bind9-qa/releng/after_merge.py "$CI_PROJECT_ID" "$MERGE_REQUEST_ID"
|