1047 lines
44 KiB
YAML
1047 lines
44 KiB
YAML
# In the interest of reliability and performance, please avoid installing
|
||
# external dependencies here, e.g. via tools/*-setup.sh, apt, dnf, or yum.
|
||
# Do so in the appropriate Dockerfile at
|
||
# https://gitlab.com/wireshark/wireshark-containers/ instead.
|
||
# The resulting images can be found at
|
||
# https://gitlab.com/wireshark/wireshark-containers/container_registry
|
||
|
||
stages:
|
||
- build
|
||
- analysis
|
||
- test
|
||
- fuzz-asan
|
||
- fuzz-randpkt
|
||
- fuzz-valgrind
|
||
|
||
variables:
|
||
# Ensure that checkouts are a) fast and b) have a reachable tag. In a
|
||
# brighter, more glorious future we might be able to use --shallow-since:
|
||
# https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3460
|
||
# In the mean time, fetching the last 5000 commits does the job.
|
||
# Ensure that all variables are string
|
||
GIT_DEPTH: "1"
|
||
GIT_FETCH_EXTRA_FLAGS: "--depth=5000"
|
||
CCACHE_DIR: "${CI_PROJECT_DIR}/ccache"
|
||
# Preferred version of clang available on wireshark-ubuntu-dev
|
||
CLANG_VERSION: "15"
|
||
# Enable color output in CMake, Ninja, and other tools. https://bixense.com/clicolors/
|
||
CLICOLOR_FORCE: "1"
|
||
# Skip irrelevant SAST scanners:
|
||
SAST_EXCLUDED_ANALYZERS: "brakeman,eslint,security-code-scan,semgrep,spotbugs"
|
||
|
||
# Scheduled builds additionally set SCHEDULE_TYPE, which can be one of:
|
||
# - 2x-daily: Twice daily at 07:00 and 19:00 UTC
|
||
# - daily: Daily at 10:00 UTC
|
||
# - weekly: Sunday at 14:00 UTC
|
||
# - coverity-visual-c++: Monday, Wednesday, & Friday at 12:00 UTC
|
||
# - coverity-gcc: Sunday, Tuesday, Thursday & Saturday at 12:00 UTC
|
||
|
||
# Common rule stanzas
|
||
# These must currently be including using "!reference tags". "extends:" and
|
||
# YAML anchors won't work:
|
||
# https://gitlab.com/gitlab-org/gitlab/-/issues/322992
|
||
|
||
# Commits that have been approved and merged. Run automatically in the main
|
||
# repo and allow manual runs in the web UI and in forks.
|
||
.if-merged:
|
||
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
- if: '$CI_PIPELINE_SOURCE == "web" && $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_PROJECT_URL !~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
when: manual
|
||
# Merged commits for runners which are only available in
|
||
# wireshark/wireshark, e.g. wireshark-windows-*. Run automatically in
|
||
# the main repo and allow manual runs in the web UI.
|
||
.if-w-w-only-merged:
|
||
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
- if: '$CI_PIPELINE_SOURCE == "web" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
# Incoming merge requests.
|
||
.if-merge-request:
|
||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||
# Incoming non-detached merge requests. Must be used for runners which are only
|
||
# available in wireshark/wireshark, e.g. wireshark-windows-*
|
||
.if-w-w-only-merge-request:
|
||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
# Scheduled jobs. Care should be taken when changing this since the scheduler
|
||
# often doesn't report errors.
|
||
.if-weekly-schedule:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "weekly"'
|
||
.if-daily-schedule:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "daily"'
|
||
.if-2x-daily-schedule:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "2x-daily"'
|
||
# Fuzz jobs. Care should be taken when changing this since the scheduler
|
||
# often doesn't report errors.
|
||
.if-fuzz-schedule:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "fuzz"'
|
||
|
||
.build-linux:
|
||
stage: build
|
||
tags:
|
||
- docker
|
||
after_script:
|
||
- for builddir in build/packaging/rpm/BUILD/wireshark-*/build build/packaging/rpm/BUILD/wireshark-* build obj-*; do [ ! -d "$builddir/run" ] || break; done
|
||
- if [[ "$CI_JOB_NAME" == "build:rpm-opensuse-"* ]]; then export LD_LIBRARY_PATH=$builddir/run; fi
|
||
- if [ -f $builddir/run/tshark ]; then $builddir/run/tshark --version; fi
|
||
needs: []
|
||
|
||
.test-linux:
|
||
stage: test
|
||
tags:
|
||
- docker
|
||
variables:
|
||
GIT_STRATEGY: none
|
||
|
||
.build-ubuntu:
|
||
extends: .build-linux
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/ubuntu-dev
|
||
retry: 1
|
||
# https://gould.cx/ted/blog/2017/06/10/ccache-for-Gitlab-CI/
|
||
cache:
|
||
# XXX Use ${CI_JOB_NAME}-${CI_MERGE_REQUEST_TARGET_BRANCH_NAME} instead?
|
||
key: ${CI_JOB_NAME}-master
|
||
paths:
|
||
- ccache/
|
||
before_script:
|
||
- useradd user
|
||
- export LANG=en_US.UTF-8
|
||
- export PYTEST_ADDOPTS=--skip-missing-programs=dumpcap,rawshark
|
||
- mkdir -p ccache
|
||
- ccache --show-stats
|
||
- export DEB_BUILD_OPTIONS="nocheck parallel=$(( $(getconf _NPROCESSORS_ONLN) + 2 ))"
|
||
- export DH_QUIET=1
|
||
- export MAKEFLAGS=--silent
|
||
- NUM_COMMITS=$(curl $CI_API_V4_URL/projects/$CI_PROJECT_ID/merge_requests/$CI_MERGE_REQUEST_IID/commits | jq length)
|
||
- echo "$NUM_COMMITS commit(s) in this MR"
|
||
- mkdir build
|
||
- cd build
|
||
script:
|
||
# setcap restricts our library paths
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- CFLAGS=-Wl,-rpath=$(pwd)/run CXXFLAGS=-Wl,-rpath=$(pwd)/run cmake -GNinja -DENABLE_CCACHE=ON $CMAKE_ARGS ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- ninja install
|
||
after_script:
|
||
# The cache should be large enough to be useful but it shouldn't take
|
||
# too long to restore+save each run.
|
||
- ccache --max-size $( du --summarize --block-size=1M "$CI_PROJECT_DIR/build" | awk '{printf ("%dM", $1 * 1.5)}' )
|
||
|
||
.build-rpm:
|
||
extends: .build-linux
|
||
rules: !reference [.if-2x-daily-schedule]
|
||
before_script:
|
||
# Hack to let ninja make full use of the system on Fedora and Rocky.
|
||
- export RPM_BUILD_NCPUS=$(( $( getconf _NPROCESSORS_ONLN ) + 2 ))
|
||
- git config --global user.email "you@example.com"
|
||
- git config --global user.name "Your Name"
|
||
- mkdir build
|
||
- cd build
|
||
- ../tools/make-version.py --set-release ..
|
||
- mv -v ../wireshark-*.tar.* .
|
||
artifacts:
|
||
paths:
|
||
- build/packaging/rpm/RPMS
|
||
expire_in: 3 days
|
||
needs:
|
||
- 'Source Package'
|
||
|
||
.test-rpm:
|
||
extends: .test-linux
|
||
rules: !reference [.if-2x-daily-schedule]
|
||
|
||
.build-windows:
|
||
stage: build
|
||
retry: 1
|
||
before_script:
|
||
- if (-Not (Test-Path C:\Development)) { New-Item -Path C:\Development -ItemType "directory" }
|
||
- $env:WIRESHARK_BASE_DIR = "C:\Development"
|
||
- $env:Configuration = "RelWithDebInfo"
|
||
- $env:Path += ";C:\Program Files\CMake\bin"
|
||
- $env:CMAKE_PREFIX_PATH = "C:\qt\6.2.4\msvc2019_64"
|
||
# https://help.appveyor.com/discussions/questions/18777-how-to-use-vcvars64bat-from-powershell
|
||
- cmd.exe /c "call `"C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Auxiliary\Build\vcvars64.bat`" && set > %temp%\vcvars.txt"
|
||
- Get-Content "$env:temp\vcvars.txt" | Foreach-Object { if ($_ -match "^(.*?)=(.*)$") { Set-Content "env:\$($matches[1])" $matches[2] } }
|
||
# Testing / debugging only.
|
||
# - cmd.exe /c "set CI_PIPELINE_SOURCE"
|
||
# - cmd.exe /c "set CI_PROJECT_URL"
|
||
#- dir c:\
|
||
#- dir c:\qt
|
||
#- $env:path.split(";")
|
||
#- cmd.exe /c "set"
|
||
#- Get-Location
|
||
- mkdir build
|
||
- cd build
|
||
needs: []
|
||
|
||
# macOS runners are still beta:
|
||
# https://about.gitlab.com/blog/2021/08/23/build-cloud-for-macos-beta/
|
||
# https://docs.gitlab.com/ee/ci/runners/saas/macos/environment.html#vm-images
|
||
# https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/macstadium/orka/-/blob/main/toolchain/monterey.yml
|
||
.build-macos:
|
||
stage: build
|
||
tags: [ saas-macos-medium-m1 ]
|
||
image: macos-12-xcode-14 # https://docs.gitlab.com/ee/ci/runners/saas/macos/environment.html
|
||
retry: 1
|
||
# https://gould.cx/ted/blog/2017/06/10/ccache-for-Gitlab-CI/
|
||
cache:
|
||
key: ${CI_JOB_NAME}-master
|
||
paths:
|
||
- ccache/
|
||
variables:
|
||
HOMEBREW_COLOR: "1"
|
||
HOMEBREW_DISPLAY_INSTALL_TIMES: "1"
|
||
HOMEBREW_NO_AUTO_UPDATE: "1"
|
||
HOMEBREW_NO_INSTALL_CLEANUP: "1"
|
||
HOMEBREW_NO_INSTALL_UPGRADE: "1"
|
||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: "1"
|
||
before_script:
|
||
# At the time of this writing (January 2023), the macOS SaaS builder has the following PATH:
|
||
# /Users/gitlab/.asdf/shims:/Users/gitlab/.asdf/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin
|
||
# The suite_clopts.case_tshark_extcap.test_tshark_extcap_interfaces test will fail because
|
||
# it sets an alternate HOME, which results in asdf returning
|
||
# unknown command: python3. Perhaps you have to reshim?
|
||
# Make sure /usr/local/bin is first in order to work around asdf.
|
||
- export PATH=/usr/local/bin:$PATH
|
||
- printf "\e[0Ksection_start:%s:brew_section[collapsed=true]\r\e[0KInstalling prerequisites" "$( date +%s)"
|
||
- ./tools/macos-setup-brew.sh --install-optional
|
||
- printf "\e[0Ksection_end:%s:brew_section\r\e[0K" "$( date +%s)"
|
||
- export PYTEST_ADDOPTS=--skip-missing-programs=dumpcap,rawshark
|
||
- mkdir -p ccache
|
||
- ccache --show-stats
|
||
- NUM_COMMITS=$(curl $CI_API_V4_URL/projects/$CI_PROJECT_ID/merge_requests/$CI_MERGE_REQUEST_IID/commits | jq length)
|
||
- echo "$NUM_COMMITS commit(s) in this MR"
|
||
- mkdir build
|
||
- cd build
|
||
script:
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja -DENABLE_CCACHE=ON -DTEST_EXTRA_ARGS=--disable-capture ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- ninja test-programs
|
||
- pytest
|
||
after_script:
|
||
# The cache should be large enough to be useful but it shouldn't take
|
||
# too long to restore+save each run.
|
||
- ccache --max-size $( gdu --summarize --block-size=1M "$CI_PROJECT_DIR/build" | awk '{printf ("%dM", $1 * 1.5)}' )
|
||
needs: []
|
||
|
||
Source Package:
|
||
extends: .build-ubuntu
|
||
stage: .pre
|
||
rules:
|
||
- !reference [.if-merged]
|
||
- !reference [.if-2x-daily-schedule]
|
||
script:
|
||
- ../tools/make-version.py --set-release ..
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja $CMAKE_ARGS ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- cd $CI_PROJECT_DIR
|
||
- build/packaging/source/git-export-release.sh -d .
|
||
after_script:
|
||
- if [ -n "$SCHEDULE_TYPE" ] ; then exit 0 ; fi
|
||
# - ccache --max-size $( du --summarize --block-size=1M "$CI_PROJECT_DIR/build" | awk '{printf ("%dM", $1 * 1.5)}' )
|
||
- stat --format="%n %s bytes" wireshark-*.tar.*
|
||
- for digest in sha512 sha256 sha1 ; do openssl $digest wireshark-*.tar.* ; done
|
||
# This will break if we produce multiple tarballs, which is arguably a good thing.
|
||
- if [ -n "$S3_DESTINATION_DIST" ] ; then aws s3 cp wireshark-*.tar.* "$S3_DESTINATION_DIST/" ; fi
|
||
artifacts:
|
||
paths:
|
||
- wireshark-*.tar.*
|
||
|
||
# Job to generate packages for Debian stable
|
||
Debian Stable APT Package:
|
||
extends: .build-linux
|
||
rules: !reference [.if-2x-daily-schedule]
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/debian-stable-dev
|
||
script:
|
||
- ln --symbolic --no-dereference --force packaging/debian
|
||
- tools/make-version.py --set-release .
|
||
# Shared GitLab runners limit the log size to 4M, so reduce verbosity. See
|
||
# https://gitlab.com/gitlab-com/support-forum/issues/2790
|
||
- export DEB_BUILD_OPTIONS="nocheck parallel=$(( $(getconf _NPROCESSORS_ONLN) + 2 ))"
|
||
- export DH_QUIET=1
|
||
- export MAKEFLAGS=--silent
|
||
# Ignore changed symbols (on development branch).
|
||
- export DPKG_GENSYMBOLS_CHECK_LEVEL=0
|
||
- CC=/usr/lib/ccache/gcc CXX=/usr/lib/ccache/g++ dpkg-buildpackage -b --no-sign -jauto -zfast
|
||
- lintian --suppress-tags library-not-linked-against-libc --display-experimental --display-info --pedantic --profile debian
|
||
- mkdir debian-packages
|
||
- mv ../*.deb debian-packages/
|
||
artifacts:
|
||
paths:
|
||
- debian-packages/*.deb
|
||
expire_in: 3 days
|
||
|
||
Debian Stable APT Test:
|
||
extends: .test-linux
|
||
rules: !reference [.if-2x-daily-schedule]
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/debian-stable-dev
|
||
stage: test
|
||
script:
|
||
- DEBIAN_FRONTEND=noninteractive apt-get install ./debian-packages/*.deb -y
|
||
- tshark --version
|
||
variables:
|
||
GIT_STRATEGY: none
|
||
needs: [ 'Debian Stable APT Package' ]
|
||
|
||
Fedora RPM Package:
|
||
extends: .build-rpm
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/fedora-dev
|
||
script:
|
||
# Shared GitLab runners limit the log size to 4M, so reduce verbosity. See
|
||
# https://gitlab.com/gitlab-com/support-forum/issues/2790
|
||
- export FORCE_CMAKE_NINJA_NON_VERBOSE=1
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake3 -G Ninja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja wireshark_rpm
|
||
|
||
# Fedora RPM Test:
|
||
# extends: .test-rpm
|
||
# image: fedora
|
||
# script:
|
||
# - dnf install -y build/packaging/rpm/RPMS/x86_64/*.rpm
|
||
# - tshark --version
|
||
# needs: [ 'Fedora RPM Package' ]
|
||
|
||
Windows MinGW-w64 Package:
|
||
stage: build
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/mingw-dev
|
||
rules: !reference [.if-merged]
|
||
tags:
|
||
- docker
|
||
cache:
|
||
# XXX Use ${CI_JOB_NAME}-${CI_MERGE_REQUEST_TARGET_BRANCH_NAME} instead?
|
||
key: ${CI_JOB_NAME}-master
|
||
paths:
|
||
- ccache/
|
||
before_script:
|
||
- mkdir -p ccache
|
||
- ccache --show-stats
|
||
- mkdir build
|
||
- cd build
|
||
script:
|
||
- mingw64-cmake -G Ninja -DENABLE_CCACHE=Yes -DFETCH_lua=Yes ..
|
||
- ninja
|
||
- ninja user_guide_html
|
||
- ninja wireshark_nsis_prep
|
||
- ninja wireshark_nsis
|
||
after_script:
|
||
# The cache should be large enough to be useful but it shouldn't take
|
||
# too long to restore+save each run.
|
||
- ccache --max-size $( du --summarize --block-size=1M "$CI_PROJECT_DIR/build" | awk '{printf ("%dM", $1 * 1.5)}' )
|
||
artifacts:
|
||
paths:
|
||
- build/packaging/nsis/wireshark-*.exe
|
||
expire_in: 3 days
|
||
needs: []
|
||
|
||
openSUSE 15.4 RPM Package:
|
||
extends: .build-rpm
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/opensuse-15.4-dev
|
||
script:
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja -DUSE_qt6=OFF ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja wireshark_rpm
|
||
|
||
openSUSE 15.4 RPM Test:
|
||
extends: .test-rpm
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/opensuse-15.4-dev
|
||
script:
|
||
- zypper --no-gpg-checks --no-remote install -y build/packaging/rpm/RPMS/x86_64/*.rpm
|
||
- tshark --version
|
||
needs: [ 'openSUSE 15.4 RPM Package' ]
|
||
|
||
Rocky Linux 9 RPM Package:
|
||
extends: .build-rpm
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/rockylinux-9-dev
|
||
script:
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja -DUSE_qt6=OFF ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja wireshark_rpm
|
||
|
||
Rocky Linux 9 RPM Test:
|
||
extends: .test-rpm
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/rockylinux-9-dev
|
||
script:
|
||
- dnf --nogpgcheck localinstall -y build/packaging/rpm/RPMS/x86_64/*.rpm
|
||
- tshark --version
|
||
needs: [ 'Rocky Linux 9 RPM Package' ]
|
||
|
||
Ubuntu APT Package:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-2x-daily-schedule]
|
||
script:
|
||
# build-ubuntu puts us in `build`.
|
||
- cd ..
|
||
- ln --symbolic --no-dereference --force packaging/debian
|
||
# Ignore changed symbols (on development branch).
|
||
- export DPKG_GENSYMBOLS_CHECK_LEVEL=0
|
||
- CC=/usr/lib/ccache/gcc CXX=/usr/lib/ccache/g++ MAKE=ninja dpkg-buildpackage -us -uc -rfakeroot -jauto -zfast
|
||
- mkdir ubuntu-packages
|
||
- mv ../*.deb ubuntu-packages/
|
||
after_script:
|
||
# dpkg-buildpackage builds in obj-<triplet>, so we need to override
|
||
# .build-ubuntu. We also build more stuff, so decrease our multiplier.
|
||
- ccache --max-size $( du --summarize --block-size=1M --total "$CI_PROJECT_DIR"/obj-* | awk '/total$/ {printf ("%dM", $1 * 1.25)}' )
|
||
artifacts:
|
||
paths:
|
||
- ubuntu-packages/*.deb
|
||
expire_in: 3 days
|
||
|
||
Ubuntu APT Test:
|
||
extends: .test-linux
|
||
rules: !reference [.if-2x-daily-schedule]
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/ubuntu-dev
|
||
stage: test
|
||
script:
|
||
- DEBIAN_FRONTEND=noninteractive apt-get install ./ubuntu-packages/*.deb -y
|
||
- tshark --version
|
||
variables:
|
||
GIT_STRATEGY: none
|
||
needs: [ 'Ubuntu APT Package' ]
|
||
|
||
Windows x64 Package:
|
||
extends: .build-windows
|
||
rules: !reference [.if-w-w-only-merged]
|
||
tags:
|
||
- wireshark-windows-x64-package
|
||
before_script:
|
||
- $env:WIRESHARK_BASE_DIR = "C:\Development"
|
||
- $env:Configuration = "RelWithDebInfo"
|
||
- $env:Path += ";C:\Program Files\Amazon\AWSCLIV2"
|
||
- $env:CMAKE_PREFIX_PATH = "C:\Qt\6.5.2\msvc2019_64"
|
||
# https://help.appveyor.com/discussions/questions/18777-how-to-use-vcvars64bat-from-powershell
|
||
- cmd.exe /c "call `"C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Auxiliary\Build\vcvars64.bat`" && set > %temp%\vcvars.txt"
|
||
- Get-Content "$env:temp\vcvars.txt" | Foreach-Object { if ($_ -match "^(.*?)=(.*)$") { Set-Content "env:\$($matches[1])" $matches[2] } }
|
||
- mkdir build
|
||
- cd build
|
||
script:
|
||
- C:\Windows\py.exe ..\tools\make-version.py --set-release ..
|
||
- cmake -G "Visual Studio 17 2022" -A x64 -DENABLE_LTO=off -DENABLE_SIGNED_NSIS=on ..
|
||
- msbuild /verbosity:minimal "/consoleloggerparameters:PerformanceSummary;NoSummary" /maxcpucount Wireshark.sln
|
||
- msbuild /verbosity:minimal /maxcpucount test-programs.vcxproj
|
||
- msbuild /verbosity:minimal /maxcpucount wireshark_nsis_prep.vcxproj
|
||
- msbuild /verbosity:minimal /maxcpucount wireshark_wix_prep.vcxproj
|
||
- C:\gitlab-builds\bin\sign-files.ps1 -Recurse -Path run\RelWithDebInfo
|
||
- msbuild /verbosity:minimal wireshark_nsis.vcxproj
|
||
# No need for explicit signing of NSIS installer here. The signing is done by makensis.
|
||
- msbuild /verbosity:minimal wireshark_wix.vcxproj
|
||
- C:\gitlab-builds\bin\sign-files.ps1 -Path packaging\wix\Wireshark-*.msi
|
||
- msbuild /verbosity:minimal wireshark_portableapps.vcxproj
|
||
- C:\gitlab-builds\bin\sign-files.ps1 -Path packaging\portableapps\WiresharkPortable*.exe
|
||
- $plugins = Get-ChildItem run\RelWithDebInfo\plugins\*\*.dll ; signtool verify /q /pa /all run\RelWithDebInfo\*.exe run\RelWithDebInfo\extcap\*.exe $plugins run\RelWithDebInfo\libwireshark.dll run\RelWithDebInfo\libwiretap.dll run\RelWithDebInfo\libwsutil.dll packaging\nsis\Wireshark-*-x64.exe packaging\wix\Wireshark-*-x64.msi packaging\portableapps\WiresharkPortable??_*.paf.exe
|
||
- msbuild /verbosity:minimal pdb_zip_package.vcxproj
|
||
- C:\gitlab-builds\bin\mse-scan.ps1
|
||
- $packages = Get-ChildItem "packaging\nsis\Wireshark-*-x64.exe", "packaging\wix\Wireshark-*-x64.msi", "packaging\portableapps\WiresharkPortable??_*.paf.exe", "Wireshark-pdb-*x64.zip"
|
||
- foreach ($package in $packages) { Write-Host $package.name $package.length "bytes" }
|
||
- foreach ($package in $packages) { certutil -hashfile $package SHA256 }
|
||
- |
|
||
if (Test-Path env:S3_DESTINATION_WINDOWS_X64) {
|
||
foreach ($package in $packages) {
|
||
aws s3 cp "$package" "$env:S3_DESTINATION_WINDOWS_X64/"
|
||
}
|
||
}
|
||
- C:\Windows\py.exe -m pytest
|
||
|
||
Windows Arm64 Package:
|
||
extends: .build-windows
|
||
rules: !reference [.if-w-w-only-merged]
|
||
tags:
|
||
- wireshark-windows-arm64-package
|
||
before_script:
|
||
- $env:WIRESHARK_BASE_DIR = "C:\Development"
|
||
- $env:Configuration = "RelWithDebInfo"
|
||
#- $env:Path += ";C:\Program Files\Amazon\AWSCLIV2"
|
||
- $env:CMAKE_PREFIX_PATH = "C:\Qt\6.5.2\msvc2019_arm64"
|
||
# https://help.appveyor.com/discussions/questions/18777-how-to-use-vcvars64bat-from-powershell
|
||
- cmd.exe /c "call `"C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Auxiliary\Build\vcvarsarm64.bat`" && set > %temp%\vcvars.txt"
|
||
- Get-Content "$env:temp\vcvars.txt" | Foreach-Object { if ($_ -match "^(.*?)=(.*)$") { Set-Content "env:\$($matches[1])" $matches[2] } }
|
||
- mkdir build
|
||
- cd build
|
||
script:
|
||
- C:\Windows\py.exe ..\tools\make-version.py --set-release ..
|
||
- cmake -G "Visual Studio 17 2022" -A arm64 -DENABLE_LTO=off -DENABLE_SIGNED_NSIS=on ..
|
||
- msbuild /verbosity:minimal "/consoleloggerparameters:PerformanceSummary;NoSummary" /maxcpucount Wireshark.sln
|
||
- msbuild /verbosity:minimal /maxcpucount test-programs.vcxproj
|
||
- msbuild /verbosity:minimal /maxcpucount wireshark_nsis_prep.vcxproj
|
||
- C:\gitlab-builds\bin\sign-files.ps1 -Recurse -Path run\RelWithDebInfo
|
||
- msbuild /verbosity:minimal wireshark_nsis.vcxproj
|
||
- $plugins = Get-ChildItem run\RelWithDebInfo\plugins\*\*.dll ; signtool verify /q /pa /all run\RelWithDebInfo\*.exe run\RelWithDebInfo\extcap\*.exe $plugins run\RelWithDebInfo\libwireshark.dll run\RelWithDebInfo\libwiretap.dll run\RelWithDebInfo\libwsutil.dll packaging\nsis\Wireshark-*-arm64.exe
|
||
- msbuild /verbosity:minimal pdb_zip_package.vcxproj
|
||
- C:\gitlab-builds\bin\mse-scan.ps1
|
||
- $packages = Get-ChildItem "packaging\nsis\Wireshark-*-arm64.exe", "Wireshark-pdb-*arm64.zip"
|
||
- foreach ($package in $packages) { Write-Host $package.name $package.length "bytes" }
|
||
- foreach ($package in $packages) { certutil -hashfile $package SHA256 }
|
||
- |
|
||
if (Test-Path env:S3_DESTINATION_WINDOWS_ARM64) {
|
||
foreach ($package in $packages) {
|
||
py.exe "C:\Program Files\Python311-arm64\Scripts\aws" s3 cp "$package" "$env:S3_DESTINATION_WINDOWS_ARM64/"
|
||
}
|
||
}
|
||
- C:\Windows\py.exe -m pytest
|
||
|
||
macOS Arm Package:
|
||
stage: build
|
||
rules: !reference [.if-w-w-only-merged]
|
||
variables:
|
||
CODE_SIGN_IDENTITY: "Wireshark Foundation, Inc."
|
||
tags:
|
||
- wireshark-macos-arm-package
|
||
retry: 1
|
||
script:
|
||
- export CMAKE_PREFIX_PATH=/usr/local/Qt-6.2.4
|
||
- tools/make-version.py --set-release .
|
||
- mkdir build
|
||
- cd build
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -DENABLE_CCACHE=ON -DCMAKE_APPLE_SILICON_PROCESSOR=arm64 -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 -DCMAKE_OSX_ARCHITECTURES=arm64 -DTEST_EXTRA_ARGS=--enable-release -G Ninja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- package-prep
|
||
- ninja wireshark_dmg
|
||
- cd run
|
||
- notarize-build
|
||
- stat -f "%N %z bytes" Wireshark*Arm*.dmg
|
||
- for digest in sha512 sha256 sha1 ; do openssl $digest Wireshark*Arm*.dmg ; done
|
||
- |
|
||
if [ -n "$S3_DESTINATION_MACOS_ARM64" ] ; then
|
||
aws s3 cp Wireshark?[1-9]*Arm*.dmg "$S3_DESTINATION_MACOS_ARM64/"
|
||
aws s3 cp Wireshark?dSYM*Arm*.dmg "$S3_DESTINATION_MACOS_ARM64/"
|
||
fi
|
||
- cd ..
|
||
- ninja test-programs
|
||
- python3 -m pytest
|
||
needs: []
|
||
|
||
macOS Intel Package:
|
||
stage: build
|
||
rules: !reference [.if-w-w-only-merged]
|
||
variables:
|
||
CODE_SIGN_IDENTITY: "Wireshark Foundation, Inc."
|
||
tags:
|
||
- wireshark-macos-intel-package
|
||
retry: 1
|
||
script:
|
||
- export CMAKE_PREFIX_PATH=/usr/local/Qt-6.2.4
|
||
- export PATH="$PATH:$HOME/bin"
|
||
- tools/make-version.py --set-release .
|
||
- mkdir build
|
||
- cd build
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -DENABLE_CCACHE=ON -DCMAKE_OSX_DEPLOYMENT_TARGET=10.14 -DCMAKE_OSX_SYSROOT=macosx10.15 -DTEST_EXTRA_ARGS=--enable-release -G Ninja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- package-prep
|
||
- ninja wireshark_dmg
|
||
- cd run
|
||
- notarize-build
|
||
- stat -f "%N %z bytes" Wireshark*Intel*.dmg
|
||
- for digest in sha512 sha256 sha1 ; do openssl $digest Wireshark*Intel*.dmg ; done
|
||
- |
|
||
if [ -n "$S3_DESTINATION_MACOS_INTEL64" ] ; then
|
||
aws s3 cp Wireshark?[1-9]*Intel*.dmg "$S3_DESTINATION_MACOS_INTEL64/"
|
||
aws s3 cp Wireshark?dSYM*Intel*.dmg "$S3_DESTINATION_MACOS_INTEL64/"
|
||
fi
|
||
- cd ..
|
||
- ninja test-programs
|
||
- python3 -m pytest
|
||
needs: []
|
||
|
||
# Build the User's Guide and Developer's Guide
|
||
Documentation:
|
||
extends: .build-linux
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/ubuntu-dev
|
||
rules:
|
||
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
changes:
|
||
- "docbook/**/*"
|
||
- "epan/wslua/**/*"
|
||
- if: '$CI_PIPELINE_SOURCE == "push"'
|
||
when: manual
|
||
allow_failure: true
|
||
script:
|
||
# XXX We might want to move this to wireshark-ubuntu-dev or debian-setup.sh.
|
||
- DEBIAN_FRONTEND=noninteractive apt-get update
|
||
- DEBIAN_FRONTEND=noninteractive apt-get --yes install ruby-coderay ruby-asciidoctor-pdf
|
||
- NOKOGIRI_USE_SYSTEM_LIBRARIES=1 gem install asciidoctor-epub3
|
||
- mkdir build
|
||
- cd build
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -GNinja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja all_guides
|
||
- cd docbook
|
||
- for HTML_DIR in wsug_html wsug_html_chunked wsdg_html wsdg_html_chunked ; do zip -9 -r "$HTML_DIR.zip" "$HTML_DIR" ; done
|
||
after_script:
|
||
- mv -v build/docbook/ws[ud]g_html{,_chunked}.zip .
|
||
- mv -v build/docbook/Wireshark*Guide.{epub,pdf} .
|
||
- |
|
||
if [ -n "$S3_DESTINATION_DOCS" ] ; then
|
||
for DOC_FILE in ws[ud]g_html{,_chunked}.zip Wireshark*Guide.{epub,pdf} ; do
|
||
aws s3 cp "$DOC_FILE" "$S3_DESTINATION_DOCS/"
|
||
done
|
||
fi
|
||
artifacts:
|
||
paths:
|
||
- wsug_html.zip
|
||
- wsug_html_chunked.zip
|
||
- wsdg_html.zip
|
||
- wsdg_html_chunked.zip
|
||
- "Wireshark User's Guide.pdf"
|
||
- "Wireshark Developer's Guide.pdf"
|
||
- "Wireshark User's Guide.epub"
|
||
- "Wireshark Developer's Guide.epub"
|
||
needs: []
|
||
|
||
|
||
# https://docs.gitlab.com/ee/user/gitlab_com/index.html#linux-shared-runners
|
||
|
||
Commit Check:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-merge-request]
|
||
script:
|
||
# build-ubuntu puts us in `build`.
|
||
- cd ..
|
||
- git status
|
||
- bash ./tools/pre-commit "${CI_COMMIT_SHA}~$NUM_COMMITS"
|
||
- tools/validate-commit.py
|
||
- python3 tools/checklicenses.py
|
||
|
||
GCC Release Build:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-merge-request]
|
||
needs: [ 'Commit Check' ]
|
||
script:
|
||
# build-ubuntu puts us in `build`.
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
# Test release build.
|
||
- CC=gcc CXX=g++ cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_EXPORT_COMPILE_COMMANDS=on -DENABLE_CCACHE=ON -DENABLE_WERROR=ON -G Ninja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- script --command ninja --flush --quiet --return ../gcc_report.txt
|
||
- ansi2html < ../gcc_report.txt > ../gcc_report.html
|
||
- ninja test-programs
|
||
- chown -R user .
|
||
- if [ -f run/dumpcap ]; then setcap cap_net_raw,cap_net_admin+eip run/dumpcap; fi
|
||
- if [ -f run/dumpcap ]; then su user -c "run/dumpcap -D" ; fi
|
||
- su user -c pytest-3
|
||
# Test CMake install code and CPack config code. Select any one of the archive generators.
|
||
- ninja user_guide_html
|
||
- ninja developer_guide_html
|
||
- cpack -G TZST .
|
||
- ls wireshark-*.tar.zst{,.sha256}
|
||
artifacts:
|
||
paths:
|
||
- gcc_report.html
|
||
|
||
Clang + Code Checks:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-merge-request]
|
||
variables:
|
||
CC: "clang-$CLANG_VERSION"
|
||
CXX: "clang++-$CLANG_VERSION"
|
||
needs: [ 'Commit Check' ]
|
||
script:
|
||
# build-ubuntu puts us in `build`.
|
||
- cd ..
|
||
- mkdir cppcheck
|
||
- ./tools/cppcheck/cppcheck.sh -l $NUM_COMMITS | tee cppcheck/cppcheck_report.txt
|
||
- if [[ -s "cppcheck/cppcheck_report.txt" ]]; then ./tools/cppcheck/cppcheck.sh -l $NUM_COMMITS -x > cppcheck/cppcheck_report.xml ; fi
|
||
- if [[ -s "cppcheck/cppcheck_report.txt" ]]; then cppcheck-htmlreport --file cppcheck/cppcheck_report.xml --report-dir cppcheck ; fi
|
||
- cd build
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
# We don't have an "All options" job, so build fuzzshark and tfshark here.
|
||
- cmake -DENABLE_CHECKHF_CONFLICT=on -DCMAKE_EXPORT_COMPILE_COMMANDS=on -DBUILD_fuzzshark=ON -DBUILD_tfshark=On -DBUILD_logray=ON -DENABLE_CCACHE=ON -DENABLE_WERROR=ON -G Ninja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- printf "\e[0Ksection_start:%s:asn1_section[collapsed=true]\r\e[0KRegenerating ASN.1 dissectors" "$( date +%s)"
|
||
- ninja asn1
|
||
- git diff --exit-code ${CI_COMMIT_SHA} ..
|
||
- printf "\e[0Ksection_end:%s:asn1_section\r\e[0K" "$( date +%s)"
|
||
- printf "\e[0Ksection_start:%s:pidl_section[collapsed=true]\r\e[0KRegenerating PIDL dissectors" "$( date +%s)"
|
||
- ninja pidl-dissectors
|
||
- git diff --exit-code ${CI_COMMIT_SHA} ..
|
||
- printf "\e[0Ksection_end:%s:pidl_section\r\e[0K" "$( date +%s)"
|
||
- mkdir ../html
|
||
- script --command ninja --flush --quiet --return ../tmp_clang_report.txt
|
||
- ansi2txt < ../tmp_clang_report.txt > ../clang_report.txt
|
||
- ansi2html < ../tmp_clang_report.txt > ../html/clang_report.html
|
||
- ./run/tshark -v 2> >(tee ../checkhf_conflict.txt)
|
||
- ../tools/validate-clang-check.sh -c $CLANG_VERSION 2> >(tee ../tmp_clang_analyzer_check.txt)
|
||
- ansi2txt < ../tmp_clang_analyzer_check.txt > ../clang_analyzer_check.txt
|
||
- ansi2html < ../tmp_clang_analyzer_check.txt > ../html/clang_analyzer_check.html
|
||
- ninja checkAPI
|
||
- ninja shellcheck
|
||
- cd ..
|
||
- ./tools/check_typed_item_calls.py --consecutive --label --mask --check-bitmask-fields --commits $NUM_COMMITS | tee item_calls_check.txt
|
||
- ./tools/check_tfs.py --commits $NUM_COMMITS | tee tfs_check.txt
|
||
- ./tools/check_val_to_str.py --commits $NUM_COMMITS | tee val_to_str_check.txt
|
||
artifacts:
|
||
paths:
|
||
- clang_report.txt
|
||
- clang_analyzer_check.txt
|
||
- cppcheck
|
||
- item_calls_check.txt
|
||
- tfs_check.txt
|
||
- val_to_str_check.txt
|
||
- checkhf_conflict.txt
|
||
- html/
|
||
|
||
No options:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-merge-request]
|
||
needs: [ 'Commit Check' ]
|
||
script: |
|
||
cmake -GNinja -DENABLE_CCACHE=ON \
|
||
-DENABLE_BROTLI=OFF -DENABLE_CAP=OFF -DENABLE_CHECKHF_CONFLICT=ON -DENABLE_GNUTLS=OFF \
|
||
-DENABLE_KERBEROS=OFF -DENABLE_LIBXML2=OFF -DENABLE_ILBC=OFF -DENABLE_LUA=OFF -DENABLE_LZ4=OFF -DENABLE_MINIZIP=OFF \
|
||
-DENABLE_NETLINK=OFF -DENABLE_NGHTTP2=OFF -DENABLE_PCAP=OFF -DENABLE_PLUGIN_IFDEMO=ON -DENABLE_PLUGINS=OFF \
|
||
-DENABLE_SBC=OFF -DENABLE_SMI=OFF -DENABLE_SNAPPY=OFF -DENABLE_SPANDSP=OFF -DENABLE_ZLIB=OFF -DENABLE_ZSTD=OFF ..
|
||
ninja
|
||
|
||
# Windows runners are still beta, at least technically:
|
||
# https://docs.gitlab.com/ee/user/gitlab_com/index.html#windows-shared-runners-beta
|
||
Windows Build:
|
||
extends: .build-windows
|
||
rules: !reference [.if-w-w-only-merge-request]
|
||
tags:
|
||
- wireshark-windows-merge-req
|
||
needs: [ 'Commit Check' ]
|
||
script:
|
||
- cmake -G "Visual Studio 17 2022" -A x64 -DENABLE_LTO=off ..
|
||
- msbuild /verbosity:minimal /maxcpucount Wireshark.sln
|
||
- msbuild /verbosity:minimal /maxcpucount test-programs.vcxproj
|
||
- C:\Windows\py.exe -m pytest
|
||
|
||
Windows Qt5 Build:
|
||
extends: .build-windows
|
||
rules: !reference [.if-w-w-only-merge-request]
|
||
tags:
|
||
- wireshark-windows-merge-req
|
||
needs: [ 'Commit Check' ]
|
||
script:
|
||
- $env:CMAKE_PREFIX_PATH = "C:\qt\5.15.2\msvc2019_64"
|
||
- cmake -G "Visual Studio 17 2022" -A x64 -DUSE_qt6=OFF -DENABLE_LTO=off ..
|
||
- msbuild /verbosity:minimal /maxcpucount ui\qt\qtui.vcxproj
|
||
|
||
macOS Build:
|
||
extends: .build-macos
|
||
rules: !reference [.if-w-w-only-merge-request]
|
||
needs: [ 'Commit Check' ]
|
||
|
||
# Adapted from https://www.synopsys.com/blogs/software-security/integrating-coverity-scan-with-gitlab-ci/
|
||
# and https://gitlab.gnome.org/GNOME/glib/-/blob/8f57a5b9/.gitlab-ci.yml#L481
|
||
Coverity GCC Scan:
|
||
image: registry.gitlab.com/wireshark/wireshark-containers/ubuntu-dev
|
||
rules:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "coverity-gcc"'
|
||
stage: analysis
|
||
needs: []
|
||
variables:
|
||
CC: gcc
|
||
CXX: g++
|
||
# cov-build doesn’t handle GLIB_DEPRECATED_ENUMERATOR
|
||
CFLAGS: '-DGLIB_DISABLE_DEPRECATION_WARNINGS'
|
||
CXXFLAGS: '-DGLIB_DISABLE_DEPRECATION_WARNINGS'
|
||
script:
|
||
- curl --output /tmp/cov-analysis-linux64.tar.gz --form project=$COVERITY_SCAN_PROJECT_NAME --form token=$COVERITY_SCAN_TOKEN https://scan.coverity.com/download/linux64
|
||
- tar --directory=/tmp --extract --gzip --file /tmp/cov-analysis-linux64.tar.gz
|
||
- mkdir build
|
||
- cd build
|
||
- cmake -G Ninja ..
|
||
- /tmp/cov-analysis-linux64-*/bin/cov-build --return-emit-failures --dir ../cov-int ninja
|
||
- cd ..
|
||
- tar --create --gzip --file cov-int.tar.gz cov-int
|
||
- echo "export ARTIFACT_JOB_URL=$CI_JOB_URL" > job_environment_variables.sh
|
||
- echo "export GIT_DESCRIPTION=$( git describe --tags )" >> job_environment_variables.sh
|
||
artifacts:
|
||
paths:
|
||
- cov-int.tar.gz
|
||
- job_environment_variables.sh
|
||
|
||
Coverity GCC Submit:
|
||
image: curlimages/curl
|
||
rules:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "coverity-gcc"'
|
||
stage: .post
|
||
script:
|
||
- . job_environment_variables.sh
|
||
- echo $ARTIFACT_JOB_URL
|
||
- echo $GIT_DESCRIPTION
|
||
- curl --fail --data "project=$COVERITY_SCAN_PROJECT_NAME&token=$COVERITY_SCAN_TOKEN&email=$GITLAB_USER_EMAIL&url=$ARTIFACT_JOB_URL/artifacts/raw/cov-int.tar.gz&version=$GIT_DESCRIPTION&description=Ubuntu $GIT_DESCRIPTION $CI_COMMIT_REF_NAME:$CI_PIPELINE_ID" https://scan.coverity.com/builds
|
||
needs: [ 'Coverity GCC Scan' ]
|
||
|
||
Coverity Visual C++ Scan:
|
||
extends: .build-windows
|
||
rules:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "coverity-visual-c++"'
|
||
tags:
|
||
- wireshark-windows-merge-req
|
||
stage: analysis
|
||
needs: []
|
||
script:
|
||
- $gitDescription = (( git describe --tags ) | Out-String).Trim()
|
||
- C:\Windows\System32\curl --output $env:temp\cov-analysis-win64.zip --form project=$COVERITY_SCAN_PROJECT_NAME --form token=$COVERITY_SCAN_TOKEN https://scan.coverity.com/download/win64
|
||
- C:\ProgramData\chocolatey\tools\7z x "$env:temp\cov-analysis-win64.zip" -y -r -o"$env:temp"
|
||
- cmake -DTEST_EXTRA_ARGS=--enable-release -DENABLE_LTO=off -G "Visual Studio 17 2022" -A x64 ..
|
||
- $covAnalysisWin64 = (Get-ChildItem -Path $env:temp -Filter "cov-analysis-win64-*" -Directory)[0].FullName
|
||
- Invoke-Expression "& $covAnalysisWin64\bin\cov-build.exe --return-emit-failures --dir ..\cov-int msbuild /verbosity:minimal `"/consoleloggerparameters:PerformanceSummary;NoSummary`" /maxcpucount:1 Wireshark.sln"
|
||
- cd ..
|
||
- C:\ProgramData\chocolatey\tools\7z a -tzip cov-int.zip cov-int
|
||
- '"export ARTIFACT_JOB_URL=$env:CI_JOB_URL" | Out-File -Encoding ascii job_environment_variables.sh'
|
||
- '"export GIT_DESCRIPTION=$gitDescription" | Out-File -Encoding ascii -Append job_environment_variables.sh'
|
||
artifacts:
|
||
paths:
|
||
- cov-int.zip
|
||
- job_environment_variables.sh
|
||
|
||
Coverity Visual C++ Submit:
|
||
image: curlimages/curl
|
||
rules:
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "coverity-visual-c++"'
|
||
stage: .post
|
||
script:
|
||
- sed -i -e 's/\r//' job_environment_variables.sh
|
||
- . job_environment_variables.sh
|
||
- echo $ARTIFACT_JOB_URL
|
||
- echo $GIT_DESCRIPTION
|
||
- curl --fail --data "project=$COVERITY_SCAN_PROJECT_NAME&token=$COVERITY_SCAN_TOKEN&email=$GITLAB_USER_EMAIL&url=$ARTIFACT_JOB_URL/artifacts/raw/cov-int.zip&version=$GIT_DESCRIPTION&description=Windows $GIT_DESCRIPTION $CI_COMMIT_REF_NAME:$CI_PIPELINE_ID" https://scan.coverity.com/builds
|
||
needs: [ 'Coverity Visual C++ Scan' ]
|
||
|
||
Clang Static Analyzer:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-daily-schedule]
|
||
stage: analysis
|
||
needs: []
|
||
variables:
|
||
CC: "clang-${CLANG_VERSION}"
|
||
CXX: "clang++-${CLANG_VERSION}"
|
||
script:
|
||
- scan-build-${CLANG_VERSION} cmake -DCMAKE_BUILD_TYPE=Debug -DENABLE_WERROR=OFF -G Ninja ..
|
||
- scan-build-${CLANG_VERSION} -o ../sbout ninja
|
||
- cd ../sbout
|
||
- RAW_DIR=$( find ../sbout -type d -name "20??-??-??-*" -printf "%P\n" | head )
|
||
- SB_DIR="scan-build-$RAW_DIR"
|
||
- mv "$RAW_DIR" "$SB_DIR"
|
||
- if [ -d logs ] ; then mv logs $SB_DIR ; fi
|
||
- chmod -R u=rwX,go=rX "$SB_DIR"
|
||
- zip -9 -r "${SB_DIR}.zip" "$SB_DIR"
|
||
- if [ -n "$S3_DESTINATION_ANALYSIS" ] ; then aws s3 cp "${SB_DIR}.zip" "$S3_DESTINATION_ANALYSIS/" ; fi
|
||
|
||
# Windows runners are still beta, at least technically:
|
||
# https://docs.gitlab.com/ee/user/gitlab_com/index.html#windows-shared-runners-beta
|
||
Visual Studio Code Analysis:
|
||
extends: .build-windows
|
||
tags:
|
||
- wireshark-windows-dev
|
||
rules:
|
||
# The wireshark-windows-* tags are only available in wireshark/wireshark.
|
||
- if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "daily" && $CI_PROJECT_URL =~ /.*gitlab.com\/wireshark\/wireshark/'
|
||
script:
|
||
- py -m venv sarif-tools.venv
|
||
- sarif-tools.venv\Scripts\pip.exe install sarif-tools
|
||
- msiexec.exe /i https://awscli.amazonaws.com/AWSCLIV2.msi
|
||
- $env:Path += ";C:\Program Files\Amazon\AWSCLIV2"
|
||
- $env:caexcludepath = "C:\Qt;$env:INCLUDE"
|
||
- cmake -DENABLE_CODE_ANALYSIS=ON -G "Visual Studio 17 2022" -A x64 -DENABLE_LTO=off ..
|
||
- msbuild /verbosity:minimal "/consoleloggerparameters:PerformanceSummary;NoSummary" /maxcpucount:2 Wireshark.sln
|
||
- $report = "visual-c++-analyze-" + (Get-Date -format "yyyy-MM-dd") + ".html"
|
||
- sarif-tools.venv\Scripts\sarif html --output $report
|
||
- |
|
||
if (Test-Path env:S3_DESTINATION_ANALYSIS) {
|
||
aws s3 cp "$report" "$env:S3_DESTINATION_ANALYSIS/"
|
||
}
|
||
|
||
# Build all doxygen docs
|
||
API Reference:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-daily-schedule]
|
||
script:
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -GNinja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja wsar_html_zip 2>&1 > doxygen_output.txt | tee doxygen_errors.txt
|
||
after_script:
|
||
- mv build/wsar_html.zip .
|
||
- mv build/doxygen_output.txt .
|
||
- mv build/doxygen_errors.txt .
|
||
- |
|
||
if [ -n "$S3_DESTINATION_DOCS" ] ; then
|
||
aws s3 cp wsar_html.zip "$S3_DESTINATION_DOCS/"
|
||
fi
|
||
artifacts:
|
||
paths:
|
||
- doxygen_errors.txt
|
||
- doxygen_output.txt
|
||
- wsar_html.zip
|
||
needs: []
|
||
|
||
Code Lines and Data:
|
||
extends: .build-ubuntu
|
||
rules: !reference [.if-daily-schedule]
|
||
stage: analysis
|
||
variables:
|
||
CLOC_OUT: /tmp/cloc.txt
|
||
SCC_OUT: /tmp/scc.txt
|
||
SLOC_OUT: /tmp/sloccount.txt
|
||
TOKEI_OUT: /tmp/tokei.txt
|
||
script:
|
||
- DEBIAN_FRONTEND=noninteractive apt-get update
|
||
- DEBIAN_FRONTEND=noninteractive apt-get --yes install sloccount cloc curl unzip
|
||
- pushd /tmp
|
||
- curl -L -O https://github.com/boyter/scc/releases/download/v3.0.0/scc-3.0.0-x86_64-unknown-linux.zip
|
||
- unzip scc-3.0.0-x86_64-unknown-linux.zip
|
||
- curl -L -O https://github.com/XAMPPRocky/tokei/releases/download/v12.1.2/tokei-x86_64-unknown-linux-gnu.tar.gz
|
||
- tar -xf tokei-x86_64-unknown-linux-gnu.tar.gz
|
||
- popd
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- cd ..
|
||
- echo -n "cloc version:\ "
|
||
- cloc --version
|
||
- cloc --quiet . | tee $CLOC_OUT
|
||
- /tmp/scc --version
|
||
- /tmp/scc --not-match 'qt/.*.ts' . | tee $SCC_OUT
|
||
- echo -n "SLOCCount version:\ "
|
||
- sloccount --version
|
||
- sloccount . | awk "/^Computing results/ { results=1 } { if (results) print }" | tee $SLOC_OUT
|
||
- /tmp/tokei --version
|
||
- /tmp/tokei --exclude 'qt/*.ts' . | tee $TOKEI_OUT
|
||
- |
|
||
cat > manuf <<FIN
|
||
# This file was generated by TShark $(git describe --tags | sed -e 's/^v//') with the
|
||
# command \`tshark -G manuf\`. Its canonical location is
|
||
#
|
||
# https://www.wireshark.org/download/automated/data/manuf
|
||
#
|
||
# The first column contains the MAC Address block (24, 28 or 36 bits wide,
|
||
# per IEEE allocation sizes) and the second column contains the vendor name.
|
||
#
|
||
FIN
|
||
- build/run/tshark -G manuf | sed -e 's,:00:00:00/24,,' >> manuf
|
||
- |
|
||
if [ -n "$S3_DESTINATION_ANALYSIS" ] ; then
|
||
aws s3 cp "$CLOC_OUT" "$S3_DESTINATION_ANALYSIS/"
|
||
aws s3 cp "$SCC_OUT" "$S3_DESTINATION_ANALYSIS/"
|
||
aws s3 cp "$SLOC_OUT" "$S3_DESTINATION_ANALYSIS/"
|
||
aws s3 cp "$TOKEI_OUT" "$S3_DESTINATION_ANALYSIS/"
|
||
fi
|
||
- |
|
||
if [ -n "$S3_DESTINATION_DATA" ] ; then
|
||
for DATA_FILE in manuf ; do
|
||
aws s3 cp "$DATA_FILE" "$S3_DESTINATION_DATA/"
|
||
done
|
||
fi
|
||
artifacts:
|
||
paths:
|
||
- manuf
|
||
needs: []
|
||
|
||
# Fuzz TShark using ASAN and valgrind.
|
||
.fuzz-ubuntu:
|
||
extends: .build-ubuntu
|
||
retry: 0
|
||
rules: !reference [.if-fuzz-schedule]
|
||
tags:
|
||
- wireshark-ubuntu-fuzz
|
||
resource_group: fuzz-master
|
||
variables:
|
||
CC: "clang-$CLANG_VERSION"
|
||
CXX: "clang++-$CLANG_VERSION"
|
||
INSTALL_PREFIX: "$CI_PROJECT_DIR/_install"
|
||
MIN_PLUGINS: 10
|
||
MAX_PASSES: 15
|
||
before_script:
|
||
- DEBIAN_FRONTEND=noninteractive apt-get update
|
||
# Use DPkg::options::="--force-overwrite" until
|
||
# https://bugs.launchpad.net/ubuntu/+source/llvm-toolchain-15/+bug/2008755
|
||
# https://github.com/llvm/llvm-project/issues/62104
|
||
# are fixed.
|
||
- DEBIAN_FRONTEND=noninteractive apt-get --yes --option DPkg::options::="--force-overwrite" install llvm-$CLANG_VERSION
|
||
- mkdir -p ccache
|
||
# Signal after_script, which runs in its own shell.
|
||
- echo "export FUZZ_PASSED=true" > /tmp/fuzz_result.sh
|
||
- mkdir /tmp/fuzz
|
||
- mkdir build
|
||
- cd build
|
||
after_script:
|
||
- . /tmp/fuzz_result.sh
|
||
- if $FUZZ_PASSED ; then exit 0 ; fi
|
||
- echo Fuzzing failed. Generating report.
|
||
- FUZZ_CAPTURE=$( ls /tmp/fuzz/fuzz-*.pcap | head -n 1 )
|
||
- FUZZ_ERRORS="/tmp/fuzz/$( basename "$FUZZ_CAPTURE" .pcap ).err"
|
||
- printf "\nfuzz-test.sh stderr:\n" >> "$FUZZ_ERRORS"
|
||
- cat fuzz-test.err >> "$FUZZ_ERRORS"
|
||
- |
|
||
if [ -n "$S3_DESTINATION_FUZZ" ] ; then
|
||
aws s3 cp "$FUZZ_CAPTURE" "$S3_DESTINATION_FUZZ/"
|
||
aws s3 cp "$FUZZ_ERRORS" "$S3_DESTINATION_FUZZ/"
|
||
fi
|
||
# The cache should be large enough to be useful but it shouldn't take
|
||
# too long to restore+save each run.
|
||
- ccache --max-size $( du --summarize --block-size=1M "$CI_PROJECT_DIR/build" | awk '{printf ("%dM", $1 * 1.5)}' )
|
||
|
||
ASan Menagerie Fuzz:
|
||
extends: .fuzz-ubuntu
|
||
stage: fuzz-asan
|
||
variables:
|
||
WIRESHARK_LOG_FATAL: "critical"
|
||
script:
|
||
- MAX_SECONDS=$(( 6 * 60 * 60 ))
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja -DBUILD_wireshark=OFF -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DENABLE_CCACHE=ON -DENABLE_WERROR=Off ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- ninja install
|
||
- cd ..
|
||
# /var/menagerie contains captures harvested from wireshark.org's mailing list, wiki, issues, etc.
|
||
# We have more captures than we can fuzz in $MAX_SECONDS, so we shuffle them each run.
|
||
- ./tools/fuzz-test.sh -a -2 -P $MIN_PLUGINS -b $INSTALL_PREFIX/bin -d /tmp/fuzz -t $MAX_SECONDS $( shuf -e /var/menagerie/*/* ) 2> fuzz-test.err || echo "export FUZZ_PASSED=false" > /tmp/fuzz_result.sh
|
||
|
||
ASan randpkt Fuzz:
|
||
extends: .fuzz-ubuntu
|
||
stage: fuzz-randpkt
|
||
variables:
|
||
WIRESHARK_LOG_FATAL: "critical"
|
||
script:
|
||
# XXX Reuse fuzz-asan?
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja -DBUILD_wireshark=OFF -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DENABLE_CCACHE=ON -DENABLE_WERROR=Off ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- ninja install
|
||
- cd ..
|
||
- ./tools/randpkt-test.sh -a -b $INSTALL_PREFIX/bin -d /tmp/fuzz -p $MAX_PASSES 2> fuzz-test.err || echo "export FUZZ_PASSED=false" > /tmp/fuzz_result.sh
|
||
needs: [ 'ASan Menagerie Fuzz' ]
|
||
|
||
Valgrind Menagerie Fuzz:
|
||
extends: .fuzz-ubuntu
|
||
stage: fuzz-valgrind
|
||
resource_group: fuzz-master-valgrind
|
||
variables:
|
||
# Use DWARF-4 debug info. Valgrind does not support Clang 14 with DWARF-5.
|
||
# https://gitlab.com/wireshark/wireshark/-/issues/18191
|
||
# https://www.mail-archive.com/valgrind-users@lists.sourceforge.net/msg07239.html
|
||
CFLAGS: "-gdwarf-4"
|
||
WIRESHARK_LOG_FATAL: "critical"
|
||
script:
|
||
- DEBIAN_FRONTEND=noninteractive apt-get update
|
||
- DEBIAN_FRONTEND=noninteractive apt-get --yes install valgrind
|
||
- MAX_SECONDS=$(( 3 * 60 * 60 ))
|
||
- printf "\e[0Ksection_start:%s:cmake_section[collapsed=true]\r\e[0KRunning CMake" "$( date +%s)"
|
||
- cmake -G Ninja -DBUILD_wireshark=OFF -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=OFF -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DENABLE_CCACHE=ON -DENABLE_WERROR=Off ..
|
||
- printf "\e[0Ksection_end:%s:cmake_section\r\e[0K" "$( date +%s)"
|
||
- ninja
|
||
- ninja install
|
||
- cd ..
|
||
- ./tools/fuzz-test.sh -g -P $MIN_PLUGINS -b $INSTALL_PREFIX/bin -d /tmp/fuzz -t $MAX_SECONDS $( shuf -e /var/menagerie/*/* ) 2> fuzz-test.err || echo "export FUZZ_PASSED=false" > /tmp/fuzz_result.sh
|
||
needs: [ 'ASan randpkt Fuzz' ]
|
||
|
||
|
||
include:
|
||
- template: Security/SAST.gitlab-ci.yml
|