Refactor Gitlab builds to use multiple stages (#3377)

* Remove unused jenkins stuff

* Install all dependencies in builder image

Instead of only target specific dependencies.

* Use docker builder image for builds

* Optimize apt installations

* Move building of dependencies into separate stage

The build-depends-xxx jobs will create artifacts (depends/$HOST) which are
then pulled in by the build jobs with the help of "needs"

* Remove use of caches from develop branch

* Use gitlab specific extends instead of YAML anchors

* Move before_script of build_template into base_template

* Add hack for parallel installation of i686 and arm cross compilation

* Install python3-setuptools in builder image

* Remove unnecessary change-dir

* Use variables to pass BUILD_TARGET instead of relying on the job name

* Move integration tests into separate stage

* Don't use --quiet for integration tests on Gitlab
This commit is contained in:
Alexander Block 2020-03-27 22:58:51 +01:00 committed by GitHub
parent 3c90da86b3
commit ff6f391aea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 210 additions and 379 deletions

View File

@ -3,67 +3,71 @@ image: "ubuntu:bionic"
variables:
DOCKER_DRIVER: overlay2
cache:
# Cache by branch/tag and job name
# Gitlab can't use caches from parent pipelines when doing the first build in a PR, so we use artifacts to copy
# caches into PRs
key: ${CI_COMMIT_REF_SLUG}-${CI_JOB_NAME}${CI_EXTERNAL_PULL_REQUEST_IID}
paths:
- $CI_PROJECT_DIR/cache
stages:
- builder-image
- build-depends
- build
- test
.build_template: &build_template
stage: build
builder-image:
stage: builder-image
image: docker:19.03.5
services:
- docker:19.03.5-dind
variables:
DOCKER_HOST: "tcp://docker:2375"
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
before_script:
- export BUILD_TARGET="$CI_JOB_NAME"
- echo $CI_JOB_TOKEN | docker login -u gitlab-ci-token --password-stdin $CI_REGISTRY
script:
- cd ci
- docker pull $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG || true
- docker pull $CI_REGISTRY_IMAGE:builder-develop || true
- docker build --cache-from $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG --cache-from $CI_REGISTRY_IMAGE:builder-develop -t $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG -f Dockerfile.builder .
- docker push $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG
.build-depends-template:
stage: build-depends
image: $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG
variables:
SDK_URL: https://bitcoincore.org/depends-sources/sdks
OSX_SDK: "10.11"
MAKEJOBS: -j4
before_script:
- echo HOST=$HOST
- |
if [ "$HOST" = "x86_64-apple-darwin11" ]; then
echo "Downloading MacOS SDK"
mkdir -p depends/SDKs
mkdir -p depends/sdk-sources
if [ ! -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then
curl --location --fail $SDK_URL/MacOSX${OSX_SDK}.sdk.tar.gz -o depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz
fi
tar -C depends/SDKs -xf depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz
fi
script:
- make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS
cache:
# Let all branches share the same cache, which is ok because the depends subsystem is able to handle this properly (it works with hashes of all scripts)
key: ${CI_JOB_NAME}
paths:
- $CI_PROJECT_DIR/depends/built
- $CI_PROJECT_DIR/depends/sdk-sources
artifacts:
name: depends
when: on_success
paths:
- $CI_PROJECT_DIR/depends/$HOST
- $CI_PROJECT_DIR/depends/SDKs
.base-template:
image: $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG
before_script:
- export CACHE_DIR=$CI_PROJECT_DIR/cache
- echo BUILD_TARGET=$BUILD_TARGET
- source ./ci/matrix.sh
# The ubuntu base image has apt configured to delete caches after each invocation, which is something that is not desirable for us
- rm /etc/apt/apt.conf.d/docker-clean
- apt-get update
- apt-get install -y wget unzip
# Init cache
- export CACHE_DIR=$CI_PROJECT_DIR/cache
- mkdir -p $CACHE_DIR
- |
if [ "$CI_COMMIT_REF_SLUG" != "develop" -a "$CI_COMMIT_TAG" == "" ]; then
if [ ! -d $CACHE_DIR/ccache ]; then
echo "Downloading cache from develop branch"
mkdir cache-artifact
cd cache-artifact
if wget --quiet -O cache-artifact.zip https://gitlab.com/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/-/jobs/artifacts/develop/download?job=$CI_JOB_NAME; then
unzip -q cache-artifact.zip
rm cache-artifact.zip
mv cache-artifact/* $CACHE_DIR/ || true
else
echo "Failed to download cache"
fi
cd ..
rm -rf cache-artifact
else
echo "Not touching cache (was initialized from previous build)"
fi
else
echo "Not touching cache (building develop branch or tag)"
fi
# Create missing cache dirs
- mkdir -p $CACHE_DIR/ccache && mkdir -p $CACHE_DIR/depends && mkdir -p $CACHE_DIR/sdk-sources && mkdir -p $CACHE_DIR/apt
# Keep this as it makes caching related debugging easier
- ls -lah $CACHE_DIR && ls -lah $CACHE_DIR/depends && ls -lah $CACHE_DIR/ccache && ls -lah $CACHE_DIR/apt
- mv $CACHE_DIR/apt/* /var/cache/apt/archives/ || true
# Install base packages
- apt-get dist-upgrade -y
- apt-get install -y git g++ autotools-dev libtool m4 automake autoconf pkg-config zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake
- apt-get install -y python3 python3-dev python3-pip
# jinja2 is needed for combine_logs.py
- pip3 install jinja2
# Setup some environment variables
- |
if [ "$CI_EXTERNAL_PULL_REQUEST_IID" != "" ]; then
@ -87,73 +91,146 @@ stages:
- echo PULL_REQUEST=$PULL_REQUEST COMMIT_RANGE=$COMMIT_RANGE HOST_SRC_DIR=$HOST_SRC_DIR CACHE_DIR=$CACHE_DIR
- echo "Commit log:" && git log --format=fuller -1
# Build dash_hash
- git clone https://github.com/dashpay/dash_hash
- cd dash_hash && python3 setup.py install
# Install build target specific packages
- echo PACKAGES=$PACKAGES
- if [ -n "$DPKG_ADD_ARCH" ]; then dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi
- if [ -n "$PACKAGES" ]; then apt-get update && apt-get install -y --no-install-recommends --no-upgrade $PACKAGES; fi
# Move apt packages into cache
- mv /var/cache/apt/archives/* $CACHE_DIR/apt/ || true
# Make mingw use correct threading libraries
- update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix || true
- update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix || true
- update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix || true
- update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix || true
.build-template:
stage: build
extends: .base-template
script:
- export BUILD_TARGET="$CI_JOB_NAME"
- cd $CI_PROJECT_DIR
- ./ci/build_depends.sh
- ./ci/build_src.sh
- ./ci/test_unittests.sh
- ./ci/test_integrationtests.sh --extended --exclude pruning,dbcrash
after_script:
# Copy all cache files into cache-artifact so that they get uploaded. We only do this for develop so that artifacts
# stay minimal for PRs and branches (we never need them)
- mkdir -p $CI_PROJECT_DIR/cache-artifact
- mkdir -p $CI_PROJECT_DIR/testlogs
- |
if [ "$CI_COMMIT_REF_SLUG" = "develop" ]; then
cp -ra $CI_PROJECT_DIR/cache/* $CI_PROJECT_DIR/cache-artifact/
fi
# We're actually only interested in the develop branch creating the cache artifact, but there is no way to control this
# until https://gitlab.com/gitlab-org/gitlab-foss/issues/25478 gets implemented. Until then, we use an expiration time of
# 3 days and rely on daily builds to refresh the cache artifacts. We also keep non-develop artifacts at minimum size
- ./ci/test_unittests.sh # Run unit tests in build stage to avoid creating too many parallel jobs
cache:
# Let all branches share the same cache, which is ok because ccache is able to handle it
key: ${CI_JOB_NAME}
paths:
- $CI_PROJECT_DIR/cache/ccache
artifacts:
name: cache-artifact
name: binaries
when: always
paths:
- $CI_PROJECT_DIR/build-ci
expire_in: 3 days
.test-template:
stage: test
extends: .base-template
script:
- ./ci/test_integrationtests.sh --extended --exclude pruning,dbcrash
after_script:
- mkdir -p $CI_PROJECT_DIR/testlogs
artifacts:
name: testlogs
when: always
paths:
- $CI_PROJECT_DIR/cache-artifact
- $CI_PROJECT_DIR/testlogs
expire_in: 3 days
arm-linux:
<<: *build_template
###
win32:
<<: *build_template
arm-linux-gnueabihf:
extends: .build-depends-template
variables:
HOST: arm-linux-gnueabihf
win64:
<<: *build_template
i686-w64-mingw32:
extends: .build-depends-template
variables:
HOST: i686-w64-mingw32
linux32:
<<: *build_template
x86_64-w64-mingw32:
extends: .build-depends-template
variables:
HOST: x86_64-w64-mingw32
linux64:
<<: *build_template
i686-pc-linux-gnu:
extends: .build-depends-template
variables:
HOST: i686-pc-linux-gnu
linux64_nowallet:
<<: *build_template
x86_64-unknown-linux-gnu:
extends: .build-depends-template
variables:
HOST: x86_64-unknown-linux-gnu
linux64_release:
<<: *build_template
x86_64-unknown-linux-gnu-debug:
extends: .build-depends-template
variables:
HOST: x86_64-unknown-linux-gnu
DEP_OPTS: "DEBUG=1"
mac:
<<: *build_template
x86_64-apple-darwin11:
extends: .build-depends-template
variables:
HOST: x86_64-apple-darwin11
###
arm-linux-build:
extends: .build-template
needs:
- arm-linux-gnueabihf
variables:
BUILD_TARGET: arm-linux
win32-build:
extends: .build-template
needs:
- i686-w64-mingw32
variables:
BUILD_TARGET: win32
win64-build:
extends: .build-template
needs:
- x86_64-w64-mingw32
variables:
BUILD_TARGET: win64
linux32-build:
extends: .build-template
needs:
- i686-pc-linux-gnu
variables:
BUILD_TARGET: linux32
linux64-build:
extends: .build-template
needs:
- x86_64-unknown-linux-gnu-debug
variables:
BUILD_TARGET: linux64
linux64_nowallet-build:
extends: .build-template
needs:
- x86_64-unknown-linux-gnu
variables:
BUILD_TARGET: linux64_nowallet
linux64_release-build:
extends: .build-template
needs:
- x86_64-unknown-linux-gnu
variables:
BUILD_TARGET: linux64_release
mac-build:
extends: .build-template
needs:
- x86_64-apple-darwin11
variables:
BUILD_TARGET: mac
###
linux32-test:
extends: .test-template
needs:
- linux32-build
variables:
BUILD_TARGET: linux32
linux64-test:
extends: .test-template
needs:
- linux64-build
variables:
BUILD_TARGET: linux64

View File

@ -48,7 +48,7 @@ runtests: &runtests
- $DOCKER_RUN_IN_BUILDER ./ci/build_src.sh
- $DOCKER_RUN_IN_BUILDER ./ci/test_unittests.sh
- if [ "$TRAVIS_EVENT_TYPE" = "cron" ]; then extended="--extended --exclude pruning,dbcrash"; fi
- $DOCKER_RUN_IN_BUILDER ./ci/test_integrationtests.sh --jobs=3 ${extended}
- $DOCKER_RUN_IN_BUILDER ./ci/test_integrationtests.sh --quiet --jobs=3 ${extended}
builddocker: &builddocker
stage: build docker

100
Jenkinsfile vendored
View File

@ -1,100 +0,0 @@
// This Jenkinsfile will build a builder image and then run the actual build and tests inside this image
// It's very important to not execute any scripts outside of the builder container, as it's our protection against
// external developers bringing in harmful code into Jenkins.
// Jenkins will only run the build if this Jenkinsfile was not modified in an external pull request. Only branches
// which are part of the Dash repo will allow modification to the Jenkinsfile.
def targets = [
'win32',
'win64',
'linux32',
'linux64',
'linux64_nowallet',
'linux64_release',
'mac',
]
def tasks = [:]
for(int i = 0; i < targets.size(); i++) {
def target = targets[i]
tasks["${target}"] = {
node {
def BUILD_NUMBER = sh(returnStdout: true, script: 'echo $BUILD_NUMBER').trim()
def BRANCH_NAME = sh(returnStdout: true, script: 'echo $BRANCH_NAME').trim()
def UID = sh(returnStdout: true, script: 'id -u').trim()
def HOME = sh(returnStdout: true, script: 'echo $HOME').trim()
def pwd = sh(returnStdout: true, script: 'pwd').trim()
checkout scm
def env = [
"BUILD_TARGET=${target}",
"PULL_REQUEST=false",
"JOB_NUMBER=${BUILD_NUMBER}",
]
withEnv(env) {
def builderImageName="dash-builder-${target}"
def builderImage
stage("${target}/builder-image") {
builderImage = docker.build("${builderImageName}", "--build-arg BUILD_TARGET=${target} ci -f ci/Dockerfile.builder")
}
builderImage.inside("-t") {
// copy source into fixed path
// we must build under the same path everytime as otherwise caches won't work properly
sh "cp -ra ${pwd}/. /dash-src/"
// restore cache
def hasCache = false
try {
copyArtifacts(projectName: "dashpay-dash/${BRANCH_NAME}", optional: true, selector: lastSuccessful(), filter: "ci-cache-${target}.tar.gz")
} catch (Exception e) {
}
if (fileExists("ci-cache-${target}.tar.gz")) {
hasCache = true
echo "Using cache from dashpay-dash/${BRANCH_NAME}"
} else {
try {
copyArtifacts(projectName: 'dashpay-dash/develop', optional: true, selector: lastSuccessful(), filter: "ci-cache-${target}.tar.gz");
} catch (Exception e) {
}
if (fileExists("ci-cache-${target}.tar.gz")) {
hasCache = true
echo "Using cache from dashpay-dash/develop"
}
}
if (hasCache) {
sh "cd /dash-src && tar xzf ${pwd}/ci-cache-${target}.tar.gz"
} else {
sh "mkdir -p /dash-src/ci-cache-${target}"
}
stage("${target}/depends") {
sh 'cd /dash-src && ./ci/build_depends.sh'
}
stage("${target}/build") {
sh 'cd /dash-src && ./ci/build_src.sh'
}
stage("${target}/test") {
sh 'cd /dash-src && ./ci/test_unittests.sh'
}
stage("${target}/test") {
sh 'cd /dash-src && ./ci/test_integrationtests.sh'
}
// archive cache and copy it into the jenkins workspace
sh "cd /dash-src && tar czfv ci-cache-${target}.tar.gz ci-cache-${target} && cp ci-cache-${target}.tar.gz ${pwd}/"
}
// upload cache
archiveArtifacts artifacts: "ci-cache-${target}.tar.gz", fingerprint: true
}
}
}
}
parallel tasks

View File

@ -1,129 +0,0 @@
def targets = [
'linux',
'win',
'osx',
]
def osslTarUrl = 'http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz'
def osslPatchUrl = 'https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch'
def SDK_URL='https://bitcoincore.org/depends-sources/sdks'
def OSX_SDK='10.11'
def proc = 4
def mem = 2000
def repositoryUrl = "https://github.com/dashpay/dash.git"
def tasks = [:]
for(int i = 0; i < targets.size(); i++) {
def target = targets[i]
tasks["${target}"] = {
node {
deleteDir() // cleanup workspace
def pwd = sh(returnStdout: true, script: 'pwd').trim()
def dockerGid = sh(returnStdout: true, script: "stat -c '%g' /var/run/docker.sock").trim()
def BRANCH_NAME = sh(returnStdout: true, script: 'echo $BRANCH_NAME').trim()
def commit = BRANCH_NAME
def hasCache = false
def gitianDescriptor
stage("${target}/prepare") {
dir('dash') {
checkout scm
gitianDescriptor = readYaml file: "contrib/gitian-descriptors/gitian-${target}.yml"
}
dir('gitian-builder') {
git url: 'https://github.com/dashpay/gitian-builder.git'
}
sh "mkdir -p dashcore-binaries"
if (target == "osx") {
dir('gitian-builder') {
sh 'mkdir -p inputs'
sh "curl --location --fail $SDK_URL/MacOSX${OSX_SDK}.sdk.tar.gz -o inputs/MacOSX${OSX_SDK}.sdk.tar.gz"
}
}
// restore cache
try {
copyArtifacts(projectName: "dashpay-dash-gitian-nightly/${BRANCH_NAME}", optional: true, selector: lastSuccessful(), filter: "cache-${gitianDescriptor.name}.tar.gz")
} catch (Exception e) {
}
if (fileExists("cache-${gitianDescriptor.name}.tar.gz")) {
hasCache = true
echo "Using cache from dashpay-dash-gitian-nightly/${BRANCH_NAME}"
} else {
try {
copyArtifacts(projectName: 'dashpay-dash-gitian-nightly/develop', optional: true, selector: lastSuccessful(), filter: "cache-${gitianDescriptor.name}.tar.gz");
} catch (Exception e) {
}
if (fileExists("cache-${gitianDescriptor.name}.tar.gz")) {
hasCache = true
echo "Using cache from dashpay-dash-gitian-nightly/develop"
}
}
}
def gitianImage
stage("${target}/builder-image") {
dir('dash') {
gitianImage = docker.build("dash-gitian:${env.BUILD_ID}", 'ci -f ci/Dockerfile.gitian-builder')
}
}
gitianImage.inside("--group-add ${dockerGid} -t -v \"/var/run/docker.sock:/var/run/docker.sock\"") {
sh "mkdir -p gitian-builder/cache"
if (hasCache) {
sh "cd gitian-builder/cache && tar xzfv ../../cache-${gitianDescriptor.name}.tar.gz"
}
stage("${target}/download") {
dir('gitian-builder') {
sh "mkdir -p inputs"
sh "cd inputs && curl -R -O ${osslPatchUrl}"
sh "cd inputs && curl -R -O ${osslTarUrl}"
sh "make -C ../dash/depends download SOURCES_PATH=`pwd`/cache/common"
}
}
stage("${target}/base-vm") {
dir('gitian-builder') {
sh "./bin/make-base-vm --suite bionic --arch amd64 --docker"
}
}
stage("${target}/gbuild") {
dir('gitian-builder') {
// make sure an old version is not running
sh "docker rm -fv gitian-target || true"
try {
sh """
tail -F var/install.log &
tail -F var/build.log &
USE_DOCKER=1 ./bin/gbuild -j ${proc} -m ${mem} --commit dash=${commit} --url dash=${repositoryUrl} ../dash/contrib/gitian-descriptors/gitian-${target}.yml
RET=\$?
# give the above tail calls enough time to print everything on failure
sleep 2s
exit \$RET
"""
} finally {
// make sure it doesn't run forever
sh "docker rm -fv gitian-target || true"
}
sh "mv build/out/dashcore-* ../dashcore-binaries/"
sh "mv build/out/src/dashcore-* ../dashcore-binaries/"
}
archiveArtifacts artifacts: 'dashcore-binaries/*', fingerprint: true
}
// TODO remove this in a few days (only needed to prune the old compressed file from Jenkins caches)
sh "cd gitian-builder/cache && find -name ccache.tar.gz | xargs rm -f"
sh "cd gitian-builder/cache && tar czfv ../../cache-${gitianDescriptor.name}.tar.gz common ${gitianDescriptor.name}"
archiveArtifacts artifacts: "cache-${gitianDescriptor.name}.tar.gz", fingerprint: true
}
}
}
}
parallel tasks

View File

@ -4,13 +4,13 @@ FROM ubuntu:bionic
# (zlib1g-dev and libssl-dev are needed for the Qt host binary builds, but should not be used by target binaries)
# We split this up into multiple RUN lines as we might need to retry multiple times on Travis. This way we allow better
# cache usage.
RUN apt-get update
RUN apt-get update && apt-get install -y git
RUN apt-get update && apt-get install -y g++
RUN apt-get update && apt-get install -y autotools-dev libtool m4 automake autoconf pkg-config
RUN apt-get update && apt-get install -y zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake
RUN apt-get update && apt-get install -y python3 python3-dev
RUN apt-get update && apt-get install -y python3-pip
ENV APT_ARGS="-y --no-install-recommends --no-upgrade"
RUN apt-get update && apt-get install $APT_ARGS git wget unzip && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS g++ && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS autotools-dev libtool m4 automake autoconf pkg-config && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS python3 python3-dev && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS python3-pip python3-setuptools && rm -rf /var/lib/apt/lists/*
# Python stuff
RUN pip3 install pyzmq # really needed?
@ -29,15 +29,23 @@ ENV GROUP_ID ${GROUP_ID}
RUN groupadd -g ${GROUP_ID} dash
RUN useradd -u ${USER_ID} -g dash -s /bin/bash -m -d /dash dash
# Extra packages
ARG BUILD_TARGET=linux64
ADD matrix.sh /tmp/matrix.sh
RUN . /tmp/matrix.sh && \
if [ -n "$DPKG_ADD_ARCH" ]; then dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi && \
if [ -n "$PACKAGES" ]; then apt-get update && apt-get install -y --no-install-recommends --no-upgrade $PACKAGES; fi
# Packages needed for all target builds
RUN dpkg --add-architecture i386
RUN apt-get update && apt-get install $APT_ARGS g++-7-multilib && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS g++-arm-linux-gnueabihf && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS g++-mingw-w64-i686 && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS g++-mingw-w64-x86-64 && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS wine-stable wine32 wine64 bc nsis && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS python3-zmq && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install $APT_ARGS imagemagick libcap-dev librsvg2-bin libz-dev libbz2-dev libtiff-tools && rm -rf /var/lib/apt/lists/*
# This is a hack. It is needed because gcc-multilib and g++-multilib are conflicting with g++-arm-linux-gnueabihf. This is
# due to gcc-multilib installing the following symbolic link, which is needed for -m32 support. However, this causes
# arm builds to also have the asm folder implicitely in the include search path. This is kind of ok, because the asm folder
# for arm has precedence.
RUN ln -s x86_64-linux-gnu/asm /usr/include/asm
# Make sure std::thread and friends is available
# Will fail on non-win builds, but we ignore this
RUN \
update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix; \
update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix; \

View File

@ -1,17 +0,0 @@
FROM ubuntu:bionic
RUN apt-get update && apt-get install -y \
ruby curl make libltdl7 git apache2 apt-cacher-ng python-vm-builder ruby qemu-utils \
&& rm -rf /var/lib/apt/lists
ARG USER_ID=1000
ARG GROUP_ID=1000
# add user with specified (or default) user/group ids
ENV USER_ID ${USER_ID}
ENV GROUP_ID ${GROUP_ID}
RUN groupadd -g ${GROUP_ID} dash
RUN useradd -u ${USER_ID} -g dash -s /bin/bash -m -d /dash dash
WORKDIR /dash
USER dash

View File

@ -34,7 +34,6 @@ export RUN_INTEGRATIONTESTS=false
if [ "$BUILD_TARGET" = "arm-linux" ]; then
export HOST=arm-linux-gnueabihf
export PACKAGES="g++-arm-linux-gnueabihf"
export CHECK_DOC=1
# -Wno-psabi is to disable ABI warnings: "note: parameter passing for argument of type ... changed in GCC 7.1"
# This could be removed once the ABI change warning does not show up by default
@ -42,20 +41,17 @@ if [ "$BUILD_TARGET" = "arm-linux" ]; then
elif [ "$BUILD_TARGET" = "win32" ]; then
export HOST=i686-w64-mingw32
export DPKG_ADD_ARCH="i386"
export PACKAGES="python3 nsis g++-mingw-w64-i686 wine-stable wine32 bc"
export BITCOIN_CONFIG="--enable-gui --enable-reduce-exports --disable-miner"
export DIRECT_WINE_EXEC_TESTS=true
export RUN_UNITTESTS=true
elif [ "$BUILD_TARGET" = "win64" ]; then
export HOST=x86_64-w64-mingw32
export DPKG_ADD_ARCH="i386"
export PACKAGES="python3 nsis g++-mingw-w64-x86-64 wine-stable wine64 bc"
export BITCOIN_CONFIG="--enable-gui --enable-reduce-exports --disable-miner"
export DIRECT_WINE_EXEC_TESTS=true
export RUN_UNITTESTS=true
elif [ "$BUILD_TARGET" = "linux32" ]; then
export HOST=i686-pc-linux-gnu
export PACKAGES="g++-multilib bc python3-zmq"
export BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports --enable-stacktraces LDFLAGS=-static-libstdc++"
export USE_SHELL="/bin/dash"
export PYZMQ=true
@ -63,7 +59,6 @@ elif [ "$BUILD_TARGET" = "linux32" ]; then
export RUN_INTEGRATIONTESTS=true
elif [ "$BUILD_TARGET" = "linux64" ]; then
export HOST=x86_64-unknown-linux-gnu
export PACKAGES="bc python3-zmq"
export DEP_OPTS="NO_UPNP=1 DEBUG=1"
export BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports --enable-stacktraces"
export CPPFLAGS="-DDEBUG_LOCKORDER -DENABLE_DASH_DEBUG"
@ -72,20 +67,17 @@ elif [ "$BUILD_TARGET" = "linux64" ]; then
export RUN_INTEGRATIONTESTS=true
elif [ "$BUILD_TARGET" = "linux64_nowallet" ]; then
export HOST=x86_64-unknown-linux-gnu
export PACKAGES="python3"
export DEP_OPTS="NO_WALLET=1"
export BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports"
export RUN_UNITTESTS=true
elif [ "$BUILD_TARGET" = "linux64_release" ]; then
export HOST=x86_64-unknown-linux-gnu
export PACKAGES="bc python3-zmq"
export DEP_OPTS="NO_UPNP=1"
export BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports"
export PYZMQ=true
export RUN_UNITTESTS=true
elif [ "$BUILD_TARGET" = "mac" ]; then
export HOST=x86_64-apple-darwin11
export PACKAGES="cmake imagemagick libcap-dev librsvg2-bin libz-dev libbz2-dev libtiff-tools"
export BITCOIN_CONFIG="--enable-gui --enable-reduce-exports --disable-miner"
export OSX_SDK=10.11
export GOAL="deploy"

View File

@ -18,7 +18,7 @@ export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib
cd build-ci/dashcore-$BUILD_TARGET
set +e
./test/functional/test_runner.py --ci --coverage --quiet --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS
./test/functional/test_runner.py --ci --coverage --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS
RESULT=$?
set -e