Add support for Gitlab CI (#3149)

* Add .gitlab-ci.yml

* Use | instead of > for multiline commands

This honor new-lines and makes ; unnecessary

* Use ubuntu:bionic as base image

* Move cache initialization before apt-get installs

* Cache apt packages

* Move installation of wget and unzip up as we need it for the cache

* Prevent apt from deleting caches

* Collect test logs into artifact

* Make combine_logs.py always look for the template in the correct dir

* Move final cache stuff into after_script

* Reintroduce PYTHON_DEBUG=1, but only for .travis.yml

* Install jinja2 in Travis builder image

* Enable ChainLocks after quorums have been created

Creating 4 quorums causes a lot of blocks to be created and signed by
ChainLocks, which then causes timeouts later.

* Increase timeout in wallet-dump.py test

The first dumpwallet is quite slow sometimes, which then makes the
later called dumpwallet throw a wallet locked exception.
This commit is contained in:
Alexander Block 2019-10-16 11:48:46 +02:00 committed by GitHub
parent b4e19f8dd5
commit 24fee30513
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 173 additions and 5 deletions

140
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,140 @@
image: "ubuntu:bionic"
variables:
DOCKER_DRIVER: overlay2
cache:
# Cache by branch/tag and job name
# Gitlab can't use caches from parent pipelines when doing the first build in a PR, so we use artifacts to copy
# caches into PRs
key: ${CI_COMMIT_REF_SLUG}-${CI_JOB_NAME}${CI_EXTERNAL_PULL_REQUEST_IID}
paths:
- $CI_PROJECT_DIR/cache
stages:
- build
.build_template: &build_template
stage: build
before_script:
- export BUILD_TARGET="$CI_JOB_NAME"
- echo BUILD_TARGET=$BUILD_TARGET
- source ./ci/matrix.sh
# The ubuntu base image has apt configured to delete caches after each invocation, which is something that is not desirable for us
- rm /etc/apt/apt.conf.d/docker-clean
- apt-get update
- apt-get install -y wget unzip
# Init cache
- export CACHE_DIR=$CI_PROJECT_DIR/cache
- mkdir -p $CACHE_DIR
- |
if [ "$CI_COMMIT_REF_SLUG" != "develop" -a "$CI_COMMIT_TAG" == "" ]; then
if [ ! -d $CACHE_DIR/ccache ]; then
echo "Downloading cache from develop branch"
if wget -O cache-artifact.zip https://gitlab.com/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/-/jobs/artifacts/develop/download?job=$CI_JOB_NAME; then
unzip cache-artifact.zip
rm cache-artifact.zip
mv cache-artifact/* $CACHE_DIR/
else
echo "Failed to download cache"
fi
else
echo "Not touching cache (was initialized from previous build)"
fi
else
echo "Not touching cache (building develop branch or tag)"
fi
# Create missing cache dirs
- mkdir -p $CACHE_DIR/ccache && mkdir -p $CACHE_DIR/depends && mkdir -p $CACHE_DIR/sdk-sources && mkdir -p $CACHE_DIR/apt
# Keep this as it makes caching related debugging easier
- ls -lah $CACHE_DIR && ls -lah $CACHE_DIR/depends && ls -lah $CACHE_DIR/ccache && ls -lah $CACHE_DIR/apt
- mv $CACHE_DIR/apt/* /var/cache/apt/archives/ || true
# Install base packages
- apt-get dist-upgrade -y
- apt-get install -y git g++ autotools-dev libtool m4 automake autoconf pkg-config zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake
- apt-get install -y python3 python3-dev python3-pip
# jinja2 is needed for combine_logs.py
- pip3 install jinja2
# Setup some environment variables
- if [ "$CI_EXTERNAL_PULL_REQUEST_IID" != "" ]; then export PULL_REQUEST="true"; else export PULL_REQUEST="false"; fi
- export COMMIT_RANGE="$CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
- export JOB_NUMBER="$CI_JOB_ID"
- export HOST_SRC_DIR=$CI_PROJECT_DIR
- echo PULL_REQUEST=$PULL_REQUEST COMMIT_RANGE=$COMMIT_RANGE HOST_SRC_DIR=$HOST_SRC_DIR CACHE_DIR=$CACHE_DIR
- echo "Commit log:" && git log --format=fuller -1
# Build dash_hash
- git clone https://github.com/dashpay/dash_hash
- cd dash_hash && python3 setup.py install
# Install build target specific packages
- echo PACKAGES=$PACKAGES
- if [ -n "$DPKG_ADD_ARCH" ]; then dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi
- if [ -n "$PACKAGES" ]; then apt-get update && apt-get install -y --no-install-recommends --no-upgrade $PACKAGES; fi
# Move apt packages into cache
- mv /var/cache/apt/archives/* $CACHE_DIR/apt/ || true
# Make mingw use correct threading libraries
- update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix || true
- update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix || true
- update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix || true
- update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix || true
script:
- export BUILD_TARGET="$CI_JOB_NAME"
- cd $CI_PROJECT_DIR
- ./ci/build_depends.sh
- ./ci/build_src.sh
- ./ci/test_unittests.sh
- ./ci/test_integrationtests.sh
after_script:
# Copy all cache files into cache-artifact so that they get uploaded. We only do this for develop so that artifacts
# stay minimal for PRs and branches (we never need them)
- mkdir $CI_PROJECT_DIR/cache-artifact
- mkdir -p $CI_PROJECT_DIR/testlogs
- |
if [ "$CI_COMMIT_REF_SLUG" = "develop" ]; then
cp -ra $CACHE_DIR/* $CI_PROJECT_DIR/cache-artifact/
fi
# We're actually only interested in the develop branch creating the cache artifact, but there is no way to control this
# until https://gitlab.com/gitlab-org/gitlab-foss/issues/25478 gets implemented. Until then, we use an expiration time of
# 3 days and rely on daily builds to refresh the cache artifacts. We also keep non-develop artifacts at minimum size
artifacts:
name: cache-artifact
when: always
paths:
- $CI_PROJECT_DIR/cache-artifact
- $CI_PROJECT_DIR/testlogs
expire_in: 3 days
arm-linux:
<<: *build_template
win32:
<<: *build_template
win64:
<<: *build_template
linux32:
<<: *build_template
linux64:
<<: *build_template
linux64_nowallet:
<<: *build_template
linux64_release:
<<: *build_template
mac:
<<: *build_template

View File

@ -125,6 +125,7 @@ install:
- export HOST_SRC_DIR=$TRAVIS_BUILD_DIR
- export HOST_CACHE_DIR=$HOME/cache
- export TRAVIS_COMMIT_LOG=`git log --format=fuller -1`
- export PYTHON_DEBUG=1
- source ./ci/matrix.sh
- mkdir -p $HOST_CACHE_DIR/docker && mkdir -p $HOST_CACHE_DIR/ccache && mkdir -p $HOST_CACHE_DIR/depends && mkdir -p $HOST_CACHE_DIR/sdk-sources
# Keep this as it makes caching related debugging easier

View File

@ -14,6 +14,7 @@ RUN apt-get update && apt-get install -y python3-pip
# Python stuff
RUN pip3 install pyzmq # really needed?
RUN pip3 install jinja2
# dash_hash
RUN git clone https://github.com/dashpay/dash_hash

View File

@ -27,7 +27,6 @@ export DOCKER_RUN_IN_BUILDER="docker run -t --rm -w $SRC_DIR $DOCKER_RUN_ARGS $B
# Default values for targets
export GOAL="install"
export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks}
export PYTHON_DEBUG=1
export MAKEJOBS="-j4"
export RUN_UNITTESTS=false

View File

@ -17,4 +17,30 @@ export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib
cd build-ci/dashcore-$BUILD_TARGET
./test/functional/test_runner.py --coverage --quiet $PASS_ARGS
set +e
./test/functional/test_runner.py --coverage --quiet --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS
RESULT=$?
set -e
echo "Collecting logs..."
BASEDIR=$(ls testdatadirs)
if [ "$BASEDIR" != "" ]; then
mkdir testlogs
for d in $(ls testdatadirs/$BASEDIR | grep -v '^cache$'); do
mkdir testlogs/$d
./test/functional/combine_logs.py -c ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.log
./test/functional/combine_logs.py --html ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.html
cd testdatadirs/$BASEDIR/$d
LOGFILES="$(find . -name 'debug.log' -or -name "test_framework.log")"
cd ../../..
for f in $LOGFILES; do
d2="testlogs/$d/$(dirname $f)"
mkdir -p $d2
cp testdatadirs/$BASEDIR/$d/$f $d2/
done
done
fi
mv testlogs ../../
exit $RESULT

View File

@ -106,7 +106,7 @@ def print_logs(log_events, color=False, html=False):
except ImportError:
print("jinja2 not found. Try `pip install jinja2`")
sys.exit(1)
print(jinja2.Environment(loader=jinja2.FileSystemLoader('./'))
print(jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__))))
.get_template('combined_log_template.html')
.render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events]))

View File

@ -29,13 +29,14 @@ class LLMQChainLocksTest(DashTestFramework):
sync_blocks(self.nodes, timeout=60*5)
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.wait_for_sporks_same()
self.log.info("Mining 4 quorums")
for i in range(4):
self.mine_quorum()
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.log.info("Mine single block, wait for chainlock")
self.nodes[0].generate(1)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())

View File

@ -100,7 +100,7 @@ class WalletDumpTest(BitcoinTestFramework):
#encrypt wallet, restart, unlock and dump
self.nodes[0].node_encrypt_wallet('test')
self.start_node(0)
self.nodes[0].walletpassphrase('test', 10)
self.nodes[0].walletpassphrase('test', 30)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")