diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000000..91e02dce64 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,140 @@ +image: "ubuntu:bionic" + +variables: + DOCKER_DRIVER: overlay2 + +cache: + # Cache by branch/tag and job name + # Gitlab can't use caches from parent pipelines when doing the first build in a PR, so we use artifacts to copy + # caches into PRs + key: ${CI_COMMIT_REF_SLUG}-${CI_JOB_NAME}${CI_EXTERNAL_PULL_REQUEST_IID} + paths: + - $CI_PROJECT_DIR/cache + +stages: + - build + +.build_template: &build_template + stage: build + before_script: + - export BUILD_TARGET="$CI_JOB_NAME" + - echo BUILD_TARGET=$BUILD_TARGET + - source ./ci/matrix.sh + + # The ubuntu base image has apt configured to delete caches after each invocation, which is something that is not desirable for us + - rm /etc/apt/apt.conf.d/docker-clean + - apt-get update + - apt-get install -y wget unzip + + # Init cache + - export CACHE_DIR=$CI_PROJECT_DIR/cache + - mkdir -p $CACHE_DIR + - | + if [ "$CI_COMMIT_REF_SLUG" != "develop" -a "$CI_COMMIT_TAG" == "" ]; then + if [ ! -d $CACHE_DIR/ccache ]; then + echo "Downloading cache from develop branch" + if wget -O cache-artifact.zip https://gitlab.com/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/-/jobs/artifacts/develop/download?job=$CI_JOB_NAME; then + unzip cache-artifact.zip + rm cache-artifact.zip + mv cache-artifact/* $CACHE_DIR/ + else + echo "Failed to download cache" + fi + else + echo "Not touching cache (was initialized from previous build)" + fi + else + echo "Not touching cache (building develop branch or tag)" + fi + # Create missing cache dirs + - mkdir -p $CACHE_DIR/ccache && mkdir -p $CACHE_DIR/depends && mkdir -p $CACHE_DIR/sdk-sources && mkdir -p $CACHE_DIR/apt + # Keep this as it makes caching related debugging easier + - ls -lah $CACHE_DIR && ls -lah $CACHE_DIR/depends && ls -lah $CACHE_DIR/ccache && ls -lah $CACHE_DIR/apt + - mv $CACHE_DIR/apt/* /var/cache/apt/archives/ || true + + # Install base packages + - apt-get dist-upgrade -y + - apt-get install -y git g++ autotools-dev libtool m4 automake autoconf pkg-config zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake + - apt-get install -y python3 python3-dev python3-pip + + # jinja2 is needed for combine_logs.py + - pip3 install jinja2 + + # Setup some environment variables + - if [ "$CI_EXTERNAL_PULL_REQUEST_IID" != "" ]; then export PULL_REQUEST="true"; else export PULL_REQUEST="false"; fi + - export COMMIT_RANGE="$CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA" + - export JOB_NUMBER="$CI_JOB_ID" + - export HOST_SRC_DIR=$CI_PROJECT_DIR + - echo PULL_REQUEST=$PULL_REQUEST COMMIT_RANGE=$COMMIT_RANGE HOST_SRC_DIR=$HOST_SRC_DIR CACHE_DIR=$CACHE_DIR + - echo "Commit log:" && git log --format=fuller -1 + + # Build dash_hash + - git clone https://github.com/dashpay/dash_hash + - cd dash_hash && python3 setup.py install + + # Install build target specific packages + - echo PACKAGES=$PACKAGES + - if [ -n "$DPKG_ADD_ARCH" ]; then dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi + - if [ -n "$PACKAGES" ]; then apt-get update && apt-get install -y --no-install-recommends --no-upgrade $PACKAGES; fi + + # Move apt packages into cache + - mv /var/cache/apt/archives/* $CACHE_DIR/apt/ || true + + # Make mingw use correct threading libraries + - update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix || true + - update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix || true + - update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix || true + - update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix || true + + script: + - export BUILD_TARGET="$CI_JOB_NAME" + - cd $CI_PROJECT_DIR + - ./ci/build_depends.sh + - ./ci/build_src.sh + - ./ci/test_unittests.sh + - ./ci/test_integrationtests.sh + + after_script: + # Copy all cache files into cache-artifact so that they get uploaded. We only do this for develop so that artifacts + # stay minimal for PRs and branches (we never need them) + - mkdir $CI_PROJECT_DIR/cache-artifact + - mkdir -p $CI_PROJECT_DIR/testlogs + - | + if [ "$CI_COMMIT_REF_SLUG" = "develop" ]; then + cp -ra $CACHE_DIR/* $CI_PROJECT_DIR/cache-artifact/ + fi + + # We're actually only interested in the develop branch creating the cache artifact, but there is no way to control this + # until https://gitlab.com/gitlab-org/gitlab-foss/issues/25478 gets implemented. Until then, we use an expiration time of + # 3 days and rely on daily builds to refresh the cache artifacts. We also keep non-develop artifacts at minimum size + artifacts: + name: cache-artifact + when: always + paths: + - $CI_PROJECT_DIR/cache-artifact + - $CI_PROJECT_DIR/testlogs + expire_in: 3 days + +arm-linux: + <<: *build_template + +win32: + <<: *build_template + +win64: + <<: *build_template + +linux32: + <<: *build_template + +linux64: + <<: *build_template + +linux64_nowallet: + <<: *build_template + +linux64_release: + <<: *build_template + +mac: + <<: *build_template diff --git a/.travis.yml b/.travis.yml index fd37ae9c95..5083fa990f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -125,6 +125,7 @@ install: - export HOST_SRC_DIR=$TRAVIS_BUILD_DIR - export HOST_CACHE_DIR=$HOME/cache - export TRAVIS_COMMIT_LOG=`git log --format=fuller -1` + - export PYTHON_DEBUG=1 - source ./ci/matrix.sh - mkdir -p $HOST_CACHE_DIR/docker && mkdir -p $HOST_CACHE_DIR/ccache && mkdir -p $HOST_CACHE_DIR/depends && mkdir -p $HOST_CACHE_DIR/sdk-sources # Keep this as it makes caching related debugging easier diff --git a/ci/Dockerfile.builder b/ci/Dockerfile.builder index dd128b475d..357ef01992 100644 --- a/ci/Dockerfile.builder +++ b/ci/Dockerfile.builder @@ -14,6 +14,7 @@ RUN apt-get update && apt-get install -y python3-pip # Python stuff RUN pip3 install pyzmq # really needed? +RUN pip3 install jinja2 # dash_hash RUN git clone https://github.com/dashpay/dash_hash diff --git a/ci/matrix.sh b/ci/matrix.sh index 216482c023..870c403877 100755 --- a/ci/matrix.sh +++ b/ci/matrix.sh @@ -27,7 +27,6 @@ export DOCKER_RUN_IN_BUILDER="docker run -t --rm -w $SRC_DIR $DOCKER_RUN_ARGS $B # Default values for targets export GOAL="install" export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks} -export PYTHON_DEBUG=1 export MAKEJOBS="-j4" export RUN_UNITTESTS=false diff --git a/ci/test_integrationtests.sh b/ci/test_integrationtests.sh index 456a6d2aa1..ceff7967bf 100755 --- a/ci/test_integrationtests.sh +++ b/ci/test_integrationtests.sh @@ -17,4 +17,30 @@ export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib cd build-ci/dashcore-$BUILD_TARGET -./test/functional/test_runner.py --coverage --quiet $PASS_ARGS +set +e +./test/functional/test_runner.py --coverage --quiet --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS +RESULT=$? +set -e + +echo "Collecting logs..." +BASEDIR=$(ls testdatadirs) +if [ "$BASEDIR" != "" ]; then + mkdir testlogs + for d in $(ls testdatadirs/$BASEDIR | grep -v '^cache$'); do + mkdir testlogs/$d + ./test/functional/combine_logs.py -c ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.log + ./test/functional/combine_logs.py --html ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.html + cd testdatadirs/$BASEDIR/$d + LOGFILES="$(find . -name 'debug.log' -or -name "test_framework.log")" + cd ../../.. + for f in $LOGFILES; do + d2="testlogs/$d/$(dirname $f)" + mkdir -p $d2 + cp testdatadirs/$BASEDIR/$d/$f $d2/ + done + done +fi + +mv testlogs ../../ + +exit $RESULT diff --git a/test/functional/combine_logs.py b/test/functional/combine_logs.py index 3ca74ea35e..ec70e2b4e8 100755 --- a/test/functional/combine_logs.py +++ b/test/functional/combine_logs.py @@ -106,7 +106,7 @@ def print_logs(log_events, color=False, html=False): except ImportError: print("jinja2 not found. Try `pip install jinja2`") sys.exit(1) - print(jinja2.Environment(loader=jinja2.FileSystemLoader('./')) + print(jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))) .get_template('combined_log_template.html') .render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events])) diff --git a/test/functional/llmq-chainlocks.py b/test/functional/llmq-chainlocks.py index 423aab565a..ae0e1811d5 100755 --- a/test/functional/llmq-chainlocks.py +++ b/test/functional/llmq-chainlocks.py @@ -29,13 +29,14 @@ class LLMQChainLocksTest(DashTestFramework): sync_blocks(self.nodes, timeout=60*5) self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0) - self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0) self.wait_for_sporks_same() self.log.info("Mining 4 quorums") for i in range(4): self.mine_quorum() + self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0) + self.log.info("Mine single block, wait for chainlock") self.nodes[0].generate(1) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) diff --git a/test/functional/wallet-dump.py b/test/functional/wallet-dump.py index cc0d8649fb..40bdbd2784 100755 --- a/test/functional/wallet-dump.py +++ b/test/functional/wallet-dump.py @@ -100,7 +100,7 @@ class WalletDumpTest(BitcoinTestFramework): #encrypt wallet, restart, unlock and dump self.nodes[0].node_encrypt_wallet('test') self.start_node(0) - self.nodes[0].walletpassphrase('test', 10) + self.nodes[0].walletpassphrase('test', 30) # Should be a no-op: self.nodes[0].keypoolrefill() self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")