diff --git a/.gitignore b/.gitignore index 3e6febd34b89..38f57495783a 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ src/dashd src/dash-cli src/dash-tx src/test/test_dash +src/test/test_dash_fuzzy src/qt/test/test_dash-qt src/bench/bench_dash @@ -88,7 +89,6 @@ Dash-Qt.app # Unit-tests Makefile.test dash-qt_test -src/test/buildenv.py # Resources cpp qrc_*.cpp @@ -109,9 +109,8 @@ coverage_percent.txt linux-coverage-build linux-build win32-build -qa/pull-tester/tests_config.py -qa/pull-tester/tests_config.ini -qa/cache/* +test/config.ini +test/cache/* !src/leveldb*/Makefile diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 000000000000..7b7e8ab20e71 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,159 @@ +image: "ubuntu:bionic" + +variables: + DOCKER_DRIVER: overlay2 + +cache: + # Cache by branch/tag and job name + # Gitlab can't use caches from parent pipelines when doing the first build in a PR, so we use artifacts to copy + # caches into PRs + key: ${CI_COMMIT_REF_SLUG}-${CI_JOB_NAME}${CI_EXTERNAL_PULL_REQUEST_IID} + paths: + - $CI_PROJECT_DIR/cache + +stages: + - build + +.build_template: &build_template + stage: build + before_script: + - export BUILD_TARGET="$CI_JOB_NAME" + - echo BUILD_TARGET=$BUILD_TARGET + - source ./ci/matrix.sh + + # The ubuntu base image has apt configured to delete caches after each invocation, which is something that is not desirable for us + - rm /etc/apt/apt.conf.d/docker-clean + - apt-get update + - apt-get install -y wget unzip + + # Init cache + - export CACHE_DIR=$CI_PROJECT_DIR/cache + - mkdir -p $CACHE_DIR + - | + if [ "$CI_COMMIT_REF_SLUG" != "develop" -a "$CI_COMMIT_TAG" == "" ]; then + if [ ! -d $CACHE_DIR/ccache ]; then + echo "Downloading cache from develop branch" + mkdir cache-artifact + cd cache-artifact + if wget --quiet -O cache-artifact.zip https://gitlab.com/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/-/jobs/artifacts/develop/download?job=$CI_JOB_NAME; then + unzip -q cache-artifact.zip + rm cache-artifact.zip + mv cache-artifact/* $CACHE_DIR/ || true + else + echo "Failed to download cache" + fi + cd .. + rm -rf cache-artifact + else + echo "Not touching cache (was initialized from previous build)" + fi + else + echo "Not touching cache (building develop branch or tag)" + fi + # Create missing cache dirs + - mkdir -p $CACHE_DIR/ccache && mkdir -p $CACHE_DIR/depends && mkdir -p $CACHE_DIR/sdk-sources && mkdir -p $CACHE_DIR/apt + # Keep this as it makes caching related debugging easier + - ls -lah $CACHE_DIR && ls -lah $CACHE_DIR/depends && ls -lah $CACHE_DIR/ccache && ls -lah $CACHE_DIR/apt + - mv $CACHE_DIR/apt/* /var/cache/apt/archives/ || true + + # Install base packages + - apt-get dist-upgrade -y + - apt-get install -y git g++ autotools-dev libtool m4 automake autoconf pkg-config zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake + - apt-get install -y python3 python3-dev python3-pip + + # jinja2 is needed for combine_logs.py + - pip3 install jinja2 + + # Setup some environment variables + - | + if [ "$CI_EXTERNAL_PULL_REQUEST_IID" != "" ]; then + export PULL_REQUEST="true" + else + # CI_EXTERNAL_PULL_REQUEST_IID is false every time until https://gitlab.com/gitlab-org/gitlab/issues/5667 is done + # Until then, we're using https://github.com/brndnmtthws/labhub atm to mirror Github pull requests as branches into Gitlab, + # which allows us to use Gitlab CI for Github. The following check detects such mirrored branches. + if [[ $CI_COMMIT_REF_NAME =~ ^pr-[^/]*/[^/]*/[^/]*/[^/]*$ ]]; then + export PULL_REQUEST="true" + # CI_COMMIT_BEFORE_SHA is also invalid until #5667 is implemented, so we need to figure it out by ourself + git fetch origin develop + export CI_COMMIT_BEFORE_SHA="$(git merge-base origin/develop HEAD)" + else + export PULL_REQUEST="false" + fi + fi + - export COMMIT_RANGE="$CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA" + - export JOB_NUMBER="$CI_JOB_ID" + - export HOST_SRC_DIR=$CI_PROJECT_DIR + - echo PULL_REQUEST=$PULL_REQUEST COMMIT_RANGE=$COMMIT_RANGE HOST_SRC_DIR=$HOST_SRC_DIR CACHE_DIR=$CACHE_DIR + - echo "Commit log:" && git log --format=fuller -1 + + # Build dash_hash + - git clone https://github.com/dashpay/dash_hash + - cd dash_hash && python3 setup.py install + + # Install build target specific packages + - echo PACKAGES=$PACKAGES + - if [ -n "$DPKG_ADD_ARCH" ]; then dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi + - if [ -n "$PACKAGES" ]; then apt-get update && apt-get install -y --no-install-recommends --no-upgrade $PACKAGES; fi + + # Move apt packages into cache + - mv /var/cache/apt/archives/* $CACHE_DIR/apt/ || true + + # Make mingw use correct threading libraries + - update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix || true + - update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix || true + - update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix || true + - update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix || true + + script: + - export BUILD_TARGET="$CI_JOB_NAME" + - cd $CI_PROJECT_DIR + - ./ci/build_depends.sh + - ./ci/build_src.sh + - ./ci/test_unittests.sh + - ./ci/test_integrationtests.sh --extended --exclude pruning,dbcrash + + after_script: + # Copy all cache files into cache-artifact so that they get uploaded. We only do this for develop so that artifacts + # stay minimal for PRs and branches (we never need them) + - mkdir -p $CI_PROJECT_DIR/cache-artifact + - mkdir -p $CI_PROJECT_DIR/testlogs + - | + if [ "$CI_COMMIT_REF_SLUG" = "develop" ]; then + cp -ra $CI_PROJECT_DIR/cache/* $CI_PROJECT_DIR/cache-artifact/ + fi + + # We're actually only interested in the develop branch creating the cache artifact, but there is no way to control this + # until https://gitlab.com/gitlab-org/gitlab-foss/issues/25478 gets implemented. Until then, we use an expiration time of + # 3 days and rely on daily builds to refresh the cache artifacts. We also keep non-develop artifacts at minimum size + artifacts: + name: cache-artifact + when: always + paths: + - $CI_PROJECT_DIR/cache-artifact + - $CI_PROJECT_DIR/testlogs + expire_in: 3 days + +arm-linux: + <<: *build_template + +win32: + <<: *build_template + +win64: + <<: *build_template + +linux32: + <<: *build_template + +linux64: + <<: *build_template + +linux64_nowallet: + <<: *build_template + +linux64_release: + <<: *build_template + +mac: + <<: *build_template diff --git a/.travis.yml b/.travis.yml index cb4663243061..5083fa990fc8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ sudo: required dist: trusty os: linux -language: generic +language: minimal services: - docker @@ -47,7 +47,8 @@ runtests: &runtests - $DOCKER_RUN_IN_BUILDER ./ci/build_depends.sh - $DOCKER_RUN_IN_BUILDER ./ci/build_src.sh - $DOCKER_RUN_IN_BUILDER ./ci/test_unittests.sh - - $DOCKER_RUN_IN_BUILDER ./ci/test_integrationtests.sh --jobs=3 + - if [ "$TRAVIS_EVENT_TYPE" = "cron" ]; then extended="--extended --exclude pruning,dbcrash"; fi + - $DOCKER_RUN_IN_BUILDER ./ci/test_integrationtests.sh --jobs=3 ${extended} builddocker: &builddocker stage: build docker @@ -119,10 +120,12 @@ before_install: install: # Our scripts try to be Travis agnostic - export PULL_REQUEST="$TRAVIS_PULL_REQUEST" + - export COMMIT_RANGE="$TRAVIS_COMMIT_RANGE" - export JOB_NUMBER="$TRAVIS_JOB_NUMBER" - export HOST_SRC_DIR=$TRAVIS_BUILD_DIR - export HOST_CACHE_DIR=$HOME/cache - export TRAVIS_COMMIT_LOG=`git log --format=fuller -1` + - export PYTHON_DEBUG=1 - source ./ci/matrix.sh - mkdir -p $HOST_CACHE_DIR/docker && mkdir -p $HOST_CACHE_DIR/ccache && mkdir -p $HOST_CACHE_DIR/depends && mkdir -p $HOST_CACHE_DIR/sdk-sources # Keep this as it makes caching related debugging easier diff --git a/CMakeLists.txt b/CMakeLists.txt index 1fff48643e30..670d3e3391eb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -33,32 +33,45 @@ if(DEFINED DEPENDS_PREFIX) endif() add_definitions( + -DENABLE_CRASH_HOOKS=1 -DENABLE_WALLET=1 ) file(GLOB SOURCE_FILES + src/*.cpp + src/*.h src/bench/*.cpp src/bench/*.h src/bls/*.cpp src/bls/*.h src/compat/*.cpp src/compat/*.h - src/consensus/*.h src/consensus/*.cpp + src/consensus/*.h src/crypto/*.c - src/crypto/*.h src/crypto/*.cpp + src/crypto/*.h + src/evo/*.cpp + src/evo/*.h src/leveldb/db/*.cc src/leveldb/db/*.h src/leveldb/include/*.h + src/llmq/*.cpp + src/llmq/*.h + src/masternode/*.cpp + src/masternode/*.h src/policy/*.cpp src/policy/*.h src/primitives/*.cpp src/primitives/*.h - src/qt/test/*.cpp - src/qt/test/*.h + src/privatesend/*.cpp + src/privatesend/*.h src/qt/*.cpp src/qt/*.h + src/qt/test/*.cpp + src/qt/test/*.h + src/rpc/*.cpp + src/rpc/*.h src/script/*.cpp src/script/*.h src/secp256k1/include/*.h @@ -67,19 +80,11 @@ file(GLOB SOURCE_FILES src/univalue/include/*.h src/univalue/lib/*.cpp src/univalue/lib/*.h - src/wallet/test/*.cpp src/wallet/*.cpp src/wallet/*.h + src/wallet/test/*.cpp src/zmq/*.cpp src/zmq/*.h - src/*.cpp - src/*.h - src/evo/*.h - src/evo/*.cpp - src/llmq/*.h - src/llmq/*.cpp - src/rpc/*.cpp - src/rpc/*.h ) add_executable(dash ${SOURCE_FILES}) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 078e0ec756b1..eb738380aef0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -37,8 +37,8 @@ fixes or code moves with actual code changes. Commit messages should be verbose by default consisting of a short subject line (50 chars max), a blank line and detailed explanatory text as separate -paragraph(s); unless the title alone is self-explanatory (like "Corrected typo -in init.cpp") then a single title line is sufficient. Commit messages should be +paragraph(s), unless the title alone is self-explanatory (like "Corrected typo +in init.cpp") in which case a single title line is sufficient. Commit messages should be helpful to people reading your code in the future, so explain the reasoning for your decisions. Further explanation [here](http://chris.beams.io/posts/git-commit/). @@ -170,13 +170,13 @@ judge the general consensus of contributors. In general, all pull requests must: - - have a clear use case, fix a demonstrable bug or serve the greater good of + - Have a clear use case, fix a demonstrable bug or serve the greater good of the project (for example refactoring for modularisation); - - be well peer reviewed; - - have unit tests and functional tests where appropriate; - - follow [code style guidelines](/doc/developer-notes.md); - - not break the existing test suite; - - where bugs are fixed, where possible, there should be unit tests + - Be well peer reviewed; + - Have unit tests and functional tests where appropriate; + - Follow [code style guidelines](/doc/developer-notes.md); + - Not break the existing test suite; + - Where bugs are fixed, where possible, there should be unit tests demonstrating the bug and also proving the fix. This helps prevent regression. Patches that change Dash consensus rules are considerably more involved than @@ -225,6 +225,36 @@ discussed extensively on the mailing list and IRC, be accompanied by a widely discussed BIP and have a generally widely perceived technical consensus of being a worthwhile change based on the judgement of the maintainers. +### Finding Reviewers + +The review process is normally fairly responsive on the Dash Core repository, however +this might not always be the case. If you find that you've been waiting +for a pull request to be given attention for several months, there may be a number +of reasons for this, some of which you can do something about: + + - It may be because of a feature freeze due to an upcoming release. During this time, + only bug fixes are taken into consideration. If your pull request is a new feature, + it will not be prioritized until the release is over. Wait for release. + - It may be because the changes you are suggesting do not appeal to people. Rather than + nits and critique, which require effort and means they care enough to spend time on your + contribution, thundering silence is a good sign of widespread (mild) dislike of a given change + (because people don't assume *others* won't actually like the proposal). Don't take + that personally, though! Instead, take another critical look at what you are suggesting + and see if it: changes too much, is too broad, doesn't adhere to the + [developer notes](doc/developer-notes.md), is dangerous or insecure, is messily written, etc. + Identify and address any of the issues you find. Then ask e.g. on the forum or on a community + discord if someone could give their opinion on the concept itself. + - It may be because your code is too complex for all but a few people. And those people + may not have realized your pull request even exists. A great way to find people who + are qualified and care about the code you are touching is the + [Git Blame feature](https://help.github.com/articles/tracing-changes-in-a-file/). Simply + find the person touching the code you are touching before you and see if you can find + them and give them a nudge. Don't be incessant about the nudging though. + - Finally, if all else fails, ask on discord or elsewhere for someone to give your pull request + a look. If you think you've been waiting an unreasonably long amount of time (month+) for + no particular reason (few lines changed, etc), this is totally fine. Try to return the favor + when someone else is asking for feedback on their code, and universe balances out. + Release Policy -------------- diff --git a/COPYING b/COPYING index 3662d621d104..b4bd185bc9e2 100644 --- a/COPYING +++ b/COPYING @@ -1,7 +1,8 @@ The MIT License (MIT) -Copyright (c) 2009-2019 The Bitcoin Core developers -Copyright (c) 2014-2019 The Dash Core developers +Copyright (c) 2009-2020 The Bitcoin Core developers +Copyright (c) 2009-2020 Bitcoin Developers +Copyright (c) 2014-2020 The Dash Core developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Makefile.am b/Makefile.am index ce09ef59eaf9..f42a3df3ffbe 100644 --- a/Makefile.am +++ b/Makefile.am @@ -44,6 +44,9 @@ DIST_CONTRIB = $(top_srcdir)/contrib/dash-cli.bash-completion \ $(top_srcdir)/contrib/dash-tx.bash-completion \ $(top_srcdir)/contrib/dashd.bash-completion \ $(top_srcdir)/contrib/init +DIST_SHARE = \ + $(top_srcdir)/share/genbuild.sh \ + $(top_srcdir)/share/rpcuser BIN_CHECKS=$(top_srcdir)/contrib/devtools/symbol-check.py \ $(top_srcdir)/contrib/devtools/security-check.py @@ -59,10 +62,10 @@ OSX_PACKAGING = $(OSX_DEPLOY_SCRIPT) $(OSX_FANCY_PLIST) $(OSX_INSTALLER_ICONS) \ $(top_srcdir)/contrib/macdeploy/detached-sig-apply.sh \ $(top_srcdir)/contrib/macdeploy/detached-sig-create.sh -COVERAGE_INFO = baseline_filtered_combined.info baseline.info \ - leveldb_baseline.info test_dash_filtered.info total_coverage.info \ - baseline_filtered.info rpc_test.info rpc_test_filtered.info \ - leveldb_baseline_filtered.info test_dash_coverage.info test_dash.info +COVERAGE_INFO = baseline.info \ + test_dash_filtered.info total_coverage.info \ + baseline_filtered.info functional_test.info functional_test_filtered.info \ + test_dash_coverage.info test_dash.info dist-hook: -$(GIT) archive --format=tar HEAD -- src/clientversion.cpp | $(AMTAR) -C $(top_distdir) -xf - @@ -166,52 +169,45 @@ $(BITCOIN_CLI_BIN): FORCE $(MAKE) -C src $(@F) if USE_LCOV +LCOV_FILTER_PATTERN=-p "/usr/include/" -p "src/leveldb/" -p "src/bench/" -p "src/univalue" -p "src/crypto/ctaes" -p "src/secp256k1" baseline.info: $(LCOV) -c -i -d $(abs_builddir)/src -o $@ baseline_filtered.info: baseline.info - $(LCOV) -r $< "/usr/include/*" -o $@ + $(abs_builddir)/contrib/filter-lcov.py $(LCOV_FILTER_PATTERN) $< $@ + $(LCOV) -a $@ $(LCOV_OPTS) -o $@ -leveldb_baseline.info: baseline_filtered.info - $(LCOV) -c -i -d $(abs_builddir)/src/leveldb -b $(abs_builddir)/src/leveldb -o $@ - -leveldb_baseline_filtered.info: leveldb_baseline.info - $(LCOV) -r $< "/usr/include/*" -o $@ - -baseline_filtered_combined.info: leveldb_baseline_filtered.info baseline_filtered.info - $(LCOV) -a leveldb_baseline_filtered.info -a baseline_filtered.info -o $@ - -test_dash.info: baseline_filtered_combined.info +test_dash.info: baseline_filtered.info $(MAKE) -C src/ check - $(LCOV) -c -d $(abs_builddir)/src -t test_dash -o $@ - $(LCOV) -z -d $(abs_builddir)/src - $(LCOV) -z -d $(abs_builddir)/src/leveldb + $(LCOV) -c $(LCOV_OPTS) -d $(abs_builddir)/src -t test_dash -o $@ + $(LCOV) -z $(LCOV_OPTS) -d $(abs_builddir)/src test_dash_filtered.info: test_dash.info - $(LCOV) -r $< "/usr/include/*" -o $@ + $(abs_builddir)/contrib/filter-lcov.py $(LCOV_FILTER_PATTERN) $< $@ + $(LCOV) -a $@ $(LCOV_OPTS) -o $@ -rpc_test.info: test_dash_filtered.info - -@TIMEOUT=15 python qa/pull-tester/rpc-tests.py $(EXTENDED_RPC_TESTS) - $(LCOV) -c -d $(abs_builddir)/src --t rpc-tests -o $@ - $(LCOV) -z -d $(abs_builddir)/src - $(LCOV) -z -d $(abs_builddir)/src/leveldb +functional_test.info: test_dash_filtered.info + -@TIMEOUT=15 test/functional/test_runner.py $(EXTENDED_FUNCTIONAL_TESTS) + $(LCOV) -c $(LCOV_OPTS) -d $(abs_builddir)/src --t functional-tests -o $@ + $(LCOV) -z $(LCOV_OPTS) -d $(abs_builddir)/src -rpc_test_filtered.info: rpc_test.info - $(LCOV) -r $< "/usr/include/*" -o $@ +functional_test_filtered.info: functional_test.info + $(abs_builddir)/contrib/filter-lcov.py $(LCOV_FILTER_PATTERN) $< $@ + $(LCOV) -a $@ $(LCOV_OPTS) -o $@ -test_dash_coverage.info: baseline_filtered_combined.info test_dash_filtered.info - $(LCOV) -a baseline_filtered.info -a leveldb_baseline_filtered.info -a test_dash_filtered.info -o $@ +test_dash_coverage.info: baseline_filtered.info test_dash_filtered.info + $(LCOV) -a $(LCOV_OPTS) baseline_filtered.info -a test_dash_filtered.info -o $@ -total_coverage.info: baseline_filtered_combined.info test_dash_filtered.info rpc_test_filtered.info - $(LCOV) -a baseline_filtered.info -a leveldb_baseline_filtered.info -a test_dash_filtered.info -a rpc_test_filtered.info -o $@ | $(GREP) "\%" | $(AWK) '{ print substr($$3,2,50) "/" $$5 }' > coverage_percent.txt +total_coverage.info: test_dash_filtered.info functional_test_filtered.info + $(LCOV) -a $(LCOV_OPTS) baseline_filtered.info -a test_dash_filtered.info -a functional_test_filtered.info -o $@ | $(GREP) "\%" | $(AWK) '{ print substr($$3,2,50) "/" $$5 }' > coverage_percent.txt test_dash.coverage/.dirstamp: test_dash_coverage.info - $(GENHTML) -s $< -o $(@D) + $(GENHTML) -s $(LCOV_OPTS) $< -o $(@D) @touch $@ total.coverage/.dirstamp: total_coverage.info - $(GENHTML) -s $< -o $(@D) + $(GENHTML) -s $(LCOV_OPTS) $< -o $(@D) @touch $@ cov: test_dash.coverage/.dirstamp total.coverage/.dirstamp @@ -220,7 +216,47 @@ endif dist_noinst_SCRIPTS = autogen.sh -EXTRA_DIST = $(top_srcdir)/share/genbuild.sh qa/pull-tester/rpc-tests.py qa/rpc-tests $(DIST_CONTRIB) $(DIST_DOCS) $(WINDOWS_PACKAGING) $(OSX_PACKAGING) $(BIN_CHECKS) +EXTRA_DIST = $(DIST_SHARE) test/functional/test_runner.py test/functional $(DIST_CONTRIB) $(DIST_DOCS) $(WINDOWS_PACKAGING) $(OSX_PACKAGING) $(BIN_CHECKS) + +EXTRA_DIST += \ + test/util/bitcoin-util-test.py \ + test/util/data/bitcoin-util-test.json \ + test/util/data/blanktxv1.hex \ + test/util/data/blanktxv1.json \ + test/util/data/blanktxv2.hex \ + test/util/data/blanktxv2.json \ + test/util/data/tt-delin1-out.hex \ + test/util/data/tt-delin1-out.json \ + test/util/data/tt-delout1-out.hex \ + test/util/data/tt-delout1-out.json \ + test/util/data/tt-locktime317000-out.hex \ + test/util/data/tt-locktime317000-out.json \ + test/util/data/tx394b54bb.hex \ + test/util/data/txcreate1.hex \ + test/util/data/txcreate1.json \ + test/util/data/txcreate2.hex \ + test/util/data/txcreate2.json \ + test/util/data/txcreatedata1.hex \ + test/util/data/txcreatedata1.json \ + test/util/data/txcreatedata2.hex \ + test/util/data/txcreatedata2.json \ + test/util/data/txcreatedata_seq0.hex \ + test/util/data/txcreatedata_seq0.json \ + test/util/data/txcreatedata_seq1.hex \ + test/util/data/txcreatedata_seq1.json \ + test/util/data/txcreatemultisig1.hex \ + test/util/data/txcreatemultisig1.json \ + test/util/data/txcreatemultisig2.hex \ + test/util/data/txcreatemultisig2.json \ + test/util/data/txcreateoutpubkey1.hex \ + test/util/data/txcreateoutpubkey1.json \ + test/util/data/txcreatescript1.hex \ + test/util/data/txcreatescript1.json \ + test/util/data/txcreatescript2.hex \ + test/util/data/txcreatescript2.json \ + test/util/data/txcreatesignv1.hex \ + test/util/data/txcreatesignv1.json \ + test/util/data/txcreatesignv2.hex CLEANFILES = $(OSX_DMG) $(BITCOIN_WIN_INSTALLER) @@ -229,5 +265,5 @@ CLEANFILES = $(OSX_DMG) $(BITCOIN_WIN_INSTALLER) DISTCHECK_CONFIGURE_FLAGS = --enable-man clean-local: - rm -rf coverage_percent.txt test_dash.coverage/ total.coverage/ qa/tmp/ cache/ $(OSX_APP) - rm -rf qa/pull-tester/__pycache__ + rm -rf coverage_percent.txt test_dash.coverage/ total.coverage/ test/tmp/ cache/ $(OSX_APP) + rm -rf test/functional/__pycache__ diff --git a/README.md b/README.md index 102349e65550..b39ee1fe7d69 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -Dash Core staging tree 0.14.0 -=============================== +Dash Core staging tree 0.15 +=========================== `master:` [![Build Status](https://travis-ci.org/dashpay/dash.svg?branch=master)](https://travis-ci.org/dashpay/dash) `develop:` [![Build Status](https://travis-ci.org/dashpay/dash.svg?branch=develop)](https://travis-ci.org/dashpay/dash/branches) @@ -7,9 +7,9 @@ https://www.dash.org What is Dash? ----------------- +------------- -Dash is an experimental digital currency that enables anonymous, instant +Dash is an experimental digital currency that enables instant, private payments to anyone, anywhere in the world. Dash uses peer-to-peer technology to operate with no central authority: managing transactions and issuing money are carried out collectively by the network. Dash Core is the name of the open @@ -49,9 +49,9 @@ submit new unit tests for old code. Unit tests can be compiled and run (assuming they weren't disabled in configure) with: `make check`. Further details on running and extending unit tests can be found in [/src/test/README.md](/src/test/README.md). -There are also [regression and integration tests](/qa) of the RPC interface, written +There are also [regression and integration tests](/test), written in Python, that are run automatically on the build server. -These tests can be run (if the [test dependencies](/qa) are installed) with: `qa/pull-tester/rpc-tests.py` +These tests can be run (if the [test dependencies](/test) are installed) with: `test/functional/test_runner.py` The Travis CI system makes sure that every pull request is built for Windows, Linux, and OS X, and that unit/sanity tests are run automatically. diff --git a/build-aux/m4/bitcoin_find_bdb48.m4 b/build-aux/m4/bitcoin_find_bdb48.m4 index 980f1e8f19cd..b9bf7bf46e54 100644 --- a/build-aux/m4/bitcoin_find_bdb48.m4 +++ b/build-aux/m4/bitcoin_find_bdb48.m4 @@ -12,7 +12,7 @@ AC_DEFUN([BITCOIN_FIND_BDB48],[ bdbpath=X bdb48path=X bdbdirlist= - for _vn in 4.8 48 4 5 ''; do + for _vn in 4.8 48 4 5 5.3 ''; do for _pfx in b lib ''; do bdbdirlist="$bdbdirlist ${_pfx}db${_vn}" done diff --git a/build-aux/m4/bitcoin_qt.m4 b/build-aux/m4/bitcoin_qt.m4 index e11c9b90ea6b..f9d63864eb8e 100644 --- a/build-aux/m4/bitcoin_qt.m4 +++ b/build-aux/m4/bitcoin_qt.m4 @@ -130,6 +130,8 @@ AC_DEFUN([BITCOIN_QT_CONFIGURE],[ if test "x$bitcoin_cv_need_acc_widget" = "xyes"; then _BITCOIN_QT_CHECK_STATIC_PLUGINS([Q_IMPORT_PLUGIN(AccessibleFactory)], [-lqtaccessiblewidgets]) fi + _BITCOIN_QT_CHECK_STATIC_PLUGINS([Q_IMPORT_PLUGIN(QMinimalIntegrationPlugin)],[-lqminimal]) + AC_DEFINE(QT_QPA_PLATFORM_MINIMAL, 1, [Define this symbol if the minimal qt platform exists]) if test x$TARGET_OS = xwindows; then _BITCOIN_QT_CHECK_STATIC_PLUGINS([Q_IMPORT_PLUGIN(QWindowsIntegrationPlugin)],[-lqwindows]) AC_DEFINE(QT_QPA_PLATFORM_WINDOWS, 1, [Define this symbol if the qt platform is windows]) @@ -412,17 +414,17 @@ AC_DEFUN([_BITCOIN_QT_FIND_LIBS_WITH_PKGCONFIG],[ qt4_modules="QtCore QtGui QtNetwork" BITCOIN_QT_CHECK([ if test x$bitcoin_qt_want_version = xqt5 || ( test x$bitcoin_qt_want_version = xauto && test x$auto_priority_version = xqt5 ); then - PKG_CHECK_MODULES([QT], [$qt5_modules], [QT_INCLUDES="$QT_CFLAGS"; have_qt=yes],[have_qt=no]) + PKG_CHECK_MODULES([QT5], [$qt5_modules], [QT_INCLUDES="$QT5_CFLAGS"; QT_LIBS="$QT5_LIBS" have_qt=yes],[have_qt=no]) elif test x$bitcoin_qt_want_version = xqt4 || ( test x$bitcoin_qt_want_version = xauto && test x$auto_priority_version = xqt4 ); then - PKG_CHECK_MODULES([QT], [$qt4_modules], [QT_INCLUDES="$QT_CFLAGS"; have_qt=yes], [have_qt=no]) + PKG_CHECK_MODULES([QT4], [$qt4_modules], [QT_INCLUDES="$QT4_CFLAGS"; QT_LIBS="$QT4_LIBS" ; have_qt=yes], [have_qt=no]) fi dnl qt version is set to 'auto' and the preferred version wasn't found. Now try the other. if test x$have_qt = xno && test x$bitcoin_qt_want_version = xauto; then if test x$auto_priority_version = xqt5; then - PKG_CHECK_MODULES([QT], [$qt4_modules], [QT_INCLUDES="$QT_CFLAGS"; have_qt=yes; QT_LIB_PREFIX=Qt; bitcoin_qt_got_major_vers=4], [have_qt=no]) + PKG_CHECK_MODULES([QT4], [$qt4_modules], [QT_INCLUDES="$QT4_CFLAGS"; QT_LIBS="$QT4_LIBS" ; have_qt=yes; QT_LIB_PREFIX=Qt; bitcoin_qt_got_major_vers=4], [have_qt=no]) else - PKG_CHECK_MODULES([QT], [$qt5_modules], [QT_INCLUDES="$QT_CFLAGS"; have_qt=yes; QT_LIB_PREFIX=Qt5; bitcoin_qt_got_major_vers=5], [have_qt=no]) + PKG_CHECK_MODULES([QT5], [$qt5_modules], [QT_INCLUDES="$QT5_CFLAGS"; QT_LIBS="$QT5_LIBS" ; have_qt=yes; QT_LIB_PREFIX=Qt5; bitcoin_qt_got_major_vers=5], [have_qt=no]) fi fi if test x$have_qt != xyes; then diff --git a/ci/Dockerfile.builder b/ci/Dockerfile.builder index dd128b475d21..357ef0199237 100644 --- a/ci/Dockerfile.builder +++ b/ci/Dockerfile.builder @@ -14,6 +14,7 @@ RUN apt-get update && apt-get install -y python3-pip # Python stuff RUN pip3 install pyzmq # really needed? +RUN pip3 install jinja2 # dash_hash RUN git clone https://github.com/dashpay/dash_hash diff --git a/ci/build_src.sh b/ci/build_src.sh index 2a659e7efc8a..6422b0f2f3d5 100755 --- a/ci/build_src.sh +++ b/ci/build_src.sh @@ -12,9 +12,11 @@ unset DISPLAY export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1} export CCACHE_SIZE=${CCACHE_SIZE:-400M} +if [ "$PULL_REQUEST" != "false" ]; then contrib/devtools/commit-script-check.sh $COMMIT_RANGE; fi + #if [ "$CHECK_DOC" = 1 ]; then contrib/devtools/check-doc.py; fi TODO reenable after all Bitcoin PRs have been merged and docs fully fixed -depends/$HOST/native/bin/ccache --max-size=$CCACHE_SIZE +ccache --max-size=$CCACHE_SIZE if [ -n "$USE_SHELL" ]; then export CONFIG_SHELL="$USE_SHELL" diff --git a/ci/matrix.sh b/ci/matrix.sh index 5afcbee0b549..f10812b73daa 100755 --- a/ci/matrix.sh +++ b/ci/matrix.sh @@ -20,42 +20,47 @@ export CACHE_DIR=${CACHE_DIR:-$HOST_CACHE_DIR} export CCACHE_DIR=$CACHE_DIR/ccache export DOCKER_RUN_VOLUME_ARGS="-v $HOST_SRC_DIR:$SRC_DIR -v $HOST_CACHE_DIR:$CACHE_DIR" -export DOCKER_RUN_ENV_ARGS="-e SRC_DIR=$SRC_DIR -e CACHE_DIR=$CACHE_DIR -e PULL_REQUEST=$PULL_REQUEST -e JOB_NUMBER=$JOB_NUMBER -e BUILD_TARGET=$BUILD_TARGET" +export DOCKER_RUN_ENV_ARGS="-e SRC_DIR=$SRC_DIR -e CACHE_DIR=$CACHE_DIR -e PULL_REQUEST=$PULL_REQUEST -e COMMIT_RANGE=$COMMIT_RANGE -e JOB_NUMBER=$JOB_NUMBER -e BUILD_TARGET=$BUILD_TARGET" export DOCKER_RUN_ARGS="$DOCKER_RUN_VOLUME_ARGS $DOCKER_RUN_ENV_ARGS" export DOCKER_RUN_IN_BUILDER="docker run -t --rm -w $SRC_DIR $DOCKER_RUN_ARGS $BUILDER_IMAGE_NAME" # Default values for targets export GOAL="install" export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks} -export PYTHON_DEBUG=1 export MAKEJOBS="-j4" +export RUN_UNITTESTS=false +export RUN_INTEGRATIONTESTS=false + if [ "$BUILD_TARGET" = "arm-linux" ]; then export HOST=arm-linux-gnueabihf export PACKAGES="g++-arm-linux-gnueabihf" export CHECK_DOC=1 - export BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports" + # -Wno-psabi is to disable ABI warnings: "note: parameter passing for argument of type ... changed in GCC 7.1" + # This could be removed once the ABI change warning does not show up by default + export BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports CXXFLAGS=-Wno-psabi" elif [ "$BUILD_TARGET" = "win32" ]; then export HOST=i686-w64-mingw32 export DPKG_ADD_ARCH="i386" export PACKAGES="python3 nsis g++-mingw-w64-i686 wine-stable wine32 bc" export BITCOIN_CONFIG="--enable-gui --enable-reduce-exports --disable-miner" export DIRECT_WINE_EXEC_TESTS=true - export RUN_TESTS=true + export RUN_UNITTESTS=true elif [ "$BUILD_TARGET" = "win64" ]; then export HOST=x86_64-w64-mingw32 export DPKG_ADD_ARCH="i386" export PACKAGES="python3 nsis g++-mingw-w64-x86-64 wine-stable wine64 bc" export BITCOIN_CONFIG="--enable-gui --enable-reduce-exports --disable-miner" export DIRECT_WINE_EXEC_TESTS=true - export RUN_TESTS=true + export RUN_UNITTESTS=true elif [ "$BUILD_TARGET" = "linux32" ]; then export HOST=i686-pc-linux-gnu export PACKAGES="g++-multilib bc python3-zmq" export BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports --enable-stacktraces LDFLAGS=-static-libstdc++" export USE_SHELL="/bin/dash" export PYZMQ=true - export RUN_TESTS=true + export RUN_UNITTESTS=true + export RUN_INTEGRATIONTESTS=true elif [ "$BUILD_TARGET" = "linux64" ]; then export HOST=x86_64-unknown-linux-gnu export PACKAGES="bc python3-zmq" @@ -63,18 +68,21 @@ elif [ "$BUILD_TARGET" = "linux64" ]; then export BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports --enable-stacktraces" export CPPFLAGS="-DDEBUG_LOCKORDER -DENABLE_DASH_DEBUG" export PYZMQ=true - export RUN_TESTS=true + export RUN_UNITTESTS=true + export RUN_INTEGRATIONTESTS=true elif [ "$BUILD_TARGET" = "linux64_nowallet" ]; then export HOST=x86_64-unknown-linux-gnu export PACKAGES="python3" export DEP_OPTS="NO_WALLET=1" export BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports" + export RUN_UNITTESTS=true elif [ "$BUILD_TARGET" = "linux64_release" ]; then export HOST=x86_64-unknown-linux-gnu export PACKAGES="bc python3-zmq" export DEP_OPTS="NO_UPNP=1" export BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports" export PYZMQ=true + export RUN_UNITTESTS=true elif [ "$BUILD_TARGET" = "mac" ]; then export HOST=x86_64-apple-darwin11 export PACKAGES="cmake imagemagick libcap-dev librsvg2-bin libz-dev libbz2-dev libtiff-tools" diff --git a/ci/test_integrationtests.sh b/ci/test_integrationtests.sh index 7b3573961919..ceff7967bf3d 100755 --- a/ci/test_integrationtests.sh +++ b/ci/test_integrationtests.sh @@ -8,7 +8,7 @@ PASS_ARGS="$@" source ./ci/matrix.sh -if [ "$RUN_TESTS" != "true" ]; then +if [ "$RUN_INTEGRATIONTESTS" != "true" ]; then echo "Skipping integration tests" exit 0 fi @@ -17,4 +17,30 @@ export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib cd build-ci/dashcore-$BUILD_TARGET -./qa/pull-tester/rpc-tests.py --coverage $PASS_ARGS +set +e +./test/functional/test_runner.py --coverage --quiet --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS +RESULT=$? +set -e + +echo "Collecting logs..." +BASEDIR=$(ls testdatadirs) +if [ "$BASEDIR" != "" ]; then + mkdir testlogs + for d in $(ls testdatadirs/$BASEDIR | grep -v '^cache$'); do + mkdir testlogs/$d + ./test/functional/combine_logs.py -c ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.log + ./test/functional/combine_logs.py --html ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.html + cd testdatadirs/$BASEDIR/$d + LOGFILES="$(find . -name 'debug.log' -or -name "test_framework.log")" + cd ../../.. + for f in $LOGFILES; do + d2="testlogs/$d/$(dirname $f)" + mkdir -p $d2 + cp testdatadirs/$BASEDIR/$d/$f $d2/ + done + done +fi + +mv testlogs ../../ + +exit $RESULT diff --git a/ci/test_unittests.sh b/ci/test_unittests.sh index f5af7e39637e..1a6aecd2909e 100755 --- a/ci/test_unittests.sh +++ b/ci/test_unittests.sh @@ -6,7 +6,7 @@ set -e source ./ci/matrix.sh -if [ "$RUN_TESTS" != "true" ]; then +if [ "$RUN_UNITTESTS" != "true" ]; then echo "Skipping unit tests" exit 0 fi @@ -19,6 +19,7 @@ export WINEDEBUG=fixme-all export BOOST_TEST_LOG_LEVEL=test_suite cd build-ci/dashcore-$BUILD_TARGET + if [ "$DIRECT_WINE_EXEC_TESTS" = "true" ]; then # Inside Docker, binfmt isn't working so we can't trust in make invoking windows binaries correctly wine ./src/test/test_dash.exe diff --git a/configure.ac b/configure.ac index 49c898473eb0..25885d3c47c2 100644 --- a/configure.ac +++ b/configure.ac @@ -1,11 +1,11 @@ dnl require autoconf 2.60 (AS_ECHO/AS_ECHO_N) AC_PREREQ([2.60]) define(_CLIENT_VERSION_MAJOR, 0) -define(_CLIENT_VERSION_MINOR, 14) +define(_CLIENT_VERSION_MINOR, 15) define(_CLIENT_VERSION_REVISION, 0) -define(_CLIENT_VERSION_BUILD, 5) +define(_CLIENT_VERSION_BUILD, 0) define(_CLIENT_VERSION_IS_RELEASE, true) -define(_COPYRIGHT_YEAR, 2019) +define(_COPYRIGHT_YEAR, 2020) define(_COPYRIGHT_HOLDERS,[The %s developers]) define(_COPYRIGHT_HOLDERS_SUBSTITUTION,[[Dash Core]]) AC_INIT([Dash Core],[_CLIENT_VERSION_MAJOR._CLIENT_VERSION_MINOR._CLIENT_VERSION_REVISION],[https://github.com/dashpay/dash/issues],[dashcore],[https://dash.org/]) @@ -19,6 +19,12 @@ BITCOIN_GUI_NAME=dash-qt BITCOIN_CLI_NAME=dash-cli BITCOIN_TX_NAME=dash-tx +dnl Unless the user specified ARFLAGS, force it to be cr +AC_ARG_VAR(ARFLAGS, [Flags for the archiver, defaults to if not set]) +if test "x${ARFLAGS+set}" != "xset"; then + ARFLAGS="cr" +fi + AC_CANONICAL_HOST AH_TOP([#ifndef DASH_CONFIG_H]) @@ -87,6 +93,7 @@ AC_PATH_PROG(HEXDUMP,hexdump) AC_PATH_TOOL(READELF, readelf) AC_PATH_TOOL(CPPFILT, c++filt) AC_PATH_TOOL(OBJCOPY, objcopy) +AC_PATH_TOOL(DSYMUTIL, dsymutil) AC_ARG_VAR(PYTHONPATH, Augments the default search path for python module files) @@ -124,10 +131,10 @@ AC_ARG_ENABLE(bench, [use_bench=$enableval], [use_bench=yes]) -AC_ARG_ENABLE([extended-rpc-tests], - AS_HELP_STRING([--enable-extended-rpc-tests],[enable expensive RPC tests when using lcov (default no)]), - [use_extended_rpc_tests=$enableval], - [use_extended_rpc_tests=no]) +AC_ARG_ENABLE([extended-functional-tests], + AS_HELP_STRING([--enable-extended-functional-tests],[enable expensive functional tests when using lcov (default no)]), + [use_extended_functional_tests=$enableval], + [use_extended_functional_tests=no]) AC_ARG_WITH([qrencode], [AS_HELP_STRING([--with-qrencode], @@ -158,6 +165,12 @@ AC_ARG_ENABLE([lcov], [enable lcov testing (default is no)])], [use_lcov=yes], [use_lcov=no]) + +AC_ARG_ENABLE([lcov-branch-coverage], + [AS_HELP_STRING([--enable-lcov-branch-coverage], + [enable lcov testing branch coverage (default is no)])], + [use_lcov_branch=yes], + [use_lcov_branch=no]) AC_ARG_ENABLE([glibc-back-compat], [AS_HELP_STRING([--enable-glibc-back-compat], @@ -165,6 +178,16 @@ AC_ARG_ENABLE([glibc-back-compat], [use_glibc_compat=$enableval], [use_glibc_compat=no]) +AC_ARG_ENABLE([asm], + [AS_HELP_STRING([--disable-asm], + [disable assembly routines (enabled by default)])], + [use_asm=$enableval], + [use_asm=yes]) + +if test "x$use_asm" = xyes; then + AC_DEFINE(USE_ASM, 1, [Define this symbol to build in assembly routines]) +fi + AC_ARG_WITH([system-univalue], [AS_HELP_STRING([--with-system-univalue], [Build with system UniValue (default is no)])], @@ -192,12 +215,12 @@ AC_ARG_ENABLE([debug], [enable_debug=$enableval], [enable_debug=no]) -# Enable exception stacktraces -AC_ARG_ENABLE([stacktraces], - [AS_HELP_STRING([--enable-stacktraces], - [gather and print exception stack traces (default is no)])], - [enable_stacktraces=$enableval], - [enable_stacktraces=no]) +# Enable crash hooks +AC_ARG_ENABLE([crash-hooks], + [AS_HELP_STRING([--enable-crash-hooks], + [hook into exception/signal/assert handling to gather stack traces (default is no)])], + [enable_crashhooks=$enableval], + [enable_crashhooks=no]) # Enable in-wallet miner AC_ARG_ENABLE([miner], @@ -229,9 +252,10 @@ if test "x$enable_debug" = xyes; then if test "x$GXX" = xyes; then CXXFLAGS="$CXXFLAGS -g3 -O0" fi -elif test "x$enable_stacktraces" = xyes; then - # Enable debug information but don't turn off optimization - # (stacktraces will be suboptimal, but better than nothing) +else + # We always enable at at least -g1 debug info to support proper stacktraces in crash infos + # Stacktraces will be suboptimal due to optimization, but better than nothing. Also, -fno-omit-frame-pointer + # mitigates this a little bit if test "x$GCC" = xyes; then CFLAGS="$CFLAGS -g1 -fno-omit-frame-pointer" fi @@ -241,17 +265,15 @@ elif test "x$enable_stacktraces" = xyes; then fi fi -AM_CONDITIONAL([ENABLE_STACKTRACES], [test x$enable_stacktraces = xyes]) -if test "x$enable_stacktraces" = xyes; then - AC_DEFINE(ENABLE_STACKTRACES, 1, [Define this symbol if stacktraces should be enabled]) +AM_CONDITIONAL([ENABLE_CRASH_HOOKS], [test x$enable_crashhooks = xyes]) +if test "x$enable_crashhooks" = xyes; then + AC_DEFINE(ENABLE_CRASH_HOOKS, 1, [Define this symbol if crash hooks should be enabled]) fi AX_CHECK_LINK_FLAG([-Wl,-wrap=__cxa_allocate_exception], [LINK_WRAP_SUPPORTED=yes],,,) -AX_CHECK_COMPILE_FLAG([-rdynamic], [RDYNAMIC_SUPPORTED=yes],,,) -AM_CONDITIONAL([STACKTRACE_WRAPPED_CXX_ABI],[test x$LINK_WRAP_SUPPORTED = xyes]) -AM_CONDITIONAL([RDYNAMIC_SUPPORTED],[test x$RDYNAMIC_SUPPORTED = xyes]) +AM_CONDITIONAL([CRASH_HOOKS_WRAPPED_CXX_ABI],[test x$LINK_WRAP_SUPPORTED = xyes]) if test x$LINK_WRAP_SUPPORTED = "xyes"; then - AC_DEFINE(STACKTRACE_WRAPPED_CXX_ABI, 1, [Define this symbol to use wrapped CXX ABIs for exception stacktraces])], + AC_DEFINE(CRASH_HOOKS_WRAPPED_CXX_ABI, 1, [Define this symbol to use wrapped CXX ABIs for exception stacktraces]) fi # Needed for MinGW targets when debug symbols are enabled as compiled objects get very large @@ -271,7 +293,6 @@ if test "x$CXXFLAGS_overridden" = "xno"; then AX_CHECK_COMPILE_FLAG([-Wformat],[CXXFLAGS="$CXXFLAGS -Wformat"],,[[$CXXFLAG_WERROR]]) AX_CHECK_COMPILE_FLAG([-Wvla],[CXXFLAGS="$CXXFLAGS -Wvla"],,[[$CXXFLAG_WERROR]]) AX_CHECK_COMPILE_FLAG([-Wformat-security],[CXXFLAGS="$CXXFLAGS -Wformat-security"],,[[$CXXFLAG_WERROR]]) - AX_CHECK_COMPILE_FLAG([-Wshadow],[CXXFLAGS="$CXXFLAGS -Wshadow"],,[[$CXXFLAG_WERROR]]) ## Some compilers (gcc) ignore unknown -Wno-* options, but warn about all ## unknown options if any other warning is produced. Test the -Wfoo case, and @@ -282,6 +303,93 @@ if test "x$CXXFLAGS_overridden" = "xno"; then AX_CHECK_COMPILE_FLAG([-Wdeprecated-register],[CXXFLAGS="$CXXFLAGS -Wno-deprecated-register"],,[[$CXXFLAG_WERROR]]) AX_CHECK_COMPILE_FLAG([-Wimplicit-fallthrough],[CXXFLAGS="$CXXFLAGS -Wno-implicit-fallthrough"],,[[$CXXFLAG_WERROR]]) fi + +enable_hwcrc32=no +enable_sse41=no +enable_avx2=no +enable_shani=no + +if test "x$use_asm" = "xyes"; then + +# Check for optional instruction set support. Enabling these does _not_ imply that all code will +# be compiled with them, rather that specific objects/libs may use them after checking for runtime +# compatibility. +AX_CHECK_COMPILE_FLAG([-msse4.2],[[SSE42_CXXFLAGS="-msse4.2"]],,[[$CXXFLAG_WERROR]]) +AX_CHECK_COMPILE_FLAG([-msse4.1],[[SSE41_CXXFLAGS="-msse4.1"]],,[[$CXXFLAG_WERROR]]) +AX_CHECK_COMPILE_FLAG([-mavx -mavx2],[[AVX2_CXXFLAGS="-mavx -mavx2"]],,[[$CXXFLAG_WERROR]]) +AX_CHECK_COMPILE_FLAG([-msse4 -msha],[[SHANI_CXXFLAGS="-msse4 -msha"]],,[[$CXXFLAG_WERROR]]) + +TEMP_CXXFLAGS="$CXXFLAGS" +CXXFLAGS="$CXXFLAGS $SSE42_CXXFLAGS" +AC_MSG_CHECKING(for assembler crc32 support) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ + #include + #if defined(_MSC_VER) + #include + #elif defined(__GNUC__) && defined(__SSE4_2__) + #include + #endif + ]],[[ + uint64_t l = 0; + l = _mm_crc32_u8(l, 0); + l = _mm_crc32_u32(l, 0); + l = _mm_crc32_u64(l, 0); + return l; + ]])], + [ AC_MSG_RESULT(yes); enable_hwcrc32=yes], + [ AC_MSG_RESULT(no)] +) +CXXFLAGS="$TEMP_CXXFLAGS" + +TEMP_CXXFLAGS="$CXXFLAGS" +CXXFLAGS="$CXXFLAGS $SSE41_CXXFLAGS" +AC_MSG_CHECKING(for SSE4.1 intrinsics) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ + #include + #include + ]],[[ + __m128i l = _mm_set1_epi32(0); + return _mm_extract_epi32(l, 3); + ]])], + [ AC_MSG_RESULT(yes); enable_sse41=yes; AC_DEFINE(ENABLE_SSE41, 1, [Define this symbol to build code that uses SSE4.1 intrinsics]) ], + [ AC_MSG_RESULT(no)] +) +CXXFLAGS="$TEMP_CXXFLAGS" + +TEMP_CXXFLAGS="$CXXFLAGS" +CXXFLAGS="$CXXFLAGS $AVX2_CXXFLAGS" +AC_MSG_CHECKING(for AVX2 intrinsics) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ + #include + #include + ]],[[ + __m256i l = _mm256_set1_epi32(0); + return _mm256_extract_epi32(l, 7); + ]])], + [ AC_MSG_RESULT(yes); enable_avx2=yes; AC_DEFINE(ENABLE_AVX2, 1, [Define this symbol to build code that uses AVX2 intrinsics]) ], + [ AC_MSG_RESULT(no)] +) +CXXFLAGS="$TEMP_CXXFLAGS" + +TEMP_CXXFLAGS="$CXXFLAGS" +CXXFLAGS="$CXXFLAGS $SHANI_CXXFLAGS" +AC_MSG_CHECKING(for SHA-NI intrinsics) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ + #include + #include + ]],[[ + __m128i i = _mm_set1_epi32(0); + __m128i j = _mm_set1_epi32(1); + __m128i k = _mm_set1_epi32(2); + return _mm_extract_epi32(_mm_sha256rnds2_epu32(i, i, k), 0); + ]])], + [ AC_MSG_RESULT(yes); enable_shani=yes; AC_DEFINE(ENABLE_SHANI, 1, [Define this symbol to build code that uses SHA-NI intrinsics]) ], + [ AC_MSG_RESULT(no)] +) +CXXFLAGS="$TEMP_CXXFLAGS" + +fi + CPPFLAGS="$CPPFLAGS -DHAVE_BUILD_INFO -D__STDC_FORMAT_MACROS" AC_ARG_WITH([utils], @@ -459,8 +567,8 @@ if test x$use_pkgconfig = xyes; then ]) fi -if test x$use_extended_rpc_tests != xno; then - AC_SUBST(EXTENDED_RPC_TESTS, -extended) +if test x$use_extended_functional_tests != xno; then + AC_SUBST(EXTENDED_FUNCTIONAL_TESTS, --extended) fi if test x$use_lcov = xyes; then @@ -481,6 +589,12 @@ if test x$use_lcov = xyes; then [AC_MSG_ERROR("lcov testing requested but --coverage linker flag does not work")]) AX_CHECK_COMPILE_FLAG([--coverage],[CXXFLAGS="$CXXFLAGS --coverage"], [AC_MSG_ERROR("lcov testing requested but --coverage flag does not work")]) + AC_DEFINE(USE_COVERAGE, 1, [Define this symbol if coverage is enabled]) + CXXFLAGS="$CXXFLAGS -Og" +fi + +if test x$use_lcov_branch != xno; then + AC_SUBST(LCOV_OPTS, "$LCOV_OPTS --rc lcov_branch_coverage=1") fi dnl Check for endianness @@ -576,7 +690,7 @@ if test x$TARGET_OS = xdarwin; then AX_CHECK_LINK_FLAG([[-Wl,-dead_strip]], [LDFLAGS="$LDFLAGS -Wl,-dead_strip"]) fi -AC_CHECK_HEADERS([endian.h sys/endian.h byteswap.h stdio.h stdlib.h unistd.h strings.h sys/types.h sys/stat.h sys/select.h sys/prctl.h]) +AC_CHECK_HEADERS([endian.h sys/endian.h byteswap.h stdio.h stdlib.h unistd.h strings.h sys/types.h sys/stat.h sys/select.h sys/prctl.h execinfo.h]) AC_CHECK_DECLS([strnlen]) @@ -595,6 +709,8 @@ AC_CHECK_DECLS([bswap_16, bswap_32, bswap_64],,, #include #endif]) +AC_CHECK_DECLS([__builtin_clz, __builtin_clzl, __builtin_clzll]) + dnl Check for MSG_NOSIGNAL AC_MSG_CHECKING(for MSG_NOSIGNAL) AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], @@ -603,6 +719,14 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [ AC_MSG_RESULT(no)] ) +dnl Check for MSG_DONTWAIT +AC_MSG_CHECKING(for MSG_DONTWAIT) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], + [[ int f = MSG_DONTWAIT; ]])], + [ AC_MSG_RESULT(yes); AC_DEFINE(HAVE_MSG_DONTWAIT, 1,[Define this symbol if you have MSG_DONTWAIT]) ], + [ AC_MSG_RESULT(no)] +) + dnl Check for mallopt(M_ARENA_MAX) (to set glibc arenas) AC_MSG_CHECKING(for mallopt M_ARENA_MAX) AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], @@ -611,6 +735,14 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [ AC_MSG_RESULT(no)] ) +dnl Check for malloc_info (for memory statistics information in getmemoryinfo) +AC_MSG_CHECKING(for getmemoryinfo) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], + [[ int f = malloc_info(0, NULL); ]])], + [ AC_MSG_RESULT(yes); AC_DEFINE(HAVE_MALLOC_INFO, 1,[Define this symbol if you have malloc_info]) ], + [ AC_MSG_RESULT(no)] +) + AC_MSG_CHECKING([for visibility attribute]) AC_LINK_IFELSE([AC_LANG_SOURCE([ int foo_def( void ) __attribute__((visibility("default"))); @@ -645,6 +777,14 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [ AC_MSG_RESULT(no)] ) +AC_MSG_CHECKING(for getentropy via random.h) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include + #include ]], + [[ getentropy(nullptr, 32) ]])], + [ AC_MSG_RESULT(yes); AC_DEFINE(HAVE_GETENTROPY_RAND, 1,[Define this symbol if the BSD getentropy system call is available with sys/random.h]) ], + [ AC_MSG_RESULT(no)] +) + AC_MSG_CHECKING(for sysctl KERN_ARND) AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include #include ]], @@ -654,6 +794,13 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include [ AC_MSG_RESULT(no)] ) +# ensure backtrace() is found, check -lexecinfo if necessary +if test x$TARGET_OS != xwindows; then + AC_SEARCH_LIBS([backtrace], [execinfo], [], [ + AC_MSG_ERROR([Unable to find backtrace()]) + ]) +fi + # Check for reduced exports if test x$use_reduce_exports = xyes; then AX_CHECK_COMPILE_FLAG([-fvisibility=hidden],[RE_CXXFLAGS="-fvisibility=hidden"], @@ -706,6 +853,10 @@ AX_BOOST_PROGRAM_OPTIONS AX_BOOST_THREAD AX_BOOST_CHRONO +dnl Boost 1.56 through 1.62 allow using std::atomic instead of its own atomic +dnl counter implementations. In 1.63 and later the std::atomic approach is default. +m4_pattern_allow(DBOOST_AC_USE_STD_ATOMIC) dnl otherwise it's treated like a macro +BOOST_CPPFLAGS="-DBOOST_SP_USE_STD_ATOMIC -DBOOST_AC_USE_STD_ATOMIC $BOOST_CPPFLAGS" if test x$use_reduce_exports = xyes; then AC_MSG_CHECKING([for working boost reduced exports]) @@ -1121,6 +1272,18 @@ else AC_MSG_RESULT([no]) fi +# When compiled natively on MacOS, we need to specify -flat to avoid producing a dSYM bundle +# When cross-compiled on linux, we're using a different version of the tool that only supports flat symbol files +AC_MSG_CHECKING([whether dsymutil needs -flat]) +if test x$DSYMUTIL != x && ($DSYMUTIL --help | grep -q \\-flat); then +AC_MSG_RESULT([yes]) + DSYMUTIL_FLAT="$DSYMUTIL -flat" +else + AC_MSG_RESULT([no]) + DSYMUTIL_FLAT="$DSYMUTIL" +fi +AC_MSG_RESULT($dsymutil_needs_flat) + if test x$build_bitcoin_utils$build_bitcoin_libs$build_bitcoind$bitcoin_enable_qt$use_bench$use_tests = xnononononono; then AC_MSG_ERROR([No targets! Please specify at least one of: --with-utils --with-libs --with-daemon --with-gui --enable-bench or --enable-tests]) fi @@ -1137,6 +1300,11 @@ AM_CONDITIONAL([USE_QRCODE], [test x$use_qr = xyes]) AM_CONDITIONAL([USE_LCOV],[test x$use_lcov = xyes]) AM_CONDITIONAL([GLIBC_BACK_COMPAT],[test x$use_glibc_compat = xyes]) AM_CONDITIONAL([HARDEN],[test x$use_hardening = xyes]) +AM_CONDITIONAL([ENABLE_HWCRC32],[test x$enable_hwcrc32 = xyes]) +AM_CONDITIONAL([ENABLE_SSE41],[test x$enable_sse41 = xyes]) +AM_CONDITIONAL([ENABLE_AVX2],[test x$enable_avx2 = xyes]) +AM_CONDITIONAL([ENABLE_SHANI],[test x$enable_shani = xyes]) +AM_CONDITIONAL([USE_ASM],[test x$use_asm = xyes]) AC_DEFINE(CLIENT_VERSION_MAJOR, _CLIENT_VERSION_MAJOR, [Major version]) AC_DEFINE(CLIENT_VERSION_MINOR, _CLIENT_VERSION_MINOR, [Minor version]) @@ -1169,6 +1337,10 @@ AC_SUBST(HARDENED_CPPFLAGS) AC_SUBST(HARDENED_LDFLAGS) AC_SUBST(PIC_FLAGS) AC_SUBST(PIE_FLAGS) +AC_SUBST(SSE42_CXXFLAGS) +AC_SUBST(SSE41_CXXFLAGS) +AC_SUBST(AVX2_CXXFLAGS) +AC_SUBST(SHANI_CXXFLAGS) AC_SUBST(LIBTOOL_APP_LDFLAGS) AC_SUBST(USE_UPNP) AC_SUBST(USE_QRCODE) @@ -1184,10 +1356,13 @@ AC_SUBST(EVENT_PTHREADS_LIBS) AC_SUBST(ZMQ_LIBS) AC_SUBST(PROTOBUF_LIBS) AC_SUBST(QR_LIBS) -AC_CONFIG_FILES([Makefile src/Makefile doc/man/Makefile share/setup.nsi share/qt/Info.plist src/test/buildenv.py]) -AC_CONFIG_FILES([qa/pull-tester/tests_config.ini],[chmod +x qa/pull-tester/tests_config.ini]) +AC_SUBST(DSYMUTIL_FLAT) +AC_CONFIG_FILES([Makefile src/Makefile doc/man/Makefile share/setup.nsi share/qt/Info.plist test/config.ini]) AC_CONFIG_FILES([contrib/devtools/split-debug.sh],[chmod +x contrib/devtools/split-debug.sh]) -AC_CONFIG_LINKS([qa/pull-tester/rpc-tests.py:qa/pull-tester/rpc-tests.py]) +AC_CONFIG_FILES([doc/Doxyfile]) +AC_CONFIG_LINKS([contrib/filter-lcov.py:contrib/filter-lcov.py]) +AC_CONFIG_LINKS([test/functional/test_runner.py:test/functional/test_runner.py]) +AC_CONFIG_LINKS([test/util/bitcoin-util-test.py:test/util/bitcoin-util-test.py]) dnl boost's m4 checks do something really nasty: they export these vars. As a dnl result, they leak into secp256k1's configure and crazy things happen. @@ -1216,7 +1391,7 @@ if test x$need_bundled_univalue = xyes; then AC_CONFIG_SUBDIRS([src/univalue]) fi -ac_configure_args="${ac_configure_args} --disable-shared --with-pic --with-bignum=no --enable-module-recovery" +ac_configure_args="${ac_configure_args} --disable-shared --with-pic --with-bignum=no --enable-module-recovery --disable-jni" AC_CONFIG_SUBDIRS([src/secp256k1]) AC_OUTPUT @@ -1235,8 +1410,8 @@ esac dnl Replace the BUILDDIR path with the correct Windows path if compiling on Native Windows case ${OS} in *Windows*) - sed 's/BUILDDIR="\/\([[a-z]]\)/BUILDDIR="\1:/' qa/pull-tester/tests_config.py > qa/pull-tester/tests_config-2.py - mv qa/pull-tester/tests_config-2.py qa/pull-tester/tests_config.py + sed 's/BUILDDIR="\/\([[a-z]]\)/BUILDDIR="\1:/' test/config.ini > test/config-2.ini + mv test/config-2.ini test/config.ini ;; esac @@ -1252,8 +1427,9 @@ echo " with zmq = $use_zmq" echo " with test = $use_tests" echo " with bench = $use_bench" echo " with upnp = $use_upnp" +echo " use asm = $use_asm" echo " debug enabled = $enable_debug" -echo " stacktraces enabled = $enable_stacktraces" +echo " crash hooks enabled = $enable_crashhooks" echo " miner enabled = $enable_miner" echo " werror = $enable_werror" echo @@ -1266,4 +1442,5 @@ echo " CPPFLAGS = $CPPFLAGS" echo " CXX = $CXX" echo " CXXFLAGS = $CXXFLAGS" echo " LDFLAGS = $LDFLAGS" +echo " ARFLAGS = $ARFLAGS" echo diff --git a/contrib/README.md b/contrib/README.md index 19d04640f4ab..624965895e8d 100644 --- a/contrib/README.md +++ b/contrib/README.md @@ -12,7 +12,6 @@ Tool to verify that every merge commit was signed by a developer using the above Construct a linear, no-fork, best version of the blockchain. ### [Qos](/contrib/qos) ### - A Linux bash script that will set up traffic control (tc) to limit the outgoing bandwidth for connections to the Dash network. This means one can have an always-on dashd instance running, and another local dashd/dash-qt instance which connects to this node and receives blocks from it. ### [Seeds](/contrib/seeds) ### @@ -29,7 +28,7 @@ for Debian-based Linux systems. If you compile dashd/dash-qt yourself, there are Notes on getting Gitian builds up and running using KVM. ### [Gitian-keys](/contrib/gitian-keys) -PGP keys used for signing Bitcoin Core [Gitian release](/doc/release-process.md) results. +PGP keys used for signing Dash Core [Gitian release](/doc/release-process.md) results. ### [MacDeploy](/contrib/macdeploy) ### Scripts and notes for Mac builds. diff --git a/contrib/debian/control b/contrib/debian/control index 2f9caa8457a7..98e3c358af61 100644 --- a/contrib/debian/control +++ b/contrib/debian/control @@ -34,7 +34,7 @@ Package: dashd Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends} Description: peer-to-peer network based digital currency - daemon - Dash is an experimental new digital currency that enables anonymous, instant + Dash is an experimental new digital currency that enables instant, private payments to anyone, anywhere in the world. Dash uses peer-to-peer technology to operate with no central authority: managing transactions and issuing money are carried out collectively by the network. Dash Core @@ -47,7 +47,7 @@ Package: dash-qt Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends} Description: peer-to-peer network based digital currency - Qt GUI - Dash is an experimental new digital currency that enables anonymous, instant + Dash is an experimental new digital currency that enables instant, private payments to anyone, anywhere in the world. Dash uses peer-to-peer technology to operate with no central authority: managing transactions and issuing money are carried out collectively by the network. Dash Core @@ -59,7 +59,7 @@ Package: dash-tx Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends} Description: peer-to-peer digital currency - standalone transaction tool - Dash is an experimental new digital currency that enables anonymous, instant + Dash is an experimental new digital currency that enables instant, private payments to anyone, anywhere in the world. Dash uses peer-to-peer technology to operate with no central authority: managing transactions and issuing money are carried out collectively by the network. Dash Core diff --git a/contrib/debian/examples/dash.conf b/contrib/debian/examples/dash.conf index 2ea173018440..fd2ee3c65d00 100644 --- a/contrib/debian/examples/dash.conf +++ b/contrib/debian/examples/dash.conf @@ -131,6 +131,13 @@ # be validated sooner. #paytxfee=0.00 +# Enable pruning to reduce storage requirements by deleting old blocks. +# This mode is incompatible with -txindex and -rescan. +# 0 = default (no pruning). +# 1 = allows manual pruning via RPC. +# >=945 = target to stay under in MiB. +#prune=945 + # User interface options # Start Dash minimized diff --git a/contrib/devtools/README.md b/contrib/devtools/README.md index 7530192fb6f4..839308697381 100644 --- a/contrib/devtools/README.md +++ b/contrib/devtools/README.md @@ -112,7 +112,7 @@ For example: ./github-merge.py 3077 (in any git repository) will help you merge pull request #3077 for the -bitcoin/bitcoin repository. +dashpay/dash repository. What it does: * Fetch master and the pull request. @@ -132,14 +132,14 @@ Setup --------- Configuring the github-merge tool for the bitcoin repository is done in the following way: - git config githubmerge.repository bitcoin/bitcoin + git config githubmerge.repository dashpay/dash git config githubmerge.testcmd "make -j4 check" (adapt to whatever you want to use for testing) git config --global user.signingkey mykeyid (if you want to GPG sign) optimize-pngs.py ================ -A script to optimize png files in the bitcoin +A script to optimize png files in the dash repository (requires pngcrush). security-check.py and test-security-check.py diff --git a/contrib/devtools/check-doc.py b/contrib/devtools/check-doc.py index 4bd65ad947ea..77ab7c32fae7 100755 --- a/contrib/devtools/check-doc.py +++ b/contrib/devtools/check-doc.py @@ -21,7 +21,7 @@ REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"') REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")') # list unsupported, deprecated and duplicate args as they need no documentation -SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-blockminsize', '-sendfreetransactions']) +SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-blockminsize', '-dbcrashratio', '-forcecompactdb']) def main(): used = check_output(CMD_GREP_ARGS, shell=True, universal_newlines=True) diff --git a/contrib/devtools/commit-script-check.sh b/contrib/devtools/commit-script-check.sh new file mode 100755 index 000000000000..1c9dbc7f68ff --- /dev/null +++ b/contrib/devtools/commit-script-check.sh @@ -0,0 +1,46 @@ +#!/bin/sh +# Copyright (c) 2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +# This simple script checks for commits beginning with: scripted-diff: +# If found, looks for a script between the lines -BEGIN VERIFY SCRIPT- and +# -END VERIFY SCRIPT-. If no ending is found, it reads until the end of the +# commit message. + +# The resulting script should exactly transform the previous commit into the current +# one. Any remaining diff signals an error. + +if test "x$1" = "x"; then + echo "Usage: $0 ..." + exit 1 +fi + +RET=0 +PREV_BRANCH=`git name-rev --name-only HEAD` +PREV_HEAD=`git rev-parse HEAD` +for i in `git rev-list --reverse $1`; do + if git rev-list -n 1 --pretty="%s" $i | grep -q "^scripted-diff:"; then + git checkout --quiet $i^ || exit + SCRIPT="`git rev-list --format=%b -n1 $i | sed '/^-BEGIN VERIFY SCRIPT-$/,/^-END VERIFY SCRIPT-$/{//!b};d'`" + if test "x$SCRIPT" = "x"; then + echo "Error: missing script for: $i" + echo "Failed" + RET=1 + else + echo "Running script for: $i" + echo "$SCRIPT" + eval "$SCRIPT" + git --no-pager diff --exit-code $i && echo "OK" || (echo "Failed"; false) || RET=1 + fi + git reset --quiet --hard HEAD + else + if git rev-list "--format=%b" -n1 $i | grep -q '^-\(BEGIN\|END\)[ a-zA-Z]*-$'; then + echo "Error: script block marker but no scripted-diff in title" + echo "Failed" + RET=1 + fi + fi +done +git checkout --quiet $PREV_BRANCH 2>/dev/null || git checkout --quiet $PREV_HEAD +exit $RET diff --git a/contrib/devtools/copyright_header.py b/contrib/devtools/copyright_header.py index 2444d5dd8180..ac518d2dfa1a 100755 --- a/contrib/devtools/copyright_header.py +++ b/contrib/devtools/copyright_header.py @@ -33,7 +33,7 @@ 'src/tinyformat.h', 'src/leveldb/util/env_win.cc', 'src/crypto/ctaes/bench.c', - 'qa/rpc-tests/test_framework/bignum.py', + 'test/functional/test_framework/bignum.py', # python init: '*__init__.py', ] diff --git a/contrib/devtools/github-merge.py b/contrib/devtools/github-merge.py index 3fee39143dad..c664cf81fa9f 100755 --- a/contrib/devtools/github-merge.py +++ b/contrib/devtools/github-merge.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2016 The Bitcoin Core developers +# Copyright (c) 2016-2017 Bitcoin Core Developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -127,6 +127,9 @@ def tree_sha512sum(commit='HEAD'): raise IOError('Non-zero return value executing git cat-file') return overall.hexdigest() +def print_merge_details(pull, title, branch, base_branch, head_branch): + print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET)) + subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch]) def parse_arguments(): epilog = ''' @@ -171,7 +174,8 @@ def main(): info = retrieve_pr_info(repo,pull) if info is None: exit(1) - title = info['title'] + title = info['title'].strip() + body = info['body'].strip() # precedence order for destination branch argument: # - command line argument # - githubmerge.branch setting @@ -226,6 +230,7 @@ def main(): firstline = 'Merge #%s' % (pull,) message = firstline + '\n\n' message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%h %s (%an)',base_branch+'..'+head_branch]).decode('utf-8') + message += '\n\nPull request description:\n\n ' + body.replace('\n', '\n ') + '\n' try: subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','-m',message.encode('utf-8'),head_branch]) except subprocess.CalledProcessError as e: @@ -256,8 +261,7 @@ def main(): printf("ERROR: Cannot update message.",file=stderr) exit(4) - print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET)) - subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch]) + print_merge_details(pull, title, branch, base_branch, head_branch) print() # Run test command if configured. @@ -276,12 +280,6 @@ def main(): print("Difference with github ignored.",file=stderr) else: exit(6) - reply = ask_prompt("Press 'd' to accept the diff.") - if reply.lower() == 'd': - print("Diff accepted.",file=stderr) - else: - print("ERROR: Diff rejected.",file=stderr) - exit(6) else: # Verify the result manually. print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr) @@ -290,12 +288,6 @@ def main(): if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt os.putenv('debian_chroot',pull) subprocess.call([BASH,'-i']) - reply = ask_prompt("Type 'm' to accept the merge.") - if reply.lower() == 'm': - print("Merge accepted.",file=stderr) - else: - print("ERROR: Merge rejected.",file=stderr) - exit(7) second_sha512 = tree_sha512sum() if first_sha512 != second_sha512: @@ -303,16 +295,18 @@ def main(): exit(8) # Sign the merge commit. - reply = ask_prompt("Type 's' to sign off on the merge.") - if reply == 's': - try: - subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit']) - except subprocess.CalledProcessError as e: - print("Error signing, exiting.",file=stderr) + print_merge_details(pull, title, branch, base_branch, head_branch) + while True: + reply = ask_prompt("Type 's' to sign off on the above merge, or 'x' to reject and exit.").lower() + if reply == 's': + try: + subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit']) + break + except subprocess.CalledProcessError as e: + print("Error while signing, asking again.",file=stderr) + elif reply == 'x': + print("Not signing off on merge, exiting.",file=stderr) exit(1) - else: - print("Not signing off on merge, exiting.",file=stderr) - exit(1) # Put the result in branch. subprocess.check_call([GIT,'checkout','-q',branch]) @@ -326,9 +320,13 @@ def main(): subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull) # Push the result. - reply = ask_prompt("Type 'push' to push the result to %s, branch %s." % (host_repo,branch)) - if reply.lower() == 'push': - subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch]) + while True: + reply = ask_prompt("Type 'push' to push the result to %s, branch %s, or 'x' to exit without pushing." % (host_repo,branch)).lower() + if reply == 'push': + subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch]) + break + elif reply == 'x': + exit(1) if __name__ == '__main__': main() diff --git a/contrib/devtools/optimize-pngs.py b/contrib/devtools/optimize-pngs.py index e8375a78bebd..548a9faa53a0 100755 --- a/contrib/devtools/optimize-pngs.py +++ b/contrib/devtools/optimize-pngs.py @@ -26,7 +26,12 @@ def content_hash(filename): pngcrush = 'pngcrush' git = 'git' -folders = ["src/qt/res/movies", "src/qt/res/icons", "src/qt/res/icons/crownium", "src/qt/res/icons/drkblue", "src/qt/res/icons/light", "src/qt/res/icons/light-retro", "src/qt/res/icons/trad", "src/qt/res/images", "src/qt/res/images/crownium", "src/qt/res/images/drkblue", "src/qt/res/images/light", "src/qt/res/images/light-retro", "src/qt/res/images/trad", "share/pixmaps"] +folders = [ + "src/qt/res/movies", + "src/qt/res/icons", + "src/qt/res/images", + "share/pixmaps" +] basePath = subprocess.check_output([git, 'rev-parse', '--show-toplevel'], universal_newlines=True).rstrip('\n') totalSaveBytes = 0 noHashChange = True diff --git a/contrib/filter-lcov.py b/contrib/filter-lcov.py new file mode 100755 index 000000000000..299377d69115 --- /dev/null +++ b/contrib/filter-lcov.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 + +import argparse + +parser = argparse.ArgumentParser(description='Remove the coverage data from a tracefile for all files matching the pattern.') +parser.add_argument('--pattern', '-p', action='append', help='the pattern of files to remove', required=True) +parser.add_argument('tracefile', help='the tracefile to remove the coverage data from') +parser.add_argument('outfile', help='filename for the output to be written to') + +args = parser.parse_args() +tracefile = args.tracefile +pattern = args.pattern +outfile = args.outfile + +in_remove = False +with open(tracefile, 'r') as f: + with open(outfile, 'w') as wf: + for line in f: + for p in pattern: + if line.startswith("SF:") and p in line: + in_remove = True + if not in_remove: + wf.write(line) + if line == 'end_of_record\n': + in_remove = False diff --git a/contrib/gitian-build.py b/contrib/gitian-build.py index 70569702178f..4a9971bdb2d9 100755 --- a/contrib/gitian-build.py +++ b/contrib/gitian-build.py @@ -51,10 +51,8 @@ def build(): os.chdir('gitian-builder') os.makedirs('inputs', exist_ok=True) - subprocess.check_call(['wget', '-N', '-P', 'inputs', 'https://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz']) - subprocess.check_call(['wget', '-N', '-P', 'inputs', 'https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch']) - subprocess.check_output(["echo 'a8c4e9cafba922f89de0df1f2152e7be286aba73f78505169bc351a7938dd911 inputs/osslsigncode-Backports-to-1.7.1.patch' | sha256sum -c"], shell=True) - subprocess.check_output(["echo 'f9a8cdb38b9c309326764ebc937cba1523a3a751a7ab05df3ecc99d18ae466c9 inputs/osslsigncode-1.7.1.tar.gz' | sha256sum -c"], shell=True) + subprocess.check_call(['wget', '-O', 'inputs/osslsigncode-2.0.tar.gz', 'https://github.com/mtrojnar/osslsigncode/archive/2.0.tar.gz']) + subprocess.check_call(["echo '5a60e0a4b3e0b4d655317b2f12a810211c50242138322b16e7e01c6fbb89d92f inputs/osslsigncode-2.0.tar.gz' | sha256sum -c"], shell=True) subprocess.check_call(['make', '-C', '../dash/depends', 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common']) if args.linux: diff --git a/contrib/gitian-descriptors/gitian-linux.yml b/contrib/gitian-descriptors/gitian-linux.yml index 6e8203be8226..65be59db206b 100755 --- a/contrib/gitian-descriptors/gitian-linux.yml +++ b/contrib/gitian-descriptors/gitian-linux.yml @@ -1,5 +1,5 @@ --- -name: "dash-linux-0.14" +name: "dash-linux-0.15" enable_cache: true suites: - "bionic" @@ -30,6 +30,7 @@ packages: - "python" - "python3" - "libxkbcommon0" +- "ccache" remotes: - "url": "https://github.com/dashpay/dash.git" "dir": "dash" @@ -38,7 +39,7 @@ script: | WRAP_DIR=$HOME/wrapped HOSTS="i686-pc-linux-gnu x86_64-linux-gnu arm-linux-gnueabihf aarch64-linux-gnu" - CONFIGFLAGS="--enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests" + CONFIGFLAGS="--enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests --enable-crash-hooks" FAKETIME_HOST_PROGS="" FAKETIME_PROGS="date ar ranlib nm" HOST_CFLAGS="-O2 -g" @@ -173,6 +174,9 @@ script: | find dashcore-* | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ../$SOURCEDIST popd + # Workaround for tarball not building with the bare tag version (prep) + make -C src obj/build.h + ORIGPATH="$PATH" # Extract the release tarball into a dir for each host and build for i in ${HOSTS}; do @@ -183,6 +187,11 @@ script: | mkdir -p ${INSTALLPATH} tar --strip-components=1 -xf ../$SOURCEDIST + # Workaround for tarball not building with the bare tag version + echo '#!/bin/true' >share/genbuild.sh + mkdir src/obj + cp ../src/obj/build.h src/obj/ + CONFIG_SITE=${BASEPREFIX}/${i}/share/config.site ./configure --prefix=/ --disable-maintainer-mode --disable-dependency-tracking ${CONFIGFLAGS} CFLAGS="${HOST_CFLAGS}" CXXFLAGS="${HOST_CXXFLAGS}" LDFLAGS="${HOST_LDFLAGS}" make ${MAKEOPTS} make ${MAKEOPTS} -C src check-security diff --git a/contrib/gitian-descriptors/gitian-osx.yml b/contrib/gitian-descriptors/gitian-osx.yml index 2995b31939f1..886f5cd19e82 100644 --- a/contrib/gitian-descriptors/gitian-osx.yml +++ b/contrib/gitian-descriptors/gitian-osx.yml @@ -1,5 +1,5 @@ --- -name: "dash-osx-0.14" +name: "dash-osx-0.15" enable_cache: true suites: - "bionic" @@ -29,6 +29,7 @@ packages: - "python3-dev" - "python3-setuptools" - "fonts-tuffy" +- "ccache" remotes: - "url": "https://github.com/dashpay/dash.git" "dir": "dash" @@ -37,7 +38,7 @@ files: script: | WRAP_DIR=$HOME/wrapped HOSTS="x86_64-apple-darwin11" - CONFIGFLAGS="--enable-reduce-exports --disable-miner --disable-bench --disable-gui-tests GENISOIMAGE=$WRAP_DIR/genisoimage" + CONFIGFLAGS="--enable-reduce-exports --disable-miner --disable-bench --disable-gui-tests GENISOIMAGE=$WRAP_DIR/genisoimage --enable-crash-hooks" FAKETIME_HOST_PROGS="" FAKETIME_PROGS="ar ranlib date dmg genisoimage" @@ -133,6 +134,9 @@ script: | find dashcore-* | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ../$SOURCEDIST popd + # Workaround for tarball not building with the bare tag version (prep) + make -C src obj/build.h + ORIGPATH="$PATH" # Extract the release tarball into a dir for each host and build for i in ${HOSTS}; do @@ -143,8 +147,14 @@ script: | mkdir -p ${INSTALLPATH} tar --strip-components=1 -xf ../$SOURCEDIST + # Workaround for tarball not building with the bare tag version + echo '#!/bin/true' >share/genbuild.sh + mkdir src/obj + cp ../src/obj/build.h src/obj/ + CONFIG_SITE=${BASEPREFIX}/${i}/share/config.site ./configure --prefix=/ --disable-maintainer-mode --disable-dependency-tracking ${CONFIGFLAGS} make ${MAKEOPTS} + make -C src osx_debug make install-strip DESTDIR=${INSTALLPATH} make osx_volname @@ -169,12 +179,15 @@ script: | find . -name "lib*.la" -delete find . -name "lib*.a" -delete rm -rf ${DISTNAME}/lib/pkgconfig - find ${DISTNAME} | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ${OUTDIR}/${DISTNAME}-${i}.tar.gz + find .. -name *.dSYM -exec cp -ra {} ${DISTNAME}/bin \; + find ${DISTNAME} -not -path '*.dSYM*' | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ${OUTDIR}/${DISTNAME}-${i}.tar.gz + find ${DISTNAME} -path '*.dSYM*' | sort | tar --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ${OUTDIR}/${DISTNAME}-${i}-debug.tar.gz cd ../../ done mkdir -p $OUTDIR/src mv $SOURCEDIST $OUTDIR/src - mv ${OUTDIR}/${DISTNAME}-x86_64-*.tar.gz ${OUTDIR}/${DISTNAME}-osx64.tar.gz + mv ${OUTDIR}/${DISTNAME}-x86_64-apple-darwin11.tar.gz ${OUTDIR}/${DISTNAME}-osx64.tar.gz + mv ${OUTDIR}/${DISTNAME}-x86_64-apple-darwin11-debug.tar.gz ${OUTDIR}/${DISTNAME}-osx64-debug.tar.gz # Compress ccache (otherwise the assert file will get too huge) if [ "$CCACHE_DIR" != "" ]; then diff --git a/contrib/gitian-descriptors/gitian-win-signer.yml b/contrib/gitian-descriptors/gitian-win-signer.yml index 31225426a7ef..f068c6464a27 100644 --- a/contrib/gitian-descriptors/gitian-win-signer.yml +++ b/contrib/gitian-descriptors/gitian-win-signer.yml @@ -5,31 +5,31 @@ suites: architectures: - "amd64" packages: -# Once osslsigncode supports openssl 1.1, we can change this back to libssl-dev -- "libssl1.0-dev" +- "libssl-dev" - "autoconf" +- "automake" +- "libtool" +- "pkg-config" remotes: - "url": "https://github.com/dashpay/dash-detached-sigs.git" "dir": "signature" files: -- "osslsigncode-1.7.1.tar.gz" -- "osslsigncode-Backports-to-1.7.1.patch" +- "osslsigncode-2.0.tar.gz" - "dashcore-win-unsigned.tar.gz" script: | BUILD_DIR=`pwd` SIGDIR=${BUILD_DIR}/signature/win UNSIGNED_DIR=${BUILD_DIR}/unsigned - echo "f9a8cdb38b9c309326764ebc937cba1523a3a751a7ab05df3ecc99d18ae466c9 osslsigncode-1.7.1.tar.gz" | sha256sum -c - echo "a8c4e9cafba922f89de0df1f2152e7be286aba73f78505169bc351a7938dd911 osslsigncode-Backports-to-1.7.1.patch" | sha256sum -c + echo "5a60e0a4b3e0b4d655317b2f12a810211c50242138322b16e7e01c6fbb89d92f osslsigncode-2.0.tar.gz" | sha256sum -c mkdir -p ${UNSIGNED_DIR} tar -C ${UNSIGNED_DIR} -xf dashcore-win-unsigned.tar.gz - tar xf osslsigncode-1.7.1.tar.gz - cd osslsigncode-1.7.1 - patch -p1 < ${BUILD_DIR}/osslsigncode-Backports-to-1.7.1.patch + tar xf osslsigncode-2.0.tar.gz + cd osslsigncode-2.0 + ./autogen.sh ./configure --without-gsf --without-curl --disable-dependency-tracking make find ${UNSIGNED_DIR} -name "*-unsigned.exe" | while read i; do diff --git a/contrib/gitian-descriptors/gitian-win.yml b/contrib/gitian-descriptors/gitian-win.yml index ee1a17149802..bfce44de3f0b 100755 --- a/contrib/gitian-descriptors/gitian-win.yml +++ b/contrib/gitian-descriptors/gitian-win.yml @@ -1,5 +1,5 @@ --- -name: "dash-win-0.14" +name: "dash-win-0.15" enable_cache: true suites: - "bionic" @@ -24,6 +24,7 @@ packages: - "python" - "python3" - "rename" +- "ccache" remotes: - "url": "https://github.com/dashpay/dash.git" "dir": "dash" @@ -31,7 +32,7 @@ files: [] script: | WRAP_DIR=$HOME/wrapped HOSTS="i686-w64-mingw32 x86_64-w64-mingw32" - CONFIGFLAGS="--enable-reduce-exports --disable-miner --disable-bench --disable-gui-tests" + CONFIGFLAGS="--enable-reduce-exports --disable-miner --disable-bench --disable-gui-tests --enable-crash-hooks" FAKETIME_HOST_PROGS="ar ranlib nm windres strip objcopy" FAKETIME_PROGS="date makensis zip" HOST_CFLAGS="-O2 -g" @@ -164,6 +165,9 @@ script: | cp ../$SOURCEDIST $OUTDIR/src popd + # Workaround for tarball not building with the bare tag version (prep) + make -C src obj/build.h + ORIGPATH="$PATH" # Extract the release tarball into a dir for each host and build for i in ${HOSTS}; do @@ -174,6 +178,11 @@ script: | mkdir -p ${INSTALLPATH} tar --strip-components=1 -xf ../$SOURCEDIST + # Workaround for tarball not building with the bare tag version + echo '#!/bin/true' >share/genbuild.sh + mkdir src/obj + cp ../src/obj/build.h src/obj/ + CONFIG_SITE=${BASEPREFIX}/${i}/share/config.site ./configure --prefix=/ --disable-maintainer-mode --disable-dependency-tracking ${CONFIGFLAGS} CFLAGS="${HOST_CFLAGS}" CXXFLAGS="${HOST_CXXFLAGS}" make ${MAKEOPTS} make ${MAKEOPTS} -C src check-security diff --git a/contrib/gitian-keys/pasta.pgp b/contrib/gitian-keys/pasta.pgp new file mode 100644 index 000000000000..f53bcec2bfa6 --- /dev/null +++ b/contrib/gitian-keys/pasta.pgp @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBF1ULyUBEADFFliU0Hr+PRCQNT9/9ZEhZtLmMMu7tai3VCxhmrHrOpNJJHqX +f1/CeUyBhmCvXpKIpAAbH66l/Uc9GH5UgMZ19gMyGa3q3QJn9A6RR9ud4ALRg60P +fmYTAci+6Luko7bqTzkS+fYOUSy/LY57s5ANTpveE+iTsBd5grXczCxaYYnthKKA +ecmTs8GzQH8XEUgy6fduHcGySzMBj87daZBmPl2zninbTmOYkzev38HXFpr6KinJ +t3vRkhw4AOMSdgaTiNr6gALKoKLyCbhvHuDsVoDBQtIzBXtOeIGyzwBFdHlN2bFG +CcH2vWOzg/Yp1qYleWWV7KYHOVKcxrIycPM0tNueLlvrqVrI59QXMVRJHtBs8eQg +dH9rZNbO0vuv6rCP7e0nt2ACVT/fExdvrwuHHYZ/7IlwOBlFhab3QYpl/WWep2+X +95BSbDOXFrLWwEE9gND+douDG1DExVa3aSNXQJdi4/Mh7bMFiq2FsbXqu+TFSCTg +ae33WKl/AOmHVirgtipnq70PW9hHViaSg3rz0NyYHHczNVaCROHE8YdIM/bAmKY/ +IYVBXJtT+6Mn8N87isK2TR7zMM3FvDJ4Dsqm1UTGwtDvMtB0sNa5IROaUCHdlMFu +rG8n+Bq/oGBFjk9Ay/twH4uOpxyr91aGoGtytw/jhd1+LOb0TGhFGpdc8QARAQAB +tBtQYXN0YSA8cGFzdGFAZGFzaGJvb3N0Lm9yZz6JAlQEEwEIAD4WIQQpWQNi7IeK +gf08ICtSUnvtq+h5hAUCXVQvJQIbAwUJA8PHawULCQgHAgYVCgkICwIEFgIDAQIe +AQIXgAAKCRBSUnvtq+h5hMqeEACQteY571XK50dW1oQzjgPq5tVuchoRQI727pr7 +5145o2rOe0e0xrWzVNnhd9ZDzC4j8dh6wWVQWErHr+3Hhn8sCUW2PNU+o3GvhGR6 +aqPl0Oh5gt4wHZalrcUnZ5u/RtFbDmGilobdASL/mpZge8ymLBj2lKiRR2X/JQe/ +KAzr/7QW1zLh2oEUOOGVas6Ev+ziosAE0b3upGTHJFPQPMFv4za22MbeTKYeqyJ6 +W6LdQDDssC/RBQKZXj3pRweA6RQFGOqw44CbtIHuQu/PV8ZDTpE+v9cWAzoNCMcQ +2fm5tCM8zYytt3perbA3VPwZNXcsITcRpIS5FgoeOntgIwzzKVmY+4GD8uWM/DHt +JPxyry7LpSa8CNyx+oN+Z2qCChn03ycJzO3UFsaCMG/CMAEkLxbg0AcxNyQ8kvIG +lcEDLINaz1xuHAtAxqTQKMYCP1xtd5rhGOe1FkGfVYEJX97+JgMGa8+2nD5+A6wG +0+JaJllqzfXY1VhNoVmfS/hFPQ+t/84jNSGR5Kn956C5MvTK65VumH+NRE59kpt1 +nsIQNKu/v6fZUnbRtCFC05BSwIjoTzFvKXycJkCVjdSYARWkagki4bbFC1WZQuA9 +BOF5TOUAYt6zaEBfAJgjeRT71Mr03eNExXaLm9k/hmvapGpmtJQhLY6NKPm/ctyf +IaEz/bkCDQRdVC8lARAAu64IaLWAvMStxVsC/+NnwBBxYPef4Iq5gB5P1NgkmkD+ +tyohWVnzdN/hwVDX3BAXevF8M+y6MouUA9IxJRt2W9PK06ArTdwhFpiam2NAO5OO +UhuJ1F8eAhRQ5VvI8MbVttZKSk3LiCmXGSj5UUXEFKS1B7WztZVwqG6YswoAPwbN +erZuwYbH2gfa9LK+av1cdZ8tnDaVmZWL8z1xSCyfRa/UAtZht/CEoTvAwXJ6CxVU +BngIlqVnK0KvOrNzol2m5x4NgPcdtdDlrTQE+SpqTKjyroRe27D+atiO6pFG/TOT +kx4TWXR07YTeZQJT/fntV409daIxEgShD0md7nJ7rVYy8u+9Z4JLlt2mtnsUKHez +o1Axrlri05cewPVYQLuJND/5e2X9UzSTpY3NubQAtkD1PpM5JeCbslT9PcMnRuUy +dZbhn7ieW0b57uWpOpE11s2eIJ5ixSci4mSJE9kW+IcCic/PPoD1Rh2CvFTBPl/b +sw6Bzw64LMflPjgWkR7NVQb1DETfXo5C2A/QU6Z/o7O4JaAeAoGki/sCmeAi5W+F +1kcjPk/L/TXM6ZccMytVQOECYBOYVUxZ2VbhknKOcSFQcpk8bj2xsD1xX2EYhkXc +CQkvutIgHGz/dt6dtvcaaL85krWD/y8h68TTFjQXK0+g8gcpexfqTMcLnF7pqEEA +EQEAAYkCPAQYAQgAJhYhBClZA2Lsh4qB/TwgK1JSe+2r6HmEBQJdVC8lAhsMBQkD +w8drAAoJEFJSe+2r6HmEDzEP/A8H3JkeSa/03kWvudFloVbGbfvP+XkKvGnAZPGH +z3ne/SV2tcXljNgU15xHvLktI4GluEfJxRPUqvUal1zOR9hqpas0vX8gsf0r0d3o +m2DHCyMY8GscfDF05Y8fqf0nU5/oLDlwwp11IyW8BDLSwwANsTLZ1ysukfYc4hoo +pU71/wdAl85fae7I2QRduImWlMADfUtc9Orfb1tAhPtaCJVZj5vgfUNSZOTUJ73R +GbdL3Z2dc42lO3mRMyDkPdykkq0EgOo6zZLuHZQFhxTzWIWeUT8vWNjpkdTeRHLv +v3cwPRx1k1atrM+pE9YkhCg0EOMTcmN+FMekgnU+ee0cibn5wWOvE05zwRKYROx3 +4va2U6TUU6KkV3fFuq3qqkXaiMFauhI1lSFGgccg7BCNMhbBpOBkfGI3croFGSm2 +pTydJ87/+P9C9ecOZSqCE7Zt5IfDs/xV7DjxBK99Z5+RGxtsIpNlxpsUvlMSsxUN +hOWyiCKr6NIOfOzdLYDkhHcKMqWGmc1zC3HHHuZvX5u6orTyYXWqc8X5p3Kh7Qjf +/ChtN2P6SCOUQquEvpiY5J1TdmQSuoqHzg3ZrN+7EOKdnUH7y1KB7iTvgQ07lcHn +AMbkFDcpQA+tAMd99LVNSXh8urXhJ/AtxaJbNbCSvpkOGB4WHLy/V+JdomFC9Pb3 +oPei +=42dS +-----END PGP PUBLIC KEY BLOCK----- diff --git a/contrib/init/README.md b/contrib/init/README.md index 8bda937e2a3a..e0adc72f3df4 100644 --- a/contrib/init/README.md +++ b/contrib/init/README.md @@ -1,12 +1,12 @@ Sample configuration files for: - +``` SystemD: dashd.service Upstart: dashd.conf OpenRC: dashd.openrc dashd.openrcconf CentOS: dashd.init OS X: org.dash.dashd.plist - +``` have been made available to assist packagers in creating node packages here. See doc/init.md for more information. diff --git a/contrib/init/dashd.openrc b/contrib/init/dashd.openrc index e8e8515849d4..53ccdf7156c5 100644 --- a/contrib/init/dashd.openrc +++ b/contrib/init/dashd.openrc @@ -1,4 +1,4 @@ -#!/sbin/runscript +#!/sbin/openrc-run # backward compatibility for existing gentoo layout # diff --git a/contrib/linearize/README.md b/contrib/linearize/README.md index b83de046fe89..dd220b98d377 100644 --- a/contrib/linearize/README.md +++ b/contrib/linearize/README.md @@ -11,7 +11,8 @@ https://github.com/dashpay/dash_hash $ ./linearize-hashes.py linearize.cfg > hashlist.txt Required configuration file settings for linearize-hashes: -* RPC: `rpcuser`, `rpcpassword` +* RPC: `datadir` (Required if `rpcuser` and `rpcpassword` are not specified) +* RPC: `rpcuser`, `rpcpassword` (Required if `datadir` is not specified) Optional config file setting for linearize-hashes: * RPC: `host` (Default: `127.0.0.1`) diff --git a/contrib/linearize/example-linearize.cfg b/contrib/linearize/example-linearize.cfg index f1376bf34de3..3fea810ea68b 100644 --- a/contrib/linearize/example-linearize.cfg +++ b/contrib/linearize/example-linearize.cfg @@ -1,6 +1,7 @@ # bitcoind RPC settings (linearize-hashes) rpcuser=someuser rpcpassword=somepassword +#datadir=~/.bitcoin host=127.0.0.1 port=9998 diff --git a/contrib/linearize/linearize-hashes.py b/contrib/linearize/linearize-hashes.py index ebd9f1e75a0b..1a2f537443b6 100755 --- a/contrib/linearize/linearize-hashes.py +++ b/contrib/linearize/linearize-hashes.py @@ -16,6 +16,8 @@ import re import base64 import sys +import os +import os.path settings = {} @@ -93,6 +95,14 @@ def get_block_hashes(settings, max_blocks_per_call=10000): height += num_blocks +def get_rpc_cookie(): + # Open the cookie file + with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r') as f: + combined = f.readline() + combined_split = combined.split(":") + settings['rpcuser'] = combined_split[0] + settings['rpcpassword'] = combined_split[1] + if __name__ == '__main__': if len(sys.argv) != 2: print("Usage: linearize-hashes.py CONFIG-FILE") @@ -122,8 +132,15 @@ def get_block_hashes(settings, max_blocks_per_call=10000): settings['max_height'] = 313000 if 'rev_hash_bytes' not in settings: settings['rev_hash_bytes'] = 'false' + + use_userpass = True + use_datadir = False if 'rpcuser' not in settings or 'rpcpassword' not in settings: - print("Missing username and/or password in cfg file", file=stderr) + use_userpass = False + if 'datadir' in settings and not use_userpass: + use_datadir = True + if not use_userpass and not use_datadir: + print("Missing datadir or username and/or password in cfg file", file=stderr) sys.exit(1) settings['port'] = int(settings['port']) @@ -133,4 +150,8 @@ def get_block_hashes(settings, max_blocks_per_call=10000): # Force hash byte format setting to be lowercase to make comparisons easier. settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower() + # Get the rpc user and pass from the cookie if the datadir is set + if use_datadir: + get_rpc_cookie() + get_block_hashes(settings) diff --git a/contrib/macdeploy/macdeployqtplus b/contrib/macdeploy/macdeployqtplus index 7a22d99bbc6c..4207cb7319b0 100755 --- a/contrib/macdeploy/macdeployqtplus +++ b/contrib/macdeploy/macdeployqtplus @@ -301,7 +301,6 @@ def copyFramework(framework, path, verbose): if os.path.exists(fromContentsDir): toContentsDir = os.path.join(path, framework.destinationVersionContentsDirectory) shutil.copytree(fromContentsDir, toContentsDir, symlinks=True) - contentslinkfrom = os.path.join(path, framework.destinationContentsDirectory) if verbose >= 3: print("Copied Contents:", fromContentsDir) print(" to:", toContentsDir) @@ -674,9 +673,8 @@ else: if verbose >= 2: print("+ Installing qt.conf +") -f = open(os.path.join(applicationBundle.resourcesPath, "qt.conf"), "wb") -f.write(qt_conf.encode()) -f.close() +with open(os.path.join(applicationBundle.resourcesPath, "qt.conf"), "wb") as f: + f.write(qt_conf.encode()) # ------------------------------------------------ diff --git a/contrib/seeds/README.md b/contrib/seeds/README.md index 48c2f02dcf04..20d0a5041c9d 100644 --- a/contrib/seeds/README.md +++ b/contrib/seeds/README.md @@ -3,14 +3,16 @@ Utility to generate the seeds.txt list that is compiled into the client (see [src/chainparamsseeds.h](/src/chainparamsseeds.h) and other utilities in [contrib/seeds](/contrib/seeds)). -Be sure to update `MIN_PROTOCOL_VERSION` in `makeseeds.py` to include the current version. +The seeds compiled into the release are created from the current protx list, like this: -The seeds compiled into the release are created from the current masternode list, like this: - - dash-cli masternodelist full > mnlist.json - python3 makeseeds.py < mnlist.json > nodes_main.txt + dash-cli protx list valid 1 1185193 > protx_list.json + python3 makeseeds.py < protx_list.json > nodes_main.txt python3 generate-seeds.py . > ../../src/chainparamsseeds.h +Make sure to use a recent block height in the "protx list" call. After updating, create a PR and +specify which block height you used so that reviewers can re-run the same commands and verify +that the list is as expected. + ## Dependencies Ubuntu: diff --git a/contrib/seeds/makeseeds.py b/contrib/seeds/makeseeds.py index c4b8df45f11d..45ddc1ac0ca8 100755 --- a/contrib/seeds/makeseeds.py +++ b/contrib/seeds/makeseeds.py @@ -3,17 +3,13 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # -# Generate seeds.txt from masternode list +# Generate seeds.txt from "protx list valid 1" # NSEEDS=512 MAX_SEEDS_PER_ASN=4 -MIN_PROTOCOL_VERSION = 70213 -MAX_LAST_SEEN_DIFF = 60 * 60 * 24 * 1 # 1 day -MAX_LAST_PAID_DIFF = 60 * 60 * 24 * 30 # 1 month - # These are hosts that have been observed to be behaving strangely (e.g. # aggressively connecting to every node). SUSPICIOUS_HOSTS = { @@ -31,17 +27,14 @@ PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$") PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$") -def parseline(line): - # line format: status protocol payee lastseen activeseconds lastpaidtime lastpaidblock IP - sline = line.split() - - m = PATTERN_IPV4.match(sline[7]) +def parseip(ip): + m = PATTERN_IPV4.match(ip) sortkey = None ip = None if m is None: - m = PATTERN_IPV6.match(sline[7]) + m = PATTERN_IPV6.match(ip) if m is None: - m = PATTERN_ONION.match(sline[7]) + m = PATTERN_ONION.match(ip) if m is None: return None else: @@ -70,13 +63,6 @@ def parseline(line): port = int(m.group(6)) return { - "status": sline[0], - "protocol": int(sline[1]), - "payee": sline[2], - "lastseen": int(sline[3]), - "activeseconds": int(sline[4]), - "lastpaidtime": int(sline[5]), - "lastpaidblock": int(sline[6]), "net": net, "ip": ipstr, "port": port, @@ -84,12 +70,26 @@ def parseline(line): "sortkey": sortkey } -def filtermultiport(ips): - '''Filter out hosts with more nodes per IP''' +def filtermulticollateralhash(mns): + '''Filter out MNs sharing the same collateral hash''' hist = collections.defaultdict(list) - for ip in ips: - hist[ip['sortkey']].append(ip) - return [value[0] for (key,value) in list(hist.items()) if len(value)==1] + for mn in mns: + hist[mn['collateralHash']].append(mn) + return [mn for mn in mns if len(hist[mn['collateralHash']]) == 1] + +def filtermulticollateraladdress(mns): + '''Filter out MNs sharing the same collateral address''' + hist = collections.defaultdict(list) + for mn in mns: + hist[mn['collateralAddress']].append(mn) + return [mn for mn in mns if len(hist[mn['collateralAddress']]) == 1] + +def filtermultipayoutaddress(mns): + '''Filter out MNs sharing the same payout address''' + hist = collections.defaultdict(list) + for mn in mns: + hist[mn['state']['payoutAddress']].append(mn) + return [mn for mn in mns if len(hist[mn['state']['payoutAddress']]) == 1] def resolveasn(resolver, ip): asn = int([x.to_text() for x in resolver.query('.'.join(reversed(ip.split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0]) @@ -138,29 +138,23 @@ def filterbyasn(ips, max_per_asn, max_total): return result def main(): + # This expects a json as outputted by "protx list valid 1" if len(sys.argv) > 1: with open(sys.argv[1], 'r') as f: - js = json.load(f) + mns = json.load(f) else: - js = json.load(sys.stdin) - ips = [parseline(line) for collateral, line in js.items()] - - cur_time = int(time.time()) - - # Skip entries with valid address. - ips = [ip for ip in ips if ip is not None] - # Enforce ENABLED state - ips = [ip for ip in ips if ip['status'] == "ENABLED"] - # Enforce minimum protocol version - ips = [ip for ip in ips if ip['protocol'] >= MIN_PROTOCOL_VERSION] - # Require at least 2 week uptime - ips = [ip for ip in ips if cur_time - ip['lastseen'] < MAX_LAST_SEEN_DIFF] - # Require to be paid recently - ips = [ip for ip in ips if cur_time - ip['lastpaidtime'] < MAX_LAST_PAID_DIFF] - # Sort by availability (and use lastpaidtime as tie breaker) - ips.sort(key=lambda x: (x['activeseconds'], x['lastpaidtime'], x['ip']), reverse=True) - # Filter out hosts with multiple ports, these are likely abusive - ips = filtermultiport(ips) + mns = json.load(sys.stdin) + + # Skip PoSe banned MNs + mns = [mn for mn in mns if mn['state']['PoSeBanHeight'] == -1] + # Skip MNs with < 10000 confirmations + mns = [mn for mn in mns if mn['confirmations'] >= 10000] + # Filter out MNs which are definitely from the same person/operator + mns = filtermulticollateralhash(mns) + mns = filtermulticollateraladdress(mns) + mns = filtermultipayoutaddress(mns) + # Extract IPs + ips = [parseip(mn['state']['service']) for mn in mns] # Look up ASNs and limit results, both per ASN and globally. ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS) # Sort the results by IP address (for deterministic output). diff --git a/contrib/seeds/nodes_main.txt b/contrib/seeds/nodes_main.txt index 8881999e662c..d56a09728f27 100644 --- a/contrib/seeds/nodes_main.txt +++ b/contrib/seeds/nodes_main.txt @@ -1,275 +1,250 @@ -5.9.99.80:9999 +2.56.213.220:9999 +2.56.213.221:9999 +3.89.82.153:9999 +5.40.8.67:9999 +5.101.78.47:9999 5.132.191.109:9999 -5.132.191.211:9999 +5.132.191.112:9999 5.132.191.213:9999 -5.132.191.216:9999 +5.132.191.214:9999 5.134.115.37:9999 -5.189.171.64:9999 -5.200.23.33:9999 -5.200.53.96:9999 -18.221.96.149:9999 +5.187.2.26:9999 +5.189.166.213:9999 +13.67.76.10:9999 +13.94.173.222:9999 +18.211.5.180:9999 +23.106.123.152:9999 +23.152.0.95:9999 +23.152.0.197:9999 23.175.0.111:9999 -23.175.0.112:9999 23.175.0.113:9999 23.175.0.114:9999 -23.182.128.21:9999 -23.182.128.23:9999 -23.182.128.24:9999 -23.182.128.25:9999 -23.227.160.76:9999 -23.227.163.23:9999 +23.175.0.116:9999 23.227.163.52:9999 -27.102.114.61:9999 +23.253.213.225:9999 +31.10.97.36:9999 31.31.73.82:9999 -34.196.96.99:9999 -34.199.130.69:9999 +31.178.4.50:9999 +31.192.110.71:9999 +34.83.128.64:9999 +34.83.233.120:9999 +34.209.237.242:9999 34.225.101.97:9999 -35.167.192.165:9999 -35.196.242.227:9999 -35.197.187.1:9999 +34.255.85.157:9999 +35.199.145.175:9999 +35.247.94.64:9999 +37.18.227.56:9999 37.97.227.21:9999 37.120.161.145:9999 37.157.199.210:9999 -37.221.192.220:9999 -37.221.194.42:9999 -37.221.194.43:9999 -40.113.6.44:9999 +38.127.169.147:9999 43.229.77.46:9999 -45.32.206.185:9999 -45.76.64.6:9999 -46.28.109.195:9999 -46.37.158.177:9999 -46.163.166.57:9999 -47.52.229.72:9999 +45.32.243.128:9999 +45.33.57.231:9999 +45.63.107.90:9999 +45.76.39.241:9999 +46.21.155.50:9999 +46.101.135.60:9999 +46.163.166.50:9999 47.75.49.228:9999 +47.88.51.15:9999 47.90.14.156:9999 -47.98.66.94:9999 -47.104.96.207:9999 -47.244.102.81:9999 -51.15.42.143:9999 -51.38.105.36:9999 -51.38.128.251:9999 -51.255.32.213:9999 -52.79.112.22:9999 -52.187.135.187:9999 -52.235.36.187:9999 -52.242.37.8:9999 -54.70.231.195:9999 -54.172.145.162:9999 -62.138.3.214:9999 -62.138.8.7:9999 -62.138.8.103:9999 -62.146.13.50:9999 -63.142.253.85:9999 -64.34.218.48:9999 +47.91.204.190:9999 +47.110.197.5:9999 +51.15.172.192:9999 +51.91.57.8:9999 +51.144.71.213:9999 +52.33.34.63:9999 +52.72.237.43:9999 +54.169.246.41:9999 +62.75.203.240:9999 +62.75.203.242:9999 +62.75.207.166:9999 64.140.157.114:9999 64.140.159.226:9999 -66.23.242.86:9999 -66.23.242.89:9999 -66.23.242.90:9999 -66.23.242.91:9999 -66.172.11.85:9999 -66.172.27.70:9999 -69.51.13.90:9999 -69.51.13.101:9999 -69.51.20.70:9999 -69.51.20.71:9999 -74.207.228.180:9999 -75.127.4.248:9999 -76.74.219.148:9999 +66.45.245.213:9999 +66.172.12.86:9999 +66.244.243.68:9999 +66.244.243.69:9999 +66.244.243.70:9999 +69.10.35.39:9999 +69.10.54.244:9999 +74.118.137.70:9999 +77.37.240.140:9999 77.81.226.146:9999 +77.81.234.111:9999 78.41.207.70:9999 78.83.19.0:9999 -78.133.169.70:9999 -80.120.27.150:9999 -81.169.205.220:9999 -81.169.229.84:9999 +78.94.32.197:9999 +78.108.216.95:9999 +78.108.216.248:9999 +80.209.238.217:9999 +80.211.207.138:9999 +80.211.221.139:9999 +80.211.241.233:9999 +80.211.245.51:9999 +80.211.246.225:9999 +80.240.132.231:9999 81.171.2.245:9999 82.118.227.52:9999 -82.165.29.103:9999 -82.211.21.128:9999 -82.211.21.131:9999 -82.211.21.136:9999 -82.211.21.139:9999 -84.234.96.82:9999 -84.234.96.86:9999 -84.234.96.87:9999 -85.25.199.215:9999 -85.184.254.29:9999 -85.184.254.181:9999 -85.217.170.206:9999 -85.217.171.243:9999 -85.255.1.65:9999 -85.255.4.212:9999 -87.106.63.52:9999 +82.165.76.29:9999 +82.211.21.23:9999 +82.211.21.179:9999 +82.211.21.240:9999 +82.211.25.34:9999 +83.96.169.215:9999 +85.25.97.244:9999 +85.25.138.47:9999 +85.206.165.89:9999 87.117.253.55:9999 -89.36.220.241:9999 -89.40.0.14:9999 +89.36.220.128:9999 +89.39.106.15:9999 89.40.114.69:9999 -89.238.181.74:9999 +89.40.127.116:9999 +89.40.127.187:9999 +89.45.67.54:9999 +91.132.145.51:9999 +91.219.237.111:9999 91.219.239.82:9999 -91.219.239.83:9999 -92.63.57.28:9999 -92.63.57.120:9999 -93.90.205.80:9999 -93.104.213.208:9999 +92.60.37.50:9999 93.158.216.153:9999 -94.156.174.166:9999 -94.176.239.62:9999 -94.177.170.223:9999 -94.177.224.225:9999 -94.177.225.115:9999 +93.186.255.44:9999 +94.16.117.161:9999 +94.176.237.88:9999 94.177.225.210:9999 -94.177.232.33:9999 -94.177.250.61:9999 +94.177.230.42:9999 +94.177.253.17:9999 95.43.139.162:9999 -95.181.226.47:9999 -95.183.50.97:9999 +95.183.51.15:9999 95.183.51.98:9999 -95.183.51.141:9999 -95.183.53.134:9999 -95.216.11.57:9999 -95.216.147.47:9999 -96.126.96.164:9999 -101.37.96.0:9999 -101.37.96.96:9999 -104.196.169.81:9999 -104.223.6.164:9999 -107.6.140.35:9999 -107.6.140.55:9999 -107.6.175.150:9999 -107.155.118.137:9999 +95.183.53.39:9999 +95.183.53.128:9999 +95.211.196.34:9999 +95.215.45.225:9999 +95.217.255.194:9999 +103.218.240.67:9999 +104.149.36.119:9999 +104.160.42.222:9999 +104.206.240.12:9999 +104.216.5.104:9999 107.191.101.212:9999 -108.61.224.22:9999 -108.61.224.33:9999 -110.10.25.76:9999 +109.123.102.122:9999 +109.237.24.123:9999 115.159.86.118:9999 -122.106.55.214:9999 -122.114.173.218:9999 +116.62.140.10:9999 +116.203.81.220:9999 +118.31.38.232:9999 +119.23.29.18:9999 +120.92.112.179:9999 123.193.64.166:9999 130.185.251.69:9999 130.185.251.113:9999 133.130.102.22:9999 -139.99.193.23:9999 -148.66.50.42:9999 +134.249.148.62:9999 +137.74.194.15:9999 +144.76.31.203:9999 +144.76.238.2:9999 +144.202.16.80:9999 +145.14.158.235:9999 +145.239.163.130:9999 +146.185.175.206:9999 151.236.10.109:9999 154.127.57.240:9999 -154.127.57.254:9999 -154.127.59.27:9999 -154.127.60.8:9999 -159.69.155.94:9999 -159.89.32.12:9999 -159.100.19.166:9999 -159.100.19.167:9999 -159.203.20.131:9999 -163.44.167.144:9999 +162.250.124.61:9999 163.44.167.237:9999 -163.44.168.191:9999 +163.44.169.29:9999 +163.44.171.51:9999 +163.172.96.100:9999 +163.172.128.187:9999 +167.71.51.205:9999 167.88.15.97:9999 -168.235.96.203:9999 -168.235.96.205:9999 -168.235.99.47:9999 +168.63.153.192:9999 +168.235.74.53:9999 +168.235.74.65:9999 +168.235.99.41:9999 170.75.162.60:9999 -170.75.162.219:9999 170.75.163.108:9999 -172.81.177.42:9999 -172.86.121.148:9999 -172.104.64.13:9999 +172.81.132.245:9999 +172.81.180.210:9999 +172.86.120.107:9999 +172.105.222.151:9999 172.110.5.98:9999 172.110.6.169:9999 +173.212.218.130:9999 +173.212.221.13:9999 173.212.241.50:9999 -176.122.20.207:9999 -178.62.160.29:9999 -178.209.50.30:9999 +176.10.97.105:9999 +176.223.137.49:9999 +178.33.189.154:9999 +178.157.91.176:9999 +178.157.91.179:9999 178.238.42.7:9999 +180.68.191.77:9999 +185.2.83.231:9999 185.22.174.37:9999 +185.25.51.117:9999 185.26.126.250:9999 -185.28.101.91:9999 -185.28.101.133:9999 -185.35.64.217:9999 -185.35.67.117:9999 +185.34.40.36:9999 +185.35.67.185:9999 185.43.210.125:9999 -185.58.194.113:9999 185.58.224.234:9999 -185.64.104.221:9999 185.64.104.222:9999 185.64.104.223:9999 -185.106.122.46:9999 -185.119.85.30:9999 -185.133.37.105:9999 -185.139.237.154:9999 -185.141.26.106:9999 +185.136.170.231:9999 +185.141.62.97:9999 185.142.212.144:9999 -185.156.178.125:9999 -185.156.178.216:9999 -185.156.178.218:9999 -185.156.178.222:9999 -185.165.168.21:9999 185.165.168.22:9999 -185.165.168.23:9999 185.165.168.25:9999 +185.165.168.224:9999 +185.165.168.243:9999 185.168.8.144:9999 +185.177.59.140:9999 +185.180.221.100:9999 +185.183.96.80:9999 185.183.97.131:9999 -185.185.40.13:9999 -185.203.117.229:9999 -185.212.44.250:9999 +185.183.97.136:9999 +185.212.131.112:9999 +185.212.131.209:9999 185.213.37.1:9999 185.213.37.6:9999 -185.217.1.99:9999 -185.217.1.100:9999 -185.243.112.48:9999 -185.243.112.80:9999 -185.243.112.187:9999 -185.243.112.221:9999 -185.253.189.18:9999 -185.253.189.66:9999 -185.253.189.70:9999 -185.253.189.80:9999 -188.166.69.88:9999 -188.227.74.171:9999 -188.227.74.193:9999 +185.213.37.11:9999 +185.228.83.30:9999 +185.243.113.149:9999 +185.253.189.16:9999 +185.253.189.22:9999 +185.253.189.27:9999 +185.253.189.114:9999 +188.227.16.83:9999 +188.227.18.74:9999 +188.227.75.67:9999 +190.2.149.236:9999 190.4.184.180:9999 -190.10.8.250:9999 +191.101.20.93:9999 192.81.130.228:9999 -192.161.182.104:9999 -192.161.182.108:9999 -192.161.182.109:9999 -192.227.143.41:9999 -192.227.228.196:9999 192.250.230.17:9999 -193.29.187.85:9999 -193.234.224.72:9999 -193.234.224.100:9999 -193.234.224.103:9999 -193.234.224.145:9999 -194.99.20.100:9999 -195.154.105.109:9999 -195.154.105.125:9999 -198.20.101.116:9999 -198.23.128.75:9999 -198.53.74.14:9999 -198.61.190.142:9999 -199.201.110.195:9999 +193.37.215.58:9999 +193.187.182.106:9999 +193.187.182.109:9999 +193.187.183.183:9999 +193.187.183.185:9999 +194.36.189.6:9999 +195.154.222.45:9999 +195.181.210.17:9999 +195.181.211.64:9999 +196.251.250.217:9999 +199.192.17.103:9999 200.122.128.172:9999 -202.71.98.123:9999 -202.71.98.124:9999 -202.71.98.125:9999 -202.71.98.126:9999 +200.122.181.44:9999 204.16.243.106:9999 204.16.245.98:9999 -209.177.88.87:9999 -209.177.91.203:9999 -212.24.96.16:9999 -212.24.96.26:9999 -212.47.235.253:9999 -212.227.201.47:9999 -212.237.37.240:9999 +212.24.96.159:9999 +212.116.121.11:9999 +212.116.121.73:9999 +212.116.121.105:9999 +212.224.118.7:9999 +212.227.201.203:9999 +212.237.12.215:9999 213.64.197.95:9999 -213.136.80.93:9999 -213.136.80.159:9999 -216.107.217.62:9999 -216.189.145.161:9999 216.189.147.178:9999 -217.61.2.53:9999 -221.156.133.110:9999 -222.231.59.30:9999 +216.189.151.94:9999 +217.61.97.99:9999 diff --git a/contrib/seeds/nodes_test.txt b/contrib/seeds/nodes_test.txt index 06e4b94633aa..aff7a7f9dc50 100644 --- a/contrib/seeds/nodes_test.txt +++ b/contrib/seeds/nodes_test.txt @@ -1,35 +1,21 @@ -18.202.52.170:20008 -18.202.52.170:20004 -18.202.52.170:20000 -18.202.52.170:20012 -18.202.52.170:20016 -18.202.52.170:20020 -34.255.15.20:20007 -34.255.15.20:20003 -34.255.15.20:20015 -34.255.15.20:20011 -34.255.15.20:20019 -34.255.15.20:20023 -52.50.208.53:20009 -52.50.208.53:20005 -52.50.208.53:20001 -52.50.208.53:20013 -52.50.208.53:20017 -52.50.208.53:20021 -63.33.238.85:20010 -63.33.238.85:20006 -63.33.238.85:20002 -63.33.238.85:20014 -63.33.238.85:20018 -63.33.238.85:20022 -145.239.235.16:19999 -145.239.235.17:19999 -145.239.235.18:19999 -145.239.235.19:19999 -145.239.235.20:19999 -145.239.235.21:19999 -145.239.235.22:19999 -145.239.235.23:19999 -145.239.235.24:19999 -145.239.235.25:19999 +3.213.227.101:19999 +35.185.202.219:19999 +45.32.237.76:19999 +51.68.175.79:19999 +52.35.83.81:19999 +52.204.225.60:19999 +95.183.53.17:10011 +95.183.53.128:10001 +106.12.73.74:19999 +108.61.189.144:19999 +108.61.192.47:19999 +109.235.71.56:19999 +134.209.90.112:19999 +134.209.231.79:19999 +140.82.59.51:10003 +144.76.66.20:19999 +144.217.86.47:19999 +165.22.213.149:19999 178.62.203.249:19999 +185.213.37.1:19999 +185.213.37.2:19999 diff --git a/contrib/testgen/gen_base58_test_vectors.py b/contrib/testgen/gen_base58_test_vectors.py index ccdcfa5269c1..0146fffc6abe 100755 --- a/contrib/testgen/gen_base58_test_vectors.py +++ b/contrib/testgen/gen_base58_test_vectors.py @@ -13,7 +13,7 @@ # Released under MIT License import os from itertools import islice -from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars +from base58 import b58encode_chk, b58decode_chk, b58chars import random from binascii import b2a_hex diff --git a/contrib/verify-commits/gpg.sh b/contrib/verify-commits/gpg.sh index b01e2a6d39e5..abd8f5fd9fe3 100755 --- a/contrib/verify-commits/gpg.sh +++ b/contrib/verify-commits/gpg.sh @@ -46,6 +46,11 @@ for LINE in $(echo "$GPG_RES"); do REVSIG=true GOODREVSIG="[GNUPG:] GOODSIG ${LINE#* * *}" ;; + "[GNUPG:] EXPKEYSIG "*) + [ "$BITCOIN_VERIFY_COMMITS_ALLOW_REVSIG" != 1 ] && exit 1 + REVSIG=true + GOODREVSIG="[GNUPG:] GOODSIG ${LINE#* * *}" + ;; esac done if ! $VALID; then diff --git a/contrib/verifybinaries/README.md b/contrib/verifybinaries/README.md index ed3e14fb6c11..3ffe0a2f2895 100644 --- a/contrib/verifybinaries/README.md +++ b/contrib/verifybinaries/README.md @@ -26,6 +26,14 @@ The script returns 0 if everything passes the checks. It returns 1 if either the ./verify.sh bitcoin-core-0.13.0-rc3 ``` +If you only want to download the binaries of certain platform, add the corresponding suffix, e.g.: + +```sh +./verify.sh bitcoin-core-0.11.2-osx +./verify.sh 0.12.0-linux +./verify.sh bitcoin-core-0.13.0-rc3-win64 +``` + If you do not want to keep the downloaded binaries, specify anything as the second parameter. ```sh diff --git a/contrib/verifybinaries/verify.sh b/contrib/verifybinaries/verify.sh index e20770c96a96..409f517c9fb2 100755 --- a/contrib/verifybinaries/verify.sh +++ b/contrib/verifybinaries/verify.sh @@ -3,7 +3,8 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -### This script attempts to download the signature file SHA256SUMS.asc from bitcoin.org +### This script attempts to download the signature file SHA256SUMS.asc from +### bitcoincore.org and bitcoin.org and compares them. ### It first checks if the signature passes, and then downloads the files specified in ### the file, and checks if the hashes of these files match those that are specified ### in the signature file. @@ -22,7 +23,9 @@ TMPFILE="hashes.tmp" SIGNATUREFILENAME="SHA256SUMS.asc" RCSUBDIR="test" -BASEDIR="https://bitcoin.org/bin/" +HOST1="https://bitcoincore.org" +HOST2="https://bitcoin.org" +BASEDIR="/bin/" VERSIONPREFIX="bitcoin-core-" RCVERSIONSTRING="rc" @@ -42,13 +45,36 @@ if [ -n "$1" ]; then VERSION="$VERSIONPREFIX$1" fi - #now let's see if the version string contains "rc", and strip it off if it does - # and simultaneously add RCSUBDIR to BASEDIR, where we will look for SIGNATUREFILENAME - if [[ $VERSION == *"$RCVERSIONSTRING"* ]]; then - BASEDIR="$BASEDIR${VERSION/%-$RCVERSIONSTRING*}/" - BASEDIR="$BASEDIR$RCSUBDIR.$RCVERSIONSTRING${VERSION: -1}/" + STRIPPEDLAST="${VERSION%-*}" + + #now let's see if the version string contains "rc" or a platform name (e.g. "osx") + if [[ "$STRIPPEDLAST-" == "$VERSIONPREFIX" ]]; then + BASEDIR="$BASEDIR$VERSION/" else + # let's examine the last part to see if it's rc and/or platform name + STRIPPEDNEXTTOLAST="${STRIPPEDLAST%-*}" + if [[ "$STRIPPEDNEXTTOLAST-" == "$VERSIONPREFIX" ]]; then + + LASTSUFFIX="${VERSION##*-}" + VERSION="$STRIPPEDLAST" + + if [[ $LASTSUFFIX == *"$RCVERSIONSTRING"* ]]; then + RCVERSION="$LASTSUFFIX" + else + PLATFORM="$LASTSUFFIX" + fi + + else + RCVERSION="${STRIPPEDLAST##*-}" + PLATFORM="${VERSION##*-}" + + VERSION="$STRIPPEDNEXTTOLAST" + fi + BASEDIR="$BASEDIR$VERSION/" + if [[ $RCVERSION == *"$RCVERSIONSTRING"* ]]; then + BASEDIR="$BASEDIR$RCSUBDIR.$RCVERSION/" + fi fi SIGNATUREFILE="$BASEDIR$SIGNATUREFILENAME" @@ -58,7 +84,7 @@ else fi #first we fetch the file containing the signature -WGETOUT=$(wget -N "$BASEDIR$SIGNATUREFILENAME" 2>&1) +WGETOUT=$(wget -N "$HOST1$BASEDIR$SIGNATUREFILENAME" 2>&1) #and then see if wget completed successfully if [ $? -ne 0 ]; then @@ -69,6 +95,22 @@ if [ $? -ne 0 ]; then exit 2 fi +WGETOUT=$(wget -N -O "$SIGNATUREFILENAME.2" "$HOST2$BASEDIR$SIGNATUREFILENAME" 2>&1) +if [ $? -ne 0 ]; then + echo "bitcoin.org failed to provide signature file, but bitcoincore.org did?" + echo "wget output:" + echo "$WGETOUT"|sed 's/^/\t/g' + clean_up $SIGNATUREFILENAME + exit 3 +fi + +SIGFILEDIFFS="$(diff $SIGNATUREFILENAME $SIGNATUREFILENAME.2)" +if [ "$SIGFILEDIFFS" != "" ]; then + echo "bitcoin.org and bitcoincore.org signature files were not equal?" + clean_up $SIGNATUREFILENAME $SIGNATUREFILENAME.2 + exit 4 +fi + #then we check it GPGOUT=$(gpg --yes --decrypt --output "$TMPFILE" "$SIGNATUREFILENAME" 2>&1) @@ -88,17 +130,27 @@ if [ $RET -ne 0 ]; then echo "gpg output:" echo "$GPGOUT"|sed 's/^/\t/g' - clean_up $SIGNATUREFILENAME $TMPFILE + clean_up $SIGNATUREFILENAME $SIGNATUREFILENAME.2 $TMPFILE exit "$RET" fi +if [ -n "$PLATFORM" ]; then + grep $PLATFORM $TMPFILE > "$TMPFILE-plat" + TMPFILESIZE=$(stat -c%s "$TMPFILE-plat") + if [ $TMPFILESIZE -eq 0 ]; then + echo "error: no files matched the platform specified" && exit 3 + fi + mv "$TMPFILE-plat" $TMPFILE +fi + #here we extract the filenames from the signature file FILES=$(awk '{print $2}' "$TMPFILE") #and download these one by one for file in $FILES do - wget --quiet -N "$BASEDIR$file" + echo "Downloading $file" + wget --quiet -N "$HOST1$BASEDIR$file" done #check hashes @@ -116,7 +168,7 @@ fi if [ -n "$2" ]; then echo "Clean up the binaries" - clean_up $FILES $SIGNATUREFILENAME $TMPFILE + clean_up $FILES $SIGNATUREFILENAME $SIGNATUREFILENAME.2 $TMPFILE else echo "Keep the binaries in $WORKINGDIR" clean_up $TMPFILE diff --git a/contrib/zmq/zmq_sub.py b/contrib/zmq/zmq_sub.py index a45b00d76dbd..09cbe5ce2290 100644 --- a/contrib/zmq/zmq_sub.py +++ b/contrib/zmq/zmq_sub.py @@ -7,7 +7,7 @@ ZMQ example using python3's asyncio Dash should be started with the command line arguments: - dashd-testnet -daemon \ + dashd -testnet -daemon \ -zmqpubrawtx=tcp://127.0.0.1:28332 \ -zmqpubrawblock=tcp://127.0.0.1:28332 \ -zmqpubhashtx=tcp://127.0.0.1:28332 \ @@ -51,8 +51,10 @@ def __init__(self): self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashinstantsenddoublespend") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawchainlock") + self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawchainlocksig") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtxlock") + self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtxlocksig") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawgovernancevote") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawgovernanceobject") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawinstantsenddoublespend") @@ -95,12 +97,18 @@ async def handle(self) : elif topic == b"rawchainlock": print('- RAW CHAINLOCK ('+sequence+') -') print(binascii.hexlify(body[:80]).decode("utf-8")) + elif topic == b"rawchainlocksig": + print('- RAW CHAINLOCK SIG ('+sequence+') -') + print(binascii.hexlify(body[:80]).decode("utf-8")) elif topic == b"rawtx": print('- RAW TX ('+sequence+') -') print(binascii.hexlify(body).decode("utf-8")) elif topic == b"rawtxlock": print('- RAW TX LOCK ('+sequence+') -') print(binascii.hexlify(body).decode("utf-8")) + elif topic == b"rawtxlocksig": + print('- RAW TX LOCK SIG ('+sequence+') -') + print(binascii.hexlify(body).decode("utf-8")) elif topic == b"rawgovernancevote": print('- RAW GOVERNANCE VOTE ('+sequence+') -') print(binascii.hexlify(body).decode("utf-8")) diff --git a/contrib/zmq/zmq_sub3.4.py b/contrib/zmq/zmq_sub3.4.py index 2987be1e7c0e..19aa5996d81f 100644 --- a/contrib/zmq/zmq_sub3.4.py +++ b/contrib/zmq/zmq_sub3.4.py @@ -55,8 +55,10 @@ def __init__(self): self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashinstantsenddoublespend") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawchainlock") + self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawchainlocksig") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtxlock") + self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtxlocksig") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawgovernancevote") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawgovernanceobject") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawinstantsenddoublespend") @@ -67,7 +69,7 @@ def handle(self) : msg = yield from self.zmqSubSocket.recv_multipart() topic = msg[0] body = msg[1] - sequence = "Unknown"; + sequence = "Unknown" if len(msg[-1]) == 4: msgSequence = struct.unpack('/-/-/.../-.` @@ -55,27 +57,27 @@ https://github.com/bitcoin/bips/blob/master/bip-0064.mediawiki Example: ``` -$ curl localhost:18332/rest/getutxos/checkmempool/b2cdfd7b89def827ff8af7cd9bff7627ff72e5e8b0f71210f92ea7a4000c5d75-0.json 2>/dev/null | json_pp +$ curl localhost:19998/rest/getutxos/checkmempool/b2cdfd7b89def827ff8af7cd9bff7627ff72e5e8b0f71210f92ea7a4000c5d75-0.json 2>/dev/null | json_pp { - "chaintipHash" : "00000000fb01a7f3745a717f8caebee056c484e6e0bfe4a9591c235bb70506fb", "chainHeight" : 325347, + "chaintipHash" : "00000000fb01a7f3745a717f8caebee056c484e6e0bfe4a9591c235bb70506fb", + "bitmap": "1", "utxos" : [ { + "txvers" : 1 + "height" : 2147483647, + "value" : 8.8687, "scriptPubKey" : { - "addresses" : [ - "mi7as51dvLJsizWnTMurtRmrP8hG2m1XvD" - ], - "type" : "pubkeyhash", + "asm" : "OP_DUP OP_HASH160 1c7cebb529b86a04c683dfa87be49de35bcf589e OP_EQUALVERIFY OP_CHECKSIG", "hex" : "76a9141c7cebb529b86a04c683dfa87be49de35bcf589e88ac", "reqSigs" : 1, - "asm" : "OP_DUP OP_HASH160 1c7cebb529b86a04c683dfa87be49de35bcf589e OP_EQUALVERIFY OP_CHECKSIG" - }, - "value" : 8.8687, - "height" : 2147483647, - "txvers" : 1 + "type" : "pubkeyhash", + "addresses" : [ + "mi7as51dvLJsizWnTMurtRmrP8hG2m1XvD" + ] + } } - ], - "bitmap" : "1" + ] } ``` @@ -87,6 +89,8 @@ Only supports JSON as output format. * size : (numeric) the number of transactions in the TX mempool * bytes : (numeric) size of the TX mempool in bytes * usage : (numeric) total TX mempool memory usage +* maxmempool : (numeric) maximum memory usage for the mempool in bytes +* mempoolminfee : (numeric) minimum feerate (DASH per KB) for tx to be accepted `GET /rest/mempool/contents.json` @@ -95,4 +99,4 @@ Only supports JSON as output format. Risks ------------- -Running a web browser on the same node with a REST enabled bitcoind can be a risk. Accessing prepared XSS websites could read out tx/block data of your node by placing links like `