mirror of
https://github.com/dashpay/dash.git
synced 2024-12-25 12:02:48 +01:00
Merge pull request #5334 from PastaPastaPasta/develop-trivial-2023-04-17
backport: trivial backports 2023 04 17
This commit is contained in:
commit
f9ca078a82
@ -98,10 +98,6 @@ host_prefix=$($(host_arch)_$(host_os)_prefix)
|
||||
build_prefix=$(host_prefix)/native
|
||||
build_host=$(build)
|
||||
|
||||
AT_$(V):=
|
||||
AT_:=@
|
||||
AT:=$(AT_$(V))
|
||||
|
||||
all: install
|
||||
|
||||
include hosts/$(host_os).mk
|
||||
@ -168,12 +164,12 @@ include funcs.mk
|
||||
final_build_id_long+=$(shell $(build_SHA256SUM) config.site.in)
|
||||
final_build_id+=$(shell echo -n "$(final_build_id_long)" | $(build_SHA256SUM) | cut -c-$(HASH_LENGTH))
|
||||
$(host_prefix)/.stamp_$(final_build_id): $(native_packages) $(packages)
|
||||
$(AT)rm -rf $(@D)
|
||||
$(AT)mkdir -p $(@D)
|
||||
$(AT)echo copying packages: $^
|
||||
$(AT)echo to: $(@D)
|
||||
$(AT)cd $(@D); $(foreach package,$^, tar xf $($(package)_cached); )
|
||||
$(AT)touch $@
|
||||
rm -rf $(@D)
|
||||
mkdir -p $(@D)
|
||||
echo copying packages: $^
|
||||
echo to: $(@D)
|
||||
cd $(@D); $(foreach package,$^, tar xf $($(package)_cached); )
|
||||
touch $@
|
||||
|
||||
# $PATH is not preserved between ./configure and make by convention. Its
|
||||
# modification and overriding at ./configure time is (as I understand it)
|
||||
@ -200,8 +196,8 @@ $(host_prefix)/.stamp_$(final_build_id): $(native_packages) $(packages)
|
||||
# we expect them to be available in $PATH at all times, more specificity does
|
||||
# not hurt.
|
||||
$(host_prefix)/share/config.site : config.site.in $(host_prefix)/.stamp_$(final_build_id)
|
||||
$(AT)@mkdir -p $(@D)
|
||||
$(AT)sed -e 's|@HOST@|$(host)|' \
|
||||
@mkdir -p $(@D)
|
||||
sed -e 's|@HOST@|$(host)|' \
|
||||
-e 's|@CC@|$(host_CC)|' \
|
||||
-e 's|@CXX@|$(host_CXX)|' \
|
||||
-e 's|@AR@|$(host_AR)|' \
|
||||
@ -222,7 +218,7 @@ $(host_prefix)/share/config.site : config.site.in $(host_prefix)/.stamp_$(final_
|
||||
-e 's|@no_natpmp@|$(NO_NATPMP)|' \
|
||||
-e 's|@debug@|$(DEBUG)|' \
|
||||
$< > $@
|
||||
$(AT)touch $@
|
||||
touch $@
|
||||
|
||||
|
||||
define check_or_remove_cached
|
||||
@ -271,3 +267,4 @@ $(foreach package,$(all_packages),$(eval $(call ext_add_stages,$(package))))
|
||||
|
||||
.PHONY: install cached clean clean-all download-one download-osx download-linux download-win download check-packages check-sources
|
||||
.PHONY: FORCE
|
||||
$(V).SILENT:
|
||||
|
@ -168,53 +168,53 @@ endef
|
||||
|
||||
define int_add_cmds
|
||||
$($(1)_fetched):
|
||||
$(AT)mkdir -p $$(@D) $(SOURCES_PATH)
|
||||
$(AT)rm -f $$@
|
||||
$(AT)touch $$@
|
||||
$(AT)cd $$(@D); $(call $(1)_fetch_cmds,$(1))
|
||||
$(AT)cd $($(1)_source_dir); $(foreach source,$($(1)_all_sources),$(build_SHA256SUM) $(source) >> $$(@);)
|
||||
$(AT)touch $$@
|
||||
mkdir -p $$(@D) $(SOURCES_PATH)
|
||||
rm -f $$@
|
||||
touch $$@
|
||||
cd $$(@D); $(call $(1)_fetch_cmds,$(1))
|
||||
cd $($(1)_source_dir); $(foreach source,$($(1)_all_sources),$(build_SHA256SUM) $(source) >> $$(@);)
|
||||
touch $$@
|
||||
$($(1)_extracted): | $($(1)_fetched)
|
||||
$(AT)echo Extracting $(1)...
|
||||
$(AT)mkdir -p $$(@D)
|
||||
$(AT)cd $$(@D); $(call $(1)_extract_cmds,$(1))
|
||||
$(AT)touch $$@
|
||||
echo Extracting $(1)...
|
||||
mkdir -p $$(@D)
|
||||
cd $$(@D); $(call $(1)_extract_cmds,$(1))
|
||||
touch $$@
|
||||
$($(1)_preprocessed): | $($(1)_extracted)
|
||||
$(AT)echo Preprocessing $(1)...
|
||||
$(AT)mkdir -p $$(@D) $($(1)_patch_dir)
|
||||
$(AT)$(foreach patch,$($(1)_patches),cd $(PATCHES_PATH)/$(1); cp $(patch) $($(1)_patch_dir) ;)
|
||||
$(AT)cd $$(@D); $(call $(1)_preprocess_cmds, $(1))
|
||||
$(AT)touch $$@
|
||||
echo Preprocessing $(1)...
|
||||
mkdir -p $$(@D) $($(1)_patch_dir)
|
||||
$(foreach patch,$($(1)_patches),cd $(PATCHES_PATH)/$(1); cp $(patch) $($(1)_patch_dir) ;)
|
||||
cd $$(@D); $(call $(1)_preprocess_cmds, $(1))
|
||||
touch $$@
|
||||
$($(1)_configured): | $($(1)_dependencies) $($(1)_preprocessed)
|
||||
$(AT)echo Configuring $(1)...
|
||||
$(AT)rm -rf $(host_prefix); mkdir -p $(host_prefix)/lib; cd $(host_prefix); $(foreach package,$($(1)_all_dependencies), tar --no-same-owner -xf $($(package)_cached); )
|
||||
$(AT)mkdir -p $$(@D)
|
||||
$(AT)+cd $$(@D); $($(1)_config_env) $(call $(1)_config_cmds, $(1))
|
||||
$(AT)touch $$@
|
||||
echo Configuring $(1)...
|
||||
rm -rf $(host_prefix); mkdir -p $(host_prefix)/lib; cd $(host_prefix); $(foreach package,$($(1)_all_dependencies), tar --no-same-owner -xf $($(package)_cached); )
|
||||
mkdir -p $$(@D)
|
||||
+cd $$(@D); $($(1)_config_env) $(call $(1)_config_cmds, $(1))
|
||||
touch $$@
|
||||
$($(1)_built): | $($(1)_configured)
|
||||
$(AT)echo Building $(1)...
|
||||
$(AT)mkdir -p $$(@D)
|
||||
$(AT)+cd $$(@D); $($(1)_build_env) $(call $(1)_build_cmds, $(1))
|
||||
$(AT)touch $$@
|
||||
echo Building $(1)...
|
||||
mkdir -p $$(@D)
|
||||
+cd $$(@D); $($(1)_build_env) $(call $(1)_build_cmds, $(1))
|
||||
touch $$@
|
||||
$($(1)_staged): | $($(1)_built)
|
||||
$(AT)echo Staging $(1)...
|
||||
$(AT)mkdir -p $($(1)_staging_dir)/$(host_prefix)
|
||||
$(AT)cd $($(1)_build_dir); $($(1)_stage_env) $(call $(1)_stage_cmds, $(1))
|
||||
$(AT)rm -rf $($(1)_extract_dir)
|
||||
$(AT)touch $$@
|
||||
echo Staging $(1)...
|
||||
mkdir -p $($(1)_staging_dir)/$(host_prefix)
|
||||
cd $($(1)_build_dir); $($(1)_stage_env) $(call $(1)_stage_cmds, $(1))
|
||||
rm -rf $($(1)_extract_dir)
|
||||
touch $$@
|
||||
$($(1)_postprocessed): | $($(1)_staged)
|
||||
$(AT)echo Postprocessing $(1)...
|
||||
$(AT)cd $($(1)_staging_prefix_dir); $(call $(1)_postprocess_cmds)
|
||||
$(AT)touch $$@
|
||||
echo Postprocessing $(1)...
|
||||
cd $($(1)_staging_prefix_dir); $(call $(1)_postprocess_cmds)
|
||||
touch $$@
|
||||
$($(1)_cached): | $($(1)_dependencies) $($(1)_postprocessed)
|
||||
$(AT)echo Caching $(1)...
|
||||
$(AT)cd $$($(1)_staging_dir)/$(host_prefix); find . | sort | tar --no-recursion -czf $$($(1)_staging_dir)/$$(@F) -T -
|
||||
$(AT)mkdir -p $$(@D)
|
||||
$(AT)rm -rf $$(@D) && mkdir -p $$(@D)
|
||||
$(AT)mv $$($(1)_staging_dir)/$$(@F) $$(@)
|
||||
$(AT)rm -rf $($(1)_staging_dir)
|
||||
echo Caching $(1)...
|
||||
cd $$($(1)_staging_dir)/$(host_prefix); find . | sort | tar --no-recursion -czf $$($(1)_staging_dir)/$$(@F) -T -
|
||||
mkdir -p $$(@D)
|
||||
rm -rf $$(@D) && mkdir -p $$(@D)
|
||||
mv $$($(1)_staging_dir)/$$(@F) $$(@)
|
||||
rm -rf $($(1)_staging_dir)
|
||||
$($(1)_cached_checksum): $($(1)_cached)
|
||||
$(AT)cd $$(@D); $(build_SHA256SUM) $$(<F) > $$(@)
|
||||
cd $$(@D); $(build_SHA256SUM) $$(<F) > $$(@)
|
||||
|
||||
.PHONY: $(1)
|
||||
$(1): | $($(1)_cached_checksum)
|
||||
|
@ -83,6 +83,10 @@ INFO: seed corpus: files: 991 min: 1b max: 1858b total: 288291b rss: 150Mb
|
||||
…
|
||||
```
|
||||
|
||||
## Run without sanitizers for increased throughput
|
||||
|
||||
Fuzzing on a harness compiled with `--with-sanitizers=address,fuzzer,undefined` is good for finding bugs. However, the very slow execution even under libFuzzer will limit the ability to find new coverage. A good approach is to perform occasional long runs without the additional bug-detectors (configure `--with-sanitizers=fuzzer`) and then merge new inputs into a corpus as described in the qa-assets repo (https://github.com/bitcoin-core/qa-assets/blob/main/.github/PULL_REQUEST_TEMPLATE.md). Patience is useful; even with improved throughput, libFuzzer may need days and 10s of millions of executions to reach deep/hard targets.
|
||||
|
||||
## Reproduce a fuzzer crash reported by the CI
|
||||
|
||||
- `cd` into the `qa-assets` directory and update it with `git pull qa-assets`
|
||||
|
@ -80,7 +80,7 @@ details.
|
||||
### Build and attest to build outputs:
|
||||
|
||||
Follow the relevant Guix README.md sections:
|
||||
- [Performing a build](/contrib/guix/README.md#performing-a-build)
|
||||
- [Building](/contrib/guix/README.md#building)
|
||||
- [Attesting to build outputs](/contrib/guix/README.md#attesting-to-build-outputs)
|
||||
|
||||
### Verify other builders' signatures to your own. (Optional)
|
||||
|
@ -412,7 +412,7 @@ public:
|
||||
}
|
||||
|
||||
// Generate report header.
|
||||
std::string result{strprintf("%s %s%s - %i%s\n\n", PACKAGE_NAME, FormatFullVersion(), ChainToString(), networkinfo["protocolversion"].get_int(), networkinfo["subversion"].get_str())};
|
||||
std::string result{strprintf("%s client %s%s - server %i%s\n\n", PACKAGE_NAME, FormatFullVersion(), ChainToString(), networkinfo["protocolversion"].get_int(), networkinfo["subversion"].get_str())};
|
||||
|
||||
// Report detailed peer connections list sorted by direction and minimum ping time.
|
||||
if (DetailsRequested() && !m_peers.empty()) {
|
||||
|
@ -196,28 +196,35 @@ void TorControlConnection::eventcb(struct bufferevent *bev, short what, void *ct
|
||||
|
||||
bool TorControlConnection::Connect(const std::string& tor_control_center, const ConnectionCB& _connected, const ConnectionCB& _disconnected)
|
||||
{
|
||||
if (b_conn)
|
||||
if (b_conn) {
|
||||
Disconnect();
|
||||
// Parse tor_control_center address:port
|
||||
struct sockaddr_storage connect_to_addr;
|
||||
int connect_to_addrlen = sizeof(connect_to_addr);
|
||||
if (evutil_parse_sockaddr_port(tor_control_center.c_str(),
|
||||
(struct sockaddr*)&connect_to_addr, &connect_to_addrlen)<0) {
|
||||
}
|
||||
|
||||
CService control_service;
|
||||
if (!Lookup(tor_control_center, control_service, 9051, fNameLookup)) {
|
||||
LogPrintf("tor: Failed to look up control center %s\n", tor_control_center);
|
||||
return false;
|
||||
}
|
||||
|
||||
struct sockaddr_storage control_address;
|
||||
socklen_t control_address_len = sizeof(control_address);
|
||||
if (!control_service.GetSockAddr(reinterpret_cast<struct sockaddr*>(&control_address), &control_address_len)) {
|
||||
LogPrintf("tor: Error parsing socket address %s\n", tor_control_center);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create a new socket, set up callbacks and enable notification bits
|
||||
b_conn = bufferevent_socket_new(base, -1, BEV_OPT_CLOSE_ON_FREE);
|
||||
if (!b_conn)
|
||||
if (!b_conn) {
|
||||
return false;
|
||||
}
|
||||
bufferevent_setcb(b_conn, TorControlConnection::readcb, nullptr, TorControlConnection::eventcb, this);
|
||||
bufferevent_enable(b_conn, EV_READ|EV_WRITE);
|
||||
this->connected = _connected;
|
||||
this->disconnected = _disconnected;
|
||||
|
||||
// Finally, connect to tor_control_center
|
||||
if (bufferevent_socket_connect(b_conn, (struct sockaddr*)&connect_to_addr, connect_to_addrlen) < 0) {
|
||||
if (bufferevent_socket_connect(b_conn, reinterpret_cast<struct sockaddr*>(&control_address), control_address_len) < 0) {
|
||||
LogPrintf("tor: Error connecting to address %s\n", tor_control_center);
|
||||
return false;
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ void SplitHostPort(std::string in, uint16_t& portOut, std::string& hostOut)
|
||||
// if a : is found, and it either follows a [...], or no other : is in the string, treat it as port separator
|
||||
bool fHaveColon = colon != in.npos;
|
||||
bool fBracketed = fHaveColon && (in[0] == '[' && in[colon - 1] == ']'); // if there is a colon, and in[0]=='[', colon is not 0, so in[colon-1] is safe
|
||||
bool fMultiColon = fHaveColon && (in.find_last_of(':', colon - 1) != in.npos);
|
||||
bool fMultiColon{fHaveColon && colon != 0 && (in.find_last_of(':', colon - 1) != in.npos)};
|
||||
if (fHaveColon && (colon == 0 || fBracketed || !fMultiColon)) {
|
||||
uint16_t n;
|
||||
if (ParseUInt16(in.substr(colon + 1), &n)) {
|
||||
|
@ -5496,7 +5496,7 @@ bool LoadMempool(CTxMemPool& pool, CChainState& active_chainstate)
|
||||
pool.PrioritiseTransaction(tx->GetHash(), amountdelta);
|
||||
}
|
||||
TxValidationState state;
|
||||
if (nTime + nExpiryTimeout > nNow) {
|
||||
if (nTime > nNow - nExpiryTimeout) {
|
||||
LOCK(cs_main);
|
||||
assert(std::addressof(::ChainstateActive()) == std::addressof(active_chainstate));
|
||||
AcceptToMemoryPoolWithTime(chainparams, pool, active_chainstate, state, tx, nTime,
|
||||
|
@ -120,6 +120,24 @@ class PruneTest(BitcoinTestFramework):
|
||||
|
||||
self.sync_blocks(self.nodes[0:5])
|
||||
|
||||
def test_invalid_command_line_options(self):
|
||||
self.nodes[0].assert_start_raises_init_error(
|
||||
expected_msg='Error: Prune cannot be configured with a negative value.',
|
||||
extra_args=['-prune=-1'],
|
||||
)
|
||||
self.nodes[0].assert_start_raises_init_error(
|
||||
expected_msg='Error: Prune configured below the minimum of 550 MiB. Please use a higher number.',
|
||||
extra_args=['-prune=549'],
|
||||
)
|
||||
self.nodes[0].assert_start_raises_init_error(
|
||||
expected_msg='Error: Prune mode is incompatible with -txindex.',
|
||||
extra_args=['-prune=550', '-txindex'],
|
||||
)
|
||||
self.nodes[0].assert_start_raises_init_error(
|
||||
expected_msg='Error: Prune mode is incompatible with -coinstatsindex.',
|
||||
extra_args=['-prune=550', '-coinstatsindex'],
|
||||
)
|
||||
|
||||
def test_height_min(self):
|
||||
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
|
||||
self.log.info("Success")
|
||||
@ -461,6 +479,9 @@ class PruneTest(BitcoinTestFramework):
|
||||
self.log.info("Stopping pruned node%d" % i)
|
||||
self.stop_node(i, expected_stderr=EXPECTED_STDERR_NO_GOV_PRUNE)
|
||||
|
||||
self.log.info("Test invalid pruning command line options")
|
||||
self.test_invalid_command_line_options()
|
||||
|
||||
self.log.info("Done")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -145,7 +145,7 @@ class TestBitcoinCli(BitcoinTestFramework):
|
||||
assert_equal(cli_get_info['balance'], amounts[1])
|
||||
|
||||
self.log.info("Test -getinfo with -rpcwallet=unloaded wallet returns no balances")
|
||||
cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet3).send_cli()
|
||||
cli_get_info_keys = self.nodes[0].cli('-getinfo', rpcwallet3).send_cli().keys()
|
||||
assert 'balance' not in cli_get_info_keys
|
||||
assert 'balances' not in cli_get_info_keys
|
||||
|
||||
|
@ -39,4 +39,4 @@ race:CZMQAbstractPublishNotifier::SendZmqMessage
|
||||
race:epoll_ctl
|
||||
|
||||
# https://github.com/bitcoin/bitcoin/issues/23366
|
||||
race:std::__1::ios_base::width
|
||||
race:std::__1::ios_base::*
|
||||
|
Loading…
Reference in New Issue
Block a user