Skip to content

Commit

Permalink
Merge bitcoin/bitcoin#31417: test: Avoid F541 (f-string without any p…
Browse files Browse the repository at this point in the history
…laceholders)

fae7639 test: Avoid F541 (f-string without any placeholders) (MarcoFalke)

Pull request description:

  An extra `f` string-prefix is mostly harmless, but could be confusing or hint to a mistake where a format argument was forgotten.

  Try to avoid the confusion and mistakes by applying the `F541` linter rule.

ACKs for top commit:
  lucasbalieiro:
    **Tested ACK** [fae7639](bitcoin/bitcoin@fae7639)
  danielabrozzoni:
    ACK fae7639
  tdb3:
    Code review ACK fae7639

Tree-SHA512: 4992a74fcf0c19b32e4d95f7333e087b4269b5c5259c556789fb86721617db81c7a4fe210ae136c92824976f07f71ad0f374655e7008b1967c02c73324862d9a
  • Loading branch information
fanquake committed Dec 6, 2024
2 parents eb2ebe6 + fae7639 commit 1a35447
Show file tree
Hide file tree
Showing 19 changed files with 30 additions and 29 deletions.
6 changes: 3 additions & 3 deletions contrib/tracing/log_raw_p2p_msgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,9 @@


def print_message(event, inbound):
print(f"%s %s msg '%s' from peer %d (%s, %s) with %d bytes: %s" %
(
f"Warning: incomplete message (only %d out of %d bytes)!" % (
print("{} {} msg '{}' from peer {} ({}, {}) with {} bytes: {}".format(

"Warning: incomplete message (only {} out of {} bytes)!".format(
len(event.msg), event.msg_size) if len(event.msg) < event.msg_size else "",
"inbound" if inbound else "outbound",
event.msg_type.decode("utf-8"),
Expand Down
2 changes: 1 addition & 1 deletion test/functional/feature_anchors.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def run_test(self):
self.nodes[0].addconnection(ONION_ADDR, 'block-relay-only', v2transport=False)

self.log.debug("Stop node")
with self.nodes[0].assert_debug_log([f"DumpAnchors: Flush 1 outbound block-relay-only peer addresses to anchors.dat"]):
with self.nodes[0].assert_debug_log(["DumpAnchors: Flush 1 outbound block-relay-only peer addresses to anchors.dat"]):
self.stop_node(0)
# Manually close keep_alive proxy connection
onion_proxy.stop()
Expand Down
6 changes: 3 additions & 3 deletions test/functional/feature_assumeutxo.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,8 @@ def expected_error(log_msg="", error_msg=""):
with self.nodes[0].assert_debug_log([log_msg]):
self.nodes[0].assert_start_raises_init_error(expected_msg=error_msg)

expected_error_msg = f"Error: A fatal internal error occurred, see debug.log for details: Assumeutxo data not found for the given blockhash '7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a'."
error_details = f"Assumeutxo data not found for the given blockhash"
expected_error_msg = "Error: A fatal internal error occurred, see debug.log for details: Assumeutxo data not found for the given blockhash '7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a7a'."
error_details = "Assumeutxo data not found for the given blockhash"
expected_error(log_msg=error_details, error_msg=expected_error_msg)

# resurrect node again
Expand Down Expand Up @@ -417,7 +417,7 @@ def check_dump_output(output):

assert_equal(n0.getblockchaininfo()["blocks"], FINAL_HEIGHT)

self.log.info(f"Check that dumptxoutset works for past block heights")
self.log.info("Check that dumptxoutset works for past block heights")
# rollback defaults to the snapshot base height
dump_output2 = n0.dumptxoutset('utxos2.dat', "rollback")
check_dump_output(dump_output2)
Expand Down
4 changes: 2 additions & 2 deletions test/functional/feature_config_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def test_negated_config(self):

self.log.debug('Verifying garbage in config can be detected')
with open(conf_path, 'a', encoding='utf-8') as conf:
conf.write(f'garbage\n')
conf.write('garbage\n')
self.nodes[0].assert_start_raises_init_error(
extra_args=['-regtest'],
expected_msg='Error: Error reading configuration file: parse error on line 1: garbage',
Expand All @@ -98,7 +98,7 @@ def test_config_file_parser(self):

# Check that startup fails if conf= is set in bitcoin.conf or in an included conf file
bad_conf_file_path = self.nodes[0].datadir_path / "bitcoin_bad.conf"
util.write_config(bad_conf_file_path, n=0, chain='', extra_config=f'conf=some.conf\n')
util.write_config(bad_conf_file_path, n=0, chain='', extra_config='conf=some.conf\n')
conf_in_config_file_err = 'Error: Error reading configuration file: conf cannot be set in the configuration file; use includeconf= if you want to include additional config files'
self.nodes[0].assert_start_raises_init_error(
extra_args=[f'-conf={bad_conf_file_path}'],
Expand Down
2 changes: 1 addition & 1 deletion test/functional/feature_dersig.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def run_test(self):
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()

with self.nodes[0].assert_debug_log(expected_msgs=[f'Block validation error: mandatory-script-verify-flag-failed (Non-canonical DER signature)']):
with self.nodes[0].assert_debug_log(expected_msgs=['Block validation error: mandatory-script-verify-flag-failed (Non-canonical DER signature)']):
peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
peer.sync_with_ping()
Expand Down
2 changes: 1 addition & 1 deletion test/functional/feature_framework_miniwallet.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def test_tx_padding(self):

def test_wallet_tagging(self):
"""Verify that tagged wallet instances are able to send funds."""
self.log.info(f"Test tagged wallet instances...")
self.log.info("Test tagged wallet instances...")
node = self.nodes[0]
untagged_wallet = self.wallets[0][1]
for i in range(10):
Expand Down
4 changes: 2 additions & 2 deletions test/functional/feature_loadblock.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def run_test(self):
cfg.write(f"port={node_url.port}\n")
cfg.write(f"host={node_url.hostname}\n")
cfg.write(f"output_file={bootstrap_file}\n")
cfg.write(f"max_height=100\n")
cfg.write(f"netmagic=fabfb5da\n")
cfg.write("max_height=100\n")
cfg.write("netmagic=fabfb5da\n")
cfg.write(f"input={blocks_dir}\n")
cfg.write(f"genesis={genesis_block}\n")
cfg.write(f"hashlist={hash_list.name}\n")
Expand Down
2 changes: 1 addition & 1 deletion test/functional/interface_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def run_test(self):
self.test_rest_request(f"/getutxos/{spending[0]}-+1", ret_type=RetType.OBJ, status=400)
self.test_rest_request(f"/getutxos/{spending[0]}--1", ret_type=RetType.OBJ, status=400)
self.test_rest_request(f"/getutxos/{spending[0]}aa-1234", ret_type=RetType.OBJ, status=400)
self.test_rest_request(f"/getutxos/aa-1234", ret_type=RetType.OBJ, status=400)
self.test_rest_request("/getutxos/aa-1234", ret_type=RetType.OBJ, status=400)

# Test limits
long_uri = '/'.join([f"{txid}-{n_}" for n_ in range(20)])
Expand Down
4 changes: 2 additions & 2 deletions test/functional/interface_usdt_utxocache.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,15 +393,15 @@ def handle_utxocache_flush(_, data, __):
bpf = BPF(text=utxocache_flushes_program, usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
bpf["utxocache_flush"].open_perf_buffer(handle_utxocache_flush)

self.log.info(f"prune blockchain to trigger a flush for pruning")
self.log.info("prune blockchain to trigger a flush for pruning")
expected_flushes.append({"mode": "NONE", "for_prune": True, "size": 0})
self.nodes[0].pruneblockchain(315)

bpf.perf_buffer_poll(timeout=500)
bpf.cleanup()

self.log.info(
f"check that we don't expect additional flushes and that the handle_* function succeeded")
"check that we don't expect additional flushes and that the handle_* function succeeded")
assert_equal(0, len(expected_flushes))
assert_equal(EXPECTED_HANDLE_FLUSH_SUCCESS, handle_flush_succeeds)

Expand Down
2 changes: 1 addition & 1 deletion test/functional/mempool_datacarrier.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def set_test_params(self):
[],
["-datacarrier=0"],
["-datacarrier=1", f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"],
["-datacarrier=1", f"-datacarriersize=2"],
["-datacarrier=1", "-datacarriersize=2"],
]

def test_null_data_transaction(self, node: TestNode, data, success: bool) -> None:
Expand Down
2 changes: 1 addition & 1 deletion test/functional/mempool_truc.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def test_nondefault_package_limits(self):
assert_greater_than_or_equal(TRUC_CHILD_MAX_VSIZE, tx_v3_child_large2["tx"].get_vsize())
assert_greater_than(tx_v3_parent_large2["tx"].get_vsize() + tx_v3_child_large2["tx"].get_vsize(), 10000)

assert_raises_rpc_error(-26, f"too-long-mempool-chain, exceeds ancestor size limit", node.sendrawtransaction, tx_v3_child_large2["hex"])
assert_raises_rpc_error(-26, "too-long-mempool-chain, exceeds ancestor size limit", node.sendrawtransaction, tx_v3_child_large2["hex"])
self.check_mempool([tx_v3_parent_large2["txid"]])

@cleanup(extra_args=["-datacarriersize=1000"])
Expand Down
2 changes: 1 addition & 1 deletion test/functional/p2p_handshake.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def run_test(self):
DESIRABLE_SERVICE_FLAGS_PRUNED, expect_disconnect=False)

self.log.info("Check that feeler connections get disconnected immediately")
with node.assert_debug_log([f"feeler connection completed"]):
with node.assert_debug_log(["feeler connection completed"]):
self.add_outbound_connection(node, "feeler", NODE_NONE, wait_for_disconnect=True)

self.log.info("Check that connecting to ourself leads to immediate disconnect")
Expand Down
6 changes: 3 additions & 3 deletions test/functional/rpc_dumptxoutset.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,14 @@ def run_test(self):
assert_raises_rpc_error(
-8, "Couldn't open file {}.incomplete for writing".format(invalid_path), node.dumptxoutset, invalid_path, "latest")

self.log.info(f"Test that dumptxoutset with unknown dump type fails")
self.log.info("Test that dumptxoutset with unknown dump type fails")
assert_raises_rpc_error(
-8, 'Invalid snapshot type "bogus" specified. Please specify "rollback" or "latest"', node.dumptxoutset, 'utxos.dat', "bogus")

self.log.info(f"Test that dumptxoutset failure does not leave the network activity suspended when it was on previously")
self.log.info("Test that dumptxoutset failure does not leave the network activity suspended when it was on previously")
self.check_expected_network(node, True)

self.log.info(f"Test that dumptxoutset failure leaves the network activity suspended when it was off")
self.log.info("Test that dumptxoutset failure leaves the network activity suspended when it was off")
node.setnetworkactive(False)
self.check_expected_network(node, False)
node.setnetworkactive(True)
Expand Down
4 changes: 2 additions & 2 deletions test/functional/test_framework/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ def deserialize_v2(self, f):
elif self.net == self.NET_CJDNS:
self.ip = socket.inet_ntop(socket.AF_INET6, addr_bytes)
else:
raise Exception(f"Address type not supported")
raise Exception("Address type not supported")

self.port = int.from_bytes(f.read(2), "big")

Expand All @@ -354,7 +354,7 @@ def serialize_v2(self):
elif self.net == self.NET_CJDNS:
r += socket.inet_pton(socket.AF_INET6, self.ip)
else:
raise Exception(f"Address type not supported")
raise Exception("Address type not supported")
r += self.port.to_bytes(2, "big")
return r

Expand Down
2 changes: 1 addition & 1 deletion test/functional/test_framework/socks5.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def handle(self):
if not self.serv.keep_alive:
self.conn.close()
else:
logger.debug(f"Keeping client connection alive")
logger.debug("Keeping client connection alive")

class Socks5Server():
def __init__(self, conf):
Expand Down
2 changes: 1 addition & 1 deletion test/functional/wallet_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def test_pruned_wallet_backup(self):
node.pruneblockchain(250)
# The backup should be updated with the latest height (locator) for
# the backup to load successfully this close to the prune height
node.restorewallet(f'pruned', node.datadir_path / 'wallet_pruned.bak')
node.restorewallet('pruned', node.datadir_path / 'wallet_pruned.bak')

def run_test(self):
self.log.info("Generating initial blockchain")
Expand Down
2 changes: 1 addition & 1 deletion test/functional/wallet_fast_rescan.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def run_test(self):
assert_equal(len(descriptors), NUM_DESCRIPTORS)
w.backupwallet(WALLET_BACKUP_FILENAME)

self.log.info(f"Create txs sending to end range address of each descriptor, triggering top-ups")
self.log.info("Create txs sending to end range address of each descriptor, triggering top-ups")
for i in range(NUM_BLOCKS):
self.log.info(f"Block {i+1}/{NUM_BLOCKS}")
for desc_info in w.listdescriptors()['descriptors']:
Expand Down
4 changes: 2 additions & 2 deletions test/functional/wallet_multisig_descriptor_psbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ def participants_create_multisigs(self, external_xpubs, internal_xpubs):
for i, node in enumerate(self.nodes):
node.createwallet(wallet_name=f"{self.name}_{i}", blank=True, descriptors=True, disable_private_keys=True)
multisig = node.get_wallet_rpc(f"{self.name}_{i}")
external = multisig.getdescriptorinfo(f"wsh(sortedmulti({self.M},{f','.join(external_xpubs)}))")
internal = multisig.getdescriptorinfo(f"wsh(sortedmulti({self.M},{f','.join(internal_xpubs)}))")
external = multisig.getdescriptorinfo(f"wsh(sortedmulti({self.M},{','.join(external_xpubs)}))")
internal = multisig.getdescriptorinfo(f"wsh(sortedmulti({self.M},{','.join(internal_xpubs)}))")
result = multisig.importdescriptors([
{ # receiving addresses (internal: False)
"desc": external["descriptor"],
Expand Down
1 change: 1 addition & 0 deletions test/lint/test_runner/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ fn lint_py_lint() -> LintResult {
"F405", // foo_function may be undefined, or defined from star imports: bar_module
"F406", // "from module import *" only allowed at module level
"F407", // an undefined __future__ feature name was imported
"F541", // f-string without any placeholders
"F601", // dictionary key name repeated with different values
"F602", // dictionary key variable name repeated with different values
"F621", // too many expressions in an assignment with star-unpacking
Expand Down

0 comments on commit 1a35447

Please sign in to comment.