cleanup(clippy): Use inline format strings (#5489)

* Inline format strings using an automated clippy fix

```sh
cargo clippy --fix --all-features --all-targets -- -A clippy::all -W clippy::uninlined_format_args
cargo fmt --all
```

* Remove unused & and &mut using an automated clippy fix

```sh
cargo clippy --fix --all-features --all-targets -- -A clippy::all -W clippy::uninlined_format_args
```
This commit is contained in:
teor 2022-10-27 23:25:18 +10:00 committed by GitHub
parent 730f6c1f55
commit c812f880cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 133 additions and 136 deletions

View File

@ -40,7 +40,7 @@ proptest! {
fn block_hash_display_fromstr_roundtrip(hash in any::<Hash>()) { fn block_hash_display_fromstr_roundtrip(hash in any::<Hash>()) {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
let display = format!("{}", hash); let display = format!("{hash}");
let parsed = display.parse::<Hash>().expect("hash should parse"); let parsed = display.parse::<Hash>().expect("hash should parse");
prop_assert_eq!(hash, parsed); prop_assert_eq!(hash, parsed);
} }

View File

@ -30,7 +30,7 @@ fn blockheaderhash_debug() {
let hash = Hash(sha_writer.finish()); let hash = Hash(sha_writer.finish());
assert_eq!( assert_eq!(
format!("{:?}", hash), format!("{hash:?}"),
"block::Hash(\"3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf\")" "block::Hash(\"3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf\")"
); );
} }
@ -44,7 +44,7 @@ fn blockheaderhash_from_blockheader() {
let hash = Hash::from(&blockheader); let hash = Hash::from(&blockheader);
assert_eq!( assert_eq!(
format!("{:?}", hash), format!("{hash:?}"),
"block::Hash(\"d1d6974bbe1d4d127c889119b2fc05724c67588dc72708839727586b8c2bc939\")" "block::Hash(\"d1d6974bbe1d4d127c889119b2fc05724c67588dc72708839727586b8c2bc939\")"
); );

View File

@ -27,7 +27,7 @@ pub fn humantime_seconds(duration: impl Into<Duration>) -> String {
let duration = humantime::format_duration(duration); let duration = humantime::format_duration(duration);
format!("{}", duration) format!("{duration}")
} }
/// Returns a human-friendly formatted string for the whole number of milliseconds in `duration`. /// Returns a human-friendly formatted string for the whole number of milliseconds in `duration`.
@ -40,5 +40,5 @@ pub fn humantime_milliseconds(duration: impl Into<Duration>) -> String {
let duration = humantime::format_duration(duration_secs + duration_millis); let duration = humantime::format_duration(duration_secs + duration_millis);
format!("{}", duration) format!("{duration}")
} }

View File

@ -38,7 +38,7 @@ impl PartialEq for HistoryTreeError {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
// Workaround since subtypes do not implement Eq. // Workaround since subtypes do not implement Eq.
// This is only used for tests anyway. // This is only used for tests anyway.
format!("{:?}", self) == format!("{:?}", other) format!("{self:?}") == format!("{other:?}")
} }
} }

View File

@ -110,8 +110,8 @@ impl Arbitrary for OutputInTransactionV4 {
/// crate does not provide an Arbitrary implementation for it. /// crate does not provide an Arbitrary implementation for it.
fn spendauth_verification_key_bytes() -> impl Strategy<Value = ValidatingKey> { fn spendauth_verification_key_bytes() -> impl Strategy<Value = ValidatingKey> {
prop::array::uniform32(any::<u8>()).prop_map(|bytes| { prop::array::uniform32(any::<u8>()).prop_map(|bytes| {
let mut rng = ChaChaRng::from_seed(bytes); let rng = ChaChaRng::from_seed(bytes);
let sk = redjubjub::SigningKey::<redjubjub::SpendAuth>::new(&mut rng); let sk = redjubjub::SigningKey::<redjubjub::SpendAuth>::new(rng);
redjubjub::VerificationKey::<redjubjub::SpendAuth>::from(&sk) redjubjub::VerificationKey::<redjubjub::SpendAuth>::from(&sk)
.try_into() .try_into()
.unwrap() .unwrap()

View File

@ -76,6 +76,6 @@ mod tests {
let input = b"hello"; let input = b"hello";
let checksum = Checksum::from(&input[..]); let checksum = Checksum::from(&input[..]);
assert_eq!(format!("{:?}", checksum), "Sha256dChecksum(\"9595c9df\")"); assert_eq!(format!("{checksum:?}"), "Sha256dChecksum(\"9595c9df\")");
} }
} }

View File

@ -64,7 +64,7 @@ fn memo_fmt() {
but its just short enough!", but its just short enough!",
)); ));
assert_eq!(format!("{:?}", memo), assert_eq!(format!("{memo:?}"),
"Memo(\"thiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeryyyyyyyyyyyyyyyyyyyyyyyyyy looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong meeeeeeeeeeeeeeeeeeemooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo but its just short enough!\")" "Memo(\"thiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeryyyyyyyyyyyyyyyyyyyyyyyyyy looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong meeeeeeeeeeeeeeeeeeemooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo but its just short enough!\")"
); );

View File

@ -38,7 +38,7 @@ proptest! {
fn transaction_hash_struct_display_roundtrip(hash in any::<Hash>()) { fn transaction_hash_struct_display_roundtrip(hash in any::<Hash>()) {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
let display = format!("{}", hash); let display = format!("{hash}");
let parsed = display.parse::<Hash>().expect("hash should parse"); let parsed = display.parse::<Hash>().expect("hash should parse");
prop_assert_eq!(hash, parsed); prop_assert_eq!(hash, parsed);
} }
@ -48,7 +48,7 @@ proptest! {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
if let Ok(parsed) = hash.parse::<Hash>() { if let Ok(parsed) = hash.parse::<Hash>() {
let display = format!("{}", parsed); let display = format!("{parsed}");
prop_assert_eq!(hash, display); prop_assert_eq!(hash, display);
} }
} }
@ -66,7 +66,7 @@ proptest! {
fn transaction_auth_digest_struct_display_roundtrip(auth_digest in any::<AuthDigest>()) { fn transaction_auth_digest_struct_display_roundtrip(auth_digest in any::<AuthDigest>()) {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
let display = format!("{}", auth_digest); let display = format!("{auth_digest}");
let parsed = display.parse::<AuthDigest>().expect("auth digest should parse"); let parsed = display.parse::<AuthDigest>().expect("auth digest should parse");
prop_assert_eq!(auth_digest, parsed); prop_assert_eq!(auth_digest, parsed);
} }
@ -76,7 +76,7 @@ proptest! {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
if let Ok(parsed) = auth_digest.parse::<AuthDigest>() { if let Ok(parsed) = auth_digest.parse::<AuthDigest>() {
let display = format!("{}", parsed); let display = format!("{parsed}");
prop_assert_eq!(auth_digest, display); prop_assert_eq!(auth_digest, display);
} }
} }
@ -85,7 +85,7 @@ proptest! {
fn transaction_wtx_id_struct_display_roundtrip(wtx_id in any::<WtxId>()) { fn transaction_wtx_id_struct_display_roundtrip(wtx_id in any::<WtxId>()) {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
let display = format!("{}", wtx_id); let display = format!("{wtx_id}");
let parsed = display.parse::<WtxId>().expect("wide transaction ID should parse"); let parsed = display.parse::<WtxId>().expect("wide transaction ID should parse");
prop_assert_eq!(wtx_id, parsed); prop_assert_eq!(wtx_id, parsed);
} }
@ -95,7 +95,7 @@ proptest! {
let _init_guard = zebra_test::init(); let _init_guard = zebra_test::init();
if let Ok(parsed) = wtx_id.parse::<WtxId>() { if let Ok(parsed) = wtx_id.parse::<WtxId>() {
let display = format!("{}", parsed); let display = format!("{parsed}");
prop_assert_eq!(wtx_id, display); prop_assert_eq!(wtx_id, display);
} }
} }

View File

@ -51,7 +51,7 @@ fn transactionhash_struct_from_str_roundtrip() {
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
format!("{:?}", hash), format!("{hash:?}"),
r#"transaction::Hash("3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf")"# r#"transaction::Hash("3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf")"#
); );
assert_eq!( assert_eq!(
@ -69,7 +69,7 @@ fn auth_digest_struct_from_str_roundtrip() {
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
format!("{:?}", digest), format!("{digest:?}"),
r#"AuthDigest("3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf")"# r#"AuthDigest("3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf")"#
); );
assert_eq!( assert_eq!(
@ -87,7 +87,7 @@ fn wtx_id_struct_from_str_roundtrip() {
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
format!("{:?}", wtx_id), format!("{wtx_id:?}"),
r#"WtxId { id: transaction::Hash("3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf"), auth_digest: AuthDigest("0000000000000000000000000000000000000000000000000000000000000001") }"# r#"WtxId { id: transaction::Hash("3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf"), auth_digest: AuthDigest("0000000000000000000000000000000000000000000000000000000000000001") }"#
); );
assert_eq!( assert_eq!(
@ -850,7 +850,7 @@ fn zip143_sighash() -> Result<()> {
), ),
); );
let expected = hex::encode(test.sighash); let expected = hex::encode(test.sighash);
assert_eq!(expected, result, "test #{}: sighash does not match", i); assert_eq!(expected, result, "test #{i}: sighash does not match");
} }
Ok(()) Ok(())
@ -886,7 +886,7 @@ fn zip243_sighash() -> Result<()> {
), ),
); );
let expected = hex::encode(test.sighash); let expected = hex::encode(test.sighash);
assert_eq!(expected, result, "test #{}: sighash does not match", i); assert_eq!(expected, result, "test #{i}: sighash does not match");
} }
Ok(()) Ok(())
@ -916,7 +916,7 @@ fn zip244_sighash() -> Result<()> {
None, None,
)); ));
let expected = hex::encode(test.sighash_shielded); let expected = hex::encode(test.sighash_shielded);
assert_eq!(expected, result, "test #{}: sighash does not match", i); assert_eq!(expected, result, "test #{i}: sighash does not match");
if let Some(sighash_all) = test.sighash_all { if let Some(sighash_all) = test.sighash_all {
let result = hex::encode(transaction.sighash( let result = hex::encode(transaction.sighash(
@ -926,7 +926,7 @@ fn zip244_sighash() -> Result<()> {
test.transparent_input.map(|idx| idx as _), test.transparent_input.map(|idx| idx as _),
)); ));
let expected = hex::encode(sighash_all); let expected = hex::encode(sighash_all);
assert_eq!(expected, result, "test #{}: sighash does not match", i); assert_eq!(expected, result, "test #{i}: sighash does not match");
} }
} }

View File

@ -270,7 +270,7 @@ mod tests {
let t_addr = pub_key.to_address(Network::Mainnet); let t_addr = pub_key.to_address(Network::Mainnet);
assert_eq!(format!("{}", t_addr), "t1bmMa1wJDFdbc2TiURQP5BbBz6jHjUBuHq"); assert_eq!(format!("{t_addr}"), "t1bmMa1wJDFdbc2TiURQP5BbBz6jHjUBuHq");
} }
#[test] #[test]
@ -285,7 +285,7 @@ mod tests {
let t_addr = pub_key.to_address(Network::Testnet); let t_addr = pub_key.to_address(Network::Testnet);
assert_eq!(format!("{}", t_addr), "tmTc6trRhbv96kGfA99i7vrFwb5p7BVFwc3"); assert_eq!(format!("{t_addr}"), "tmTc6trRhbv96kGfA99i7vrFwb5p7BVFwc3");
} }
#[test] #[test]
@ -296,7 +296,7 @@ mod tests {
let t_addr = script.to_address(Network::Mainnet); let t_addr = script.to_address(Network::Mainnet);
assert_eq!(format!("{}", t_addr), "t3Y5pHwfgHbS6pDjj1HLuMFxhFFip1fcJ6g"); assert_eq!(format!("{t_addr}"), "t3Y5pHwfgHbS6pDjj1HLuMFxhFFip1fcJ6g");
} }
#[test] #[test]
@ -307,7 +307,7 @@ mod tests {
let t_addr = script.to_address(Network::Testnet); let t_addr = script.to_address(Network::Testnet);
assert_eq!(format!("{}", t_addr), "t2L51LcmpA43UMvKTw2Lwtt9LMjwyqU2V1P"); assert_eq!(format!("{t_addr}"), "t2L51LcmpA43UMvKTw2Lwtt9LMjwyqU2V1P");
} }
#[test] #[test]
@ -316,7 +316,7 @@ mod tests {
let t_addr: Address = "t3Vz22vK5z2LcKEdg16Yv4FFneEL1zg9ojd".parse().unwrap(); let t_addr: Address = "t3Vz22vK5z2LcKEdg16Yv4FFneEL1zg9ojd".parse().unwrap();
assert_eq!(format!("{}", t_addr), "t3Vz22vK5z2LcKEdg16Yv4FFneEL1zg9ojd"); assert_eq!(format!("{t_addr}"), "t3Vz22vK5z2LcKEdg16Yv4FFneEL1zg9ojd");
} }
#[test] #[test]
@ -326,7 +326,7 @@ mod tests {
let t_addr: Address = "t3Vz22vK5z2LcKEdg16Yv4FFneEL1zg9ojd".parse().unwrap(); let t_addr: Address = "t3Vz22vK5z2LcKEdg16Yv4FFneEL1zg9ojd".parse().unwrap();
assert_eq!( assert_eq!(
format!("{:?}", t_addr), format!("{t_addr:?}"),
"TransparentAddress { network: Mainnet, script_hash: \"7d46a730d31f97b1930d3368a967c309bd4d136a\" }" "TransparentAddress { network: Mainnet, script_hash: \"7d46a730d31f97b1930d3368a967c309bd4d136a\" }"
); );
} }

View File

@ -31,16 +31,16 @@ fn debug_format() {
); );
let one = CompactDifficulty((1 << PRECISION) + (1 << 16)); let one = CompactDifficulty((1 << PRECISION) + (1 << 16));
assert_eq!( assert_eq!(
format!("{:?}", one), format!("{one:?}"),
"CompactDifficulty(0x01010000, Some(ExpandedDifficulty(\"0000000000000000000000000000000000000000000000000000000000000001\")))"); "CompactDifficulty(0x01010000, Some(ExpandedDifficulty(\"0000000000000000000000000000000000000000000000000000000000000001\")))");
let mant = CompactDifficulty(OFFSET as u32 * (1 << PRECISION) + UNSIGNED_MANTISSA_MASK); let mant = CompactDifficulty(OFFSET as u32 * (1 << PRECISION) + UNSIGNED_MANTISSA_MASK);
assert_eq!( assert_eq!(
format!("{:?}", mant), format!("{mant:?}"),
"CompactDifficulty(0x037fffff, Some(ExpandedDifficulty(\"00000000000000000000000000000000000000000000000000000000007fffff\")))" "CompactDifficulty(0x037fffff, Some(ExpandedDifficulty(\"00000000000000000000000000000000000000000000000000000000007fffff\")))"
); );
let exp = CompactDifficulty(((31 + OFFSET - 2) as u32) * (1 << PRECISION) + (1 << 16)); let exp = CompactDifficulty(((31 + OFFSET - 2) as u32) * (1 << PRECISION) + (1 << 16));
assert_eq!( assert_eq!(
format!("{:?}", exp), format!("{exp:?}"),
"CompactDifficulty(0x20010000, Some(ExpandedDifficulty(\"0100000000000000000000000000000000000000000000000000000000000000\")))" "CompactDifficulty(0x20010000, Some(ExpandedDifficulty(\"0100000000000000000000000000000000000000000000000000000000000000\")))"
); );

View File

@ -7,6 +7,6 @@
fn main() { fn main() {
let path = zebra_consensus::groth16::Groth16Parameters::directory(); let path = zebra_consensus::groth16::Groth16Parameters::directory();
if let Some(path) = path.to_str() { if let Some(path) = path.to_str() {
println!("{}", path); println!("{path}");
} }
} }

View File

@ -72,7 +72,7 @@ impl FromStr for CheckpointList {
if let [height, hash] = fields[..] { if let [height, hash] = fields[..] {
checkpoint_list.push((height.parse()?, hash.parse()?)); checkpoint_list.push((height.parse()?, hash.parse()?));
} else { } else {
Err(format!("Invalid checkpoint format: expected 2 space-separated fields but found {}: '{}'", fields.len(), checkpoint))?; Err(format!("Invalid checkpoint format: expected 2 space-separated fields but found {}: '{checkpoint}'", fields.len()))?;
}; };
} }

View File

@ -316,7 +316,7 @@ impl MustUseClientResponseSender {
.as_ref() .as_ref()
.map(|tx| tx.is_canceled()) .map(|tx| tx.is_canceled())
.unwrap_or_else( .unwrap_or_else(
|| panic!("called is_canceled() after using oneshot sender: oneshot must be used exactly once: {:?}", self)) || panic!("called is_canceled() after using oneshot sender: oneshot must be used exactly once: {self:?}"))
} }
} }
@ -471,7 +471,7 @@ impl Client {
} }
// Heartbeat task stopped with panic. // Heartbeat task stopped with panic.
else if error.is_panic() { else if error.is_panic() {
panic!("heartbeat task has panicked: {}", error); panic!("heartbeat task has panicked: {error}");
} }
// Heartbeat task stopped with error. // Heartbeat task stopped with error.
else { else {
@ -497,7 +497,7 @@ impl Client {
} }
Poll::Ready(Err(error)) => { Poll::Ready(Err(error)) => {
// Connection task stopped unexpectedly with a panic. // Connection task stopped unexpectedly with a panic.
panic!("connection task has panicked: {}", error); panic!("connection task has panicked: {error}");
} }
} }
} }

View File

@ -70,8 +70,8 @@ pub(super) enum Handler {
impl fmt::Display for Handler { impl fmt::Display for Handler {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&match self { f.write_str(&match self {
Handler::Finished(Ok(response)) => format!("Finished({})", response), Handler::Finished(Ok(response)) => format!("Finished({response})"),
Handler::Finished(Err(error)) => format!("Finished({})", error), Handler::Finished(Err(error)) => format!("Finished({error})"),
Handler::Ping(_) => "Ping".to_string(), Handler::Ping(_) => "Ping".to_string(),
Handler::Peers => "Peers".to_string(), Handler::Peers => "Peers".to_string(),
@ -403,7 +403,7 @@ impl fmt::Display for State {
f.write_str(&match self { f.write_str(&match self {
State::AwaitingRequest => "AwaitingRequest".to_string(), State::AwaitingRequest => "AwaitingRequest".to_string(),
State::AwaitingResponse { handler, .. } => { State::AwaitingResponse { handler, .. } => {
format!("AwaitingResponse({})", handler) format!("AwaitingResponse({handler})")
} }
State::Failed => "Failed".to_string(), State::Failed => "Failed".to_string(),
}) })
@ -1327,7 +1327,7 @@ impl<S, Tx> Connection<S, Tx> {
/// using `extra_state_info` as additional state information. /// using `extra_state_info` as additional state information.
fn update_state_metrics(&mut self, extra_state_info: impl Into<Option<String>>) { fn update_state_metrics(&mut self, extra_state_info: impl Into<Option<String>>) {
let current_metrics_state = if let Some(extra_state_info) = extra_state_info.into() { let current_metrics_state = if let Some(extra_state_info) = extra_state_info.into() {
format!("{}::{}", self.state.command(), extra_state_info).into() format!("{}::{extra_state_info}", self.state.command()).into()
} else { } else {
self.state.command() self.state.command()
}; };

View File

@ -50,7 +50,7 @@ async fn connection_run_loop_ok() {
assert_eq!(result, None); assert_eq!(result, None);
let error = shared_error_slot.try_get_error(); let error = shared_error_slot.try_get_error();
assert!(error.is_none(), "unexpected error: {:?}", error); assert!(error.is_none(), "unexpected error: {error:?}");
assert!(!client_tx.is_closed()); assert!(!client_tx.is_closed());
assert!(!peer_tx.is_closed()); assert!(!peer_tx.is_closed());
@ -79,7 +79,7 @@ async fn connection_run_loop_spawn_ok() {
let mut connection_join_handle = tokio::spawn(connection.run(peer_rx)); let mut connection_join_handle = tokio::spawn(connection.run(peer_rx));
let error = shared_error_slot.try_get_error(); let error = shared_error_slot.try_get_error();
assert!(error.is_none(), "unexpected error: {:?}", error); assert!(error.is_none(), "unexpected error: {error:?}");
assert!(!client_tx.is_closed()); assert!(!client_tx.is_closed());
assert!(!peer_tx.is_closed()); assert!(!peer_tx.is_closed());
@ -159,7 +159,7 @@ async fn connection_run_loop_message_ok() {
); );
let error = shared_error_slot.try_get_error(); let error = shared_error_slot.try_get_error();
assert!(error.is_none(), "unexpected error: {:?}", error); assert!(error.is_none(), "unexpected error: {error:?}");
assert!(!client_tx.is_closed()); assert!(!client_tx.is_closed());
assert!(!peer_tx.is_closed()); assert!(!peer_tx.is_closed());
@ -625,7 +625,7 @@ async fn connection_run_loop_receive_timeout() {
// Receive timeouts don't close the connection // Receive timeouts don't close the connection
let error = shared_error_slot.try_get_error(); let error = shared_error_slot.try_get_error();
assert!(error.is_none(), "unexpected error: {:?}", error); assert!(error.is_none(), "unexpected error: {error:?}");
assert!(!client_tx.is_closed()); assert!(!client_tx.is_closed());
assert!(!peer_tx.is_closed()); assert!(!peer_tx.is_closed());

View File

@ -135,7 +135,7 @@ impl PeerError {
PeerError::ConnectionSendTimeout => "ConnectionSendTimeout".into(), PeerError::ConnectionSendTimeout => "ConnectionSendTimeout".into(),
PeerError::ConnectionReceiveTimeout => "ConnectionReceiveTimeout".into(), PeerError::ConnectionReceiveTimeout => "ConnectionReceiveTimeout".into(),
// TODO: add error kinds or summaries to `SerializationError` // TODO: add error kinds or summaries to `SerializationError`
PeerError::Serialization(inner) => format!("Serialization({})", inner).into(), PeerError::Serialization(inner) => format!("Serialization({inner})").into(),
PeerError::DuplicateHandshake => "DuplicateHandshake".into(), PeerError::DuplicateHandshake => "DuplicateHandshake".into(),
PeerError::Overloaded => "Overloaded".into(), PeerError::Overloaded => "Overloaded".into(),
PeerError::NotFoundResponse(_) => "NotFoundResponse".into(), PeerError::NotFoundResponse(_) => "NotFoundResponse".into(),

View File

@ -483,10 +483,10 @@ pub(crate) async fn open_listener(config: &Config) -> (TcpListener, SocketAddr)
let listener = match listener_result { let listener = match listener_result {
Ok(l) => l, Ok(l) => l,
Err(e) => panic!( Err(e) => panic!(
"Opening Zcash network protocol listener {:?} failed: {:?}. \ "Opening Zcash network protocol listener {:?} failed: {e:?}. \
Hint: Check if another zebrad or zcashd process is running. \ Hint: Check if another zebrad or zcashd process is running. \
Try changing the network listen_addr in the Zebra config.", Try changing the network listen_addr in the Zebra config.",
config.listen_addr, e, config.listen_addr,
), ),
}; };
@ -530,7 +530,7 @@ where
next_handshake_res = handshakes.next() => match next_handshake_res { next_handshake_res = handshakes.next() => match next_handshake_res {
// The task has already sent the peer change to the peer set. // The task has already sent the peer change to the peer set.
Some(Ok(_)) => continue, Some(Ok(_)) => continue,
Some(Err(task_panic)) => panic!("panic in inbound handshake task: {:?}", task_panic), Some(Err(task_panic)) => panic!("panic in inbound handshake task: {task_panic:?}"),
None => unreachable!("handshakes never terminates, because it contains a future that never resolves"), None => unreachable!("handshakes never terminates, because it contains a future that never resolves"),
}, },
@ -765,7 +765,7 @@ where
.map(move |res| match res { .map(move |res| match res {
Ok(crawler_action) => crawler_action, Ok(crawler_action) => crawler_action,
Err(e) => { Err(e) => {
panic!("panic during handshaking with {:?}: {:?} ", candidate, e); panic!("panic during handshaking with {candidate:?}: {e:?} ");
} }
}) })
.in_current_span(); .in_current_span();

View File

@ -49,7 +49,7 @@ fn parses_msg_addr_v1_ip() {
{ {
let deserialized: Message = codec let deserialized: Message = codec
.read_addr(&mut addr_v1_bytes.as_slice()) .read_addr(&mut addr_v1_bytes.as_slice())
.unwrap_or_else(|_| panic!("failed to deserialize AddrV1 case {}", case_idx)); .unwrap_or_else(|_| panic!("failed to deserialize AddrV1 case {case_idx}"));
if let Message::Addr(addrs) = deserialized { if let Message::Addr(addrs) = deserialized {
assert!( assert!(
@ -114,7 +114,7 @@ fn parses_msg_addr_v1_empty() {
{ {
let deserialized: Message = codec let deserialized: Message = codec
.read_addr(&mut addr_v1_bytes.as_slice()) .read_addr(&mut addr_v1_bytes.as_slice())
.unwrap_or_else(|_| panic!("failed to deserialize AddrV1 case {}", case_idx)); .unwrap_or_else(|_| panic!("failed to deserialize AddrV1 case {case_idx}"));
if let Message::Addr(addrs) = deserialized { if let Message::Addr(addrs) = deserialized {
assert!( assert!(
@ -148,7 +148,7 @@ fn parses_msg_addr_v2_ip() {
{ {
let deserialized: Message = codec let deserialized: Message = codec
.read_addrv2(&mut addr_v2_bytes.as_slice()) .read_addrv2(&mut addr_v2_bytes.as_slice())
.unwrap_or_else(|_| panic!("failed to deserialize AddrV2 case {}", case_idx)); .unwrap_or_else(|_| panic!("failed to deserialize AddrV2 case {case_idx}"));
if let Message::Addr(addrs) = deserialized { if let Message::Addr(addrs) = deserialized {
assert!( assert!(
@ -238,7 +238,7 @@ fn parses_msg_addr_v2_empty() {
{ {
let deserialized: Message = codec let deserialized: Message = codec
.read_addrv2(&mut addr_v2_bytes.as_slice()) .read_addrv2(&mut addr_v2_bytes.as_slice())
.unwrap_or_else(|_| panic!("failed to deserialize AddrV2 case {}", case_idx)); .unwrap_or_else(|_| panic!("failed to deserialize AddrV2 case {case_idx}"));
if let Message::Addr(addrs) = deserialized { if let Message::Addr(addrs) = deserialized {
assert!( assert!(

View File

@ -103,7 +103,7 @@ impl fmt::Display for Response {
.unwrap_or_else(|| "None".into()), .unwrap_or_else(|| "None".into()),
block.hash(), block.hash(),
), ),
Missing(hash) => format!("Block {{ missing: {} }}", hash), Missing(hash) => format!("Block {{ missing: {hash} }}"),
} }
} }
Response::Blocks(blocks) => format!( Response::Blocks(blocks) => format!(

View File

@ -1049,7 +1049,7 @@ impl AddressStrings {
.into_iter() .into_iter()
.map(|address| { .map(|address| {
address.parse().map_err(|error| { address.parse().map_err(|error| {
Error::invalid_params(&format!("invalid address {address:?}: {error}")) Error::invalid_params(format!("invalid address {address:?}: {error}"))
}) })
}) })
.collect::<Result<_>>()?; .collect::<Result<_>>()?;

View File

@ -259,8 +259,8 @@ impl ChainTipSender {
let height = tip.as_ref().map(|block| block.height); let height = tip.as_ref().map(|block| block.height);
let hash = tip.as_ref().map(|block| block.hash); let hash = tip.as_ref().map(|block| block.hash);
span.record(format!("{}_height", prefix).as_str(), &field::debug(height)); span.record(format!("{prefix}_height").as_str(), &field::debug(height));
span.record(format!("{}_hash", prefix).as_str(), &field::debug(hash)); span.record(format!("{prefix}_hash").as_str(), &field::debug(hash));
} }
} }

View File

@ -187,7 +187,7 @@ proptest! {
.expect("block should deserialize"); .expect("block should deserialize");
make_distinct_nullifiers( make_distinct_nullifiers(
&mut joinsplit1 joinsplit1
.nullifiers .nullifiers
.iter_mut() .iter_mut()
.chain(joinsplit2.nullifiers.iter_mut()), .chain(joinsplit2.nullifiers.iter_mut()),
@ -244,7 +244,7 @@ proptest! {
.expect("block should deserialize"); .expect("block should deserialize");
make_distinct_nullifiers( make_distinct_nullifiers(
&mut joinsplit1 joinsplit1
.nullifiers .nullifiers
.iter_mut() .iter_mut()
.chain(joinsplit2.nullifiers.iter_mut()), .chain(joinsplit2.nullifiers.iter_mut()),
@ -307,7 +307,7 @@ proptest! {
.expect("block should deserialize"); .expect("block should deserialize");
make_distinct_nullifiers( make_distinct_nullifiers(
&mut joinsplit1 joinsplit1
.nullifiers .nullifiers
.iter_mut() .iter_mut()
.chain(joinsplit2.nullifiers.iter_mut()), .chain(joinsplit2.nullifiers.iter_mut()),

View File

@ -102,7 +102,7 @@ fn test_raw_rocksdb_column_families_with_network(network: Network) {
.expect("test block is valid"); .expect("test block is valid");
let mut settings = insta::Settings::clone_current(); let mut settings = insta::Settings::clone_current();
settings.set_snapshot_suffix(format!("{}_{}", net_suffix, height)); settings.set_snapshot_suffix(format!("{net_suffix}_{height}"));
settings.bind(|| snapshot_raw_rocksdb_column_family_data(&state.db, &cf_names)); settings.bind(|| snapshot_raw_rocksdb_column_family_data(&state.db, &cf_names));
} }
@ -141,12 +141,12 @@ fn snapshot_raw_rocksdb_column_family_data(db: &DiskDb, original_cf_names: &[Str
assert_eq!(cf_data.len(), 0, "default column family is never used"); assert_eq!(cf_data.len(), 0, "default column family is never used");
} else if cf_data.is_empty() { } else if cf_data.is_empty() {
// distinguish column family names from empty column families // distinguish column family names from empty column families
empty_column_families.push(format!("{}: no entries", cf_name)); empty_column_families.push(format!("{cf_name}: no entries"));
} else { } else {
// The note commitment tree snapshots will change if the trees do not have cached roots. // The note commitment tree snapshots will change if the trees do not have cached roots.
// But we expect them to always have cached roots, // But we expect them to always have cached roots,
// because those roots are used to populate the anchor column families. // because those roots are used to populate the anchor column families.
insta::assert_ron_snapshot!(format!("{}_raw_data", cf_name), cf_data); insta::assert_ron_snapshot!(format!("{cf_name}_raw_data"), cf_data);
} }
let raw_cf_iter: rocksdb::DBRawIteratorWithThreadMode<DB> = cf_iter.into(); let raw_cf_iter: rocksdb::DBRawIteratorWithThreadMode<DB> = cf_iter.into();

View File

@ -196,7 +196,7 @@ fn test_block_and_transaction_data_with_network(network: Network) {
.expect("test block is valid"); .expect("test block is valid");
let mut settings = insta::Settings::clone_current(); let mut settings = insta::Settings::clone_current();
settings.set_snapshot_suffix(format!("{}_{}", net_suffix, height)); settings.set_snapshot_suffix(format!("{net_suffix}_{height}"));
settings.bind(|| snapshot_block_and_transaction_data(&state)); settings.bind(|| snapshot_block_and_transaction_data(&state));
settings.bind(|| snapshot_transparent_address_data(&state, height)); settings.bind(|| snapshot_transparent_address_data(&state, height));

View File

@ -527,7 +527,7 @@ fn commitment_is_validated_for_network_upgrade(network: Network, network_upgrade
crate::ValidateContextError::InvalidBlockCommitment( crate::ValidateContextError::InvalidBlockCommitment(
zebra_chain::block::CommitmentError::InvalidChainHistoryActivationReserved { .. }, zebra_chain::block::CommitmentError::InvalidChainHistoryActivationReserved { .. },
) => {}, ) => {},
_ => panic!("Error must be InvalidBlockCommitment::InvalidChainHistoryActivationReserved instead of {:?}", err), _ => panic!("Error must be InvalidBlockCommitment::InvalidChainHistoryActivationReserved instead of {err:?}"),
}; };
// Test committing the Heartwood activation block with the correct commitment // Test committing the Heartwood activation block with the correct commitment

View File

@ -60,7 +60,7 @@ impl CommandExt for Command {
/// wrapper for `status` fn on `Command` that constructs informative error /// wrapper for `status` fn on `Command` that constructs informative error
/// reports /// reports
fn status2(&mut self) -> Result<TestStatus, Report> { fn status2(&mut self) -> Result<TestStatus, Report> {
let cmd = format!("{:?}", self); let cmd = format!("{self:?}");
let status = self.status(); let status = self.status();
let command = || cmd.clone().header("Command:"); let command = || cmd.clone().header("Command:");
@ -79,19 +79,19 @@ impl CommandExt for Command {
let output = output let output = output
.wrap_err("failed to execute process") .wrap_err("failed to execute process")
.with_section(|| format!("{:?}", self).header("Command:"))?; .with_section(|| format!("{self:?}").header("Command:"))?;
Ok(TestOutput { Ok(TestOutput {
dir: None, dir: None,
output, output,
cmd: format!("{:?}", self), cmd: format!("{self:?}"),
}) })
} }
/// wrapper for `spawn` fn on `Command` that constructs informative error /// wrapper for `spawn` fn on `Command` that constructs informative error
/// reports /// reports
fn spawn2<T>(&mut self, dir: T) -> Result<TestChild<T>, Report> { fn spawn2<T>(&mut self, dir: T) -> Result<TestChild<T>, Report> {
let cmd = format!("{:?}", self); let cmd = format!("{self:?}");
let child = self.spawn(); let child = self.spawn();
let child = child let child = child
@ -321,11 +321,11 @@ where
if bypass_test_capture { if bypass_test_capture {
// Send lines directly to the terminal (or process stdout file redirect). // Send lines directly to the terminal (or process stdout file redirect).
#[allow(clippy::explicit_write)] #[allow(clippy::explicit_write)]
writeln!(std::io::stdout(), "{}", line).unwrap(); writeln!(std::io::stdout(), "{line}").unwrap();
} else { } else {
// If the test fails, the test runner captures and displays this output. // If the test fails, the test runner captures and displays this output.
// To show this output unconditionally, use `cargo test -- --nocapture`. // To show this output unconditionally, use `cargo test -- --nocapture`.
println!("{}", line); println!("{line}");
} }
// Some OSes require a flush to send all output to the terminal. // Some OSes require a flush to send all output to the terminal.
@ -1070,7 +1070,7 @@ impl<T> TestOutput<T> {
output_name, output_name,
format!("contain {}", err_msg.to_string()), format!("contain {}", err_msg.to_string()),
) )
.with_section(|| format!("{:?}", s).header("Match String:")) .with_section(|| format!("{s:?}").header("Match String:"))
} }
/// Tests if standard output contains `s`. /// Tests if standard output contains `s`.
@ -1082,7 +1082,7 @@ impl<T> TestOutput<T> {
"stdout", "stdout",
"contain the given string", "contain the given string",
) )
.with_section(|| format!("{:?}", s).header("Match String:")) .with_section(|| format!("{s:?}").header("Match String:"))
} }
/// Tests if standard output matches `regex`. /// Tests if standard output matches `regex`.
@ -1100,7 +1100,7 @@ impl<T> TestOutput<T> {
"stdout", "stdout",
"matched the given regex", "matched the given regex",
) )
.with_section(|| format!("{:?}", regex).header("Match Regex:")) .with_section(|| format!("{regex:?}").header("Match Regex:"))
} }
/// Tests if any lines in standard output contain `s`. /// Tests if any lines in standard output contain `s`.
@ -1124,7 +1124,7 @@ impl<T> TestOutput<T> {
"stdout", "stdout",
"matched the given regex", "matched the given regex",
) )
.with_section(|| format!("{:?}", regex).header("Line Match Regex:")) .with_section(|| format!("{regex:?}").header("Line Match Regex:"))
} }
/// Tests if standard error contains `s`. /// Tests if standard error contains `s`.
@ -1136,7 +1136,7 @@ impl<T> TestOutput<T> {
"stderr", "stderr",
"contain the given string", "contain the given string",
) )
.with_section(|| format!("{:?}", s).header("Match String:")) .with_section(|| format!("{s:?}").header("Match String:"))
} }
/// Tests if standard error matches `regex`. /// Tests if standard error matches `regex`.
@ -1154,7 +1154,7 @@ impl<T> TestOutput<T> {
"stderr", "stderr",
"matched the given regex", "matched the given regex",
) )
.with_section(|| format!("{:?}", regex).header("Match Regex:")) .with_section(|| format!("{regex:?}").header("Match Regex:"))
} }
/// Tests if any lines in standard error contain `s`. /// Tests if any lines in standard error contain `s`.
@ -1178,7 +1178,7 @@ impl<T> TestOutput<T> {
"stderr", "stderr",
"matched the given regex", "matched the given regex",
) )
.with_section(|| format!("{:?}", regex).header("Line Match Regex:")) .with_section(|| format!("{regex:?}").header("Line Match Regex:"))
} }
/// Returns Ok if the program was killed, Err(Report) if exit was by another /// Returns Ok if the program was killed, Err(Report) if exit was by another
@ -1274,9 +1274,9 @@ impl<T> ContextFrom<&mut TestChild<T>> for Report {
if let Some(stdout) = &mut source.stdout { if let Some(stdout) = &mut source.stdout {
for line in stdout { for line in stdout {
let line = line.unwrap_or_else(|error| { let line = line.unwrap_or_else(|error| {
format!("failure reading test process logs: {:?}", error) format!("failure reading test process logs: {error:?}")
}); });
let _ = writeln!(&mut stdout_buf, "{}", line); let _ = writeln!(&mut stdout_buf, "{line}");
} }
} else if let Some(child) = &mut source.child { } else if let Some(child) = &mut source.child {
if let Some(stdout) = &mut child.stdout { if let Some(stdout) = &mut child.stdout {
@ -1287,9 +1287,9 @@ impl<T> ContextFrom<&mut TestChild<T>> for Report {
if let Some(stderr) = &mut source.stderr { if let Some(stderr) = &mut source.stderr {
for line in stderr { for line in stderr {
let line = line.unwrap_or_else(|error| { let line = line.unwrap_or_else(|error| {
format!("failure reading test process logs: {:?}", error) format!("failure reading test process logs: {error:?}")
}); });
let _ = writeln!(&mut stderr_buf, "{}", line); let _ = writeln!(&mut stderr_buf, "{line}");
} }
} else if let Some(child) = &mut source.child { } else if let Some(child) = &mut source.child {
if let Some(stderr) = &mut child.stderr { if let Some(stderr) = &mut child.stderr {
@ -1344,14 +1344,14 @@ impl ContextFrom<&ExitStatus> for Report {
if let Some(code) = source.code() { if let Some(code) = source.code() {
return self.with_section(|| { return self.with_section(|| {
format!("command exited {} with status code {}", how, code).header("Exit Status:") format!("command exited {how} with status code {code}").header("Exit Status:")
}); });
} }
#[cfg(unix)] #[cfg(unix)]
if let Some(signal) = source.signal() { if let Some(signal) = source.signal() {
self.with_section(|| { self.with_section(|| {
format!("command terminated {} by signal {}", how, signal).header("Exit Status:") format!("command terminated {how} by signal {signal}").header("Exit Status:")
}) })
} else { } else {
unreachable!("on unix all processes either terminate via signal or with an exit code"); unreachable!("on unix all processes either terminate via signal or with an exit code");

View File

@ -114,18 +114,17 @@ where
Err(eyre!( Err(eyre!(
"response doesn't match transcript's expected response" "response doesn't match transcript's expected response"
)) ))
.with_section(|| format!("{:?}", expected_rsp).header("Expected Response:")) .with_section(|| format!("{expected_rsp:?}").header("Expected Response:"))
.with_section(|| format!("{:?}", rsp).header("Found Response:"))?; .with_section(|| format!("{rsp:?}").header("Found Response:"))?;
} }
} }
(Ok(rsp), Err(error_checker)) => { (Ok(rsp), Err(error_checker)) => {
let error = Err(eyre!("received a response when an error was expected")) let error = Err(eyre!("received a response when an error was expected"))
.with_section(|| format!("{:?}", rsp).header("Found Response:")); .with_section(|| format!("{rsp:?}").header("Found Response:"));
let error = match std::panic::catch_unwind(|| error_checker.mock()) { let error = match std::panic::catch_unwind(|| error_checker.mock()) {
Ok(expected_err) => error.with_section(|| { Ok(expected_err) => error
format!("{:?}", expected_err).header("Expected Error:") .with_section(|| format!("{expected_err:?}").header("Expected Error:")),
}),
Err(pi) => { Err(pi) => {
let payload = pi let payload = pi
.downcast_ref::<String>() .downcast_ref::<String>()
@ -144,9 +143,7 @@ where
(Err(e), Ok(expected_rsp)) => { (Err(e), Ok(expected_rsp)) => {
Err(eyre!("received an error when a response was expected")) Err(eyre!("received an error when a response was expected"))
.with_error(|| ErrorCheckerError(e.into())) .with_error(|| ErrorCheckerError(e.into()))
.with_section(|| { .with_section(|| format!("{expected_rsp:?}").header("Expected Response:"))?
format!("{:?}", expected_rsp).header("Expected Response:")
})?
} }
(Err(e), Err(error_checker)) => { (Err(e), Err(error_checker)) => {
error_checker.check(e.into())?; error_checker.check(e.into())?;
@ -182,9 +179,9 @@ where
ready( ready(
Err(eyre!("received unexpected request")) Err(eyre!("received unexpected request"))
.with_section(|| { .with_section(|| {
format!("{:?}", expected_request).header("Expected Request:") format!("{expected_request:?}").header("Expected Request:")
}) })
.with_section(|| format!("{:?}", request).header("Found Request:")), .with_section(|| format!("{request:?}").header("Found Request:")),
) )
} }
} }

View File

@ -209,7 +209,7 @@ fn failure_regex_matches_stdout_failure_message() {
.expect_stdout_line_matches("this regex should not match") .expect_stdout_line_matches("this regex should not match")
.unwrap_err(); .unwrap_err();
let expected_error = format!("{:?}", expected_error); let expected_error = format!("{expected_error:?}");
assert!( assert!(
expected_error.contains("Logged a failure message"), expected_error.contains("Logged a failure message"),
"error did not contain expected failure message: {}", "error did not contain expected failure message: {}",
@ -248,7 +248,7 @@ fn failure_regex_matches_stderr_failure_message() {
.expect_stderr_line_matches("this regex should not match") .expect_stderr_line_matches("this regex should not match")
.unwrap_err(); .unwrap_err();
let expected_error = format!("{:?}", expected_error); let expected_error = format!("{expected_error:?}");
assert!( assert!(
expected_error.contains("Logged a failure message"), expected_error.contains("Logged a failure message"),
"error did not contain expected failure message: {}", "error did not contain expected failure message: {}",
@ -314,7 +314,7 @@ fn failure_regex_reads_multi_line_output_on_expect_line() {
.expect_stdout_line_matches("this regex should not match") .expect_stdout_line_matches("this regex should not match")
.unwrap_err(); .unwrap_err();
let expected_error = format!("{:?}", expected_error); let expected_error = format!("{expected_error:?}");
assert!( assert!(
expected_error.contains( expected_error.contains(
"\ "\
@ -478,7 +478,7 @@ fn failure_regex_timeout_continuous_output() {
.expect_stdout_line_matches("this regex should not match") .expect_stdout_line_matches("this regex should not match")
.unwrap_err(); .unwrap_err();
let expected_error = format!("{:?}", expected_error); let expected_error = format!("{expected_error:?}");
assert!( assert!(
expected_error.contains("Logged a failure message"), expected_error.contains("Logged a failure message"),
"error did not contain expected failure message: {}", "error did not contain expected failure message: {}",
@ -544,7 +544,7 @@ fn failure_regex_iter_matches_stdout_failure_message() {
.expect_stdout_line_matches("this regex should not match") .expect_stdout_line_matches("this regex should not match")
.unwrap_err(); .unwrap_err();
let expected_error = format!("{:?}", expected_error); let expected_error = format!("{expected_error:?}");
assert!( assert!(
expected_error.contains("Logged a failure message"), expected_error.contains("Logged a failure message"),
"error did not contain expected failure message: {}", "error did not contain expected failure message: {}",

View File

@ -124,7 +124,7 @@ fn main() -> Result<()> {
|| height_gap.0 >= zebra_consensus::MAX_CHECKPOINT_HEIGHT_GAP as u32 || height_gap.0 >= zebra_consensus::MAX_CHECKPOINT_HEIGHT_GAP as u32
{ {
// print to output // print to output
println!("{} {}", height.0, hash); println!("{} {hash}", height.0);
// reset counters // reset counters
cumulative_bytes = 0; cumulative_bytes = 0;

View File

@ -79,7 +79,7 @@ pub fn app_version() -> Version {
// it's the "git semver" format, which doesn't quite match SemVer 2.0 // it's the "git semver" format, which doesn't quite match SemVer 2.0
[hash, commit_count, tag] => { [hash, commit_count, tag] => {
let semver_fix = format!("{}+{}.{}", tag, commit_count, hash); let semver_fix = format!("{tag}+{commit_count}.{hash}");
semver_fix.parse().unwrap_or_else(|_| semver_fix.parse().unwrap_or_else(|_|
panic!("Modified VERGEN_GIT_SEMVER {:?} -> {:?} -> {:?} must be valid. Note: CARGO_PKG_VERSION was {:?}.", panic!("Modified VERGEN_GIT_SEMVER {:?} -> {:?} -> {:?} must be valid. Note: CARGO_PKG_VERSION was {:?}.",
vergen_git_semver, vergen_git_semver,
@ -280,7 +280,7 @@ impl Application for ZebradApp {
let mut metadata_section = "Metadata:".to_string(); let mut metadata_section = "Metadata:".to_string();
for (k, v) in panic_metadata { for (k, v) in panic_metadata {
builder = builder.add_issue_metadata(k, v.clone()); builder = builder.add_issue_metadata(k, v.clone());
write!(&mut metadata_section, "\n{}: {}", k, &v) write!(&mut metadata_section, "\n{k}: {}", &v)
.expect("unexpected failure writing to string"); .expect("unexpected failure writing to string");
} }
@ -340,7 +340,7 @@ impl Application for ZebradApp {
std::panic::set_hook(Box::new(move |panic_info| { std::panic::set_hook(Box::new(move |panic_info| {
let panic_report = panic_hook.panic_report(panic_info); let panic_report = panic_hook.panic_report(panic_info);
eprintln!("{}", panic_report); eprintln!("{panic_report}");
#[cfg(feature = "sentry")] #[cfg(feature = "sentry")]
{ {
@ -362,7 +362,7 @@ impl Application for ZebradApp {
// when that crate is being used by itself? // when that crate is being used by itself?
rayon::ThreadPoolBuilder::new() rayon::ThreadPoolBuilder::new()
.num_threads(config.sync.parallel_cpu_threads) .num_threads(config.sync.parallel_cpu_threads)
.thread_name(|thread_index| format!("rayon {}", thread_index)) .thread_name(|thread_index| format!("rayon {thread_index}"))
.build_global() .build_global()
.expect("unable to initialize rayon thread pool"); .expect("unable to initialize rayon thread pool");

View File

@ -54,7 +54,7 @@ impl Runnable for GenerateCmd {
.expect("must be able to write output"); .expect("must be able to write output");
} }
None => { None => {
println!("{}", output); println!("{output}");
} }
} }
} }

View File

@ -107,7 +107,7 @@ impl EvictionList {
let evicted_at = self let evicted_at = self
.unique_entries .unique_entries
.get(txid) .get(txid)
.unwrap_or_else(|| panic!("all entries should exist in both ordered_entries and unique_entries, missing {:?} in unique_entries", txid)); .unwrap_or_else(|| panic!("all entries should exist in both ordered_entries and unique_entries, missing {txid:?} in unique_entries"));
if self.has_expired(evicted_at) { if self.has_expired(evicted_at) {
self.pop_front(); self.pop_front();
} else { } else {

View File

@ -261,7 +261,7 @@ async fn mempool_queue_single() -> Result<(), Report> {
evicted_count += 1 evicted_count += 1
} }
MempoolError::InMempool => in_mempool_count += 1, MempoolError::InMempool => in_mempool_count += 1,
error => panic!("transaction should not be rejected with reason {:?}", error), error => panic!("transaction should not be rejected with reason {error:?}"),
} }
} }
assert_eq!(in_mempool_count, transactions.len() - 1); assert_eq!(in_mempool_count, transactions.len() - 1);

View File

@ -1120,13 +1120,13 @@ where
BlockDownloadVerifyError::Invalid { BlockDownloadVerifyError::Invalid {
error: VerifyChainError::Block(VerifyBlockError::Commit(ref source)), error: VerifyChainError::Block(VerifyBlockError::Commit(ref source)),
.. ..
} if format!("{:?}", source).contains("block is already committed to the state") => { } if format!("{source:?}").contains("block is already committed to the state") => {
// TODO: improve this by checking the type (#2908) // TODO: improve this by checking the type (#2908)
debug!(error = ?e, "block is already committed, possibly from a previous sync run, continuing"); debug!(error = ?e, "block is already committed, possibly from a previous sync run, continuing");
false false
} }
BlockDownloadVerifyError::DownloadFailed { ref error, .. } BlockDownloadVerifyError::DownloadFailed { ref error, .. }
if format!("{:?}", error).contains("NotFound") => if format!("{error:?}").contains("NotFound") =>
{ {
// Covers these errors: // Covers these errors:
// - NotFoundResponse // - NotFoundResponse
@ -1148,7 +1148,7 @@ where
// //
// TODO: add a proper test and remove this // TODO: add a proper test and remove this
// https://github.com/ZcashFoundation/zebra/issues/2909 // https://github.com/ZcashFoundation/zebra/issues/2909
let err_str = format!("{:?}", e); let err_str = format!("{e:?}");
if err_str.contains("AlreadyVerified") if err_str.contains("AlreadyVerified")
|| err_str.contains("AlreadyInChain") || err_str.contains("AlreadyInChain")
|| err_str.contains("block is already committed to the state") || err_str.contains("block is already committed to the state")

View File

@ -445,7 +445,7 @@ where
let short_timeout_max = (max_checkpoint_height + FINAL_CHECKPOINT_BLOCK_VERIFY_TIMEOUT_LIMIT).expect("checkpoint block height is in valid range"); let short_timeout_max = (max_checkpoint_height + FINAL_CHECKPOINT_BLOCK_VERIFY_TIMEOUT_LIMIT).expect("checkpoint block height is in valid range");
if block_height >= max_checkpoint_height && block_height <= short_timeout_max { if block_height >= max_checkpoint_height && block_height <= short_timeout_max {
rsp = timeout(FINAL_CHECKPOINT_BLOCK_VERIFY_TIMEOUT, rsp) rsp = timeout(FINAL_CHECKPOINT_BLOCK_VERIFY_TIMEOUT, rsp)
.map_err(|timeout| format!("initial fully verified block timed out: retrying: {:?}", timeout).into()) .map_err(|timeout| format!("initial fully verified block timed out: retrying: {timeout:?}").into())
.map(|nested_result| nested_result.and_then(convert::identity)).boxed(); .map(|nested_result| nested_result.and_then(convert::identity)).boxed();
} }

View File

@ -232,7 +232,7 @@ pub async fn show_block_chain_progress(
); );
} }
} else { } else {
let sync_percent = format!("{:.frac$} %", 0.0f64, frac = SYNC_PERCENT_FRAC_DIGITS,); let sync_percent = format!("{:.SYNC_PERCENT_FRAC_DIGITS$} %", 0.0f64,);
if is_syncer_stopped { if is_syncer_stopped {
// We've stopped syncing blocks, // We've stopped syncing blocks,

View File

@ -970,7 +970,7 @@ fn sync_large_checkpoints_mempool_mainnet() -> Result<()> {
#[tracing::instrument] #[tracing::instrument]
fn create_cached_database(network: Network) -> Result<()> { fn create_cached_database(network: Network) -> Result<()> {
let height = network.mandatory_checkpoint_height(); let height = network.mandatory_checkpoint_height();
let checkpoint_stop_regex = format!("{}.*CommitFinalized request", STOP_AT_HEIGHT_REGEX); let checkpoint_stop_regex = format!("{STOP_AT_HEIGHT_REGEX}.*CommitFinalized request");
create_cached_database_height( create_cached_database_height(
network, network,
@ -988,7 +988,7 @@ fn create_cached_database(network: Network) -> Result<()> {
fn sync_past_mandatory_checkpoint(network: Network) -> Result<()> { fn sync_past_mandatory_checkpoint(network: Network) -> Result<()> {
let height = network.mandatory_checkpoint_height() + 1200; let height = network.mandatory_checkpoint_height() + 1200;
let full_validation_stop_regex = let full_validation_stop_regex =
format!("{}.*best non-finalized chain root", STOP_AT_HEIGHT_REGEX); format!("{STOP_AT_HEIGHT_REGEX}.*best non-finalized chain root");
create_cached_database_height( create_cached_database_height(
network, network,
@ -1126,8 +1126,8 @@ async fn metrics_endpoint() -> Result<()> {
// [Note on port conflict](#Note on port conflict) // [Note on port conflict](#Note on port conflict)
let port = random_known_port(); let port = random_known_port();
let endpoint = format!("127.0.0.1:{}", port); let endpoint = format!("127.0.0.1:{port}");
let url = format!("http://{}", endpoint); let url = format!("http://{endpoint}");
// Write a configuration that has metrics endpoint_addr set // Write a configuration that has metrics endpoint_addr set
let mut config = default_test_config()?; let mut config = default_test_config()?;
@ -1163,7 +1163,7 @@ async fn metrics_endpoint() -> Result<()> {
std::str::from_utf8(&body).expect("unexpected invalid UTF-8 in metrics exporter response"); std::str::from_utf8(&body).expect("unexpected invalid UTF-8 in metrics exporter response");
// Make sure metrics was started // Make sure metrics was started
output.stdout_line_contains(format!("Opened metrics endpoint at {}", endpoint).as_str())?; output.stdout_line_contains(format!("Opened metrics endpoint at {endpoint}").as_str())?;
// [Note on port conflict](#Note on port conflict) // [Note on port conflict](#Note on port conflict)
output output
@ -1182,9 +1182,9 @@ async fn tracing_endpoint() -> Result<()> {
// [Note on port conflict](#Note on port conflict) // [Note on port conflict](#Note on port conflict)
let port = random_known_port(); let port = random_known_port();
let endpoint = format!("127.0.0.1:{}", port); let endpoint = format!("127.0.0.1:{port}");
let url_default = format!("http://{}", endpoint); let url_default = format!("http://{endpoint}");
let url_filter = format!("{}/filter", url_default); let url_filter = format!("{url_default}/filter");
// Write a configuration that has tracing endpoint_addr option set // Write a configuration that has tracing endpoint_addr option set
let mut config = default_test_config()?; let mut config = default_test_config()?;
@ -1230,7 +1230,7 @@ async fn tracing_endpoint() -> Result<()> {
let output = output.assert_failure()?; let output = output.assert_failure()?;
// Make sure tracing endpoint was started // Make sure tracing endpoint was started
output.stdout_line_contains(format!("Opened tracing endpoint at {}", endpoint).as_str())?; output.stdout_line_contains(format!("Opened tracing endpoint at {endpoint}").as_str())?;
// TODO: Match some trace level messages from output // TODO: Match some trace level messages from output
// Make sure the endpoint header is correct // Make sure the endpoint header is correct
@ -1333,11 +1333,11 @@ async fn rpc_endpoint(parallel_cpu_threads: bool) -> Result<()> {
// Check that we have at least 4 characters in the `build` field. // Check that we have at least 4 characters in the `build` field.
let build = parsed["result"]["build"].as_str().unwrap(); let build = parsed["result"]["build"].as_str().unwrap();
assert!(build.len() > 4, "Got {}", build); assert!(build.len() > 4, "Got {build}");
// Check that the `subversion` field has "Zebra" in it. // Check that the `subversion` field has "Zebra" in it.
let subversion = parsed["result"]["subversion"].as_str().unwrap(); let subversion = parsed["result"]["subversion"].as_str().unwrap();
assert!(subversion.contains("Zebra"), "Got {}", subversion); assert!(subversion.contains("Zebra"), "Got {subversion}");
child.kill(false)?; child.kill(false)?;
@ -1755,7 +1755,7 @@ fn zebra_zcash_listener_conflict() -> Result<()> {
// [Note on port conflict](#Note on port conflict) // [Note on port conflict](#Note on port conflict)
let port = random_known_port(); let port = random_known_port();
let listen_addr = format!("127.0.0.1:{}", port); let listen_addr = format!("127.0.0.1:{port}");
// Write a configuration that has our created network listen_addr // Write a configuration that has our created network listen_addr
let mut config = default_test_config()?; let mut config = default_test_config()?;
@ -1787,13 +1787,13 @@ fn zebra_metrics_conflict() -> Result<()> {
// [Note on port conflict](#Note on port conflict) // [Note on port conflict](#Note on port conflict)
let port = random_known_port(); let port = random_known_port();
let listen_addr = format!("127.0.0.1:{}", port); let listen_addr = format!("127.0.0.1:{port}");
// Write a configuration that has our created metrics endpoint_addr // Write a configuration that has our created metrics endpoint_addr
let mut config = default_test_config()?; let mut config = default_test_config()?;
config.metrics.endpoint_addr = Some(listen_addr.parse().unwrap()); config.metrics.endpoint_addr = Some(listen_addr.parse().unwrap());
let dir1 = testdir()?.with_config(&mut config)?; let dir1 = testdir()?.with_config(&mut config)?;
let regex1 = regex::escape(&format!(r"Opened metrics endpoint at {}", listen_addr)); let regex1 = regex::escape(&format!(r"Opened metrics endpoint at {listen_addr}"));
// From another folder create a configuration with the same endpoint. // From another folder create a configuration with the same endpoint.
// `metrics.endpoint_addr` will be the same in the 2 nodes. // `metrics.endpoint_addr` will be the same in the 2 nodes.
@ -1816,13 +1816,13 @@ fn zebra_tracing_conflict() -> Result<()> {
// [Note on port conflict](#Note on port conflict) // [Note on port conflict](#Note on port conflict)
let port = random_known_port(); let port = random_known_port();
let listen_addr = format!("127.0.0.1:{}", port); let listen_addr = format!("127.0.0.1:{port}");
// Write a configuration that has our created tracing endpoint_addr // Write a configuration that has our created tracing endpoint_addr
let mut config = default_test_config()?; let mut config = default_test_config()?;
config.tracing.endpoint_addr = Some(listen_addr.parse().unwrap()); config.tracing.endpoint_addr = Some(listen_addr.parse().unwrap());
let dir1 = testdir()?.with_config(&mut config)?; let dir1 = testdir()?.with_config(&mut config)?;
let regex1 = regex::escape(&format!(r"Opened tracing endpoint at {}", listen_addr)); let regex1 = regex::escape(&format!(r"Opened tracing endpoint at {listen_addr}"));
// From another folder create a configuration with the same endpoint. // From another folder create a configuration with the same endpoint.
// `tracing.endpoint_addr` will be the same in the 2 nodes. // `tracing.endpoint_addr` will be the same in the 2 nodes.

View File

@ -185,7 +185,7 @@ where
if !config.state.ephemeral { if !config.state.ephemeral {
let cache_dir = dir.join("state"); let cache_dir = dir.join("state");
fs::create_dir_all(&cache_dir)?; fs::create_dir_all(cache_dir)?;
} else { } else {
fs::create_dir_all(dir)?; fs::create_dir_all(dir)?;
} }

View File

@ -123,7 +123,7 @@ pub async fn run() -> Result<()> {
?zebra_rpc_address, ?zebra_rpc_address,
"spawned isolated zebrad with shorter chain, waiting for zebrad to open its RPC port..." "spawned isolated zebrad with shorter chain, waiting for zebrad to open its RPC port..."
); );
zebrad.expect_stdout_line_matches(&format!("Opened RPC endpoint at {}", zebra_rpc_address))?; zebrad.expect_stdout_line_matches(&format!("Opened RPC endpoint at {zebra_rpc_address}"))?;
tracing::info!( tracing::info!(
?zebra_rpc_address, ?zebra_rpc_address,
@ -174,7 +174,7 @@ pub async fn run() -> Result<()> {
let expected_response = wallet_grpc::SendResponse { let expected_response = wallet_grpc::SendResponse {
error_code: 0, error_code: 0,
error_message: format!("\"{}\"", transaction_hash), error_message: format!("\"{transaction_hash}\""),
}; };
tracing::info!(?transaction_hash, "sending transaction..."); tracing::info!(?transaction_hash, "sending transaction...");

View File

@ -107,7 +107,7 @@ pub async fn run() -> Result<()> {
?zebra_rpc_address, ?zebra_rpc_address,
"launched zebrad, waiting for zebrad to open its RPC port..." "launched zebrad, waiting for zebrad to open its RPC port..."
); );
zebrad.expect_stdout_line_matches(&format!("Opened RPC endpoint at {}", zebra_rpc_address))?; zebrad.expect_stdout_line_matches(&format!("Opened RPC endpoint at {zebra_rpc_address}"))?;
tracing::info!( tracing::info!(
?zebra_rpc_address, ?zebra_rpc_address,

View File

@ -227,7 +227,7 @@ pub fn sync_until(
let mut child = tempdir.spawn_child(args!["start"])?.with_timeout(timeout); let mut child = tempdir.spawn_child(args!["start"])?.with_timeout(timeout);
let network = format!("network: {},", network); let network = format!("network: {network},");
if mempool_behavior.require_activation() { if mempool_behavior.require_activation() {
// require that the mempool activated, // require that the mempool activated,
@ -383,7 +383,7 @@ pub fn create_cached_database_height(
.with_timeout(FINISH_FULL_SYNC_TIMEOUT) .with_timeout(FINISH_FULL_SYNC_TIMEOUT)
.bypass_test_capture(true); .bypass_test_capture(true);
let network = format!("network: {},", network); let network = format!("network: {network},");
child.expect_stdout_line_matches(&network)?; child.expect_stdout_line_matches(&network)?;
child.expect_stdout_line_matches("starting legacy chain check")?; child.expect_stdout_line_matches("starting legacy chain check")?;