-
Notifications
You must be signed in to change notification settings - Fork 163
Relay submissions more metrics #672
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. Weβll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: develop
Are you sure you want to change the base?
Changes from all commits
4333d7c
7cd01b3
62ea801
0f10046
e24ff94
ef2f64c
d58a92e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||||
---|---|---|---|---|---|---|---|---|
|
@@ -5,6 +5,8 @@ pub mod rpc; | |||||||
pub mod sign_payload; | ||||||||
pub mod submission; | ||||||||
|
||||||||
use crate::utils::{offset_datetime_to_timestamp_us, timestamp_now_us}; | ||||||||
|
||||||||
use super::utils::u256decimal_serde_helper; | ||||||||
|
||||||||
use alloy_primitives::{Address, BlockHash, Bytes, U256}; | ||||||||
|
@@ -18,8 +20,13 @@ use reqwest::{ | |||||||
use serde::{Deserialize, Serialize}; | ||||||||
use serde_with::{serde_as, DisplayFromStr}; | ||||||||
use ssz::Encode; | ||||||||
use std::{io::Write, str::FromStr}; | ||||||||
use std::{ | ||||||||
io::Write, | ||||||||
str::FromStr, | ||||||||
time::{Duration, Instant}, | ||||||||
}; | ||||||||
use submission::{SubmitBlockRequestNoBlobs, SubmitBlockRequestWithMetadata}; | ||||||||
use tokio::task::spawn_blocking; | ||||||||
use url::Url; | ||||||||
|
||||||||
pub use error::*; | ||||||||
|
@@ -28,6 +35,8 @@ pub use sign_payload::*; | |||||||
const TOTAL_PAYMENT_HEADER: &str = "Total-Payment"; | ||||||||
const BUNDLE_HASHES_HEADER: &str = "Bundle-Hashes"; | ||||||||
const TOP_BID_HEADER: &str = "Top-Bid"; | ||||||||
const SUBMIT_START_TIME_US: &str = "Submit-Start-Time-Us"; | ||||||||
const BLOCK_SEAL_TIME_US: &str = "Block-Seal-Time-Us"; | ||||||||
const BLOXROUTE_SHARE_HEADER: &str = "share"; | ||||||||
const BLOXROUTE_BUILDER_VALUE_HEADER: &str = "builder-value"; | ||||||||
|
||||||||
|
@@ -357,6 +366,22 @@ impl std::fmt::Debug for SubmitBlockErr { | |||||||
} | ||||||||
} | ||||||||
|
||||||||
#[derive(Debug, Clone, Default)] | ||||||||
pub struct RelaySubmitStats { | ||||||||
/// time spent between starting submission and doing request | ||||||||
pub send_preparation_time: Duration, | ||||||||
/// time spent compressing payload using gzip, its counted in send_preparation_time | ||||||||
pub send_compression_time: Duration, | ||||||||
/// time spent writing request | ||||||||
pub send_write_request_time: Duration, | ||||||||
/// time spent reading response | ||||||||
pub send_read_request_time: Duration, | ||||||||
/// size in bytes of original payload (before compression if present) | ||||||||
pub original_payload_size: usize, | ||||||||
/// size in bytes of sent payload (after compression if present) | ||||||||
pub sent_payload_size: usize, | ||||||||
} | ||||||||
|
||||||||
// Data API | ||||||||
impl RelayClient { | ||||||||
async fn get_one_delivered_payload( | ||||||||
|
@@ -508,7 +533,9 @@ impl RelayClient { | |||||||
gzip: bool, | ||||||||
fake_relay: bool, | ||||||||
cancellations: bool, | ||||||||
stats: &mut RelaySubmitStats, | ||||||||
) -> Result<Response, SubmitBlockErr> { | ||||||||
let preparation_start = Instant::now(); | ||||||||
let url = { | ||||||||
let mut url = self.url.clone(); | ||||||||
url.set_path("/relay/v1/builder/blocks"); | ||||||||
|
@@ -544,20 +571,28 @@ impl RelayClient { | |||||||
self.add_auth_headers(&mut headers) | ||||||||
.map_err(|_| SubmitBlockErr::InvalidHeader)?; | ||||||||
|
||||||||
stats.original_payload_size = body_data.len(); | ||||||||
let compression_start = Instant::now(); | ||||||||
// GZIP | ||||||||
if gzip { | ||||||||
headers.insert( | ||||||||
CONTENT_ENCODING, | ||||||||
HeaderValue::from_static(GZIP_CONTENT_ENCODING), | ||||||||
); | ||||||||
let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); | ||||||||
encoder | ||||||||
.write_all(&body_data) | ||||||||
.map_err(|e| SubmitBlockErr::RPCSerializationError(e.to_string()))?; | ||||||||
body_data = encoder | ||||||||
.finish() | ||||||||
.map_err(|e| SubmitBlockErr::RPCSerializationError(e.to_string()))?; | ||||||||
body_data = spawn_blocking(move || { | ||||||||
let mut encoder = GzEncoder::new(Vec::new(), Compression::fast()); | ||||||||
encoder | ||||||||
.write_all(&body_data) | ||||||||
.map_err(|e| SubmitBlockErr::RPCSerializationError(e.to_string()))?; | ||||||||
encoder | ||||||||
.finish() | ||||||||
.map_err(|e| SubmitBlockErr::RPCSerializationError(e.to_string())) | ||||||||
}) | ||||||||
.await | ||||||||
.map_err(|e| SubmitBlockErr::RPCSerializationError(e.to_string()))??; | ||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. [nitpick] The double question mark operator (??) is used to handle both the spawn_blocking error and the compression error. This creates nested error handling that could be clearer if split into separate statements or using a more explicit error handling approach.
Suggested change
Copilot uses AI. Check for mistakes. Positive FeedbackNegative Feedback |
||||||||
} | ||||||||
stats.sent_payload_size = body_data.len(); | ||||||||
stats.send_compression_time = compression_start.elapsed(); | ||||||||
|
||||||||
// Set bloxroute specific headers. | ||||||||
if self.is_bloxroute { | ||||||||
|
@@ -613,12 +648,24 @@ impl RelayClient { | |||||||
}; | ||||||||
builder = builder.header(BUNDLE_HASHES_HEADER, bundle_ids); | ||||||||
} | ||||||||
|
||||||||
builder = builder | ||||||||
.header(SUBMIT_START_TIME_US, timestamp_now_us().to_string()) | ||||||||
.header( | ||||||||
BLOCK_SEAL_TIME_US, | ||||||||
offset_datetime_to_timestamp_us(submission_with_metadata.metadata.sealed_at), | ||||||||
); | ||||||||
} | ||||||||
|
||||||||
Ok(builder | ||||||||
stats.send_preparation_time = preparation_start.elapsed(); | ||||||||
let send_start = Instant::now(); | ||||||||
let response = builder | ||||||||
.send() | ||||||||
.await | ||||||||
.map_err(|e| RelayError::RequestError(e.into()))?) | ||||||||
.map_err(|e| RelayError::RequestError(e.into()))?; | ||||||||
stats.send_write_request_time = send_start.elapsed(); | ||||||||
|
||||||||
Ok(response) | ||||||||
} | ||||||||
|
||||||||
/// Submits the block (call_relay_submit_block) and processes some special errors. | ||||||||
|
@@ -629,30 +676,34 @@ impl RelayClient { | |||||||
gzip: bool, | ||||||||
fake_relay: bool, | ||||||||
cancellations: bool, | ||||||||
) -> Result<(), SubmitBlockErr> { | ||||||||
) -> Result<RelaySubmitStats, SubmitBlockErr> { | ||||||||
let mut stats = RelaySubmitStats::default(); | ||||||||
let resp = self | ||||||||
.call_relay_submit_block(data, ssz, gzip, fake_relay, cancellations) | ||||||||
.call_relay_submit_block(data, ssz, gzip, fake_relay, cancellations, &mut stats) | ||||||||
.await?; | ||||||||
|
||||||||
let read_start = Instant::now(); | ||||||||
let status = resp.status(); | ||||||||
|
||||||||
// always read full body to reuse TCP connection | ||||||||
let body_result = resp.bytes().await; | ||||||||
stats.send_read_request_time = read_start.elapsed(); | ||||||||
|
||||||||
if status == StatusCode::TOO_MANY_REQUESTS { | ||||||||
return Err(RelayError::TooManyRequests.into()); | ||||||||
} | ||||||||
if status == StatusCode::GATEWAY_TIMEOUT { | ||||||||
return Err(RelayError::ConnectionError.into()); | ||||||||
} | ||||||||
|
||||||||
let data = resp | ||||||||
.bytes() | ||||||||
.await | ||||||||
.map_err(|e| RelayError::RequestError(e.into()))?; | ||||||||
let data = body_result.map_err(|e| RelayError::RequestError(e.into()))?; | ||||||||
|
||||||||
if status == StatusCode::OK && data.as_ref() == b"" { | ||||||||
return Ok(()); | ||||||||
return Ok(stats); | ||||||||
} | ||||||||
|
||||||||
match serde_json::from_slice::<RelayResponse<()>>(&data) { | ||||||||
Ok(RelayResponse::Ok(_)) => Ok(()), | ||||||||
Ok(RelayResponse::Ok(_)) => Ok(stats), | ||||||||
Ok(RelayResponse::Error(error)) => { | ||||||||
let msg = error.message.as_str(); | ||||||||
match msg { | ||||||||
|
@@ -690,11 +741,11 @@ impl RelayClient { | |||||||
// bloxroute returns empty response in this format which we handle here because its not valid | ||||||||
// jsonrpc response | ||||||||
if data.as_ref() == b"{}\n" { | ||||||||
return Ok(()); | ||||||||
return Ok(stats); | ||||||||
} | ||||||||
let data_string = String::from_utf8_lossy(&data).to_string(); | ||||||||
if is_ignorable_relay_error(status, &data_string) { | ||||||||
Ok(()) | ||||||||
Ok(stats) | ||||||||
} else { | ||||||||
Err(RelayError::UnknownRelayError(status, data_string).into()) | ||||||||
} | ||||||||
|
@@ -725,6 +776,7 @@ impl RelayClient { | |||||||
#[cfg(test)] | ||||||||
mod tests { | ||||||||
use submission::{BidMetadata, BidValueMetadata}; | ||||||||
use time::OffsetDateTime; | ||||||||
|
||||||||
use super::{rpc::TestDataGenerator, *}; | ||||||||
use crate::mev_boost::{ | ||||||||
|
@@ -884,6 +936,7 @@ mod tests { | |||||||
top_competitor_bid: None, | ||||||||
}, | ||||||||
order_ids: vec![], | ||||||||
sealed_at: OffsetDateTime::now_utc(), | ||||||||
}, | ||||||||
}; | ||||||||
relay | ||||||||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The spawn_blocking call moves the entire body_data Vec to the blocking thread, which could be inefficient for large payloads. Consider using a streaming approach or at least moving only the encoder setup to the blocking thread while keeping the data processing async.
Copilot uses AI. Check for mistakes.