fix check on max request size (#4250)

This commit is contained in:
realbigsean 2023-05-02 19:14:02 -04:00 committed by GitHub
parent c1d47da02d
commit 9db6b39dc3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -625,7 +625,7 @@ impl<T: BeaconChainTypes> Worker<T> {
); );
// Should not send more than max request blocks // Should not send more than max request blocks
if req.count > MAX_REQUEST_BLOB_SIDECARS { if req.count * T::EthSpec::max_blobs_per_block() as u64 > MAX_REQUEST_BLOB_SIDECARS {
return self.send_error_response( return self.send_error_response(
peer_id, peer_id,
RPCResponseErrorCode::InvalidRequest, RPCResponseErrorCode::InvalidRequest,
@ -808,28 +808,15 @@ impl<T: BeaconChainTypes> Worker<T> {
.slot() .slot()
.unwrap_or_else(|_| self.chain.slot_clock.genesis_slot()); .unwrap_or_else(|_| self.chain.slot_clock.genesis_slot());
if blobs_sent < (req.count as usize) { debug!(
debug!( self.log,
self.log, "BlobsByRange Response processed";
"BlobsByRange Response processed"; "peer" => %peer_id,
"peer" => %peer_id, "start_slot" => req.start_slot,
"msg" => "Failed to return all requested blobs", "current_slot" => current_slot,
"start_slot" => req.start_slot, "requested" => req.count,
"current_slot" => current_slot, "returned" => blobs_sent
"requested" => req.count, );
"returned" => blobs_sent
);
} else {
debug!(
self.log,
"BlobsByRange Response processed";
"peer" => %peer_id,
"start_slot" => req.start_slot,
"current_slot" => current_slot,
"requested" => req.count,
"returned" => blobs_sent
);
}
if send_response { if send_response {
// send the stream terminator // send the stream terminator