Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(api): fix bug about txPoolContentWithMinTip api #52

Merged
merged 5 commits into from
Oct 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/ethereum/evm/src/execute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -379,7 +379,7 @@ where
// NOTE: we need to merge keep the reverts for the bundle retention
self.state.merge_transitions(BundleRetention::Reverts);

Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, requests, gas_used })
Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, requests, gas_used, target_list: vec![] })
}
}

Expand Down
1 change: 1 addition & 0 deletions crates/evm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ revm-primitives.workspace = true
reth-prune-types.workspace = true
reth-storage-errors.workspace = true
reth-execution-types.workspace = true
alloy-rpc-types-eth.workspace = true

revm.workspace = true

Expand Down
50 changes: 47 additions & 3 deletions crates/evm/src/execute.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//! Traits for execution.

use alloy_rpc_types_eth::transaction::Transaction;
use reth_execution_types::ExecutionOutcome;
use reth_primitives::{BlockNumber, BlockWithSenders, Receipt, Request, U256};
use reth_prune_types::PruneModes;
Expand Down Expand Up @@ -90,6 +91,17 @@ pub trait BatchExecutor<DB> {
fn size_hint(&self) -> Option<usize>;
}

/// Result of the trigger
#[derive(Debug, Clone)]
pub struct TaskResult {
/// Transactions
pub txs: Vec<Transaction>,
/// Estimated gas used
pub estimated_gas_used: u64,
/// Bytes length
pub bytes_length: u64,
}

/// The output of an ethereum block.
///
/// Contains the state changes, transaction receipts, and total gas used in the block.
Expand All @@ -105,6 +117,8 @@ pub struct BlockExecutionOutput<T> {
pub requests: Vec<Request>,
/// The total gas used by the block.
pub gas_used: u64,
/// The target list.
pub target_list: Vec<TaskResult>,
}

/// A helper type for ethereum block inputs that consists of a block and the total difficulty.
Expand All @@ -118,12 +132,26 @@ pub struct BlockExecutionInput<'a, Block> {
pub enable_anchor: bool,
/// Enable skip invalid transaction.
pub enable_skip: bool,
/// Enable build transaction lists.
pub enable_build: bool,
/// Max compressed bytes.
pub max_bytes_per_tx_list: u64,
/// Max length of transactions list.
pub max_transactions_lists: u64,
}

impl<'a, Block> BlockExecutionInput<'a, Block> {
/// Creates a new input.
pub fn new(block: &'a mut Block, total_difficulty: U256) -> Self {
Self { block, total_difficulty, enable_anchor: true, enable_skip: true }
Self {
block,
total_difficulty,
enable_anchor: true,
enable_skip: true,
enable_build: false,
max_bytes_per_tx_list: 0,
max_transactions_lists: 0,
}
}
}

Expand All @@ -135,15 +163,31 @@ impl<'a, Block> From<(&'a mut Block, U256)> for BlockExecutionInput<'a, Block> {

impl<'a, Block> From<(&'a mut Block, U256, bool)> for BlockExecutionInput<'a, Block> {
fn from((block, total_difficulty, enable_anchor): (&'a mut Block, U256, bool)) -> Self {
Self { block, total_difficulty, enable_anchor, enable_skip: true }
Self {
block,
total_difficulty,
enable_anchor,
enable_skip: true,
enable_build: false,
max_bytes_per_tx_list: 0,
max_transactions_lists: 0,
}
}
}

impl<'a, Block> From<(&'a mut Block, U256, bool, bool)> for BlockExecutionInput<'a, Block> {
fn from(
(block, total_difficulty, enable_anchor, enable_skip): (&'a mut Block, U256, bool, bool),
) -> Self {
Self { block, total_difficulty, enable_anchor, enable_skip }
Self {
block,
total_difficulty,
enable_anchor,
enable_skip,
enable_build: false,
max_bytes_per_tx_list: 0,
max_transactions_lists: 0,
}
}
}

Expand Down
3 changes: 0 additions & 3 deletions crates/taiko/consensus/proposer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ reth-revm.workspace = true
reth-transaction-pool.workspace = true
reth-evm.workspace = true
reth-consensus.workspace = true
reth-rpc-types.workspace = true
reth-rpc-types-compat.workspace = true
reth-errors.workspace = true


Expand All @@ -32,5 +30,4 @@ tokio = { workspace = true, features = ["sync", "time"] }
tracing.workspace = true

# misc
flate2.workspace = true
alloy-rlp.workspace = true
102 changes: 17 additions & 85 deletions crates/taiko/consensus/proposer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,26 +12,19 @@
html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256",
issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/"
)]
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]

use flate2::write::ZlibEncoder;
use flate2::Compression;
use reth_chainspec::ChainSpec;
use reth_consensus::{Consensus, ConsensusError, PostExecutionInput};
use reth_errors::RethError;
use reth_execution_errors::{BlockExecutionError, BlockValidationError};
use reth_primitives::{
eip4844::calculate_excess_blob_gas, proofs, transaction::TransactionSignedList, Address, Block,
BlockWithSenders, Header, Requests, SealedBlock, SealedHeader, TransactionSigned, Withdrawals,
U256,
eip4844::calculate_excess_blob_gas, proofs, Address, Block, BlockWithSenders, Header, Requests,
SealedBlock, SealedHeader, TransactionSigned, Withdrawals, U256,
};
use reth_provider::{BlockReaderIdExt, StateProviderFactory};
use reth_revm::database::StateProviderDatabase;
use reth_rpc_types::Transaction;
use reth_transaction_pool::TransactionPool;
use std::{
io::{self, Write},
sync::Arc,
time::{SystemTime, UNIX_EPOCH},
};
Expand All @@ -42,7 +35,9 @@ mod client;
mod task;

pub use crate::client::ProposerClient;
use reth_evm::execute::{BlockExecutionOutput, BlockExecutorProvider, Executor};
use reth_evm::execute::{
BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, Executor, TaskResult,
};
pub use task::ProposerTask;

/// A consensus implementation intended for local development and testing purposes.
Expand Down Expand Up @@ -157,17 +152,6 @@ pub struct TaskArgs {
tx: oneshot::Sender<Result<Vec<TaskResult>, RethError>>,
}

/// Result of the trigger
#[derive(Debug)]
pub struct TaskResult {
/// Transactions
pub txs: Vec<Transaction>,
/// Estimated gas used
pub estimated_gas_used: u64,
/// Bytes length
pub bytes_length: u64,
}

#[derive(Debug, Clone, Default)]
struct Storage;

Expand Down Expand Up @@ -303,70 +287,18 @@ impl Storage {
);

// execute the block
let BlockExecutionOutput { receipts, .. } =
executor.executor(&mut db).execute((&mut block, U256::ZERO, false).into())?;
let Block { body, .. } = block.block;

debug!(target: "taiko::proposer", transactions=?body, "after executing transactions");

let mut tx_lists = vec![];
let mut chunk_start = 0;
let mut last_compressed_buf = None;
let mut gas_used_start = 0;
for idx in 0..body.len() {
if let Some((txs_range, estimated_gas_used, compressed_buf)) = {
let compressed_buf = encode_and_compress_tx_list(&body[chunk_start..=idx])
.map_err(BlockExecutionError::other)?;

if compressed_buf.len() > max_bytes_per_tx_list as usize {
// the first transaction in chunk is too large, so we need to skip it
if idx == chunk_start {
gas_used_start = receipts[idx].cumulative_gas_used;
chunk_start += 1;
// the first transaction in chunk is too large, so we need to skip it
None
} else {
// current chunk reaches the max_transactions_lists or max_bytes_per_tx_list
// and use previous transaction's data
let estimated_gas_used =
receipts[idx - 1].cumulative_gas_used - gas_used_start;
gas_used_start = receipts[idx - 1].cumulative_gas_used;
let range = chunk_start..idx;
chunk_start = idx;
Some((range, estimated_gas_used, last_compressed_buf.clone()))
}
}
// reach the limitation of max_transactions_lists or max_bytes_per_tx_list
else if idx - chunk_start + 1 == max_transactions_lists as usize {
let estimated_gas_used = receipts[idx].cumulative_gas_used - gas_used_start;
gas_used_start = receipts[idx].cumulative_gas_used;
let range = chunk_start..idx + 1;
chunk_start = idx + 1;
Some((range, estimated_gas_used, Some(compressed_buf)))
} else {
last_compressed_buf = Some(compressed_buf);
None
}
} {
tx_lists.push(TaskResult {
txs: body[txs_range]
.iter()
.cloned()
.map(|tx| reth_rpc_types_compat::transaction::from_signed(tx).unwrap())
.collect(),
estimated_gas_used,
bytes_length: compressed_buf.map_or(0, |b| b.len() as u64),
});
}
}
let block_input = BlockExecutionInput {
block: &mut block,
total_difficulty: U256::ZERO,
enable_anchor: false,
enable_skip: false,
enable_build: true,
max_bytes_per_tx_list,
max_transactions_lists,
};
let BlockExecutionOutput { target_list, .. } =
executor.executor(&mut db).execute(block_input.into())?;

Ok(tx_lists)
Ok(target_list)
}
}

fn encode_and_compress_tx_list(txs: &[TransactionSigned]) -> io::Result<Vec<u8>> {
let encoded_buf = alloy_rlp::encode(TransactionSignedList(txs));
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::default());
encoder.write_all(&encoded_buf)?;
encoder.finish()
}
6 changes: 1 addition & 5 deletions crates/taiko/consensus/proposer/src/task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ where
});
local_txs.extend(remote_txs);
debug!(target: "taiko::proposer", txs = ?local_txs.len(), "Proposer filter best transactions");

// miner returned a set of transaction that we feed to the producer
this.queued.push_back((trigger_args, local_txs));
};
Expand All @@ -111,7 +112,6 @@ where

let client = this.provider.clone();
let chain_spec = Arc::clone(&this.chain_spec);
let pool = this.pool.clone();
let executor = this.block_executor.clone();

// Create the mining future that creates a block, notifies the engine that drives
Expand Down Expand Up @@ -144,10 +144,6 @@ where
max_transactions_lists,
base_fee,
);
if res.is_ok() {
// clear all transactions from pool
pool.remove_transactions(txs.iter().map(|tx| tx.hash()).collect());
}
let _ = tx.send(res);
}));
}
Expand Down
2 changes: 2 additions & 0 deletions crates/taiko/evm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ reth-revm = { workspace = true, features = ["taiko"] }
reth-prune-types.workspace = true
reth-execution-types.workspace = true
reth-consensus.workspace = true
reth-rpc-types-compat.workspace = true

# Taiko
taiko-reth-beacon-consensus.workspace = true
Expand All @@ -32,6 +33,7 @@ alloy-sol-types.workspace = true

# Misc
tracing.workspace = true
flate2.workspace = true

[dev-dependencies]
reth-testing-utils.workspace = true
Expand Down
Loading
Loading