Compare commits

..

No commits in common. "e2085d8b212ca294432efef27fd1324217d01968" and "daf261de8d480fe6306b47fb1444a9beeaadfab9" have entirely different histories.

4 changed files with 3 additions and 22 deletions

View File

@ -1,11 +1,7 @@
use std::sync::Arc;
use metrics::{register_timer, Gauge, GaugeUsize, Timer};
use metrics::{register_timer, Timer};
lazy_static::lazy_static! {
pub static ref LOG_MANAGER_HANDLE_DATA_TRANSACTION: Arc<dyn Timer> = register_timer("log_manager_handle_data_transaction");
pub static ref STORE_PUT_TX: Arc<dyn Timer> = register_timer("log_entry_sync_manager_put_tx_inner");
pub static ref STORE_PUT_TX_SPEED_IN_BYTES: Arc<dyn Gauge<usize>> = GaugeUsize::register("log_entry_sync_manager_put_tx_speed_in_bytes");
pub static ref STORE_PUT_TX: Arc<dyn Timer> = register_timer("log_entry_sync_store_put_tx");
}

View File

@ -408,7 +408,6 @@ impl LogSyncManager {
}
LogFetchProgress::Transaction((tx, block_number)) => {
let mut stop = false;
let start_time = Instant::now();
match self.put_tx(tx.clone()).await {
Some(false) => stop = true,
Some(true) => {
@ -442,8 +441,6 @@ impl LogSyncManager {
// no receivers will be created.
warn!("log sync broadcast error, error={:?}", e);
}
metrics::LOG_MANAGER_HANDLE_DATA_TRANSACTION.update_since(start_time);
}
LogFetchProgress::Reverted(reverted) => {
self.process_reverted(reverted).await;
@ -456,6 +453,7 @@ impl LogSyncManager {
async fn put_tx_inner(&mut self, tx: Transaction) -> bool {
let start_time = Instant::now();
let result = self.store.put_tx(tx.clone());
metrics::STORE_PUT_TX.update_since(start_time);
if let Err(e) = result {
error!("put_tx error: e={:?}", e);
@ -516,7 +514,6 @@ impl LogSyncManager {
// Check if the computed data root matches on-chain state.
// If the call fails, we won't check the root here and return `true` directly.
let flow_contract = self.log_fetcher.flow_contract();
match flow_contract
.get_flow_root_by_tx_seq(tx.seq.into())
.call()
@ -548,10 +545,6 @@ impl LogSyncManager {
}
}
metrics::STORE_PUT_TX_SPEED_IN_BYTES
.update((tx.size / start_time.elapsed().as_millis() as u64) as usize);
metrics::STORE_PUT_TX.update_since(start_time);
true
}
}

View File

@ -192,7 +192,6 @@ impl LogStoreChunkWrite for LogManager {
chunks: ChunkArray,
maybe_file_proof: Option<FlowProof>,
) -> Result<bool> {
let start_time = Instant::now();
let mut merkle = self.merkle.write();
let tx = self
.tx_store
@ -225,7 +224,6 @@ impl LogStoreChunkWrite for LogManager {
)?;
self.flow_store.put_mpt_node_list(updated_node_list)?;
}
metrics::PUT_CHUNKS.update_since(start_time);
Ok(true)
}
@ -256,7 +254,6 @@ impl LogStoreWrite for LogManager {
/// `put_tx` for the last tx when we restart the node to ensure that it succeeds.
///
fn put_tx(&self, tx: Transaction) -> Result<()> {
let start_time = Instant::now();
let mut merkle = self.merkle.write();
debug!("put_tx: tx={:?}", tx);
let expected_seq = self.tx_store.next_tx_seq();
@ -286,7 +283,6 @@ impl LogStoreWrite for LogManager {
self.copy_tx_and_finalize(old_tx_seq, vec![tx.seq])?;
}
}
metrics::PUT_TX.update_since(start_time);
Ok(())
}

View File

@ -3,10 +3,6 @@ use std::sync::Arc;
use metrics::{register_timer, Timer};
lazy_static::lazy_static! {
pub static ref PUT_TX: Arc<dyn Timer> = register_timer("log_store_put_tx");
pub static ref PUT_CHUNKS: Arc<dyn Timer> = register_timer("log_store_put_chunks");
pub static ref TX_STORE_PUT: Arc<dyn Timer> = register_timer("log_store_tx_store_put_tx");
pub static ref CHECK_TX_COMPLETED: Arc<dyn Timer> =