add detailed metrics in slow operations

This commit is contained in:
Peter Zhang 2024-10-25 11:59:26 +08:00
parent 24e5663e63
commit 8fa9929003
4 changed files with 26 additions and 9 deletions

View File

@ -1,13 +1,13 @@
use super::load_chunk::EntryBatch;
use super::log_manager::{COL_PAD_DATA_LIST, COL_PAD_DATA_SYNC_HEIGH};
use super::seal_task_manager::SealTaskManager;
use super::{MineLoadChunk, SealAnswer, SealTask};
use crate::config::ShardConfig; use crate::config::ShardConfig;
use crate::error::Error; use crate::error::Error;
use crate::log_store::load_chunk::EntryBatch;
use crate::log_store::log_manager::{ use crate::log_store::log_manager::{
bytes_to_entries, COL_ENTRY_BATCH, COL_FLOW_MPT_NODES, PORA_CHUNK_SIZE, bytes_to_entries, COL_ENTRY_BATCH, COL_FLOW_MPT_NODES, PORA_CHUNK_SIZE, COL_PAD_DATA_LIST, COL_PAD_DATA_SYNC_HEIGH
};
use crate::log_store::seal_task_manager::SealTaskManager;
use crate::log_store::{
metrics, FlowRead, FlowSeal, FlowWrite, MineLoadChunk, SealAnswer, SealTask,
}; };
use crate::log_store::{FlowRead, FlowSeal, FlowWrite};
use crate::{try_option, ZgsKeyValueDB}; use crate::{try_option, ZgsKeyValueDB};
use any::Any; use any::Any;
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
@ -21,6 +21,7 @@ use ssz_derive::{Decode as DeriveDecode, Encode as DeriveEncode};
use std::fmt::Debug; use std::fmt::Debug;
use std::sync::Arc; use std::sync::Arc;
use std::time::Instant;
use std::{any, cmp}; use std::{any, cmp};
use tracing::{debug, error, trace}; use tracing::{debug, error, trace};
use zgs_spec::{BYTES_PER_SECTOR, SEALS_PER_LOAD, SECTORS_PER_LOAD, SECTORS_PER_SEAL}; use zgs_spec::{BYTES_PER_SECTOR, SEALS_PER_LOAD, SECTORS_PER_LOAD, SECTORS_PER_SEAL};
@ -47,6 +48,7 @@ impl FlowStore {
batch_index: usize, batch_index: usize,
subtree_list: Vec<(usize, usize, DataRoot)>, subtree_list: Vec<(usize, usize, DataRoot)>,
) -> Result<()> { ) -> Result<()> {
let start_time = Instant::now();
let mut batch = self let mut batch = self
.data_db .data_db
.get_entry_batch(batch_index as u64)? .get_entry_batch(batch_index as u64)?
@ -54,7 +56,7 @@ impl FlowStore {
batch.set_subtree_list(subtree_list); batch.set_subtree_list(subtree_list);
self.data_db self.data_db
.put_entry_raw(vec![(batch_index as u64, batch)])?; .put_entry_raw(vec![(batch_index as u64, batch)])?;
metrics::INSERT_SUBTREE_LIST.update_since(start_time);
Ok(()) Ok(())
} }

View File

@ -32,8 +32,6 @@ use std::time::{Duration, Instant};
use tracing::{debug, error, info, instrument, trace, warn}; use tracing::{debug, error, info, instrument, trace, warn};
use crate::log_store::metrics; use crate::log_store::metrics;
use crate::log_store::tx_store::BlockHashAndSubmissionIndex;
use crate::log_store::{FlowSeal, MineLoadChunk, SealAnswer, SealTask};
/// 256 Bytes /// 256 Bytes
pub const ENTRY_SIZE: usize = 256; pub const ENTRY_SIZE: usize = 256;
@ -933,6 +931,7 @@ impl LogManager {
#[instrument(skip(self, merkle))] #[instrument(skip(self, merkle))]
fn pad_tx(&self, tx_seq: u64, tx_start_index: u64, merkle: &mut MerkleManager) -> Result<()> { fn pad_tx(&self, tx_seq: u64, tx_start_index: u64, merkle: &mut MerkleManager) -> Result<()> {
// Check if we need to pad the flow. // Check if we need to pad the flow.
let start_time = Instant::now();
let mut tx_start_flow_index = let mut tx_start_flow_index =
merkle.last_chunk_start_index() + merkle.last_chunk_merkle.leaves() as u64; merkle.last_chunk_start_index() + merkle.last_chunk_merkle.leaves() as u64;
let pad_size = tx_start_index - tx_start_flow_index; let pad_size = tx_start_index - tx_start_flow_index;
@ -1013,6 +1012,8 @@ impl LogManager {
); );
self.flow_store.put_pad_data(&pad_list, tx_seq)?; self.flow_store.put_pad_data(&pad_list, tx_seq)?;
metrics::PAD_TX.update_since(start_time);
Ok(()) Ok(())
} }

View File

@ -5,10 +5,21 @@ use metrics::{register_timer, Timer};
lazy_static::lazy_static! { lazy_static::lazy_static! {
pub static ref TX_STORE_PUT: Arc<dyn Timer> = register_timer("log_store_tx_store_put_tx"); pub static ref TX_STORE_PUT: Arc<dyn Timer> = register_timer("log_store_tx_store_put_tx");
pub static ref CHECK_TX_COMPLETED: Arc<dyn Timer> =
register_timer("log_store_log_manager_check_tx_completed");
pub static ref APPEND_SUBTREE_LIST: Arc<dyn Timer> = pub static ref APPEND_SUBTREE_LIST: Arc<dyn Timer> =
register_timer("log_store_log_manager_append_subtree_list"); register_timer("log_store_log_manager_append_subtree_list");
pub static ref COPY_TX_AND_FINALIZE: Arc<dyn Timer> = pub static ref COPY_TX_AND_FINALIZE: Arc<dyn Timer> =
register_timer("log_store_log_manager_copy_tx_and_finalize"); register_timer("log_store_log_manager_copy_tx_and_finalize");
pub static ref PAD_TX: Arc<dyn Timer> = register_timer("log_store_log_manager_pad_tx");
pub static ref PUT_BATCH_ROOT_LIST: Arc<dyn Timer> = register_timer("log_store_flow_store_put_batch_root_list");
pub static ref INSERT_SUBTREE_LIST: Arc<dyn Timer> =
register_timer("log_store_log_manager_insert_subtree_list");
pub static ref PUT_MPT_NODE: Arc<dyn Timer> = register_timer("log_store_log_manager_put_mpt_node");
} }

View File

@ -213,7 +213,10 @@ impl TransactionStore {
} }
pub fn check_tx_completed(&self, tx_seq: u64) -> Result<bool> { pub fn check_tx_completed(&self, tx_seq: u64) -> Result<bool> {
let start_time = Instant::now();
let status = self.get_tx_status(tx_seq)?; let status = self.get_tx_status(tx_seq)?;
metrics::CHECK_TX_COMPLETED.update_since(start_time);
Ok(matches!(status, Some(TxStatus::Finalized))) Ok(matches!(status, Some(TxStatus::Finalized)))
} }