mirror of
https://github.com/0glabs/0g-storage-node.git
synced 2024-11-20 15:05:19 +00:00
Output debug log with serde_json and fix debug root db key.
This commit is contained in:
parent
29fcc415a6
commit
ff9810e1ca
6
Cargo.lock
generated
6
Cargo.lock
generated
@ -6892,11 +6892,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.118"
|
version = "1.0.128"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4"
|
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
|
"memchr",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
@ -7236,6 +7237,7 @@ dependencies = [
|
|||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rayon",
|
"rayon",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"shared_types",
|
"shared_types",
|
||||||
"static_assertions",
|
"static_assertions",
|
||||||
"tempdir",
|
"tempdir",
|
||||||
|
@ -28,6 +28,7 @@ tiny-keccak = "*"
|
|||||||
itertools = "0.13.0"
|
itertools = "0.13.0"
|
||||||
serde = { version = "1.0.197", features = ["derive"] }
|
serde = { version = "1.0.197", features = ["derive"] }
|
||||||
parking_lot = "0.12.3"
|
parking_lot = "0.12.3"
|
||||||
|
serde_json = "1.0.127"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempdir = "0.3.7"
|
tempdir = "0.3.7"
|
||||||
|
@ -587,7 +587,10 @@ impl FlowDBStore {
|
|||||||
fn get_batch_root(&self, batch_index: u64) -> Result<Option<DataRoot>> {
|
fn get_batch_root(&self, batch_index: u64) -> Result<Option<DataRoot>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.kvdb
|
.kvdb
|
||||||
.get(COL_ENTRY_BATCH_ROOT, &batch_index.to_be_bytes())?
|
.get(
|
||||||
|
COL_ENTRY_BATCH_ROOT,
|
||||||
|
&encode_batch_root_key(batch_index as usize, 1),
|
||||||
|
)?
|
||||||
.map(|v| DataRoot::from_slice(&v)))
|
.map(|v| DataRoot::from_slice(&v)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ use std::ops::{Deref, DerefMut};
|
|||||||
use ssz::{Decode, DecodeError, Encode};
|
use ssz::{Decode, DecodeError, Encode};
|
||||||
|
|
||||||
use bitmaps::{Bitmap, Bits, BitsImpl};
|
use bitmaps::{Bitmap, Bits, BitsImpl};
|
||||||
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
pub struct WrappedBitmap<const N: usize>(pub Bitmap<N>)
|
pub struct WrappedBitmap<const N: usize>(pub Bitmap<N>)
|
||||||
@ -72,6 +73,24 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> Deserialize<'a> for WrappedBitmap<64> {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'a>,
|
||||||
|
{
|
||||||
|
Ok(Self(Bitmap::from_value(u64::deserialize(deserializer)?)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for WrappedBitmap<64> {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
serializer.serialize_u64(self.0.into_value())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait TruncateBitmap {
|
pub trait TruncateBitmap {
|
||||||
fn truncate(&mut self, index: u16);
|
fn truncate(&mut self, index: u16);
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use shared_types::{bytes_to_chunks, DataRoot};
|
use shared_types::{bytes_to_chunks, DataRoot};
|
||||||
use ssz_derive::{Decode, Encode};
|
use ssz_derive::{Decode, Encode};
|
||||||
use std::fmt::{Debug, Formatter};
|
use std::fmt::{Debug, Formatter};
|
||||||
@ -6,14 +7,14 @@ use std::mem;
|
|||||||
use tracing::error;
|
use tracing::error;
|
||||||
use zgs_spec::{BYTES_PER_LOAD, BYTES_PER_SECTOR, SECTORS_PER_LOAD, SECTORS_PER_SEAL};
|
use zgs_spec::{BYTES_PER_LOAD, BYTES_PER_SECTOR, SECTORS_PER_LOAD, SECTORS_PER_SEAL};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
pub enum EntryBatchData {
|
pub enum EntryBatchData {
|
||||||
Complete(Vec<u8>),
|
Complete(Vec<u8>),
|
||||||
/// All `PartialBatch`s are ordered based on `start_index`.
|
/// All `PartialBatch`s are ordered based on `start_index`.
|
||||||
Incomplete(IncompleteData),
|
Incomplete(IncompleteData),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Encode, Decode)]
|
#[derive(Default, Debug, Encode, Decode, Deserialize, Serialize)]
|
||||||
pub struct IncompleteData {
|
pub struct IncompleteData {
|
||||||
pub subtrees: Vec<Subtree>,
|
pub subtrees: Vec<Subtree>,
|
||||||
pub known_data: Vec<PartialBatch>,
|
pub known_data: Vec<PartialBatch>,
|
||||||
@ -57,14 +58,14 @@ impl IncompleteData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Encode, Decode)]
|
#[derive(Default, Debug, Encode, Decode, Deserialize, Serialize)]
|
||||||
pub struct Subtree {
|
pub struct Subtree {
|
||||||
pub start_sector: usize,
|
pub start_sector: usize,
|
||||||
pub subtree_height: usize,
|
pub subtree_height: usize,
|
||||||
pub root: DataRoot,
|
pub root: DataRoot,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq)]
|
#[derive(PartialEq, Eq, Deserialize, Serialize)]
|
||||||
pub struct PartialBatch {
|
pub struct PartialBatch {
|
||||||
/// Offset in this batch.
|
/// Offset in this batch.
|
||||||
pub(super) start_sector: usize,
|
pub(super) start_sector: usize,
|
||||||
|
@ -3,15 +3,16 @@ mod chunk_data;
|
|||||||
mod seal;
|
mod seal;
|
||||||
mod serde;
|
mod serde;
|
||||||
|
|
||||||
|
use ::serde::{Deserialize, Serialize};
|
||||||
use std::cmp::min;
|
use std::cmp::min;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::{bail, Result};
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
use ssz_derive::{Decode, Encode};
|
use ssz_derive::{Decode, Encode};
|
||||||
|
|
||||||
use crate::log_store::log_manager::data_to_merkle_leaves;
|
use crate::log_store::log_manager::data_to_merkle_leaves;
|
||||||
use crate::try_option;
|
use crate::try_option;
|
||||||
use append_merkle::{Algorithm, MerkleTreeRead, Sha3Algorithm};
|
use append_merkle::MerkleTreeRead;
|
||||||
use shared_types::{ChunkArray, DataRoot, Merkle};
|
use shared_types::{ChunkArray, DataRoot, Merkle};
|
||||||
use tracing::trace;
|
use tracing::trace;
|
||||||
use zgs_spec::{
|
use zgs_spec::{
|
||||||
@ -23,7 +24,7 @@ use super::SealAnswer;
|
|||||||
use chunk_data::EntryBatchData;
|
use chunk_data::EntryBatchData;
|
||||||
use seal::SealInfo;
|
use seal::SealInfo;
|
||||||
|
|
||||||
#[derive(Debug, Encode, Decode)]
|
#[derive(Debug, Encode, Decode, Deserialize, Serialize)]
|
||||||
pub struct EntryBatch {
|
pub struct EntryBatch {
|
||||||
seal: SealInfo,
|
seal: SealInfo,
|
||||||
// the inner data
|
// the inner data
|
||||||
@ -243,6 +244,15 @@ impl EntryBatch {
|
|||||||
for subtree in self.data.get_subtree_list() {
|
for subtree in self.data.get_subtree_list() {
|
||||||
trace!(?subtree, "get subtree, leaves={}", merkle.leaves());
|
trace!(?subtree, "get subtree, leaves={}", merkle.leaves());
|
||||||
if subtree.start_sector != merkle.leaves() {
|
if subtree.start_sector != merkle.leaves() {
|
||||||
|
println!(
|
||||||
|
"start_sector={} leaves={}",
|
||||||
|
subtree.start_sector,
|
||||||
|
merkle.leaves()
|
||||||
|
);
|
||||||
|
let subtree_size = 1 << (subtree.subtree_height - 1);
|
||||||
|
if subtree.start_sector % subtree_size != 0 {
|
||||||
|
bail!("error");
|
||||||
|
}
|
||||||
let leaf_data = try_option!(
|
let leaf_data = try_option!(
|
||||||
self.get_unsealed_data(merkle.leaves(), subtree.start_sector - merkle.leaves())
|
self.get_unsealed_data(merkle.leaves(), subtree.start_sector - merkle.leaves())
|
||||||
);
|
);
|
||||||
@ -257,13 +267,13 @@ impl EntryBatch {
|
|||||||
merkle.append_list(data_to_merkle_leaves(&leaf_data).expect("aligned"));
|
merkle.append_list(data_to_merkle_leaves(&leaf_data).expect("aligned"));
|
||||||
}
|
}
|
||||||
// TODO(zz): Optimize.
|
// TODO(zz): Optimize.
|
||||||
for index in 0..merkle.leaves() {
|
// for index in 0..merkle.leaves() {
|
||||||
if merkle.leaf_at(index)?.is_none() {
|
// if merkle.leaf_at(index)?.is_none() {
|
||||||
if let Some(leaf_data) = self.get_unsealed_data(index, 1) {
|
// if let Some(leaf_data) = self.get_unsealed_data(index, 1) {
|
||||||
merkle.fill_leaf(index, Sha3Algorithm::leaf(&leaf_data));
|
// merkle.fill_leaf(index, Sha3Algorithm::leaf(&leaf_data));
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
Ok(Some(merkle))
|
Ok(Some(merkle))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -271,10 +281,14 @@ impl EntryBatch {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{EntryBatch, SealAnswer};
|
use super::{EntryBatch, SealAnswer};
|
||||||
|
use crate::log_store::load_chunk::chunk_data::EntryBatchData::Incomplete;
|
||||||
|
use crate::log_store::load_chunk::chunk_data::{IncompleteData, PartialBatch, Subtree};
|
||||||
|
use append_merkle::MerkleTreeRead;
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
use zgs_spec::{
|
use zgs_spec::{
|
||||||
BYTES_PER_SEAL, BYTES_PER_SECTOR, SEALS_PER_LOAD, SECTORS_PER_LOAD, SECTORS_PER_SEAL,
|
BYTES_PER_SEAL, BYTES_PER_SECTOR, SEALS_PER_LOAD, SECTORS_PER_LOAD, SECTORS_PER_SEAL,
|
||||||
};
|
};
|
||||||
|
|
||||||
const LOAD_INDEX: u64 = 1;
|
const LOAD_INDEX: u64 = 1;
|
||||||
fn seal(
|
fn seal(
|
||||||
batch: &mut EntryBatch,
|
batch: &mut EntryBatch,
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use ssz_derive::{Decode as DeriveDecode, Encode as DeriveEncode};
|
use ssz_derive::{Decode as DeriveDecode, Encode as DeriveEncode};
|
||||||
use static_assertions::const_assert;
|
use static_assertions::const_assert;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
@ -8,7 +9,7 @@ use zgs_spec::{SEALS_PER_LOAD, SECTORS_PER_LOAD, SECTORS_PER_SEAL};
|
|||||||
|
|
||||||
use super::bitmap::WrappedBitmap;
|
use super::bitmap::WrappedBitmap;
|
||||||
|
|
||||||
#[derive(Debug, DeriveEncode, DeriveDecode)]
|
#[derive(Debug, DeriveEncode, DeriveDecode, Deserialize, Serialize)]
|
||||||
pub struct SealContextInfo {
|
pub struct SealContextInfo {
|
||||||
/// The context digest for this seal group
|
/// The context digest for this seal group
|
||||||
context_digest: H256,
|
context_digest: H256,
|
||||||
@ -19,9 +20,10 @@ pub struct SealContextInfo {
|
|||||||
type ChunkSealBitmap = WrappedBitmap<SEALS_PER_LOAD>;
|
type ChunkSealBitmap = WrappedBitmap<SEALS_PER_LOAD>;
|
||||||
const_assert!(SEALS_PER_LOAD <= u128::BITS as usize);
|
const_assert!(SEALS_PER_LOAD <= u128::BITS as usize);
|
||||||
|
|
||||||
#[derive(Debug, Default, DeriveEncode, DeriveDecode)]
|
#[derive(Debug, Default, DeriveEncode, DeriveDecode, Deserialize, Serialize)]
|
||||||
pub struct SealInfo {
|
pub struct SealInfo {
|
||||||
// a bitmap specify which sealing chunks have been sealed
|
// a bitmap specify which sealing chunks have been sealed
|
||||||
|
#[serde(skip)]
|
||||||
bitmap: ChunkSealBitmap,
|
bitmap: ChunkSealBitmap,
|
||||||
// the batch_offset (seal chunks) of the EntryBatch this seal info belongs to
|
// the batch_offset (seal chunks) of the EntryBatch this seal info belongs to
|
||||||
load_index: u64,
|
load_index: u64,
|
||||||
|
@ -778,16 +778,16 @@ impl LogManager {
|
|||||||
};
|
};
|
||||||
let r = entry_proof(&top_proof, &sub_proof);
|
let r = entry_proof(&top_proof, &sub_proof);
|
||||||
if r.is_err() {
|
if r.is_err() {
|
||||||
let raw_batch = self.flow_store.get_raw_batch(seg_index as u64)?;
|
let raw_batch = self.flow_store.get_raw_batch(seg_index as u64)?.unwrap();
|
||||||
let db_root = self.flow_store.get_batch_root(seg_index as u64)?;
|
let db_root = self.flow_store.get_batch_root(seg_index as u64)?;
|
||||||
error!(
|
error!(
|
||||||
?r,
|
?r,
|
||||||
?raw_batch,
|
|
||||||
?db_root,
|
?db_root,
|
||||||
?seg_index,
|
?seg_index,
|
||||||
"gen proof error: top_leaves={}, last={}",
|
"gen proof error: top_leaves={}, last={}, raw_batch={}",
|
||||||
merkle.pora_chunks_merkle.leaves(),
|
merkle.pora_chunks_merkle.leaves(),
|
||||||
merkle.last_chunk_merkle.leaves()
|
merkle.last_chunk_merkle.leaves(),
|
||||||
|
serde_json::to_string(&raw_batch).unwrap(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
r
|
r
|
||||||
|
Loading…
Reference in New Issue
Block a user