Skip to content

Commit

Permalink
add red move
Browse files Browse the repository at this point in the history
  • Loading branch information
jackzhhuang committed Oct 16, 2024
1 parent 35018c9 commit 0a135ad
Show file tree
Hide file tree
Showing 5 changed files with 78 additions and 34 deletions.
7 changes: 4 additions & 3 deletions chain/mock/src/mock_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ impl MockChain {

let MineNewDagBlockInfo {
tips: pruned_tips,
blue_blocks,
ghostdata,
pruning_point,
} = self
.head
Expand All @@ -275,14 +275,15 @@ impl MockChain {

debug!(
"tips: {:?}, blue_blocks: {:?}, pruning_point: {:?}",
pruned_tips, blue_blocks, pruning_point
pruned_tips, ghostdata.mergeset_blues, pruning_point
);

let (template, _) = self.head.create_block_template_by_header(
*self.miner.address(),
selected_header,
vec![],
blue_blocks
ghostdata
.mergeset_blues
.get(1..)
.unwrap_or(&[])
.iter()
Expand Down
33 changes: 18 additions & 15 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use starcoin_chain_api::{
use starcoin_consensus::Consensus;
use starcoin_crypto::hash::PlainCryptoHash;
use starcoin_crypto::HashValue;
use starcoin_dag::blockdag::{BlockDAG, MineNewDagBlockInfo};
use starcoin_dag::blockdag::BlockDAG;
use starcoin_dag::consensusdb::consenses_state::DagState;
use starcoin_dag::consensusdb::prelude::StoreError;
use starcoin_dag::consensusdb::schemadb::GhostdagStoreReader;
Expand Down Expand Up @@ -315,24 +315,27 @@ impl BlockChain {
(self.dag().ghostdata(&tips)?, tips)
};

let MineNewDagBlockInfo {
tips,
blue_blocks,
pruning_point: _,
} = {
let blue_blocks = ghostdata.mergeset_blues.clone()[1..].to_vec();
MineNewDagBlockInfo {
tips,
blue_blocks,
pruning_point, // TODO: new test cases will need pass this field if they have some special requirements.
}
};
// let MineNewDagBlockInfo {
// tips,
// ghostdata,
// pruning_point: _,
// } = {
// let blue_blocks = ghostdata.mergeset_blues.clone()[1..].to_vec();
// MineNewDagBlockInfo {
// tips,
// ghostdata,
// pruning_point, // TODO: new test cases will need pass this field if they have some special requirements.
// }
// };

debug!(
"Blue blocks:{:?} in chain/create_block_template_by_header",
blue_blocks
ghostdata.mergeset_blues,
);
let blue_blocks = blue_blocks
let blue_blocks = ghostdata
.mergeset_blues
.as_ref()
.clone()
.into_iter()
.map(|block| self.storage.get_block_by_hash(block))
.collect::<Result<Vec<Option<Block>>>>()?
Expand Down
2 changes: 0 additions & 2 deletions flexidag/src/blockdag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -581,10 +581,8 @@ impl BlockDAG {
merge_depth,
)?;
if merge_depth_root == Hash::zero() {
println!("jacktest: merge depth root is zero");
return anyhow::Ok((parents, ghostdata));
}
println!("jacktest: merge depth root: {:?}", merge_depth_root);
let mut kosherizing_blues: Option<Vec<Hash>> = None;
let mut bad_reds = Vec::new();

Expand Down
1 change: 1 addition & 0 deletions flexidag/src/prune/pruning_point_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ impl<T: ReachabilityStoreReader + Clone> PruningPointManagerT<T> {
{
break;
}

if self.finality_score(next_pruning_ghostdata.blue_score)
> self.finality_score(latest_pruning_ghost_data.blue_score)
{
Expand Down
69 changes: 55 additions & 14 deletions flexidag/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,19 @@ use starcoin_dag::{
blockdag::{BlockDAG, MineNewDagBlockInfo},
consensusdb::{
consenses_state::{DagState, DagStateReader, DagStateStore},
consensus_block_depth::BlockDepthInfoStore,
schemadb::{
DbReachabilityStore, GhostdagStore, GhostdagStoreReader, ReachabilityStore,
ReachabilityStoreReader, RelationsStore, RelationsStoreReader,
DbReachabilityStore, GhostdagStoreReader, ReachabilityStore, ReachabilityStoreReader,
RelationsStore, RelationsStoreReader,
},
},
reachability::{inquirer, ReachabilityError},
types::{
ghostdata::{self, GhostdagData},
interval::Interval,
},
types::{ghostdata::GhostdagData, interval::Interval},
};
use starcoin_logger::prelude::{debug, info};
use starcoin_logger::prelude::debug;
use starcoin_types::{
block::{BlockHeader, BlockHeaderBuilder, BlockNumber},
blockhash::{BlockHashMap, HashKTypeMap, KType},
U256,
};

use std::{
Expand Down Expand Up @@ -742,6 +739,41 @@ fn add_and_print_with_ghostdata(
Ok(header)
}

fn add_and_print_with_pruning_point_and_difficulty(
number: BlockNumber,
parent: Hash,
parents: Vec<Hash>,
origin: Hash,
pruning_point: Hash,
difficulty: U256,
dag: &mut BlockDAG,
) -> anyhow::Result<BlockHeader> {
let header_builder = BlockHeaderBuilder::random();
let header = header_builder
.with_parent_hash(parent)
.with_parents_hash(parents.clone())
.with_number(number)
.with_pruning_point(pruning_point)
.with_difficulty(difficulty)
.build();
let start = Instant::now();
dag.commit(header.to_owned(), origin)?;
let duration = start.elapsed();
println!(
"commit header: {:?}, number: {:?}, duration: {:?}",
header.id(),
header.number(),
duration
);
// let ghostdata = dag.ghostdata(&parents)?;
// dag.storage.ghost_dag_store.insert(header.id(), Arc::new(ghostdata))?;
// println!(
// "add a header: {:?}, blue set: {:?}, red set: {:?}, blue anticone size: {:?}",
// header, ghostdata.mergeset_blues, ghostdata.mergeset_reds, ghostdata.blues_anticone_sizes
// );
Ok(header)
}

fn add_and_print_with_pruning_point(
number: BlockNumber,
parent: Hash,
Expand All @@ -756,6 +788,7 @@ fn add_and_print_with_pruning_point(
.with_parents_hash(parents.clone())
.with_number(number)
.with_pruning_point(pruning_point)
.with_difficulty(100.into())
.build();
let start = Instant::now();
dag.commit(header.to_owned(), origin)?;
Expand Down Expand Up @@ -1339,11 +1372,13 @@ fn test_merge_bounded() -> anyhow::Result<()> {
&mut dag,
)?;

let block_main_2 = add_and_print(
let block_main_2 = add_and_print_with_pruning_point_and_difficulty(
2,
block1.id(),
vec![block1.id()],
genesis.parent_hash(),
Hash::zero(),
3000.into(),
&mut dag,
)?;
let block_main_3 = add_and_print(
Expand Down Expand Up @@ -1444,7 +1479,7 @@ fn test_merge_bounded() -> anyhow::Result<()> {
let (tips, ghostdata) =
dag.remove_bounded_merge_breaking_parents(tips, ghostdata, pruning_point, merge_depth)?;
assert_eq!(tips, vec![block_main_5.id()]);
assert_eq!(ghostdata, dag.ghostdata(&vec![block_main_5.id()])?);
assert_eq!(ghostdata, dag.ghostdata(&[block_main_5.id()])?);
dag.storage
.state_store
.write()
Expand Down Expand Up @@ -1507,11 +1542,17 @@ fn test_merge_bounded() -> anyhow::Result<()> {
merge_depth,
pruning_finality,
)?;
let fork = dag
let mut fork = dag
.ghost_dag_manager()
.find_selected_parent(vec![block_main_3.id(), block_main_3_1.id()])?;
assert_eq!(fork, merge_depth_root);

fork = if block_main_3.id() == fork {
block_main_3_1.id()
} else {
block_main_3.id()
};

// to test the filter
let block_red_4 = add_and_print(4, fork, vec![fork], genesis.parent_hash(), &mut dag)?;
let block_red_5 = add_and_print(
Expand All @@ -1522,13 +1563,13 @@ fn test_merge_bounded() -> anyhow::Result<()> {
&mut dag,
)?;

let ghostdata = dag.ghostdata(&vec![block_main_6.id(), block_red_5.id()])?;
let ghostdata = dag.ghostdata(&[block_main_6.id(), block_red_5.id()])?;
assert_eq!(
HashSet::from_iter(vec![block_red_4.id(), block_red_5.id()]),
ghostdata
.mergeset_reds
.as_ref()
.into_iter()
.iter()
.cloned()
.collect::<HashSet<_>>()
);
Expand Down Expand Up @@ -1558,7 +1599,7 @@ fn test_merge_bounded() -> anyhow::Result<()> {
HashSet::from_iter(vec![block_main_6.id(), block_red_5.id()])
);

let (tips, ghostdata) =
let (tips, _ghostdata) =
dag.remove_bounded_merge_breaking_parents(tips, ghostdata, pruning_point, merge_depth)?;
assert_eq!(tips.len(), 1);
assert_eq!(tips, vec![block_main_6.id()]);
Expand Down

0 comments on commit 0a135ad

Please sign in to comment.