diff --git a/Cargo.lock b/Cargo.lock index a87925ce8..6ac2bf0e7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -755,7 +755,7 @@ checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" [[package]] name = "clar2wasm" version = "0.1.0" -source = "git+https://github.com/stacks-network/clarity-wasm.git?branch=main#6402754a71c3998b7febde69e06bbe582def9875" +source = "git+https://github.com/stacks-network/clarity-wasm.git?branch=main#1a1ab12dba3dfb243363fbf9ba955d7dab060936" dependencies = [ "chrono", "clap", diff --git a/components/clarinet-cli/src/frontend/cli.rs b/components/clarinet-cli/src/frontend/cli.rs index 6de2a289b..a7b8bfdb2 100644 --- a/components/clarinet-cli/src/frontend/cli.rs +++ b/components/clarinet-cli/src/frontend/cli.rs @@ -1072,7 +1072,7 @@ pub fn main() { contract.epoch, contract.clarity_version, ); - let mut analysis_db = AnalysisDatabase::new(&mut session.interpreter.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut session.interpreter.clarity_datastore); let mut analysis_diagnostics = match analysis::run_analysis( &mut contract_analysis, &mut analysis_db, diff --git a/components/clarinet-sdk/node/tests/support-clarity-version.test.ts b/components/clarinet-sdk/node/tests/support-clarity-version.test.ts index 31a21aefe..0a58d3031 100644 --- a/components/clarinet-sdk/node/tests/support-clarity-version.test.ts +++ b/components/clarinet-sdk/node/tests/support-clarity-version.test.ts @@ -60,7 +60,7 @@ describe("the sdk handle all clarity version", () => { // `tenure-height` was introduced in clarity 3 let resOk3 = simnet.execute("(print tenure-height)"); - expect(resOk3.result).toStrictEqual(Cl.uint(1)); + expect(resOk3.result).toStrictEqual(Cl.uint(2)); // `block-height` was removed in clarity 3 expect(() => simnet.execute("(print block-height)")).toThrowError( diff --git a/components/clarity-events/src/bin.rs b/components/clarity-events/src/bin.rs index 0cc612133..67521260f 100644 --- a/components/clarity-events/src/bin.rs +++ b/components/clarity-events/src/bin.rs @@ -65,7 +65,7 @@ pub fn main() { }; { - let mut analysis_db = session.interpreter.datastore.as_analysis_db(); + let mut analysis_db = session.interpreter.clarity_datastore.as_analysis_db(); let cost_track = LimitedCostTracker::new_free(); let type_checker = TypeChecker::new(&mut analysis_db, cost_track, true); let settings = Settings::default(); diff --git a/components/clarity-repl/src/repl/clarity_values.rs b/components/clarity-repl/src/repl/clarity_values.rs index 1131556ed..29309d5fb 100644 --- a/components/clarity-repl/src/repl/clarity_values.rs +++ b/components/clarity-repl/src/repl/clarity_values.rs @@ -1,3 +1,5 @@ +use std::fmt::Write; + use clarity::vm::{ types::{CharType, SequenceData}, Value, @@ -22,47 +24,50 @@ pub fn uint8_to_value(mut value: &[u8]) -> Value { pub fn value_to_string(value: &Value) -> String { match value { - Value::Principal(principal_data) => { - format!("'{principal_data}") - } + Value::Principal(principal_data) => format!("'{}", principal_data), Value::Tuple(tup_data) => { - let mut data = Vec::new(); - for (name, value) in tup_data.data_map.iter() { - data.push(format!("{}: {}", &**name, value_to_string(value))) + let mut data = String::new(); + for (name, value) in &tup_data.data_map { + write!(&mut data, "{}: {}, ", name, value_to_string(value)).unwrap(); } - format!("{{ {} }}", data.join(", ")) + format!("{{ {} }}", data.trim_end_matches(", ")) } - Value::Optional(opt_data) => match opt_data.data { - Some(ref x) => format!("(some {})", value_to_string(x)), + Value::Optional(opt_data) => match &opt_data.data { + Some(x) => format!("(some {})", value_to_string(x)), None => "none".to_string(), }, - Value::Response(res_data) => match res_data.committed { - true => format!("(ok {})", value_to_string(&res_data.data)), - false => format!("(err {})", value_to_string(&res_data.data)), - }, - Value::Sequence(SequenceData::String(CharType::ASCII(data))) => { - format!("\"{}\"", String::from_utf8(data.data.clone()).unwrap()) + Value::Response(res_data) => { + let committed = if res_data.committed { "ok" } else { "err" }; + format!("({} {})", committed, value_to_string(&res_data.data)) } - Value::Sequence(SequenceData::String(CharType::UTF8(data))) => { - let mut result = String::new(); - for c in data.data.iter() { - if c.len() > 1 { - // escape extended charset - result.push_str(&format!("\\u{{{}}}", hash::to_hex(&c[..]))); - } else { - result.push(c[0] as char) - } - } - format!("u\"{result}\"") + Value::Sequence(SequenceData::String(CharType::ASCII(ascii_data))) => { + format!("\"{}\"", String::from_utf8_lossy(&ascii_data.data)) + } + Value::Sequence(SequenceData::String(CharType::UTF8(utf8_data))) => { + let result = utf8_data + .data + .iter() + .map(|c| { + if c.len() > 1 { + format!("\\u{{{}}}", hash::to_hex(&c[..])) + } else { + (c[0] as char).to_string() + } + }) + .collect::>() + .join(""); + format!("u\"{}\"", result) } Value::Sequence(SequenceData::List(list_data)) => { - let mut data = Vec::new(); - for value in list_data.data.iter() { - data.push(value_to_string(value)) - } - format!("(list {})", data.join(" ")) + let data = list_data + .data + .iter() + .map(value_to_string) + .collect::>() + .join(" "); + format!("(list {})", data) } - _ => format!("{value}"), + _ => format!("{}", value), } } diff --git a/components/clarity-repl/src/repl/datastore.rs b/components/clarity-repl/src/repl/datastore.rs index ffd5491b4..5ff6c44e3 100644 --- a/components/clarity-repl/src/repl/datastore.rs +++ b/components/clarity-repl/src/repl/datastore.rs @@ -21,6 +21,9 @@ use sha2::{Digest, Sha512_256}; use super::interpreter::BLOCK_LIMIT_MAINNET; +const SECONDS_BETWEEN_BURN_BLOCKS: u64 = 600; +const SECONDS_BETWEEN_STACKS_BLOCKS: u64 = 10; + fn epoch_to_peer_version(epoch: StacksEpochId) -> u8 { use clarity::consts::*; match epoch { @@ -37,29 +40,28 @@ fn epoch_to_peer_version(epoch: StacksEpochId) -> u8 { } #[derive(Clone, Debug)] -pub struct Datastore { - store: HashMap>, - block_id_lookup: HashMap, - metadata: HashMap<(String, String), String>, +pub struct ClarityDatastore { open_chain_tip: StacksBlockId, current_chain_tip: StacksBlockId, - chain_height: u32, + store: HashMap>, + metadata: HashMap<(String, String), String>, + block_id_lookup: HashMap, height_at_chain_tip: HashMap, } #[derive(Clone, Debug)] -pub struct BlockInfo { +struct BurnBlockInfo { + burn_block_time: u64, + burn_block_height: u32, +} + +#[derive(Clone, Debug)] +pub struct StacksBlockInfo { block_header_hash: BlockHeaderHash, burn_block_header_hash: BurnchainHeaderHash, consensus_hash: ConsensusHash, vrf_seed: VRFSeed, - burn_block_time: u64, - burn_block_height: u32, - miner: StacksAddress, - burnchain_tokens_spent_for_block: u128, - get_burnchain_tokens_spent_for_winning_block: u128, - tokens_earned_for_block: u128, - pox_payout_addrs: (Vec, u128), + stacks_block_time: u64, } #[derive(Clone, Debug)] @@ -71,19 +73,17 @@ pub struct StacksConstants { } #[derive(Clone, Debug)] -pub struct BurnDatastore { - store: HashMap, +pub struct Datastore { + genesis_id: StacksBlockId, + burn_chain_height: u32, + burn_blocks: HashMap, + stacks_chain_height: u32, + stacks_blocks: HashMap, sortition_lookup: HashMap, consensus_hash_lookup: HashMap, - block_id_lookup: HashMap, - open_chain_tip: StacksBlockId, - current_chain_tip: StacksBlockId, - chain_height: u32, - height_at_chain_tip: HashMap, current_epoch: StacksEpochId, current_epoch_start_height: u32, constants: StacksConstants, - genesis_time: u64, } fn height_to_hashed_bytes(height: u32) -> [u8; 32] { @@ -98,107 +98,117 @@ fn height_to_id(height: u32) -> StacksBlockId { StacksBlockId(height_to_hashed_bytes(height)) } -fn height_to_block(height: u32, genesis_time: Option) -> BlockInfo { - let bytes = height_to_hashed_bytes(height); - let genesis_time = genesis_time.unwrap_or(0); - - let block_header_hash = { - let mut buffer = bytes; - buffer[0] = 1; - BlockHeaderHash(buffer) - }; - let burn_block_header_hash = { - let mut buffer = bytes; - buffer[0] = 2; - BurnchainHeaderHash(buffer) - }; - let consensus_hash = { - let mut buffer = bytes; - buffer[0] = 3; - ConsensusHash::from_bytes(&buffer[0..20]).unwrap() - }; - let vrf_seed = { - let mut buffer = bytes; - buffer[0] = 4; - VRFSeed(buffer) - }; - let time_since_genesis: u64 = (height * 600).into(); - let burn_block_time: u64 = genesis_time + time_since_genesis; - let burn_block_height = height; - let miner = StacksAddress::burn_address(true); - let burnchain_tokens_spent_for_block = 2000; - let get_burnchain_tokens_spent_for_winning_block = 2000; - let tokens_earned_for_block = 5000; - let pox_payout_addrs = (vec![], 0_u128); - - BlockInfo { - block_header_hash, - burn_block_header_hash, - consensus_hash, - vrf_seed, - burn_block_time, - burn_block_height, - miner, - burnchain_tokens_spent_for_block, - get_burnchain_tokens_spent_for_winning_block, - tokens_earned_for_block, - pox_payout_addrs, - } +fn height_to_burn_block_header_hash(height: u32) -> BurnchainHeaderHash { + let mut bytes = height_to_hashed_bytes(height); + bytes[0] = 2; + BurnchainHeaderHash(bytes) } -impl Default for Datastore { +impl Default for ClarityDatastore { fn default() -> Self { Self::new() } } -impl Datastore { +impl ClarityDatastore { pub fn new() -> Self { let id = height_to_id(0); - - let mut store = HashMap::new(); - store.insert(id, HashMap::new()); - - let mut block_id_lookup = HashMap::new(); - block_id_lookup.insert(id, id); - - let mut id_height_map = HashMap::new(); - id_height_map.insert(id, 0); - Self { - store, - block_id_lookup, - metadata: HashMap::new(), open_chain_tip: id, current_chain_tip: id, - chain_height: 0, - height_at_chain_tip: id_height_map, + store: HashMap::from([(id, HashMap::new())]), + metadata: HashMap::new(), + block_id_lookup: HashMap::from([(id, id)]), + height_at_chain_tip: HashMap::from([(id, 0)]), } } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - let cur_height = self.chain_height; - let current_lookup_id = *self + pub fn open(_path_str: &str, _miner_tip: Option<&StacksBlockId>) -> Result { + Ok(ClarityDatastore::new()) + } + + pub fn as_analysis_db(&mut self) -> AnalysisDatabase<'_> { + AnalysisDatabase::new(self) + } + + /// begin, commit, rollback a save point identified by key + /// this is used to clean up any data from aborted blocks + /// (NOT aborted transactions that is handled by the clarity vm directly). + /// The block header hash is used for identifying savepoints. + /// this _cannot_ be used to rollback to arbitrary prior block hash, because that + /// blockhash would already have committed and no longer exist in the save point stack. + /// this is a "lower-level" rollback than the roll backs performed in + /// ClarityDatabase or AnalysisDatabase -- this is done at the backing store level. + + pub fn begin(&mut self, _current: &StacksBlockId, _next: &StacksBlockId) { + // self.marf.begin(current, next) + // .expect(&format!("ERROR: Failed to begin new MARF block {} - {})", current, next)); + // self.chain_tip = self.marf.get_open_chain_tip() + // .expect("ERROR: Failed to get open MARF") + // .clone(); + // self.side_store.begin(&self.chain_tip); + } + pub fn rollback(&mut self) { + // self.marf.drop_current(); + // self.side_store.rollback(&self.chain_tip); + // self.chain_tip = StacksBlockId::sentinel(); + } + // This is used by miners + // so that the block validation and processing logic doesn't + // reprocess the same data as if it were already loaded + pub fn commit_mined_block(&mut self, _will_move_to: &StacksBlockId) { + // rollback the side_store + // the side_store shouldn't commit data for blocks that won't be + // included in the processed chainstate (like a block constructed during mining) + // _if_ for some reason, we do want to be able to access that mined chain state in the future, + // we should probably commit the data to a different table which does not have uniqueness constraints. + // self.side_store.rollback(&self.chain_tip); + // self.marf.commit_mined(will_move_to) + // .expect("ERROR: Failed to commit MARF block"); + } + + pub fn commit_to(&mut self, _final_bhh: &StacksBlockId) { + // println!("commit_to({})", final_bhh); + // self.side_store.commit_metadata_to(&self.chain_tip, final_bhh); + // self.side_store.commit(&self.chain_tip); + // self.marf.commit_to(final_bhh) + // .expect("ERROR: Failed to commit MARF block"); + } + + pub fn put(&mut self, key: &str, value: &str) { + let lookup_id = self .block_id_lookup .get(&self.open_chain_tip) - .expect("Open chain tip missing in block id lookup table"); + .expect("Could not find current chain tip in block_id_lookup map"); - for i in 1..=count { - let height = cur_height + i; - let id = height_to_id(height); + // if there isn't a store for the open chain_tip, make one and update the + // entry for the block id in the lookup table + if *lookup_id != self.open_chain_tip { + self.store.insert( + self.open_chain_tip, + self.store + .get(lookup_id) + .unwrap_or_else(|| panic!("Block with ID {:?} does not exist", lookup_id)) + .clone(), + ); - self.block_id_lookup.insert(id, current_lookup_id); - self.height_at_chain_tip.insert(id, height); + self.block_id_lookup + .insert(self.open_chain_tip, self.current_chain_tip); } - self.chain_height += count; - self.open_chain_tip = height_to_id(self.chain_height); - self.current_chain_tip = self.open_chain_tip; - self.chain_height + if let Some(map) = self.store.get_mut(&self.open_chain_tip) { + map.insert(key.to_string(), value.to_string()); + } else { + panic!("Block does not exist for current chain tip"); + } + } + + pub fn make_contract_hash_key(contract: &QualifiedContractIdentifier) -> String { + format!("clarity-contract::{}", contract) } } -impl ClarityBackingStore for Datastore { +impl ClarityBackingStore for ClarityDatastore { fn put_all_data(&mut self, items: Vec<(String, String)>) -> Result<()> { for (key, value) in items { self.put(&key, &value); @@ -243,12 +253,15 @@ impl ClarityBackingStore for Datastore { fn get_current_block_height(&mut self) -> u32 { *self .height_at_chain_tip - .get(self.get_chain_tip()) + .get(&self.current_chain_tip) .unwrap_or(&u32::MAX) } fn get_open_chain_tip_height(&mut self) -> u32 { - self.chain_height + self.height_at_chain_tip + .get(&self.open_chain_tip) + .copied() + .unwrap_or(u32::MAX) } fn get_open_chain_tip(&mut self) -> StacksBlockId { @@ -267,8 +280,6 @@ impl ClarityBackingStore for Datastore { key: &str, value: &str, ) -> Result<()> { - // let bhh = self.get_open_chain_tip(); - // self.get_side_store().insert_metadata(&bhh, &contract.to_string(), key, value) self.metadata .insert((contract.to_string(), key.to_string()), value.to_string()); Ok(()) @@ -279,8 +290,6 @@ impl ClarityBackingStore for Datastore { contract: &QualifiedContractIdentifier, key: &str, ) -> Result> { - // let (bhh, _) = self.get_contract_hash(contract)?; - // Ok(self.get_side_store().get_metadata(&bhh, &contract.to_string(), key)) let key = &(contract.to_string(), key.to_string()); match self.metadata.get(key) { @@ -319,58 +328,55 @@ impl ClarityBackingStore for Datastore { } } -impl BurnDatastore { +impl Default for Datastore { + fn default() -> Self { + Self::new(StacksConstants { + burn_start_height: 0, + pox_prepare_length: 50, + pox_reward_cycle_length: 1050, + pox_rejection_fraction: 0, + }) + } +} + +impl Datastore { pub fn new(constants: StacksConstants) -> Self { let bytes = height_to_hashed_bytes(0); let id = StacksBlockId(bytes); let sortition_id = SortitionId(bytes); let genesis_time = chrono::Utc::now().timestamp() as u64; - let genesis_block = BlockInfo { - block_header_hash: BlockHeaderHash([0x00; 32]), - burn_block_header_hash: BurnchainHeaderHash([0x00; 32]), - consensus_hash: ConsensusHash([0x00; 20]), - vrf_seed: VRFSeed([0x00; 32]), + let first_burn_block_header_hash = BurnchainHeaderHash([0x00; 32]); + + let genesis_burn_block = BurnBlockInfo { burn_block_time: genesis_time, burn_block_height: 0, - miner: StacksAddress::burn_address(false), - burnchain_tokens_spent_for_block: 0, - get_burnchain_tokens_spent_for_winning_block: 0, - tokens_earned_for_block: 0, - pox_payout_addrs: (vec![], 0), }; - let mut height_at_chain_tip = HashMap::new(); - height_at_chain_tip.insert(id, 0); - - let mut sortition_lookup = HashMap::new(); - sortition_lookup.insert(sortition_id, id); - - let mut consensus_hash_lookup = HashMap::new(); - consensus_hash_lookup.insert(genesis_block.consensus_hash, sortition_id); - - let mut store = HashMap::new(); - store.insert(id, genesis_block); - - let mut block_id_lookup = HashMap::new(); - block_id_lookup.insert(id, id); - - let mut id_height_map = HashMap::new(); - id_height_map.insert(id, 0); + let genesis_block = StacksBlockInfo { + block_header_hash: BlockHeaderHash([0x00; 32]), + burn_block_header_hash: first_burn_block_header_hash, + consensus_hash: ConsensusHash([0x00; 20]), + vrf_seed: VRFSeed([0x00; 32]), + stacks_block_time: genesis_time + SECONDS_BETWEEN_STACKS_BLOCKS, + }; - BurnDatastore { - store, + let sortition_lookup = HashMap::from([(sortition_id, id)]); + let consensus_hash_lookup = HashMap::from([(genesis_block.consensus_hash, sortition_id)]); + let burn_blocks = HashMap::from([(first_burn_block_header_hash, genesis_burn_block)]); + let stacks_blocks = HashMap::from([(id, genesis_block)]); + + Datastore { + genesis_id: id, + burn_chain_height: 0, + burn_blocks, + stacks_chain_height: 0, + stacks_blocks, sortition_lookup, consensus_hash_lookup, - block_id_lookup, - open_chain_tip: id, - current_chain_tip: id, - chain_height: 0, - height_at_chain_tip, current_epoch: StacksEpochId::Epoch2_05, current_epoch_start_height: 0, constants, - genesis_time, } } @@ -378,70 +384,172 @@ impl BurnDatastore { self.current_epoch } - pub fn get_current_block_height(&self) -> u32 { - self.chain_height + pub fn get_current_stacks_block_height(&self) -> u32 { + self.stacks_chain_height + } + + pub fn get_current_burn_block_height(&self) -> u32 { + self.burn_chain_height + } + + fn build_next_stacks_block(&self, clarity_datastore: &ClarityDatastore) -> StacksBlockInfo { + let burn_chain_height = self.burn_chain_height; + let stacks_block_height = self.stacks_chain_height; + + let last_stacks_block = self + .stacks_blocks + .get(&clarity_datastore.current_chain_tip) + .expect("current chain tip missing in stacks block table"); + let last_burn_block = self + .burn_blocks + .get(&height_to_burn_block_header_hash(burn_chain_height)) + .expect("burn block missing in burn block table"); + + let last_block_time = std::cmp::max( + last_stacks_block.stacks_block_time, + last_burn_block.burn_block_time, + ); + + let bytes = height_to_hashed_bytes(stacks_block_height); + + let block_header_hash = { + let mut buffer = bytes; + buffer[0] = 1; + BlockHeaderHash(buffer) + }; + let burn_block_header_hash = height_to_burn_block_header_hash(burn_chain_height); + let consensus_hash = { + let mut buffer = bytes; + buffer[0] = 3; + ConsensusHash::from_bytes(&buffer[0..20]).unwrap() + }; + let vrf_seed = { + let mut buffer = bytes; + buffer[0] = 4; + VRFSeed(buffer) + }; + let stacks_block_time: u64 = last_block_time + SECONDS_BETWEEN_STACKS_BLOCKS; + + StacksBlockInfo { + block_header_hash, + burn_block_header_hash, + consensus_hash, + vrf_seed, + stacks_block_time, + } + } + + pub fn advance_burn_chain_tip( + &mut self, + clarity_datastore: &mut ClarityDatastore, + count: u32, + ) -> u32 { + for _ in 1..=count { + let last_stacks_block = self + .stacks_blocks + .get(&clarity_datastore.current_chain_tip) + .unwrap(); + let last_burn_block = self + .burn_blocks + .get(&last_stacks_block.burn_block_header_hash) + .unwrap(); + + let mut next_burn_block_time = + last_burn_block.burn_block_time + SECONDS_BETWEEN_BURN_BLOCKS; + if last_stacks_block.stacks_block_time > next_burn_block_time { + next_burn_block_time = + last_stacks_block.stacks_block_time + SECONDS_BETWEEN_STACKS_BLOCKS; + } + + let height = self.burn_chain_height + 1; + let hash = height_to_burn_block_header_hash(height); + let burn_block_info = BurnBlockInfo { + burn_block_time: next_burn_block_time, + burn_block_height: height, + }; + + self.burn_blocks.insert(hash, burn_block_info); + self.burn_chain_height = height; + self.advance_stacks_chain_tip(clarity_datastore, 1); + } + + self.burn_chain_height } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - let cur_height = self.chain_height; - let current_lookup_id = *self + + pub fn advance_stacks_chain_tip( + &mut self, + clarity_datastore: &mut ClarityDatastore, + count: u32, + ) -> u32 { + let current_lookup_id = *clarity_datastore .block_id_lookup - .get(&self.open_chain_tip) + .get(&clarity_datastore.open_chain_tip) .expect("Open chain tip missing in block id lookup table"); - let genesis_time = self.genesis_time; - for i in 1..=count { - let height = cur_height + i; - let bytes = height_to_hashed_bytes(height); + for _ in 1..=count { + self.stacks_chain_height += 1; + + let bytes = height_to_hashed_bytes(self.stacks_chain_height); let id = StacksBlockId(bytes); let sortition_id = SortitionId(bytes); - let block_info = height_to_block(height, Some(genesis_time)); - self.block_id_lookup.insert(id, current_lookup_id); - self.height_at_chain_tip.insert(id, height); + let block_info = self.build_next_stacks_block(clarity_datastore); + self.sortition_lookup.insert(sortition_id, id); self.consensus_hash_lookup .insert(block_info.consensus_hash, sortition_id); - self.store.insert(id, block_info); + self.stacks_blocks.insert(id, block_info); + + clarity_datastore + .block_id_lookup + .entry(id) + .or_insert(current_lookup_id); + clarity_datastore + .height_at_chain_tip + .entry(id) + .or_insert(self.stacks_chain_height); + clarity_datastore.open_chain_tip = height_to_id(self.stacks_chain_height); + clarity_datastore.current_chain_tip = clarity_datastore.open_chain_tip; } - self.chain_height += count; - self.open_chain_tip = height_to_id(self.chain_height); - self.current_chain_tip = self.open_chain_tip; - self.chain_height + self.stacks_chain_height } - pub fn set_current_epoch(&mut self, epoch: StacksEpochId) { + pub fn set_current_epoch( + &mut self, + clarity_datastore: &mut ClarityDatastore, + epoch: StacksEpochId, + ) { + if epoch == self.current_epoch { + return; + } self.current_epoch = epoch; - self.current_epoch_start_height = self.chain_height; + self.current_epoch_start_height = self.stacks_chain_height; + if epoch >= StacksEpochId::Epoch30 { + // ideally the burn chain tip should be advanced for each new epoch + // but this would introduce breaking changes to existing 2.x tests + self.advance_burn_chain_tip(clarity_datastore, 1); + } } } -impl HeadersDB for BurnDatastore { - // fn get(&mut self, key: &str) -> Option { - // let lookup_id = self - // .block_id_lookup - // .get(&self.current_chain_tip) - // .expect("Could not find current chain tip in block_id_lookup map"); - - // if let Some(map) = self.store.get(lookup_id) { - // map.get(key).map(|v| v.clone()) - // } else { - // panic!("Block does not exist for current chain tip"); - // } - // } - +impl HeadersDB for Datastore { fn get_stacks_block_header_hash_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.block_header_hash) + self.stacks_blocks + .get(id_bhh) + .map(|id| id.block_header_hash) } fn get_burn_header_hash_for_block( &self, id_bhh: &StacksBlockId, ) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_header_hash) + self.stacks_blocks + .get(id_bhh) + .map(|block| block.burn_block_header_hash) } fn get_consensus_hash_for_block( @@ -449,63 +557,91 @@ impl HeadersDB for BurnDatastore { id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.consensus_hash) + self.stacks_blocks.get(id_bhh).map(|id| id.consensus_hash) } + fn get_vrf_seed_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.vrf_seed) + self.stacks_blocks.get(id_bhh).map(|id| id.vrf_seed) } + fn get_stacks_block_time_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_time) + self.stacks_blocks + .get(id_bhh) + .map(|id| id.stacks_block_time) } + fn get_burn_block_time_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: Option<&StacksEpochId>, ) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_time) + self.get_burn_header_hash_for_block(id_bhh) + .and_then(|hash| self.burn_blocks.get(&hash)) + .map(|b| b.burn_block_time) } + fn get_burn_block_height_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store.get(id_bhh).map(|id| id.burn_block_height) + self.get_burn_header_hash_for_block(id_bhh) + .and_then(|hash| self.burn_blocks.get(&hash)) + .map(|b| b.burn_block_height) } + fn get_miner_address( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.miner) + if self.get_burn_block_height_for_block(id_bhh).is_some() { + return StacksAddress::burn_address(id_bhh != &self.genesis_id).into(); + } + None } + fn get_burnchain_tokens_spent_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store - .get(id_bhh) - .map(|id| id.burnchain_tokens_spent_for_block) + if id_bhh == &self.genesis_id { + return Some(0); + }; + if self.get_burn_block_height_for_block(id_bhh).is_some() { + return Some(2000); + }; + None } + fn get_burnchain_tokens_spent_for_winning_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store - .get(id_bhh) - .map(|id| id.get_burnchain_tokens_spent_for_winning_block) + if id_bhh == &self.genesis_id { + return Some(0); + }; + None } + fn get_tokens_earned_for_block( &self, id_bhh: &StacksBlockId, _epoch_id: &StacksEpochId, ) -> Option { - self.store.get(id_bhh).map(|id| id.tokens_earned_for_block) + if id_bhh == &self.genesis_id { + return Some(0); + }; + if self.get_burn_block_height_for_block(id_bhh).is_some() { + return Some(5000); + } + None } } -impl BurnStateDB for BurnDatastore { +impl BurnStateDB for Datastore { fn get_v1_unlock_height(&self) -> u32 { 0 } @@ -527,11 +663,11 @@ impl BurnStateDB for BurnDatastore { } fn get_tip_burn_block_height(&self) -> Option { - Some(self.chain_height) + Some(self.burn_chain_height) } fn get_tip_sortition_id(&self) -> Option { - let bytes = height_to_hashed_bytes(self.chain_height); + let bytes = height_to_hashed_bytes(self.stacks_chain_height); let sortition_id = SortitionId(bytes); Some(sortition_id) } @@ -540,8 +676,10 @@ impl BurnStateDB for BurnDatastore { fn get_burn_block_height(&self, sortition_id: &SortitionId) -> Option { self.sortition_lookup .get(sortition_id) - .and_then(|id| self.store.get(id)) - .map(|block_info| block_info.burn_block_height) + .and_then(|id| self.stacks_blocks.get(id)) + .map(|stacks_block_info| stacks_block_info.burn_block_header_hash) + .and_then(|hash| self.burn_blocks.get(&hash)) + .map(|burn_block_info| burn_block_info.burn_block_height) } /// Returns the height of the burnchain when the Stacks chain started running. @@ -571,7 +709,7 @@ impl BurnStateDB for BurnDatastore { ) -> Option { self.sortition_lookup .get(sortition_id) - .and_then(|id| self.store.get(id)) + .and_then(|id| self.stacks_blocks.get(id)) .map(|block_info| block_info.burn_block_header_hash) } @@ -604,109 +742,18 @@ impl BurnStateDB for BurnDatastore { /// Get the PoX payout addresses for a given burnchain block fn get_pox_payout_addrs( &self, - _height: u32, - sortition_id: &SortitionId, + height: u32, + _sortition_id: &SortitionId, ) -> Option<(Vec, u128)> { - self.sortition_lookup - .get(sortition_id) - .and_then(|id| self.store.get(id)) - .map(|block_info| block_info.pox_payout_addrs.clone()) - } - - fn get_ast_rules(&self, _height: u32) -> clarity::vm::ast::ASTRules { - clarity::vm::ast::ASTRules::PrecheckSize - } -} - -impl Datastore { - pub fn open(_path_str: &str, _miner_tip: Option<&StacksBlockId>) -> Result { - Ok(Datastore::new()) - } - - pub fn as_analysis_db(&mut self) -> AnalysisDatabase<'_> { - AnalysisDatabase::new(self) - } - - /// begin, commit, rollback a save point identified by key - /// this is used to clean up any data from aborted blocks - /// (NOT aborted transactions that is handled by the clarity vm directly). - /// The block header hash is used for identifying savepoints. - /// this _cannot_ be used to rollback to arbitrary prior block hash, because that - /// blockhash would already have committed and no longer exist in the save point stack. - /// this is a "lower-level" rollback than the roll backs performed in - /// ClarityDatabase or AnalysisDatabase -- this is done at the backing store level. - - pub fn begin(&mut self, _current: &StacksBlockId, _next: &StacksBlockId) { - // self.marf.begin(current, next) - // .expect(&format!("ERROR: Failed to begin new MARF block {} - {})", current, next)); - // self.chain_tip = self.marf.get_open_chain_tip() - // .expect("ERROR: Failed to get open MARF") - // .clone(); - // self.side_store.begin(&self.chain_tip); - } - pub fn rollback(&mut self) { - // self.marf.drop_current(); - // self.side_store.rollback(&self.chain_tip); - // self.chain_tip = StacksBlockId::sentinel(); - } - // This is used by miners - // so that the block validation and processing logic doesn't - // reprocess the same data as if it were already loaded - pub fn commit_mined_block(&mut self, _will_move_to: &StacksBlockId) { - // rollback the side_store - // the side_store shouldn't commit data for blocks that won't be - // included in the processed chainstate (like a block constructed during mining) - // _if_ for some reason, we do want to be able to access that mined chain state in the future, - // we should probably commit the data to a different table which does not have uniqueness constraints. - // self.side_store.rollback(&self.chain_tip); - // self.marf.commit_mined(will_move_to) - // .expect("ERROR: Failed to commit MARF block"); - } - pub fn commit_to(&mut self, _final_bhh: &StacksBlockId) { - // println!("commit_to({})", final_bhh); - // self.side_store.commit_metadata_to(&self.chain_tip, final_bhh); - // self.side_store.commit(&self.chain_tip); - // self.marf.commit_to(final_bhh) - // .expect("ERROR: Failed to commit MARF block"); - } - pub fn get_chain_tip(&self) -> &StacksBlockId { - &self.current_chain_tip - } - - pub fn set_chain_tip(&mut self, bhh: &StacksBlockId) { - self.current_chain_tip = *bhh; - } - - pub fn put(&mut self, key: &str, value: &str) { - let lookup_id = self - .block_id_lookup - .get(&self.open_chain_tip) - .expect("Could not find current chain tip in block_id_lookup map"); - - // if there isn't a store for the open chain_tip, make one and update the - // entry for the block id in the lookup table - if *lookup_id != self.open_chain_tip { - self.store.insert( - self.open_chain_tip, - self.store - .get(lookup_id) - .unwrap_or_else(|| panic!("Block with ID {:?} does not exist", lookup_id)) - .clone(), - ); - - self.block_id_lookup - .insert(self.open_chain_tip, self.current_chain_tip); - } - - if let Some(map) = self.store.get_mut(&self.open_chain_tip) { - map.insert(key.to_string(), value.to_string()); + if height <= self.burn_chain_height { + Some((vec![], 0)) } else { - panic!("Block does not exist for current chain tip"); + None } } - pub fn make_contract_hash_key(contract: &QualifiedContractIdentifier) -> String { - format!("clarity-contract::{}", contract) + fn get_ast_rules(&self, _height: u32) -> clarity::vm::ast::ASTRules { + clarity::vm::ast::ASTRules::PrecheckSize } } @@ -716,97 +763,90 @@ mod tests { use super::*; - fn get_burn_datastore() -> BurnDatastore { - let constants = StacksConstants { - burn_start_height: 0, - pox_prepare_length: 50, - pox_reward_cycle_length: 1050, - pox_rejection_fraction: 0, - }; - BurnDatastore::new(constants) - } - #[test] fn test_advance_chain_tip() { - let mut datastore = get_burn_datastore(); - datastore.advance_chain_tip(5); - assert_eq!(datastore.chain_height, 5); + let mut datastore = Datastore::default(); + let mut clarity_datastore = ClarityDatastore::new(); + datastore.advance_burn_chain_tip(&mut clarity_datastore, 5); + assert_eq!(datastore.stacks_chain_height, 5); } #[test] fn test_set_current_epoch() { - let mut datastore = get_burn_datastore(); + let mut datastore = Datastore::default(); + let mut clarity_datastore = ClarityDatastore::new(); let epoch_id = StacksEpochId::Epoch25; - datastore.set_current_epoch(epoch_id); + datastore.set_current_epoch(&mut clarity_datastore, epoch_id); assert_eq!(datastore.current_epoch, epoch_id); } #[test] fn test_get_v1_unlock_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_v1_unlock_height(), 0); } #[test] fn test_get_v2_unlock_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_v2_unlock_height(), 0); } #[test] fn test_get_v3_unlock_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_v3_unlock_height(), 0); } #[test] fn test_get_pox_3_activation_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_3_activation_height(), 0); } #[test] fn test_get_pox_4_activation_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_4_activation_height(), 0); } #[test] fn test_get_tip_burn_block_height() { - let mut datastore = get_burn_datastore(); + let mut datastore = Datastore::default(); + let mut clarity_datastore = ClarityDatastore::new(); let chain_height = 10; - datastore.chain_height = chain_height; + datastore.advance_burn_chain_tip(&mut clarity_datastore, 10); let tip_burn_block_height = datastore.get_tip_burn_block_height(); assert_eq!(tip_burn_block_height, Some(chain_height)); } #[test] fn test_get_burn_start_height() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_burn_start_height(), 0); } #[test] fn test_get_pox_prepare_length() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_prepare_length(), 50); } #[test] fn test_get_pox_reward_cycle_length() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_reward_cycle_length(), 1050); } #[test] fn test_get_pox_rejection_fraction() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); assert_eq!(datastore.get_pox_rejection_fraction(), 0); } #[test] fn test_get_stacks_epoch() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); let height = 10; let epoch = datastore.get_stacks_epoch(height); assert_eq!( @@ -823,7 +863,7 @@ mod tests { #[test] fn test_get_stacks_epoch_by_epoch_id() { - let datastore = get_burn_datastore(); + let datastore = Datastore::default(); let epoch_id = StacksEpochId::Epoch2_05; let epoch = datastore.get_stacks_epoch_by_epoch_id(&epoch_id); assert_eq!( diff --git a/components/clarity-repl/src/repl/interpreter.rs b/components/clarity-repl/src/repl/interpreter.rs index 8b081e9f8..22df7047d 100644 --- a/components/clarity-repl/src/repl/interpreter.rs +++ b/components/clarity-repl/src/repl/interpreter.rs @@ -3,7 +3,7 @@ use std::collections::{btree_map::Entry, BTreeMap, BTreeSet}; use crate::analysis::annotation::{Annotation, AnnotationKind}; use crate::analysis::ast_dependency_detector::{ASTDependencyDetector, Dependency}; use crate::analysis::{self}; -use crate::repl::datastore::BurnDatastore; +use crate::repl::datastore::ClarityDatastore; use crate::repl::datastore::Datastore; use crate::repl::Settings; use clarity::consts::CHAIN_ID_TESTNET; @@ -28,7 +28,6 @@ use clarity::vm::{events::*, ClarityVersion}; use clarity::vm::{ContractEvaluationResult, EvalHook}; use clarity::vm::{CostSynthesis, ExecutionResult, ParsedContract}; -use super::datastore::StacksConstants; use super::{ClarityContract, DEFAULT_EPOCH}; pub const BLOCK_LIMIT_MAINNET: ExecutionCost = ExecutionCost { @@ -41,8 +40,8 @@ pub const BLOCK_LIMIT_MAINNET: ExecutionCost = ExecutionCost { #[derive(Clone, Debug)] pub struct ClarityInterpreter { + pub clarity_datastore: ClarityDatastore, pub datastore: Datastore, - pub burn_datastore: BurnDatastore, pub repl_settings: Settings, tx_sender: StandardPrincipalData, accounts: BTreeSet, @@ -54,19 +53,13 @@ pub struct Txid(pub [u8; 32]); impl ClarityInterpreter { pub fn new(tx_sender: StandardPrincipalData, repl_settings: Settings) -> Self { - let constants = StacksConstants { - burn_start_height: 0, - pox_prepare_length: 50, - pox_reward_cycle_length: 1050, - pox_rejection_fraction: 0, - }; Self { tx_sender, repl_settings, - datastore: Datastore::new(), + clarity_datastore: ClarityDatastore::new(), accounts: BTreeSet::new(), tokens: BTreeMap::new(), - burn_datastore: BurnDatastore::new(constants), + datastore: Datastore::default(), } } @@ -297,7 +290,7 @@ impl ClarityInterpreter { contract_ast: &ContractAST, annotations: &Vec, ) -> Result<(ContractAnalysis, Vec), Diagnostic> { - let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut self.clarity_datastore); // Run standard clarity analyses let mut contract_analysis = clarity::vm::analysis::run_analysis( @@ -327,9 +320,9 @@ impl ClarityInterpreter { pub fn get_block_time(&mut self) -> u64 { let block_height = self.get_block_height(); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); conn.get_block_time(block_height) .expect("unable to get block time") @@ -342,7 +335,7 @@ impl ClarityInterpreter { ) -> Option { let key = ClarityDatabase::make_key_for_trip(contract_id, StoreType::Variable, var_name); let value_hex = self - .datastore + .clarity_datastore .get_data(&key) .expect("failed to get key from datastore")?; Some(format!("0x{value_hex}")) @@ -357,7 +350,7 @@ impl ClarityInterpreter { let key = ClarityDatabase::make_key_for_data_map_entry(contract_id, map_name, map_key).unwrap(); let value_hex = self - .datastore + .clarity_datastore .get_data(&key) .expect("failed to get map entry from datastore")?; Some(format!("0x{value_hex}")) @@ -377,9 +370,9 @@ impl ClarityInterpreter { ContractContext::new(contract_id.clone(), contract.clarity_version); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let tx_sender: PrincipalData = self.tx_sender.clone().into(); conn.begin(); @@ -581,7 +574,7 @@ impl ClarityInterpreter { } if contract_saved { - let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut self.clarity_datastore); analysis_db .execute(|db| db.insert_contract(&contract_id, &analysis)) .expect("Unable to save data"); @@ -606,9 +599,9 @@ impl ClarityInterpreter { ContractContext::new(contract_id.clone(), contract.clarity_version); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let tx_sender: PrincipalData = self.tx_sender.clone().into(); conn.begin(); @@ -821,7 +814,7 @@ impl ClarityInterpreter { } if contract_saved { - let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); + let mut analysis_db = AnalysisDatabase::new(&mut self.clarity_datastore); analysis_db .execute(|db| db.insert_contract(&contract_id, &analysis)) .expect("Unable to save data"); @@ -842,9 +835,9 @@ impl ClarityInterpreter { eval_hooks: Option>, ) -> Result { let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let tx_sender: PrincipalData = self.tx_sender.clone().into(); conn.begin(); @@ -1055,9 +1048,9 @@ impl ClarityInterpreter { ) -> Result { let final_balance = { let conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); let mut global_context = GlobalContext::new( @@ -1098,39 +1091,36 @@ impl ClarityInterpreter { self.tx_sender.clone() } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - let current_epoch = self.burn_datastore.get_current_epoch(); - if current_epoch < StacksEpochId::Epoch30 { - self.advance_burn_chain_tip(count) - } else { - match self.advance_stacks_chain_tip(count) { - Ok(count) => count, - Err(_) => unreachable!("Epoch checked already"), - } - } + pub fn set_current_epoch(&mut self, epoch: StacksEpochId) { + self.datastore + .set_current_epoch(&mut self.clarity_datastore, epoch); } pub fn advance_burn_chain_tip(&mut self, count: u32) -> u32 { - let new_height = self.burn_datastore.advance_chain_tip(count); - let _ = self.datastore.advance_chain_tip(count); + let new_height = self + .datastore + .advance_burn_chain_tip(&mut self.clarity_datastore, count); self.set_tenure_height(); new_height } + pub fn advance_stacks_chain_tip(&mut self, count: u32) -> Result { - let current_epoch = self.burn_datastore.get_current_epoch(); + let current_epoch = self.datastore.get_current_epoch(); if current_epoch < StacksEpochId::Epoch30 { Err("only burn chain height can be advanced in epoch lower than 3.0".to_string()) } else { - Ok(self.datastore.advance_chain_tip(count)) + Ok(self + .datastore + .advance_stacks_chain_tip(&mut self.clarity_datastore, count)) } } pub fn set_tenure_height(&mut self) { let burn_block_height = self.get_burn_block_height(); let mut conn = ClarityDatabase::new( - &mut self.datastore, - &self.burn_datastore, - &self.burn_datastore, + &mut self.clarity_datastore, + &self.datastore, + &self.datastore, ); conn.begin(); conn.put_data("_stx-data::tenure_height", &burn_block_height) @@ -1139,11 +1129,11 @@ impl ClarityInterpreter { } pub fn get_block_height(&mut self) -> u32 { - self.datastore.get_current_block_height() + self.datastore.get_current_stacks_block_height() } pub fn get_burn_block_height(&mut self) -> u32 { - self.burn_datastore.get_current_block_height() + self.datastore.get_current_burn_block_height() } fn credit_token(&mut self, account: String, token: String, value: u128) { @@ -1249,6 +1239,24 @@ mod tests { assert_eq!(result.result, expected_value); } + #[track_caller] + fn run_snippet( + interpreter: &mut ClarityInterpreter, + snippet: &str, + clarity_version: ClarityVersion, + ) -> Value { + let contract = ClarityContractBuilder::new() + .code_source(snippet.to_string()) + .epoch(interpreter.datastore.get_current_epoch()) + .clarity_version(clarity_version) + .build(); + let deploy_result = deploy_contract(interpreter, &contract); + match deploy_result.unwrap().result { + EvaluationResult::Contract(_) => unreachable!(), + EvaluationResult::Snippet(res) => res.result, + } + } + #[test] fn test_get_tx_sender() { let mut interpreter = @@ -1288,15 +1296,14 @@ mod tests { fn test_advance_stacks_chain_tip_pre_epoch_3() { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); - interpreter - .burn_datastore - .set_current_epoch(StacksEpochId::Epoch2_05); + interpreter.set_current_epoch(StacksEpochId::Epoch2_05); let count = 5; let initial_block_height = interpreter.get_burn_block_height(); assert_ne!(interpreter.advance_stacks_chain_tip(count), Ok(count)); assert_eq!(interpreter.get_burn_block_height(), initial_block_height); assert_eq!(interpreter.get_block_height(), initial_block_height); } + #[test] fn test_advance_stacks_chain_tip() { let wasm_settings = Settings { @@ -1306,22 +1313,23 @@ mod tests { }; let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), wasm_settings); - interpreter - .burn_datastore - .set_current_epoch(StacksEpochId::Epoch30); + interpreter.set_current_epoch(StacksEpochId::Epoch30); + interpreter.advance_burn_chain_tip(1); let count = 5; - let initial_block_height = interpreter.get_burn_block_height(); - assert_eq!(interpreter.advance_stacks_chain_tip(count), Ok(count)); + let initial_block_height = interpreter.get_block_height(); + + let result = interpreter.advance_stacks_chain_tip(count); + assert_eq!(result, Ok(initial_block_height + count)); + assert_eq!(interpreter.get_burn_block_height(), initial_block_height); assert_eq!(interpreter.get_block_height(), initial_block_height + count); } + #[test] fn test_advance_chain_tip_pre_epoch3() { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); - interpreter - .burn_datastore - .set_current_epoch(StacksEpochId::Epoch2_05); + interpreter.set_current_epoch(StacksEpochId::Epoch2_05); let count = 5; let initial_block_height = interpreter.get_block_height(); interpreter.advance_burn_chain_tip(count); @@ -1331,13 +1339,12 @@ mod tests { initial_block_height + count ); } + #[test] fn test_advance_chain_tip() { let mut interpreter = ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); - interpreter - .burn_datastore - .set_current_epoch(StacksEpochId::Epoch30); + interpreter.set_current_epoch(StacksEpochId::Epoch30); let count = 5; let initial_block_height = interpreter.get_block_height(); interpreter.advance_burn_chain_tip(count); @@ -1978,6 +1985,154 @@ mod tests { assert!(interpreter.run(&call_contract, None, false, None).is_ok()); } + #[test] + fn burn_block_time_is_realistic_in_epoch_3_0() { + let mut interpreter = + ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); + + interpreter.set_current_epoch(StacksEpochId::Epoch30); + interpreter.advance_burn_chain_tip(3); + + let snippet_1 = run_snippet( + &mut interpreter, + "(get-tenure-info? time u2)", + ClarityVersion::Clarity3, + ); + let time_block_1 = match snippet_1.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + + let snippet_2 = run_snippet( + &mut interpreter, + "(get-tenure-info? time u3)", + ClarityVersion::Clarity3, + ); + let time_block_2 = match snippet_2.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + assert_eq!(time_block_2 - time_block_1, 600); + } + + #[test] + fn first_stacks_block_time_in_a_tenure() { + let mut interpreter = + ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); + + interpreter.set_current_epoch(StacksEpochId::Epoch30); + let _ = interpreter.advance_burn_chain_tip(2); + + let snippet_1 = run_snippet( + &mut interpreter, + "(get-tenure-info? time (- stacks-block-height u1))", + ClarityVersion::Clarity3, + ); + let last_tenure_time = match snippet_1.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + + let snippet_2 = run_snippet( + &mut interpreter, + "(get-stacks-block-info? time (- stacks-block-height u1))", + ClarityVersion::Clarity3, + ); + let last_stacks_block_time = match snippet_2.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + assert_eq!((last_stacks_block_time) - (last_tenure_time), 10); + } + + #[test] + fn stacks_block_time_is_realistic_in_epoch_3_0() { + let mut interpreter = + ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); + + interpreter.set_current_epoch(StacksEpochId::Epoch30); + let _ = interpreter.advance_stacks_chain_tip(3); + + let snippet_1 = run_snippet( + &mut interpreter, + "(get-stacks-block-info? time u2)", + ClarityVersion::Clarity3, + ); + let time_block_1 = match snippet_1.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + + let snippet_2 = run_snippet( + &mut interpreter, + "(get-stacks-block-info? time u3)", + ClarityVersion::Clarity3, + ); + let time_block_2 = match snippet_2.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + assert_eq!(time_block_2 - time_block_1, 10); + } + + #[test] + fn burn_block_time_after_many_stacks_blocks_is_realistic_in_epoch_3_0() { + let mut interpreter = + ClarityInterpreter::new(StandardPrincipalData::transient(), Settings::default()); + + interpreter.set_current_epoch(StacksEpochId::Epoch30); + // by advancing stacks_chain_tip by 101, we are getting a tenure of more than 600 seconds + // the next burn block should happen after the last stacks block + let stacks_block_height = interpreter.advance_stacks_chain_tip(101).unwrap(); + assert_eq!(stacks_block_height, 102); + + let snippet_1 = run_snippet( + &mut interpreter, + "(get-stacks-block-info? time u1)", + ClarityVersion::Clarity3, + ); + let stacks_block_time_1 = match snippet_1.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + + let snippet_2 = run_snippet( + &mut interpreter, + "(get-stacks-block-info? time u101)", + ClarityVersion::Clarity3, + ); + let stacks_block_time_2 = match snippet_2.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + assert_eq!(stacks_block_time_2 - stacks_block_time_1, 1000); + + let _ = interpreter.advance_burn_chain_tip(1); + let _ = interpreter.advance_stacks_chain_tip(1); + + let snippet_3 = run_snippet( + &mut interpreter, + "(get-tenure-info? time u4)", + ClarityVersion::Clarity3, + ); + let tenure_height_1 = match snippet_3.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + + let snippet_4 = run_snippet( + &mut interpreter, + "(get-tenure-info? time (- stacks-block-height u1))", + ClarityVersion::Clarity3, + ); + let tenure_height_2 = match snippet_4.expect_optional() { + Ok(Some(Value::UInt(time))) => time, + _ => panic!("Unexpected result"), + }; + + assert_eq!(1030, tenure_height_2 - tenure_height_1); + } + #[test] fn can_call_a_public_function() { let mut interpreter = diff --git a/components/clarity-repl/src/repl/session.rs b/components/clarity-repl/src/repl/session.rs index affbafd8e..30c910b71 100644 --- a/components/clarity-repl/src/repl/session.rs +++ b/components/clarity-repl/src/repl/session.rs @@ -392,7 +392,7 @@ impl Session { } } EvaluationResult::Snippet(snippet_result) => { - output.push(format!("{}", snippet_result.result).green().to_string()) + output.push(value_to_string(&snippet_result.result).green().to_string()) } } Ok((output, result)) @@ -826,78 +826,80 @@ impl Session { output.join("\n") } - fn parse_and_advance_stacks_chain_tip(&mut self, command: &str) -> String { - let args: Vec<_> = command.split(' ').collect(); - - if args.len() != 2 { - return format!("{}", "Usage: ::advance_stacks_chain_tip ".red()); - } - - let count = match args[1].parse::() { + fn parse_and_advance_chain_tip(&mut self, command: &str) -> String { + let args: Vec<_> = command.split(' ').skip(1).collect(); + let count = match args.first().unwrap_or(&"1").parse::() { Ok(count) => count, - _ => { - return format!("{}", "Unable to parse count".red()); - } + _ => return format!("{}", "Unable to parse count".red()), }; - match self.advance_stacks_chain_tip(count) { - Ok(new_height) => format!("{} blocks simulated, new height: {}", count, new_height) - .green() - .to_string(), - Err(_) => format!( - "{}", - "advance_stacks_chain_tip can't be called in epoch lower than 3.0".red() - ), - } + let _ = self.advance_chain_tip(count); + format!( + "new burn height: {}\nnew stacks height: {}", + self.interpreter.datastore.get_current_burn_block_height(), + self.interpreter.datastore.get_current_stacks_block_height(), + ) + .green() + .to_string() } - fn parse_and_advance_chain_tip(&mut self, command: &str) -> String { - let args: Vec<_> = command.split(' ').collect(); - - if args.len() != 2 { - return format!("{}", "Usage: ::advance_chain_tip ".red()); - } - - let count = match args[1].parse::() { + fn parse_and_advance_burn_chain_tip(&mut self, command: &str) -> String { + let args: Vec<_> = command.split(' ').skip(1).collect(); + let count = match args.first().unwrap_or(&"1").parse::() { Ok(count) => count, - _ => { - return format!("{}", "Unable to parse count".red()); - } + _ => return format!("{}", "Unable to parse count".red()), }; - let new_height = self.advance_chain_tip(count); - format!("{} blocks simulated, new height: {}", count, new_height) - .green() - .to_string() + let _ = self.advance_burn_chain_tip(count); + format!( + "new burn height: {}\nnew stacks height: {}", + self.interpreter.datastore.get_current_burn_block_height(), + self.interpreter.datastore.get_current_stacks_block_height(), + ) + .green() + .to_string() } - fn parse_and_advance_burn_chain_tip(&mut self, command: &str) -> String { - let args: Vec<_> = command.split(' ').collect(); - - if args.len() != 2 { - return format!("{}", "Usage: ::advance_burn_chain_tip ".red()); - } - let count = match args[1].parse::() { + fn parse_and_advance_stacks_chain_tip(&mut self, command: &str) -> String { + let args: Vec<_> = command.split(' ').skip(1).collect(); + let count = match args.first().unwrap_or(&"1").parse::() { Ok(count) => count, - _ => { - return format!("{}", "Unable to parse count".red()); - } + _ => return format!("{}", "Unable to parse count".red()), }; - let new_height = self.advance_burn_chain_tip(count); - format!("{} blocks simulated, new height: {}", count, new_height) + match self.advance_stacks_chain_tip(count) { + Ok(_) => format!( + "new burn height: {}\nnew stacks height: {}", + self.interpreter.datastore.get_current_burn_block_height(), + self.interpreter.datastore.get_current_stacks_block_height(), + ) .green() - .to_string() + .to_string(), + Err(_) => format!( + "{}", + "advance_stacks_chain_tip can't be called in epoch lower than 3.0".red() + ), + } } - pub fn advance_stacks_chain_tip(&mut self, count: u32) -> Result { - self.interpreter.advance_stacks_chain_tip(count) + pub fn advance_chain_tip(&mut self, count: u32) -> u32 { + let current_epoch = self.interpreter.datastore.get_current_epoch(); + if current_epoch < StacksEpochId::Epoch30 { + self.advance_burn_chain_tip(count) + } else { + match self.advance_stacks_chain_tip(count) { + Ok(count) => count, + Err(_) => unreachable!("Epoch checked already"), + } + } } + pub fn advance_burn_chain_tip(&mut self, count: u32) -> u32 { self.interpreter.advance_burn_chain_tip(count) } - pub fn advance_chain_tip(&mut self, count: u32) -> u32 { - self.interpreter.advance_chain_tip(count) + + pub fn advance_stacks_chain_tip(&mut self, count: u32) -> Result { + self.interpreter.advance_stacks_chain_tip(count) } fn parse_and_set_tx_sender(&mut self, command: &str) -> String { @@ -992,7 +994,7 @@ impl Session { pub fn update_epoch(&mut self, epoch: StacksEpochId) { self.current_epoch = epoch; - self.interpreter.burn_datastore.set_current_epoch(epoch); + self.interpreter.set_current_epoch(epoch); if epoch >= StacksEpochId::Epoch30 { self.interpreter.set_tenure_height(); } @@ -1392,7 +1394,7 @@ mod tests { session.handle_command("::set_epoch 3.0"); let _ = session.handle_command("::advance_stacks_chain_tip 1"); let new_height = session.handle_command("::get_stacks_block_height"); - assert_eq!(new_height, "Current height: 1"); + assert_eq!(new_height, "Current height: 2"); } #[test] @@ -1401,7 +1403,7 @@ mod tests { let result = session.handle_command("::advance_burn_chain_tip 1"); assert_eq!( result, - "1 blocks simulated, new height: 1" + "new burn height: 1\nnew stacks height: 1" .to_string() .green() .to_string() @@ -1410,7 +1412,7 @@ mod tests { let result = session.handle_command("::advance_chain_tip 1"); assert_eq!( result, - "1 blocks simulated, new height: 2" + "new burn height: 2\nnew stacks height: 2" .to_string() .green() .to_string() @@ -1424,23 +1426,25 @@ mod tests { let result = session.handle_command("::advance_burn_chain_tip 1"); assert_eq!( result, - "1 blocks simulated, new height: 1" + "new burn height: 2\nnew stacks height: 2" .to_string() .green() .to_string() ); let new_height = session.handle_command("::get_stacks_block_height"); - assert_eq!(new_height, "Current height: 1"); + assert_eq!(new_height, "Current height: 2"); // advance_chain_tip will only affect stacks height in epoch 3 or greater let _ = session.handle_command("::advance_chain_tip 1"); let new_height = session.handle_command("::get_stacks_block_height"); - assert_eq!(new_height, "Current height: 2"); + assert_eq!(new_height, "Current height: 3"); let new_height = session.handle_command("::get_burn_block_height"); - assert_eq!(new_height, "Current height: 1"); + assert_eq!(new_height, "Current height: 2"); } + #[test] fn set_epoch_command() { let mut session = Session::new(SessionSettings::default()); + let initial_block_height = session.interpreter.get_block_height(); let initial_epoch = session.handle_command("::get_epoch"); // initial epoch is 2.05 assert_eq!(initial_epoch, "Current epoch: 2.05"); @@ -1456,9 +1460,18 @@ mod tests { let current_epoch = session.handle_command("::get_epoch"); assert_eq!(current_epoch, "Current epoch: 2.4"); + // changing epoch in 2.x does not impact the block height + assert_eq!(session.interpreter.get_block_height(), initial_block_height); + session.handle_command("::set_epoch 3.0"); let current_epoch = session.handle_command("::get_epoch"); assert_eq!(current_epoch, "Current epoch: 3.0"); + + // changing epoch in 3.x increments the block height + assert_eq!( + session.interpreter.get_block_height(), + initial_block_height + 1 + ); } #[test] @@ -1759,31 +1772,6 @@ mod tests { _ => panic!("Unexpected result"), }; - println!("{}", time_block_2 - time_block_1); - assert!(time_block_2 - time_block_1 == 600); - } - - #[test] - fn block_time_is_realistic_in_epoch_3_0() { - let settings = SessionSettings::default(); - let mut session = Session::new(settings); - session.start().expect("session could not start"); - session.update_epoch(StacksEpochId::Epoch30); - - session.advance_burn_chain_tip(4); - - let result = run_session_snippet(&mut session, "(get-tenure-info? time u2)"); - let time_block_1 = match result.expect_optional() { - Ok(Some(Value::UInt(time))) => time, - _ => panic!("Unexpected result"), - }; - - let result = run_session_snippet(&mut session, "(get-tenure-info? time u3)"); - let time_block_2 = match result.expect_optional() { - Ok(Some(Value::UInt(time))) => time, - _ => panic!("Unexpected result"), - }; - assert!(time_block_2 - time_block_1 == 600); } }