diff --git a/Cargo.lock b/Cargo.lock index 90bd846e30..3fba39200e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1740,10 +1740,13 @@ name = "db_common" version = "0.1.0" dependencies = [ "common", + "crossbeam-channel 0.5.1", + "futures 0.3.28", "hex 0.4.3", "log", "rusqlite", "sql-builder", + "tokio", "uuid 1.2.2", ] diff --git a/mm2src/coins/eth.rs b/mm2src/coins/eth.rs index 53567b87eb..90e2f90ffb 100644 --- a/mm2src/coins/eth.rs +++ b/mm2src/coins/eth.rs @@ -422,7 +422,7 @@ pub struct EthCoinImpl { swap_contract_address: Address, fallback_swap_contract: Option
, contract_supports_watchers: bool, - web3: Web3, + pub(crate) web3: Web3, /// The separate web3 instances kept to get nonce, will replace the web3 completely soon web3_instances: Vec, decimals: u8, @@ -875,7 +875,7 @@ async fn withdraw_impl(coin: EthCoin, req: WithdrawRequest) -> WithdrawResult { /// `withdraw_erc1155` function returns details of `ERC-1155` transaction including tx hex, /// which should be sent to`send_raw_transaction` RPC to broadcast the transaction. pub async fn withdraw_erc1155(ctx: MmArc, withdraw_type: WithdrawErc1155) -> WithdrawNftResult { - let coin = lp_coinfind_or_err(&ctx, &withdraw_type.chain.to_ticker()).await?; + let coin = lp_coinfind_or_err(&ctx, withdraw_type.chain.to_ticker()).await?; let (to_addr, token_addr, eth_coin) = get_valid_nft_add_to_withdraw(coin, &withdraw_type.to, &withdraw_type.token_address)?; let my_address = eth_coin.my_address()?; @@ -977,7 +977,7 @@ pub async fn withdraw_erc1155(ctx: MmArc, withdraw_type: WithdrawErc1155) -> Wit /// `withdraw_erc721` function returns details of `ERC-721` transaction including tx hex, /// which should be sent to`send_raw_transaction` RPC to broadcast the transaction. pub async fn withdraw_erc721(ctx: MmArc, withdraw_type: WithdrawErc721) -> WithdrawNftResult { - let coin = lp_coinfind_or_err(&ctx, &withdraw_type.chain.to_ticker()).await?; + let coin = lp_coinfind_or_err(&ctx, withdraw_type.chain.to_ticker()).await?; let (to_addr, token_addr, eth_coin) = get_valid_nft_add_to_withdraw(coin, &withdraw_type.to, &withdraw_type.token_address)?; let my_address = eth_coin.my_address()?; @@ -4716,7 +4716,7 @@ pub struct EthTxFeeDetails { } impl EthTxFeeDetails { - fn new(gas: U256, gas_price: U256, coin: &str) -> NumConversResult { + pub(crate) fn new(gas: U256, gas_price: U256, coin: &str) -> NumConversResult { let total_fee = gas * gas_price; // Fees are always paid in ETH, can use 18 decimals by default let total_fee = u256_to_big_decimal(total_fee, ETH_DECIMALS)?; diff --git a/mm2src/coins/lp_price.rs b/mm2src/coins/lp_price.rs index fc31095460..6823e90d91 100644 --- a/mm2src/coins/lp_price.rs +++ b/mm2src/coins/lp_price.rs @@ -11,7 +11,7 @@ use std::collections::HashMap; use std::str::Utf8Error; pub const PRICE_ENDPOINTS: [&str; 2] = [ - "https://prices.komodo.earth/api/v2/tickers", + "https://prices.komodian.info/api/v2/tickers", "https://prices.cipig.net:1717/api/v2/tickers", ]; diff --git a/mm2src/coins/my_tx_history_v2.rs b/mm2src/coins/my_tx_history_v2.rs index 1635be9e80..97c5a5ca8f 100644 --- a/mm2src/coins/my_tx_history_v2.rs +++ b/mm2src/coins/my_tx_history_v2.rs @@ -491,7 +491,7 @@ where _ => {}, }; - let confirmations = if details.block_height == 0 || details.block_height > current_block { + let confirmations = if details.block_height > current_block { 0 } else { current_block + 1 - details.block_height diff --git a/mm2src/coins/nft.rs b/mm2src/coins/nft.rs index 0f0cb942e0..40736a6414 100644 --- a/mm2src/coins/nft.rs +++ b/mm2src/coins/nft.rs @@ -8,29 +8,34 @@ pub(crate) mod storage; #[cfg(any(test, target_arch = "wasm32"))] mod nft_tests; -use crate::{coin_conf, get_my_address, MyAddressReq, WithdrawError}; +use crate::{coin_conf, get_my_address, lp_coinfind_or_err, MarketCoinOps, MmCoinEnum, MyAddressReq, WithdrawError}; use nft_errors::{GetNftInfoError, UpdateNftError}; use nft_structs::{Chain, ContractType, ConvertChain, Nft, NftFromMoralis, NftList, NftListReq, NftMetadataReq, NftTransferHistory, NftTransferHistoryFromMoralis, NftTransfersReq, NftsTransferHistoryList, TransactionNftDetails, UpdateNftReq, WithdrawNftReq}; -use crate::eth::{eth_addr_to_hex, get_eth_address, withdraw_erc1155, withdraw_erc721}; -use crate::nft::nft_errors::{MetaFromUrlError, ProtectFromSpamError, UpdateSpamPhishingError}; +use crate::eth::{eth_addr_to_hex, get_eth_address, withdraw_erc1155, withdraw_erc721, EthCoin, EthCoinType, + EthTxFeeDetails}; +use crate::nft::nft_errors::{MetaFromUrlError, ProtectFromSpamError, TransferConfirmationsError, + UpdateSpamPhishingError}; use crate::nft::nft_structs::{build_nft_with_empty_meta, BuildNftFields, NftCommon, NftCtx, NftTransferCommon, PhishingDomainReq, PhishingDomainRes, RefreshMetadataReq, SpamContractReq, SpamContractRes, TransferMeta, TransferStatus, UriMeta}; -use crate::nft::storage::{NftListStorageOps, NftStorageBuilder, NftTransferHistoryStorageOps}; +use crate::nft::storage::{NftListStorageOps, NftTransferHistoryStorageOps}; use common::parse_rfc3339_to_timestamp; use crypto::StandardHDCoinAddress; -use ethereum_types::Address; +use ethereum_types::{Address, H256}; +use futures::compat::Future01CompatExt; +use futures::future::try_join_all; use mm2_err_handle::map_to_mm::MapToMmResult; use mm2_net::transport::send_post_request_to_uri; -use mm2_number::BigDecimal; +use mm2_number::{BigDecimal, BigUint}; use regex::Regex; use serde_json::Value as Json; use std::cmp::Ordering; -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; use std::str::FromStr; +use web3::types::TransactionId; #[cfg(not(target_arch = "wasm32"))] use mm2_net::native_http::send_request_to_uri; @@ -74,9 +79,8 @@ pub type WithdrawNftResult = Result MmResult { let nft_ctx = NftCtx::from_ctx(&ctx).map_to_mm(GetNftInfoError::Internal)?; - let _lock = nft_ctx.guard.lock().await; - let storage = NftStorageBuilder::new(&ctx).build()?; + let storage = nft_ctx.lock_db().await?; for chain in req.chains.iter() { if !NftListStorageOps::is_initialized(&storage, chain).await? { NftListStorageOps::init(&storage, chain).await?; @@ -114,9 +118,8 @@ pub async fn get_nft_list(ctx: MmArc, req: NftListReq) -> MmResult MmResult { let nft_ctx = NftCtx::from_ctx(&ctx).map_to_mm(GetNftInfoError::Internal)?; - let _lock = nft_ctx.guard.lock().await; - let storage = NftStorageBuilder::new(&ctx).build()?; + let storage = nft_ctx.lock_db().await?; if !NftListStorageOps::is_initialized(&storage, &req.chain).await? { NftListStorageOps::init(&storage, &req.chain).await?; } @@ -156,26 +159,67 @@ pub async fn get_nft_metadata(ctx: MmArc, req: NftMetadataReq) -> MmResult MmResult { let nft_ctx = NftCtx::from_ctx(&ctx).map_to_mm(GetNftInfoError::Internal)?; - let _lock = nft_ctx.guard.lock().await; - let storage = NftStorageBuilder::new(&ctx).build()?; + let storage = nft_ctx.lock_db().await?; for chain in req.chains.iter() { if !NftTransferHistoryStorageOps::is_initialized(&storage, chain).await? { NftTransferHistoryStorageOps::init(&storage, chain).await?; } } let mut transfer_history_list = storage - .get_transfer_history(req.chains, req.max, req.limit, req.page_number, req.filters) + .get_transfer_history(req.chains.clone(), req.max, req.limit, req.page_number, req.filters) .await?; if req.protect_from_spam { for transfer in &mut transfer_history_list.transfer_history { protect_from_history_spam_links(transfer, true)?; } } + process_transfers_confirmations(&ctx, req.chains, &mut transfer_history_list).await?; drop_mutability!(transfer_history_list); Ok(transfer_history_list) } +async fn process_transfers_confirmations( + ctx: &MmArc, + chains: Vec, + history_list: &mut NftsTransferHistoryList, +) -> MmResult<(), TransferConfirmationsError> { + async fn current_block_impl(coin: Coin) -> MmResult { + coin.current_block() + .compat() + .await + .map_to_mm(TransferConfirmationsError::GetCurrentBlockErr) + } + + let futures = chains.into_iter().map(|chain| async move { + let ticker = chain.to_ticker(); + let coin_enum = lp_coinfind_or_err(ctx, ticker).await?; + match coin_enum { + MmCoinEnum::EthCoin(eth_coin) => { + let current_block = current_block_impl(eth_coin).await?; + Ok((ticker, current_block)) + }, + _ => MmError::err(TransferConfirmationsError::CoinDoesntSupportNft { + coin: coin_enum.ticker().to_owned(), + }), + } + }); + let blocks_map = try_join_all(futures).await?.into_iter().collect::>(); + + for transfer in history_list.transfer_history.iter_mut() { + let current_block = match blocks_map.get(transfer.chain.to_ticker()) { + Some(block) => *block, + None => 0, + }; + transfer.confirmations = if transfer.block_number > current_block { + 0 + } else { + current_block + 1 - transfer.block_number + }; + } + Ok(()) +} + /// Updates NFT transfer history and NFT list in the DB. /// /// This function refreshes the NFT transfer history and NFT list cache based on new @@ -192,9 +236,8 @@ pub async fn get_nft_transfers(ctx: MmArc, req: NftTransfersReq) -> MmResult`: A result indicating success or an error. pub async fn update_nft(ctx: MmArc, req: UpdateNftReq) -> MmResult<(), UpdateNftError> { let nft_ctx = NftCtx::from_ctx(&ctx).map_to_mm(GetNftInfoError::Internal)?; - let _lock = nft_ctx.guard.lock().await; - let storage = NftStorageBuilder::new(&ctx).build()?; + let storage = nft_ctx.lock_db().await?; for chain in req.chains.iter() { let transfer_history_initialized = NftTransferHistoryStorageOps::is_initialized(&storage, chain).await?; @@ -205,7 +248,16 @@ pub async fn update_nft(ctx: MmArc, req: UpdateNftReq) -> MmResult<(), UpdateNft NftTransferHistoryStorageOps::init(&storage, chain).await?; None }; - let nft_transfers = get_moralis_nft_transfers(&ctx, chain, from_block, &req.url).await?; + let coin_enum = lp_coinfind_or_err(&ctx, chain.to_ticker()).await?; + let eth_coin = match coin_enum { + MmCoinEnum::EthCoin(eth_coin) => eth_coin, + _ => { + return MmError::err(UpdateNftError::CoinDoesntSupportNft { + coin: coin_enum.ticker().to_owned(), + }) + }, + }; + let nft_transfers = get_moralis_nft_transfers(&ctx, chain, from_block, &req.url, eth_coin).await?; storage.add_transfers_to_history(*chain, nft_transfers).await?; let nft_block = match NftListStorageOps::get_last_block_number(&storage, chain).await { @@ -377,9 +429,8 @@ fn prepare_uri_for_blocklist_endpoint( /// * `MmResult<(), UpdateNftError>`: A result indicating success or an error. pub async fn refresh_nft_metadata(ctx: MmArc, req: RefreshMetadataReq) -> MmResult<(), UpdateNftError> { let nft_ctx = NftCtx::from_ctx(&ctx).map_to_mm(GetNftInfoError::Internal)?; - let _lock = nft_ctx.guard.lock().await; - let storage = NftStorageBuilder::new(&ctx).build()?; + let storage = nft_ctx.lock_db().await?; let token_address_str = eth_addr_to_hex(&req.token_address); let moralis_meta = match get_moralis_metadata( token_address_str.clone(), @@ -532,8 +583,8 @@ async fn get_moralis_nft_list( ) -> MmResult, GetNftInfoError> { let mut res_list = Vec::new(); let ticker = chain.to_ticker(); - let conf = coin_conf(ctx, &ticker); - let my_address = get_eth_address(ctx, &conf, &ticker, &StandardHDCoinAddress::default()).await?; + let conf = coin_conf(ctx, ticker); + let my_address = get_eth_address(ctx, &conf, ticker, &StandardHDCoinAddress::default()).await?; let mut uri_without_cursor = url.clone(); uri_without_cursor.set_path(MORALIS_API_ENDPOINT); @@ -584,11 +635,12 @@ async fn get_moralis_nft_transfers( chain: &Chain, from_block: Option, url: &Url, + eth_coin: EthCoin, ) -> MmResult, GetNftInfoError> { let mut res_list = Vec::new(); let ticker = chain.to_ticker(); - let conf = coin_conf(ctx, &ticker); - let my_address = get_eth_address(ctx, &conf, &ticker, &StandardHDCoinAddress::default()).await?; + let conf = coin_conf(ctx, ticker); + let my_address = get_eth_address(ctx, &conf, ticker, &StandardHDCoinAddress::default()).await?; let mut uri_without_cursor = url.clone(); uri_without_cursor.set_path(MORALIS_API_ENDPOINT); @@ -625,6 +677,7 @@ async fn get_moralis_nft_transfers( let status = get_transfer_status(&wallet_address, ð_addr_to_hex(&transfer_moralis.common.to_address)); let block_timestamp = parse_rfc3339_to_timestamp(&transfer_moralis.block_timestamp)?; + let fee_details = get_fee_details(ð_coin, &transfer_moralis.common.transaction_hash).await; let transfer_history = NftTransferHistory { common: NftTransferCommon { block_hash: transfer_moralis.common.block_hash, @@ -634,7 +687,6 @@ async fn get_moralis_nft_transfers( value: transfer_moralis.common.value, transaction_type: transfer_moralis.common.transaction_type, token_address: transfer_moralis.common.token_address, - token_id: transfer_moralis.common.token_id, from_address: transfer_moralis.common.from_address, to_address: transfer_moralis.common.to_address, amount: transfer_moralis.common.amount, @@ -643,6 +695,7 @@ async fn get_moralis_nft_transfers( possible_spam: transfer_moralis.common.possible_spam, }, chain: *chain, + token_id: transfer_moralis.token_id.0, block_number: *transfer_moralis.block_number, block_timestamp, contract_type, @@ -654,6 +707,8 @@ async fn get_moralis_nft_transfers( token_name: None, status, possible_phishing: false, + fee_details, + confirmations: 0, }; // collect NFTs transfers from the page res_list.push(transfer_history); @@ -672,6 +727,35 @@ async fn get_moralis_nft_transfers( Ok(res_list) } +async fn get_fee_details(eth_coin: &EthCoin, transaction_hash: &str) -> Option { + let hash = H256::from_str(transaction_hash).ok()?; + let receipt = eth_coin.web3.eth().transaction_receipt(hash).await.ok()?; + let fee_coin = match eth_coin.coin_type { + EthCoinType::Eth => eth_coin.ticker(), + EthCoinType::Erc20 { .. } => return None, + }; + + match receipt { + Some(r) => { + let gas_used = r.gas_used.unwrap_or_default(); + match r.effective_gas_price { + Some(gas_price) => EthTxFeeDetails::new(gas_used, gas_price, fee_coin).ok(), + None => { + let web3_tx = eth_coin + .web3 + .eth() + .transaction(TransactionId::Hash(hash)) + .await + .ok()??; + let gas_price = web3_tx.gas_price.unwrap_or_default(); + EthTxFeeDetails::new(gas_used, gas_price, fee_coin).ok() + }, + } + }, + None => None, + } +} + /// Implements request to the Moralis "Get NFT metadata" endpoint. /// /// [Moralis Documentation Link](https://docs.moralis.io/web3-data-api/evm/reference/get-nft-metadata) @@ -683,7 +767,7 @@ async fn get_moralis_nft_transfers( /// **Dont** use this function to get specific info about owner address, amount etc, you will get info not related to my_address. async fn get_moralis_metadata( token_address: String, - token_id: BigDecimal, + token_id: BigUint, chain: &Chain, url: &Url, url_antispam: &Url, @@ -804,7 +888,7 @@ async fn update_nft_list( ) -> MmResult<(), UpdateNftError> { let transfers = storage.get_transfers_from_block(*chain, scan_from_block).await?; let req = MyAddressReq { - coin: chain.to_ticker(), + coin: chain.to_ticker().to_string(), path_to_address: StandardHDCoinAddress::default(), }; let my_address = get_my_address(ctx.clone(), req).await?.wallet_address.to_lowercase(); @@ -843,18 +927,18 @@ async fn handle_send_erc721 .get_nft( chain, eth_addr_to_hex(&transfer.common.token_address), - transfer.common.token_id.clone(), + transfer.token_id.clone(), ) .await? .ok_or_else(|| UpdateNftError::TokenNotFoundInWallet { token_address: eth_addr_to_hex(&transfer.common.token_address), - token_id: transfer.common.token_id.to_string(), + token_id: transfer.token_id.to_string(), })?; storage .remove_nft_from_list( chain, eth_addr_to_hex(&transfer.common.token_address), - transfer.common.token_id, + transfer.token_id, transfer.block_number, ) .await?; @@ -871,7 +955,7 @@ async fn handle_receive_erc721 MmResult<(), UpdateNftError> { let token_address_str = eth_addr_to_hex(&transfer.common.token_address); match storage - .get_nft(chain, token_address_str.clone(), transfer.common.token_id.clone()) + .get_nft(chain, token_address_str.clone(), transfer.token_id.clone()) .await? { Some(mut nft_db) => { @@ -891,7 +975,7 @@ async fn handle_receive_erc721 { let mut nft = match get_moralis_metadata( token_address_str.clone(), - transfer.common.token_id.clone(), + transfer.token_id.clone(), chain, url, url_antispam, @@ -926,21 +1010,16 @@ async fn handle_send_erc1155 MmResult<(), UpdateNftError> { let token_address_str = eth_addr_to_hex(&transfer.common.token_address); let mut nft_db = storage - .get_nft(chain, token_address_str.clone(), transfer.common.token_id.clone()) + .get_nft(chain, token_address_str.clone(), transfer.token_id.clone()) .await? .ok_or_else(|| UpdateNftError::TokenNotFoundInWallet { token_address: token_address_str.clone(), - token_id: transfer.common.token_id.to_string(), + token_id: transfer.token_id.to_string(), })?; match nft_db.common.amount.cmp(&transfer.common.amount) { Ordering::Equal => { storage - .remove_nft_from_list( - chain, - token_address_str, - transfer.common.token_id, - transfer.block_number, - ) + .remove_nft_from_list(chain, token_address_str, transfer.token_id, transfer.block_number) .await?; }, Ordering::Greater => { @@ -969,7 +1048,7 @@ async fn handle_receive_erc1155 MmResult<(), UpdateNftError> { let token_address_str = eth_addr_to_hex(&transfer.common.token_address); let mut nft = match storage - .get_nft(chain, token_address_str.clone(), transfer.common.token_id.clone()) + .get_nft(chain, token_address_str.clone(), transfer.token_id.clone()) .await? { Some(mut nft_db) => { @@ -989,7 +1068,7 @@ async fn handle_receive_erc1155 { let nft = match get_moralis_metadata( token_address_str.clone(), - transfer.common.token_id.clone(), + transfer.token_id.clone(), chain, url, url_antispam, @@ -1031,7 +1110,6 @@ async fn create_nft_from_moralis_metadata( let nft = Nft { common: NftCommon { token_address: moralis_meta.common.token_address, - token_id: moralis_meta.common.token_id, amount: transfer.common.amount.clone(), owner_of: Address::from_str(my_address).map_to_mm(|e| UpdateNftError::InvalidHexString(e.to_string()))?, token_hash: moralis_meta.common.token_hash, @@ -1046,6 +1124,7 @@ async fn create_nft_from_moralis_metadata( possible_spam: moralis_meta.common.possible_spam, }, chain: *chain, + token_id: moralis_meta.token_id, block_number_minted: moralis_meta.block_number_minted, block_number: transfer.block_number, contract_type: moralis_meta.contract_type, @@ -1071,7 +1150,7 @@ async fn mark_as_spam_and_build_empty_meta MmResult { - let nft_ctx = NftCtx::from_ctx(ctx).map_err(GetNftInfoError::Internal)?; - let _lock = nft_ctx.guard.lock().await; + let nft_ctx = NftCtx::from_ctx(ctx).map_to_mm(GetNftInfoError::Internal)?; - let storage = NftStorageBuilder::new(ctx).build()?; + let storage = nft_ctx.lock_db().await?; if !NftListStorageOps::is_initialized(&storage, chain).await? { NftListStorageOps::init(&storage, chain).await?; } @@ -1290,7 +1368,6 @@ async fn build_nft_from_moralis( Nft { common: NftCommon { token_address: nft_moralis.common.token_address, - token_id: nft_moralis.common.token_id, amount: nft_moralis.common.amount, owner_of: nft_moralis.common.owner_of, token_hash: nft_moralis.common.token_hash, @@ -1305,6 +1382,7 @@ async fn build_nft_from_moralis( possible_spam: nft_moralis.common.possible_spam, }, chain, + token_id: nft_moralis.token_id.0, block_number_minted: nft_moralis.block_number_minted.map(|v| v.0), block_number: *nft_moralis.block_number, contract_type, diff --git a/mm2src/coins/nft/nft_errors.rs b/mm2src/coins/nft/nft_errors.rs index 941348db67..f5dd5adaba 100644 --- a/mm2src/coins/nft/nft_errors.rs +++ b/mm2src/coins/nft/nft_errors.rs @@ -1,7 +1,11 @@ use crate::eth::GetEthAddressError; -use crate::nft::storage::{CreateNftStorageError, NftStorageError}; -use crate::{GetMyAddressError, WithdrawError}; +#[cfg(target_arch = "wasm32")] +use crate::nft::storage::wasm::WasmNftCacheError; +use crate::nft::storage::NftStorageError; +use crate::{CoinFindError, GetMyAddressError, WithdrawError}; use common::{HttpStatusCode, ParseRfc3339Err}; +#[cfg(not(target_arch = "wasm32"))] +use db_common::sqlite::rusqlite::Error as SqlError; use derive_more::Display; use enum_from::EnumFromStringify; use http::StatusCode; @@ -38,6 +42,7 @@ pub enum GetNftInfoError { #[display(fmt = "The contract type is required and should not be null.")] ContractTypeIsNull, ProtectFromSpamError(ProtectFromSpamError), + TransferConfirmationsError(TransferConfirmationsError), } impl From for WithdrawError { @@ -73,14 +78,6 @@ impl From for GetNftInfoError { fn from(e: GetEthAddressError) -> Self { GetNftInfoError::GetEthAddressError(e) } } -impl From for GetNftInfoError { - fn from(e: CreateNftStorageError) -> Self { - match e { - CreateNftStorageError::Internal(err) => GetNftInfoError::Internal(err), - } - } -} - impl From for GetNftInfoError { fn from(err: T) -> Self { GetNftInfoError::DbError(format!("{:?}", err)) } } @@ -104,6 +101,14 @@ impl From for GetNftInfoError { fn from(e: ProtectFromSpamError) -> Self { GetNftInfoError::ProtectFromSpamError(e) } } +impl From for GetNftInfoError { + fn from(e: LockDBError) -> Self { GetNftInfoError::DbError(e.to_string()) } +} + +impl From for GetNftInfoError { + fn from(e: TransferConfirmationsError) -> Self { GetNftInfoError::TransferConfirmationsError(e) } +} + impl HttpStatusCode for GetNftInfoError { fn status_code(&self) -> StatusCode { match self { @@ -115,7 +120,8 @@ impl HttpStatusCode for GetNftInfoError { | GetNftInfoError::GetEthAddressError(_) | GetNftInfoError::TokenNotFoundInWallet { .. } | GetNftInfoError::DbError(_) - | GetNftInfoError::ProtectFromSpamError(_) => StatusCode::INTERNAL_SERVER_ERROR, + | GetNftInfoError::ProtectFromSpamError(_) + | GetNftInfoError::TransferConfirmationsError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } } @@ -184,14 +190,14 @@ pub enum UpdateNftError { #[from_stringify("serde_json::Error")] SerdeError(String), ProtectFromSpamError(ProtectFromSpamError), -} - -impl From for UpdateNftError { - fn from(e: CreateNftStorageError) -> Self { - match e { - CreateNftStorageError::Internal(err) => UpdateNftError::Internal(err), - } - } + #[display(fmt = "No such coin {}", coin)] + NoSuchCoin { + coin: String, + }, + #[display(fmt = "{} coin doesn't support NFT", coin)] + CoinDoesntSupportNft { + coin: String, + }, } impl From for UpdateNftError { @@ -218,6 +224,18 @@ impl From for UpdateNftError { fn from(e: ProtectFromSpamError) -> Self { UpdateNftError::ProtectFromSpamError(e) } } +impl From for UpdateNftError { + fn from(e: LockDBError) -> Self { UpdateNftError::DbError(e.to_string()) } +} + +impl From for UpdateNftError { + fn from(e: CoinFindError) -> Self { + match e { + CoinFindError::NoSuchCoin { coin } => UpdateNftError::NoSuchCoin { coin }, + } + } +} + impl HttpStatusCode for UpdateNftError { fn status_code(&self) -> StatusCode { match self { @@ -234,7 +252,9 @@ impl HttpStatusCode for UpdateNftError { | UpdateNftError::UpdateSpamPhishingError(_) | UpdateNftError::GetInfoFromUriError(_) | UpdateNftError::SerdeError(_) - | UpdateNftError::ProtectFromSpamError(_) => StatusCode::INTERNAL_SERVER_ERROR, + | UpdateNftError::ProtectFromSpamError(_) + | UpdateNftError::NoSuchCoin { .. } + | UpdateNftError::CoinDoesntSupportNft { .. } => StatusCode::INTERNAL_SERVER_ERROR, } } } @@ -310,3 +330,39 @@ pub(crate) enum MetaFromUrlError { impl From for MetaFromUrlError { fn from(e: GetInfoFromUriError) -> Self { MetaFromUrlError::GetInfoFromUriError(e) } } + +#[derive(Debug, Display)] +pub enum LockDBError { + #[cfg(target_arch = "wasm32")] + WasmNftCacheError(WasmNftCacheError), + #[cfg(not(target_arch = "wasm32"))] + SqlError(SqlError), +} + +#[cfg(not(target_arch = "wasm32"))] +impl From for LockDBError { + fn from(e: SqlError) -> Self { LockDBError::SqlError(e) } +} + +#[cfg(target_arch = "wasm32")] +impl From for LockDBError { + fn from(e: WasmNftCacheError) -> Self { LockDBError::WasmNftCacheError(e) } +} + +#[derive(Clone, Debug, Deserialize, Display, PartialEq, Serialize)] +pub enum TransferConfirmationsError { + #[display(fmt = "No such coin {}", coin)] + NoSuchCoin { coin: String }, + #[display(fmt = "{} coin doesn't support NFT", coin)] + CoinDoesntSupportNft { coin: String }, + #[display(fmt = "Get current block error: {}", _0)] + GetCurrentBlockErr(String), +} + +impl From for TransferConfirmationsError { + fn from(e: CoinFindError) -> Self { + match e { + CoinFindError::NoSuchCoin { coin } => TransferConfirmationsError::NoSuchCoin { coin }, + } + } +} diff --git a/mm2src/coins/nft/nft_structs.rs b/mm2src/coins/nft/nft_structs.rs index 165e7cbd93..9173f3bd5b 100644 --- a/mm2src/coins/nft/nft_structs.rs +++ b/mm2src/coins/nft/nft_structs.rs @@ -1,13 +1,11 @@ -use crate::nft::eth_addr_to_hex; -use crate::{TransactionType, TxFeeDetails, WithdrawFee}; use common::ten; use ethereum_types::Address; -use futures::lock::Mutex as AsyncMutex; use mm2_core::mm_ctx::{from_ctx, MmArc}; -use mm2_number::BigDecimal; +use mm2_err_handle::prelude::*; +use mm2_number::{BigDecimal, BigUint}; use rpc::v1::types::Bytes as BytesJson; use serde::de::{self, Deserializer}; -use serde::Deserialize; +use serde::{Deserialize, Serializer}; use serde_json::Value as Json; use std::collections::HashMap; use std::fmt; @@ -16,12 +14,22 @@ use std::str::FromStr; use std::sync::Arc; use url::Url; -use crate::nft::nft_errors::ParseChainTypeError; -#[cfg(target_arch = "wasm32")] -use mm2_db::indexed_db::{ConstructibleDb, SharedDb}; +use crate::eth::EthTxFeeDetails; +use crate::nft::eth_addr_to_hex; +use crate::nft::nft_errors::{LockDBError, ParseChainTypeError}; +use crate::nft::storage::{NftListStorageOps, NftTransferHistoryStorageOps}; +use crate::{TransactionType, TxFeeDetails, WithdrawFee}; + +cfg_native! { + use db_common::async_sql_conn::AsyncConnection; + use futures::lock::Mutex as AsyncMutex; +} -#[cfg(target_arch = "wasm32")] -use crate::nft::storage::wasm::nft_idb::NftCacheIDB; +cfg_wasm32! { + use mm2_db::indexed_db::{ConstructibleDb, SharedDb}; + use crate::nft::storage::wasm::WasmNftCacheError; + use crate::nft::storage::wasm::nft_idb::NftCacheIDB; +} /// Represents a request to list NFTs owned by the user across specified chains. /// @@ -68,7 +76,8 @@ pub struct NftListFilters { #[derive(Debug, Deserialize)] pub struct NftMetadataReq { pub(crate) token_address: Address, - pub(crate) token_id: BigDecimal, + #[serde(deserialize_with = "deserialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) chain: Chain, #[serde(default)] pub(crate) protect_from_spam: bool, @@ -85,7 +94,8 @@ pub struct NftMetadataReq { #[derive(Debug, Deserialize)] pub struct RefreshMetadataReq { pub(crate) token_address: Address, - pub(crate) token_id: BigDecimal, + #[serde(deserialize_with = "deserialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) chain: Chain, pub(crate) url: Url, pub(crate) url_antispam: Url, @@ -104,17 +114,17 @@ pub enum Chain { } pub(crate) trait ConvertChain { - fn to_ticker(&self) -> String; + fn to_ticker(&self) -> &'static str; } impl ConvertChain for Chain { - fn to_ticker(&self) -> String { + fn to_ticker(&self) -> &'static str { match self { - Chain::Avalanche => "AVAX".to_owned(), - Chain::Bsc => "BNB".to_owned(), - Chain::Eth => "ETH".to_owned(), - Chain::Fantom => "FTM".to_owned(), - Chain::Polygon => "MATIC".to_owned(), + Chain::Avalanche => "AVAX", + Chain::Bsc => "BNB", + Chain::Eth => "ETH", + Chain::Fantom => "FTM", + Chain::Polygon => "MATIC", } } } @@ -258,7 +268,6 @@ impl UriMeta { #[derive(Clone, Debug, Deserialize, Serialize)] pub struct NftCommon { pub(crate) token_address: Address, - pub(crate) token_id: BigDecimal, pub(crate) amount: BigDecimal, pub(crate) owner_of: Address, pub(crate) token_hash: Option, @@ -283,6 +292,8 @@ pub struct Nft { #[serde(flatten)] pub(crate) common: NftCommon, pub(crate) chain: Chain, + #[serde(serialize_with = "serialize_token_id", deserialize_with = "deserialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) block_number_minted: Option, pub(crate) block_number: u64, pub(crate) contract_type: ContractType, @@ -293,7 +304,7 @@ pub struct Nft { pub(crate) struct BuildNftFields { pub(crate) token_address: Address, - pub(crate) token_id: BigDecimal, + pub(crate) token_id: BigUint, pub(crate) amount: BigDecimal, pub(crate) owner_of: Address, pub(crate) contract_type: ContractType, @@ -306,7 +317,6 @@ pub(crate) fn build_nft_with_empty_meta(nft_fields: BuildNftFields) -> Nft { Nft { common: NftCommon { token_address: nft_fields.token_address, - token_id: nft_fields.token_id, amount: nft_fields.amount, owner_of: nft_fields.owner_of, token_hash: None, @@ -321,6 +331,7 @@ pub(crate) fn build_nft_with_empty_meta(nft_fields: BuildNftFields) -> Nft { possible_spam: nft_fields.possible_spam, }, chain: nft_fields.chain, + token_id: nft_fields.token_id, block_number_minted: None, block_number: nft_fields.block_number, contract_type: nft_fields.contract_type, @@ -339,6 +350,7 @@ pub(crate) struct NftFromMoralis { pub(crate) block_number_minted: Option>, pub(crate) block_number: SerdeStringWrap, pub(crate) contract_type: Option, + pub(crate) token_id: SerdeStringWrap, } #[derive(Debug)] @@ -378,7 +390,8 @@ pub struct WithdrawErc1155 { pub(crate) chain: Chain, pub(crate) to: String, pub(crate) token_address: String, - pub(crate) token_id: BigDecimal, + #[serde(deserialize_with = "deserialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) amount: Option, #[serde(default)] pub(crate) max: bool, @@ -390,7 +403,8 @@ pub struct WithdrawErc721 { pub(crate) chain: Chain, pub(crate) to: String, pub(crate) token_address: String, - pub(crate) token_id: BigDecimal, + #[serde(deserialize_with = "deserialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) fee: Option, } @@ -413,7 +427,8 @@ pub struct TransactionNftDetails { pub(crate) to: Vec, pub(crate) contract_type: ContractType, pub(crate) token_address: String, - pub(crate) token_id: BigDecimal, + #[serde(serialize_with = "serialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) amount: BigDecimal, pub(crate) fee_details: Option, /// The coin transaction belongs to @@ -498,7 +513,6 @@ pub struct NftTransferCommon { pub(crate) value: Option, pub(crate) transaction_type: Option, pub(crate) token_address: Address, - pub(crate) token_id: BigDecimal, pub(crate) from_address: Address, pub(crate) to_address: Address, pub(crate) amount: BigDecimal, @@ -519,6 +533,8 @@ pub struct NftTransferHistory { #[serde(flatten)] pub(crate) common: NftTransferCommon, pub(crate) chain: Chain, + #[serde(serialize_with = "serialize_token_id", deserialize_with = "deserialize_token_id")] + pub(crate) token_id: BigUint, pub(crate) block_number: u64, pub(crate) block_timestamp: u64, pub(crate) contract_type: ContractType, @@ -531,6 +547,8 @@ pub struct NftTransferHistory { pub(crate) status: TransferStatus, #[serde(default)] pub(crate) possible_phishing: bool, + pub(crate) fee_details: Option, + pub(crate) confirmations: u64, } /// Represents an NFT transfer structure specifically for deserialization from Moralis's JSON response. @@ -543,6 +561,7 @@ pub(crate) struct NftTransferHistoryFromMoralis { pub(crate) block_number: SerdeStringWrap, pub(crate) block_timestamp: String, pub(crate) contract_type: Option, + pub(crate) token_id: SerdeStringWrap, } /// Represents the detailed transfer history of NFTs, including the total number of transfers @@ -589,13 +608,13 @@ pub struct UpdateNftReq { #[derive(Debug, Deserialize, Eq, Hash, PartialEq)] pub struct NftTokenAddrId { pub(crate) token_address: String, - pub(crate) token_id: BigDecimal, + pub(crate) token_id: BigUint, } #[derive(Debug)] pub struct TransferMeta { pub(crate) token_address: String, - pub(crate) token_id: BigDecimal, + pub(crate) token_id: BigUint, pub(crate) token_uri: Option, pub(crate) token_domain: Option, pub(crate) collection_name: Option, @@ -608,7 +627,7 @@ impl From for TransferMeta { fn from(nft_db: Nft) -> Self { TransferMeta { token_address: eth_addr_to_hex(&nft_db.common.token_address), - token_id: nft_db.common.token_id, + token_id: nft_db.token_id, token_uri: nft_db.common.token_uri, token_domain: nft_db.common.token_domain, collection_name: nft_db.common.collection_name, @@ -625,26 +644,56 @@ impl From for TransferMeta { /// required for NFT operations, including guarding against concurrent accesses and /// dealing with platform-specific storage mechanisms. pub(crate) struct NftCtx { - /// An asynchronous mutex to guard against concurrent NFT operations, ensuring data consistency. - pub(crate) guard: Arc>, - #[cfg(target_arch = "wasm32")] /// Platform-specific database for caching NFT data. + #[cfg(target_arch = "wasm32")] pub(crate) nft_cache_db: SharedDb, + #[cfg(not(target_arch = "wasm32"))] + pub(crate) nft_cache_db: Arc>, } impl NftCtx { /// Create a new `NftCtx` from the given MM context. /// /// If an `NftCtx` instance doesn't already exist in the MM context, it gets created and cached for subsequent use. + #[cfg(not(target_arch = "wasm32"))] + pub(crate) fn from_ctx(ctx: &MmArc) -> Result, String> { + Ok(try_s!(from_ctx(&ctx.nft_ctx, move || { + let async_sqlite_connection = ctx + .async_sqlite_connection + .ok_or("async_sqlite_connection is not initialized".to_owned())?; + Ok(NftCtx { + nft_cache_db: async_sqlite_connection.clone(), + }) + }))) + } + + #[cfg(target_arch = "wasm32")] pub(crate) fn from_ctx(ctx: &MmArc) -> Result, String> { Ok(try_s!(from_ctx(&ctx.nft_ctx, move || { Ok(NftCtx { - guard: Arc::new(AsyncMutex::new(())), - #[cfg(target_arch = "wasm32")] nft_cache_db: ConstructibleDb::new(ctx).into_shared(), }) }))) } + + /// Lock database to guard against concurrent NFT operations, ensuring data consistency. + #[cfg(not(target_arch = "wasm32"))] + pub(crate) async fn lock_db( + &self, + ) -> MmResult { + Ok(self.nft_cache_db.lock().await) + } + + #[cfg(target_arch = "wasm32")] + pub(crate) async fn lock_db( + &self, + ) -> MmResult { + self.nft_cache_db + .get_or_initialize() + .await + .mm_err(WasmNftCacheError::from) + .mm_err(LockDBError::from) + } } #[derive(Debug, Serialize)] @@ -667,3 +716,19 @@ pub(crate) struct SpamContractRes { pub(crate) struct PhishingDomainRes { pub(crate) result: HashMap, } + +fn serialize_token_id(token_id: &BigUint, serializer: S) -> Result +where + S: Serializer, +{ + let token_id_str = token_id.to_string(); + serializer.serialize_str(&token_id_str) +} + +fn deserialize_token_id<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let s = String::deserialize(deserializer)?; + BigUint::from_str(&s).map_err(serde::de::Error::custom) +} diff --git a/mm2src/coins/nft/nft_tests.rs b/mm2src/coins/nft/nft_tests.rs index ae10513987..99ec3925de 100644 --- a/mm2src/coins/nft/nft_tests.rs +++ b/mm2src/coins/nft/nft_tests.rs @@ -2,19 +2,18 @@ use crate::eth::eth_addr_to_hex; use crate::nft::nft_structs::{Chain, NftFromMoralis, NftListFilters, NftTransferHistoryFilters, NftTransferHistoryFromMoralis, PhishingDomainReq, PhishingDomainRes, SpamContractReq, SpamContractRes, TransferMeta, UriMeta}; -use crate::nft::storage::db_test_helpers::{init_nft_history_storage, init_nft_list_storage, nft, nft_list, - nft_transfer_history}; +use crate::nft::storage::db_test_helpers::{get_nft_ctx, nft, nft_list, nft_transfer_history}; use crate::nft::storage::{NftListStorageOps, NftTransferHistoryStorageOps, RemoveNftResult}; use crate::nft::{check_moralis_ipfs_bafy, get_domain_from_url, process_metadata_for_spam_link, process_text_for_spam_link}; use common::cross_test; use ethereum_types::Address; use mm2_net::transport::send_post_request_to_uri; -use mm2_number::BigDecimal; +use mm2_number::{BigDecimal, BigUint}; use std::num::NonZeroUsize; use std::str::FromStr; -const MORALIS_API_ENDPOINT_TEST: &str = "https://moralis-proxy.komodo.earth/api/v2"; +const MORALIS_API_ENDPOINT_TEST: &str = "https://moralis-proxy.komodian.info/api/v2"; const TEST_WALLET_ADDR_EVM: &str = "0x394d86994f954ed931b86791b62fe64f4c5dac37"; const BLOCKLIST_API_ENDPOINT: &str = "https://nft.antispam.dragonhound.info"; const TOKEN_ADD: &str = "0xfd913a305d70a60aac4faac70c739563738e1f81"; @@ -158,11 +157,13 @@ cross_test!(test_camo, { cross_test!(test_add_get_nfts, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); - let token_id = BigDecimal::from_str(TOKEN_ID).unwrap(); + let token_id = BigUint::from_str(TOKEN_ID).unwrap(); let nft = storage .get_nft(&chain, TOKEN_ADD.to_string(), token_id) .await @@ -173,7 +174,9 @@ cross_test!(test_add_get_nfts, { cross_test!(test_last_nft_block, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -186,7 +189,9 @@ cross_test!(test_last_nft_block, { cross_test!(test_nft_list, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -203,11 +208,13 @@ cross_test!(test_nft_list, { cross_test!(test_remove_nft, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); - let token_id = BigDecimal::from_str(TOKEN_ID).unwrap(); + let token_id = BigUint::from_str(TOKEN_ID).unwrap(); let remove_rslt = storage .remove_nft_from_list(&chain, TOKEN_ADD.to_string(), token_id, 28056800) .await @@ -226,7 +233,9 @@ cross_test!(test_remove_nft, { cross_test!(test_nft_amount, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let mut nft = nft(); storage .add_nfts_to_list(chain, vec![nft.clone()], 25919780) @@ -236,11 +245,7 @@ cross_test!(test_nft_amount, { nft.common.amount -= BigDecimal::from(1); storage.update_nft_amount(&chain, nft.clone(), 25919800).await.unwrap(); let amount = storage - .get_nft_amount( - &chain, - eth_addr_to_hex(&nft.common.token_address), - nft.common.token_id.clone(), - ) + .get_nft_amount(&chain, eth_addr_to_hex(&nft.common.token_address), nft.token_id.clone()) .await .unwrap() .unwrap(); @@ -255,7 +260,7 @@ cross_test!(test_nft_amount, { .await .unwrap(); let amount = storage - .get_nft_amount(&chain, eth_addr_to_hex(&nft.common.token_address), nft.common.token_id) + .get_nft_amount(&chain, eth_addr_to_hex(&nft.common.token_address), nft.token_id) .await .unwrap() .unwrap(); @@ -266,7 +271,9 @@ cross_test!(test_nft_amount, { cross_test!(test_refresh_metadata, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let new_symbol = "NEW_SYMBOL"; let mut nft = nft(); storage @@ -276,7 +283,7 @@ cross_test!(test_refresh_metadata, { nft.common.symbol = Some(new_symbol.to_string()); drop_mutability!(nft); let token_add = eth_addr_to_hex(&nft.common.token_address); - let token_id = nft.common.token_id.clone(); + let token_id = nft.token_id.clone(); storage.refresh_nft_metadata(&chain, nft).await.unwrap(); let nft_upd = storage.get_nft(&chain, token_add, token_id).await.unwrap().unwrap(); assert_eq!(new_symbol.to_string(), nft_upd.common.symbol.unwrap()); @@ -284,7 +291,9 @@ cross_test!(test_refresh_metadata, { cross_test!(test_update_nft_spam_by_token_address, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -303,7 +312,9 @@ cross_test!(test_update_nft_spam_by_token_address, { cross_test!(test_exclude_nft_spam, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -320,7 +331,9 @@ cross_test!(test_exclude_nft_spam, { cross_test!(test_get_animation_external_domains, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -332,7 +345,9 @@ cross_test!(test_get_animation_external_domains, { cross_test!(test_update_nft_phishing_by_domain, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -358,7 +373,9 @@ cross_test!(test_update_nft_phishing_by_domain, { cross_test!(test_exclude_nft_phishing_spam, { let chain = Chain::Bsc; - let storage = init_nft_list_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftListStorageOps::init(&storage, &chain).await.unwrap(); let nft_list = nft_list(); storage.add_nfts_to_list(chain, nft_list, 28056726).await.unwrap(); @@ -380,11 +397,13 @@ cross_test!(test_exclude_nft_phishing_spam, { cross_test!(test_add_get_transfers, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); - let token_id = BigDecimal::from_str(TOKEN_ID).unwrap(); + let token_id = BigUint::from_str(TOKEN_ID).unwrap(); let transfer1 = storage .get_transfers_by_token_addr_id(chain, TOKEN_ADD.to_string(), token_id) .await @@ -405,7 +424,9 @@ cross_test!(test_add_get_transfers, { cross_test!(test_last_transfer_block, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -418,7 +439,9 @@ cross_test!(test_last_transfer_block, { cross_test!(test_transfer_history, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -435,7 +458,9 @@ cross_test!(test_transfer_history, { cross_test!(test_transfer_history_filters, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -495,7 +520,9 @@ cross_test!(test_transfer_history_filters, { cross_test!(test_get_update_transfer_meta, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -528,7 +555,9 @@ cross_test!(test_get_update_transfer_meta, { cross_test!(test_update_transfer_spam_by_token_address, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -547,7 +576,9 @@ cross_test!(test_update_transfer_spam_by_token_address, { cross_test!(test_get_token_addresses, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -557,7 +588,9 @@ cross_test!(test_get_token_addresses, { cross_test!(test_exclude_transfer_spam, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -578,7 +611,9 @@ cross_test!(test_exclude_transfer_spam, { cross_test!(test_get_domains, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -590,7 +625,9 @@ cross_test!(test_get_domains, { cross_test!(test_update_transfer_phishing_by_domain, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); @@ -616,7 +653,9 @@ cross_test!(test_update_transfer_phishing_by_domain, { cross_test!(test_exclude_transfer_phishing_spam, { let chain = Chain::Bsc; - let storage = init_nft_history_storage(&chain).await; + let nft_ctx = get_nft_ctx(&chain).await; + let storage = nft_ctx.lock_db().await.unwrap(); + NftTransferHistoryStorageOps::init(&storage, &chain).await.unwrap(); let transfers = nft_transfer_history(); storage.add_transfers_to_history(chain, transfers).await.unwrap(); diff --git a/mm2src/coins/nft/storage/db_test_helpers.rs b/mm2src/coins/nft/storage/db_test_helpers.rs index 9d4e8bfe14..d59b845661 100644 --- a/mm2src/coins/nft/storage/db_test_helpers.rs +++ b/mm2src/coins/nft/storage/db_test_helpers.rs @@ -1,16 +1,18 @@ -use crate::nft::nft_structs::{Chain, ContractType, Nft, NftCommon, NftTransferCommon, NftTransferHistory, +use crate::nft::nft_structs::{Chain, ContractType, Nft, NftCommon, NftCtx, NftTransferCommon, NftTransferHistory, TransferStatus, UriMeta}; -use crate::nft::storage::{NftListStorageOps, NftStorageBuilder, NftTransferHistoryStorageOps}; use ethereum_types::Address; -use mm2_number::BigDecimal; +use mm2_number::{BigDecimal, BigUint}; +#[cfg(not(target_arch = "wasm32"))] +use mm2_test_helpers::for_tests::mm_ctx_with_custom_async_db; +#[cfg(target_arch = "wasm32")] use mm2_test_helpers::for_tests::mm_ctx_with_custom_db; use std::str::FromStr; +use std::sync::Arc; pub(crate) fn nft() -> Nft { Nft { common: NftCommon { token_address: Address::from_str("0x5c7d6712dfaf0cb079d48981781c8705e8417ca0").unwrap(), - token_id: Default::default(), amount: BigDecimal::from_str("2").unwrap(), owner_of: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), token_hash: Some("b34ddf294013d20a6d70691027625839".to_string()), @@ -28,6 +30,7 @@ pub(crate) fn nft() -> Nft { possible_spam: true, }, chain: Chain::Bsc, + token_id: Default::default(), block_number_minted: Some(25465916), block_number: 25919780, contract_type: ContractType::Erc1155, @@ -52,7 +55,6 @@ pub(crate) fn nft_list() -> Vec { let nft = Nft { common: NftCommon { token_address: Address::from_str("0x5c7d6712dfaf0cb079d48981781c8705e8417ca0").unwrap(), - token_id: Default::default(), amount: BigDecimal::from_str("2").unwrap(), owner_of: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), token_hash: Some("b34ddf294013d20a6d70691027625839".to_string()), @@ -67,6 +69,7 @@ pub(crate) fn nft_list() -> Vec { possible_spam: false, }, chain: Chain::Bsc, + token_id: Default::default(), block_number_minted: Some(25465916), block_number: 25919780, contract_type: ContractType::Erc1155, @@ -89,7 +92,6 @@ pub(crate) fn nft_list() -> Vec { let nft1 = Nft { common: NftCommon { token_address: Address::from_str("0xfd913a305d70a60aac4faac70c739563738e1f81").unwrap(), - token_id: BigDecimal::from_str("214300047252").unwrap(), amount: BigDecimal::from_str("1").unwrap(), owner_of: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), token_hash: Some("c5d1cfd75a0535b0ec750c0156e6ddfe".to_string()), @@ -107,6 +109,7 @@ pub(crate) fn nft_list() -> Vec { possible_spam: true, }, chain: Chain::Bsc, + token_id: BigUint::from_str("214300047252").unwrap(), block_number_minted: Some(25721963), block_number: 28056726, contract_type: ContractType::Erc721, @@ -131,7 +134,6 @@ pub(crate) fn nft_list() -> Vec { let nft2 = Nft { common: NftCommon { token_address: Address::from_str("0xfd913a305d70a60aac4faac70c739563738e1f81").unwrap(), - token_id: BigDecimal::from_str("214300047253").unwrap(), amount: BigDecimal::from_str("1").unwrap(), owner_of: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), token_hash: Some("c5d1cfd75a0535b0ec750c0156e6ddfe".to_string()), @@ -149,6 +151,7 @@ pub(crate) fn nft_list() -> Vec { possible_spam: false, }, chain: Chain::Bsc, + token_id: BigUint::from_str("214300047253").unwrap(), block_number_minted: Some(25721963), block_number: 28056726, contract_type: ContractType::Erc721, @@ -173,7 +176,6 @@ pub(crate) fn nft_list() -> Vec { let nft3 = Nft { common: NftCommon { token_address: Address::from_str("0xfd913a305d70a60aac4faac70c739563738e1f81").unwrap(), - token_id: BigDecimal::from_str("214300044414").unwrap(), amount: BigDecimal::from_str("1").unwrap(), owner_of: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), token_hash: Some("125f8f4e952e107c257960000b4b250e".to_string()), @@ -191,6 +193,7 @@ pub(crate) fn nft_list() -> Vec { possible_spam: false, }, chain: Chain::Bsc, + token_id: BigUint::from_str("214300044414").unwrap(), block_number_minted: Some(25810308), block_number: 28056721, contract_type: ContractType::Erc721, @@ -224,7 +227,6 @@ pub(crate) fn nft_transfer_history() -> Vec { value: Default::default(), transaction_type: Some("Single".to_string()), token_address: Address::from_str("0x5c7d6712dfaf0cb079d48981781c8705e8417ca0").unwrap(), - token_id: Default::default(), from_address: Address::from_str("0x4ff0bbc9b64d635a4696d1a38554fb2529c103ff").unwrap(), to_address: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), amount: BigDecimal::from_str("1").unwrap(), @@ -233,6 +235,7 @@ pub(crate) fn nft_transfer_history() -> Vec { possible_spam: false, }, chain: Chain::Bsc, + token_id: Default::default(), block_number: 25919780, block_timestamp: 1677166110, contract_type: ContractType::Erc1155, @@ -244,6 +247,8 @@ pub(crate) fn nft_transfer_history() -> Vec { token_name: None, status: TransferStatus::Receive, possible_phishing: false, + fee_details: None, + confirmations: 0, }; let transfer1 = NftTransferHistory { @@ -255,7 +260,6 @@ pub(crate) fn nft_transfer_history() -> Vec { value: Default::default(), transaction_type: Some("Single".to_string()), token_address: Address::from_str("0xfd913a305d70a60aac4faac70c739563738e1f81").unwrap(), - token_id: BigDecimal::from_str("214300047252").unwrap(), from_address: Address::from_str("0x6fad0ec6bb76914b2a2a800686acc22970645820").unwrap(), to_address: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), amount: BigDecimal::from_str("1").unwrap(), @@ -264,6 +268,7 @@ pub(crate) fn nft_transfer_history() -> Vec { possible_spam: true, }, chain: Chain::Bsc, + token_id: BigUint::from_str("214300047252").unwrap(), block_number: 28056726, block_timestamp: 1683627432, contract_type: ContractType::Erc721, @@ -275,6 +280,8 @@ pub(crate) fn nft_transfer_history() -> Vec { token_name: None, status: TransferStatus::Receive, possible_phishing: false, + fee_details: None, + confirmations: 0, }; // Same as transfer1 but with different log_index, meaning that transfer1 and transfer2 are part of one batch/multi token transaction @@ -287,7 +294,6 @@ pub(crate) fn nft_transfer_history() -> Vec { value: Default::default(), transaction_type: Some("Single".to_string()), token_address: Address::from_str("0xfd913a305d70a60aac4faac70c739563738e1f81").unwrap(), - token_id: BigDecimal::from_str("214300047253").unwrap(), from_address: Address::from_str("0x6fad0ec6bb76914b2a2a800686acc22970645820").unwrap(), to_address: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), amount: BigDecimal::from_str("1").unwrap(), @@ -296,6 +302,7 @@ pub(crate) fn nft_transfer_history() -> Vec { possible_spam: false, }, chain: Chain::Bsc, + token_id: BigUint::from_str("214300047253").unwrap(), block_number: 28056726, block_timestamp: 1683627432, contract_type: ContractType::Erc721, @@ -307,6 +314,8 @@ pub(crate) fn nft_transfer_history() -> Vec { token_name: None, status: TransferStatus::Receive, possible_phishing: false, + fee_details: None, + confirmations: 0, }; let transfer3 = NftTransferHistory { @@ -318,7 +327,6 @@ pub(crate) fn nft_transfer_history() -> Vec { value: Default::default(), transaction_type: Some("Single".to_string()), token_address: Address::from_str("0xfd913a305d70a60aac4faac70c739563738e1f81").unwrap(), - token_id: BigDecimal::from_str("214300044414").unwrap(), from_address: Address::from_str("0x6fad0ec6bb76914b2a2a800686acc22970645820").unwrap(), to_address: Address::from_str("0xf622a6c52c94b500542e2ae6bcad24c53bc5b6a2").unwrap(), amount: BigDecimal::from_str("1").unwrap(), @@ -327,6 +335,7 @@ pub(crate) fn nft_transfer_history() -> Vec { possible_spam: false, }, chain: Chain::Bsc, + token_id: BigUint::from_str("214300044414").unwrap(), block_number: 28056721, block_timestamp: 1683627417, contract_type: ContractType::Erc721, @@ -338,26 +347,16 @@ pub(crate) fn nft_transfer_history() -> Vec { token_name: Some("Nebula Nodes".to_string()), status: TransferStatus::Receive, possible_phishing: false, + fee_details: None, + confirmations: 0, }; vec![transfer, transfer1, transfer2, transfer3] } -pub(crate) async fn init_nft_list_storage(chain: &Chain) -> impl NftListStorageOps + NftTransferHistoryStorageOps { +pub(crate) async fn get_nft_ctx(_chain: &Chain) -> Arc { + #[cfg(not(target_arch = "wasm32"))] + let ctx = mm_ctx_with_custom_async_db().await; + #[cfg(target_arch = "wasm32")] let ctx = mm_ctx_with_custom_db(); - let storage = NftStorageBuilder::new(&ctx).build().unwrap(); - NftListStorageOps::init(&storage, chain).await.unwrap(); - let is_initialized = NftListStorageOps::is_initialized(&storage, chain).await.unwrap(); - assert!(is_initialized); - storage -} - -pub(crate) async fn init_nft_history_storage(chain: &Chain) -> impl NftListStorageOps + NftTransferHistoryStorageOps { - let ctx = mm_ctx_with_custom_db(); - let storage = NftStorageBuilder::new(&ctx).build().unwrap(); - NftTransferHistoryStorageOps::init(&storage, chain).await.unwrap(); - let is_initialized = NftTransferHistoryStorageOps::is_initialized(&storage, chain) - .await - .unwrap(); - assert!(is_initialized); - storage + NftCtx::from_ctx(&ctx).unwrap() } diff --git a/mm2src/coins/nft/storage/mod.rs b/mm2src/coins/nft/storage/mod.rs index 14cc9243f0..c28c33ea54 100644 --- a/mm2src/coins/nft/storage/mod.rs +++ b/mm2src/coins/nft/storage/mod.rs @@ -1,13 +1,12 @@ +use crate::eth::EthTxFeeDetails; use crate::nft::nft_structs::{Chain, Nft, NftList, NftListFilters, NftTokenAddrId, NftTransferHistory, NftTransferHistoryFilters, NftsTransferHistoryList, TransferMeta}; use crate::WithdrawError; use async_trait::async_trait; -use derive_more::Display; use ethereum_types::Address; -use mm2_core::mm_ctx::MmArc; use mm2_err_handle::mm_error::MmResult; use mm2_err_handle::mm_error::{NotEqual, NotMmError}; -use mm2_number::BigDecimal; +use mm2_number::{BigDecimal, BigUint}; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use std::num::NonZeroUsize; @@ -62,14 +61,14 @@ pub trait NftListStorageOps { &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error>; async fn remove_nft_from_list( &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, scanned_block: u64, ) -> MmResult; @@ -77,7 +76,7 @@ pub trait NftListStorageOps { &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error>; async fn refresh_nft_metadata(&self, chain: &Chain, nft: Nft) -> MmResult<(), Self::Error>; @@ -154,7 +153,7 @@ pub trait NftTransferHistoryStorageOps { &self, chain: Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error>; async fn get_transfer_by_tx_hash_and_log_index( @@ -204,41 +203,6 @@ pub trait NftTransferHistoryStorageOps { ) -> MmResult<(), Self::Error>; } -/// Represents potential errors that can occur when creating an NFT storage. -#[derive(Debug, Deserialize, Display, Serialize)] -pub enum CreateNftStorageError { - Internal(String), -} - -impl From for WithdrawError { - fn from(e: CreateNftStorageError) -> Self { - match e { - CreateNftStorageError::Internal(err) => WithdrawError::InternalError(err), - } - } -} - -/// `NftStorageBuilder` is used to create an instance that implements the [`NftListStorageOps`] -/// and [`NftTransferHistoryStorageOps`] traits. -pub struct NftStorageBuilder<'a> { - ctx: &'a MmArc, -} - -impl<'a> NftStorageBuilder<'a> { - /// Creates a new `NftStorageBuilder` instance with the provided context. - #[inline] - pub fn new(ctx: &MmArc) -> NftStorageBuilder<'_> { NftStorageBuilder { ctx } } - - /// `build` function is used to build nft storage which implements [`NftListStorageOps`] and [`NftTransferHistoryStorageOps`] traits. - #[inline] - pub fn build(&self) -> MmResult { - #[cfg(target_arch = "wasm32")] - return wasm::wasm_storage::IndexedDbNftStorage::new(self.ctx); - #[cfg(not(target_arch = "wasm32"))] - sql_storage::SqliteNftStorage::new(self.ctx) - } -} - /// `get_offset_limit` function calculates offset and limit for final result if we use pagination. fn get_offset_limit(max: bool, limit: usize, page_number: Option, total_count: usize) -> (usize, usize) { if max { @@ -272,4 +236,5 @@ pub(crate) struct TransferDetailsJson { pub(crate) operator: Option, pub(crate) from_address: Address, pub(crate) to_address: Address, + pub(crate) fee_details: Option, } diff --git a/mm2src/coins/nft/storage/sql_storage.rs b/mm2src/coins/nft/storage/sql_storage.rs index 4e76b93249..86166a4793 100644 --- a/mm2src/coins/nft/storage/sql_storage.rs +++ b/mm2src/coins/nft/storage/sql_storage.rs @@ -2,37 +2,35 @@ use crate::nft::eth_addr_to_hex; use crate::nft::nft_structs::{Chain, ContractType, ConvertChain, Nft, NftCommon, NftList, NftListFilters, NftTokenAddrId, NftTransferCommon, NftTransferHistory, NftTransferHistoryFilters, NftsTransferHistoryList, TransferMeta, UriMeta}; -use crate::nft::storage::{get_offset_limit, CreateNftStorageError, NftDetailsJson, NftListStorageOps, NftStorageError, +use crate::nft::storage::{get_offset_limit, NftDetailsJson, NftListStorageOps, NftStorageError, NftTransferHistoryStorageOps, RemoveNftResult, TransferDetailsJson}; use async_trait::async_trait; -use common::async_blocking; +use db_common::async_sql_conn::{AsyncConnError, AsyncConnection}; use db_common::sql_build::{SqlCondition, SqlQuery}; use db_common::sqlite::rusqlite::types::{FromSqlError, Type}; use db_common::sqlite::rusqlite::{Connection, Error as SqlError, Result as SqlResult, Row, Statement}; use db_common::sqlite::sql_builder::SqlBuilder; use db_common::sqlite::{query_single_row, string_from_row, validate_table_name, CHECK_TABLE_EXISTS_SQL}; use ethereum_types::Address; -use mm2_core::mm_ctx::MmArc; -use mm2_err_handle::map_to_mm::MapToMmResult; -use mm2_err_handle::mm_error::{MmError, MmResult}; -use mm2_number::BigDecimal; +use futures::lock::MutexGuard as AsyncMutexGuard; +use mm2_err_handle::prelude::*; +use mm2_number::{BigDecimal, BigUint}; use serde_json::Value as Json; use serde_json::{self as json}; use std::collections::HashSet; use std::convert::TryInto; use std::num::NonZeroUsize; use std::str::FromStr; -use std::sync::{Arc, Mutex}; impl Chain { fn nft_list_table_name(&self) -> SqlResult { - let name = self.to_ticker() + "_nft_list"; + let name = self.to_ticker().to_owned() + "_nft_list"; validate_table_name(&name)?; Ok(name) } fn transfer_history_table_name(&self) -> SqlResult { - let name = self.to_ticker() + "_nft_transfer_history"; + let name = self.to_ticker().to_owned() + "_nft_transfer_history"; validate_table_name(&name)?; Ok(name) } @@ -82,7 +80,7 @@ fn create_nft_list_table_sql(chain: &Chain) -> MmResult { Ok(sql) } -fn create_transfer_history_table_sql(chain: &Chain) -> MmResult { +fn create_transfer_history_table_sql(chain: &Chain) -> Result { let table_name = chain.transfer_history_table_name()?; let sql = format!( "CREATE TABLE IF NOT EXISTS {} ( @@ -112,7 +110,7 @@ fn create_transfer_history_table_sql(chain: &Chain) -> MmResult MmResult { +fn create_scanned_nft_blocks_sql() -> Result { let table_name = scanned_nft_blocks_table_name()?; let sql = format!( "CREATE TABLE IF NOT EXISTS {} ( @@ -124,26 +122,9 @@ fn create_scanned_nft_blocks_sql() -> MmResult { Ok(sql) } -impl NftStorageError for SqlError {} +impl NftStorageError for AsyncConnError {} -#[derive(Clone)] -pub struct SqliteNftStorage(Arc>); - -impl SqliteNftStorage { - pub fn new(ctx: &MmArc) -> MmResult { - let sqlite_connection = ctx - .sqlite_connection - .ok_or(MmError::new(CreateNftStorageError::Internal( - "sqlite_connection is not initialized".to_owned(), - )))?; - Ok(SqliteNftStorage(sqlite_connection.clone())) - } -} - -fn get_nft_list_builder_preimage( - chains: Vec, - filters: Option, -) -> MmResult { +fn get_nft_list_builder_preimage(chains: Vec, filters: Option) -> Result { let union_sql_strings = chains .iter() .map(|chain| { @@ -156,7 +137,7 @@ fn get_nft_list_builder_preimage( .to_string(); Ok(sql_string) }) - .collect::, SqlError>>()?; + .collect::, SqlError>>()?; let union_alias_sql = format!("({}) AS nft_list", union_sql_strings.join(" UNION ALL ")); let mut final_sql_builder = SqlBuilder::select_from(union_alias_sql); final_sql_builder.order_desc("nft_list.block_number"); @@ -181,7 +162,7 @@ fn nft_list_builder_preimage(table_name: &str, filters: Option) fn get_nft_transfer_builder_preimage( chains: Vec, filters: Option, -) -> MmResult { +) -> Result { let union_sql_strings = chains .into_iter() .map(|chain| { @@ -194,7 +175,7 @@ fn get_nft_transfer_builder_preimage( .to_string(); Ok(sql_string) }) - .collect::, SqlError>>()?; + .collect::, SqlError>>()?; let union_alias_sql = format!("({}) AS nft_history", union_sql_strings.join(" UNION ALL ")); let mut final_sql_builder = SqlBuilder::select_from(union_alias_sql); final_sql_builder.order_desc("nft_history.block_timestamp"); @@ -230,7 +211,7 @@ fn nft_history_table_builder_preimage( Ok(sql_builder) } -fn finalize_sql_builder(mut sql_builder: SqlBuilder, offset: usize, limit: usize) -> MmResult { +fn finalize_sql_builder(mut sql_builder: SqlBuilder, offset: usize, limit: usize) -> Result { let sql = sql_builder .field("*") .offset(offset) @@ -247,9 +228,9 @@ fn get_and_parse(row: &Row<'_>, column: &str) -> Result fn nft_from_row(row: &Row<'_>) -> Result { let token_address = get_and_parse(row, "token_address")?; - let token_id = get_and_parse(row, "token_id")?; + let token_id: BigUint = get_and_parse(row, "token_id")?; let chain = get_and_parse(row, "chain")?; - let amount = get_and_parse(row, "amount")?; + let amount: BigDecimal = get_and_parse(row, "amount")?; let block_number: u64 = row.get("block_number")?; let contract_type = get_and_parse(row, "contract_type")?; let possible_spam: i32 = row.get("possible_spam")?; @@ -302,7 +283,6 @@ fn nft_from_row(row: &Row<'_>) -> Result { let common = NftCommon { token_address, - token_id, amount, owner_of: nft_details.owner_of, token_hash: nft_details.token_hash, @@ -319,6 +299,7 @@ fn nft_from_row(row: &Row<'_>) -> Result { let nft = Nft { common, chain, + token_id, block_number_minted: nft_details.block_number_minted, block_number, contract_type, @@ -336,7 +317,7 @@ fn transfer_history_from_row(row: &Row<'_>) -> Result = row.get("token_uri")?; @@ -359,7 +340,6 @@ fn transfer_history_from_row(row: &Row<'_>) -> Result) -> Result) -> Result) -> Result { fn token_address_id_from_row(row: &Row<'_>) -> Result { let token_address: String = row.get("token_address")?; let token_id_str: String = row.get("token_id")?; - let token_id = BigDecimal::from_str(&token_id_str).map_err(|_| SqlError::from(FromSqlError::InvalidType))?; + let token_id = BigUint::from_str(&token_id_str).map_err(|_| SqlError::from(FromSqlError::InvalidType))?; Ok(NftTokenAddrId { token_address, token_id, }) } -fn insert_nft_in_list_sql(chain: &Chain) -> MmResult { +fn insert_nft_in_list_sql(chain: &Chain) -> Result { let table_name = chain.nft_list_table_name()?; let sql = format!( "INSERT INTO {} ( @@ -422,7 +405,7 @@ fn insert_nft_in_list_sql(chain: &Chain) -> MmResult { Ok(sql) } -fn insert_transfer_in_history_sql(chain: &Chain) -> MmResult { +fn insert_transfer_in_history_sql(chain: &Chain) -> Result { let table_name = chain.transfer_history_table_name()?; let sql = format!( "INSERT INTO {} ( @@ -437,7 +420,7 @@ fn insert_transfer_in_history_sql(chain: &Chain) -> MmResult { Ok(sql) } -fn upsert_last_scanned_block_sql() -> MmResult { +fn upsert_last_scanned_block_sql() -> Result { let table_name = scanned_nft_blocks_table_name()?; let sql = format!( "INSERT OR REPLACE INTO {} (chain, last_scanned_block) VALUES (?1, ?2);", @@ -446,7 +429,7 @@ fn upsert_last_scanned_block_sql() -> MmResult { Ok(sql) } -fn refresh_nft_metadata_sql(chain: &Chain) -> MmResult { +fn refresh_nft_metadata_sql(chain: &Chain) -> Result { let table_name = chain.nft_list_table_name()?; let sql = format!( "UPDATE {} SET possible_spam = ?1, possible_phishing = ?2, collection_name = ?3, symbol = ?4, token_uri = ?5, token_domain = ?6, metadata = ?7, \ @@ -457,7 +440,7 @@ fn refresh_nft_metadata_sql(chain: &Chain) -> MmResult { Ok(sql) } -fn update_transfers_meta_by_token_addr_id_sql(chain: &Chain) -> MmResult { +fn update_transfers_meta_by_token_addr_id_sql(chain: &Chain) -> Result { let table_name = chain.transfer_history_table_name()?; let sql = format!( "UPDATE {} SET token_uri = ?1, token_domain = ?2, collection_name = ?3, image_url = ?4, image_domain = ?5, \ @@ -467,7 +450,7 @@ fn update_transfers_meta_by_token_addr_id_sql(chain: &Chain) -> MmResult MmResult { +fn update_transfer_spam_by_token_addr_id(chain: &Chain) -> Result { let table_name = chain.transfer_history_table_name()?; let sql = format!( "UPDATE {} SET possible_spam = ?1 WHERE token_address = ?2 AND token_id = ?3;", @@ -476,7 +459,7 @@ fn update_transfer_spam_by_token_addr_id(chain: &Chain) -> MmResult MmResult { +fn select_last_block_number_sql(table_name: String) -> Result { let sql = format!( "SELECT block_number FROM {} ORDER BY block_number DESC LIMIT 1", table_name @@ -490,7 +473,7 @@ fn select_last_scanned_block_sql() -> MmResult { Ok(sql) } -fn delete_nft_sql(table_name: String) -> Result> { +fn delete_nft_sql(table_name: String) -> Result { let sql = format!("DELETE FROM {} WHERE token_address=?1 AND token_id=?2", table_name); Ok(sql) } @@ -499,19 +482,19 @@ fn block_number_from_row(row: &Row<'_>) -> Result { row.get::<_, fn nft_amount_from_row(row: &Row<'_>) -> Result { row.get(0) } -fn get_nfts_by_token_address_statement(conn: &Connection, table_name: String) -> MmResult { +fn get_nfts_by_token_address_statement(conn: &Connection, table_name: String) -> Result { let sql_query = format!("SELECT * FROM {} WHERE token_address = ?", table_name); let stmt = conn.prepare(&sql_query)?; Ok(stmt) } -fn get_token_addresses_statement(conn: &Connection, table_name: String) -> MmResult { +fn get_token_addresses_statement(conn: &Connection, table_name: String) -> Result { let sql_query = format!("SELECT DISTINCT token_address FROM {}", table_name); let stmt = conn.prepare(&sql_query)?; Ok(stmt) } -fn get_transfers_from_block_statement<'a>(conn: &'a Connection, chain: &'a Chain) -> MmResult, SqlError> { +fn get_transfers_from_block_statement<'a>(conn: &'a Connection, chain: &'a Chain) -> Result, SqlError> { let table_name = chain.transfer_history_table_name()?; let sql_query = format!( "SELECT * FROM {} WHERE block_number >= ? ORDER BY block_number ASC", @@ -521,17 +504,14 @@ fn get_transfers_from_block_statement<'a>(conn: &'a Connection, chain: &'a Chain Ok(stmt) } -fn get_transfers_by_token_addr_id_statement(conn: &Connection, chain: Chain) -> MmResult { +fn get_transfers_by_token_addr_id_statement(conn: &Connection, chain: Chain) -> Result { let table_name = chain.transfer_history_table_name()?; let sql_query = format!("SELECT * FROM {} WHERE token_address = ? AND token_id = ?", table_name); let stmt = conn.prepare(&sql_query)?; Ok(stmt) } -fn get_transfers_with_empty_meta_builder<'a>( - conn: &'a Connection, - chain: &'a Chain, -) -> MmResult, SqlError> { +fn get_transfers_with_empty_meta_builder<'a>(conn: &'a Connection, chain: &'a Chain) -> Result, SqlError> { let table_name = chain.transfer_history_table_name()?; let mut sql_builder = SqlQuery::select_from(conn, table_name.as_str())?; sql_builder @@ -542,35 +522,33 @@ fn get_transfers_with_empty_meta_builder<'a>( .and_where_is_null("token_uri") .and_where_is_null("collection_name") .and_where_is_null("image_url") - .and_where_is_null("token_name"); + .and_where_is_null("token_name") + .and_where("possible_spam == 0"); drop_mutability!(sql_builder); Ok(sql_builder) } #[async_trait] -impl NftListStorageOps for SqliteNftStorage { - type Error = SqlError; +impl NftListStorageOps for AsyncMutexGuard<'_, AsyncConnection> { + type Error = AsyncConnError; async fn init(&self, chain: &Chain) -> MmResult<(), Self::Error> { - let selfi = self.clone(); let sql_nft_list = create_nft_list_table_sql(chain)?; - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { conn.execute(&sql_nft_list, []).map(|_| ())?; conn.execute(&create_scanned_nft_blocks_sql()?, []).map(|_| ())?; Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn is_initialized(&self, chain: &Chain) -> MmResult { let table_name = chain.nft_list_table_name()?; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - let nft_list_initialized = query_single_row(&conn, CHECK_TABLE_EXISTS_SQL, [table_name], string_from_row)?; + self.call(move |conn| { + let nft_list_initialized = query_single_row(conn, CHECK_TABLE_EXISTS_SQL, [table_name], string_from_row)?; let scanned_nft_blocks_initialized = query_single_row( - &conn, + conn, CHECK_TABLE_EXISTS_SQL, [scanned_nft_blocks_table_name()?], string_from_row, @@ -578,6 +556,7 @@ impl NftListStorageOps for SqliteNftStorage { Ok(nft_list_initialized.is_some() && scanned_nft_blocks_initialized.is_some()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_nft_list( @@ -588,9 +567,7 @@ impl NftListStorageOps for SqliteNftStorage { page_number: Option, filters: Option, ) -> MmResult { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_builder = get_nft_list_builder_preimage(chains, filters)?; let total_count_builder_sql = sql_builder .clone() @@ -616,6 +593,7 @@ impl NftListStorageOps for SqliteNftStorage { Ok(result) }) .await + .map_to_mm(AsyncConnError::from) } async fn add_nfts_to_list(&self, chain: Chain, nfts: I, last_scanned_block: u64) -> MmResult<(), Self::Error> @@ -623,9 +601,7 @@ impl NftListStorageOps for SqliteNftStorage { I: IntoIterator + Send + 'static, I::IntoIter: Send, { - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; for nft in nfts { @@ -638,7 +614,7 @@ impl NftListStorageOps for SqliteNftStorage { let details_json = json::to_string(&details_json).expect("serialization should not fail"); let params = [ Some(eth_addr_to_hex(&nft.common.token_address)), - Some(nft.common.token_id.to_string()), + Some(nft.token_id.to_string()), Some(nft.chain.to_string()), Some(nft.common.amount.to_string()), Some(nft.block_number.to_string()), @@ -667,45 +643,44 @@ impl NftListStorageOps for SqliteNftStorage { ]; sql_transaction.execute(&insert_nft_in_list_sql(&chain)?, params)?; } - let scanned_block_params = [chain.to_ticker(), last_scanned_block.to_string()]; + let scanned_block_params = [chain.to_ticker().to_string(), last_scanned_block.to_string()]; sql_transaction.execute(&upsert_last_scanned_block_sql()?, scanned_block_params)?; sql_transaction.commit()?; Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_nft( &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error> { let table_name = chain.nft_list_table_name()?; - let sql = format!("SELECT * FROM {} WHERE token_address=?1 AND token_id=?2", table_name); - let params = [token_address, token_id.to_string()]; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - query_single_row(&conn, &sql, params, nft_from_row).map_to_mm(SqlError::from) + self.call(move |conn| { + let sql = format!("SELECT * FROM {} WHERE token_address=?1 AND token_id=?2", table_name); + let params = [token_address, token_id.to_string()]; + let nft = query_single_row(conn, &sql, params, nft_from_row)?; + Ok(nft) }) .await + .map_to_mm(AsyncConnError::from) } async fn remove_nft_from_list( &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, scanned_block: u64, ) -> MmResult { let table_name = chain.nft_list_table_name()?; let sql = delete_nft_sql(table_name)?; let params = [token_address, token_id.to_string()]; - let scanned_block_params = [chain.to_ticker(), scanned_block.to_string()]; - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + let scanned_block_params = [chain.to_ticker().to_string(), scanned_block.to_string()]; + self.call(move |conn| { let sql_transaction = conn.transaction()?; let rows_num = sql_transaction.execute(&sql, params)?; @@ -719,13 +694,14 @@ impl NftListStorageOps for SqliteNftStorage { Ok(remove_nft_result) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_nft_amount( &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error> { let table_name = chain.nft_list_table_name()?; let sql = format!( @@ -733,19 +709,17 @@ impl NftListStorageOps for SqliteNftStorage { table_name ); let params = [token_address, token_id.to_string()]; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - query_single_row(&conn, &sql, params, nft_amount_from_row).map_to_mm(SqlError::from) + self.call(move |conn| { + let amount = query_single_row(conn, &sql, params, nft_amount_from_row)?; + Ok(amount) }) .await + .map_to_mm(AsyncConnError::from) } async fn refresh_nft_metadata(&self, chain: &Chain, nft: Nft) -> MmResult<(), Self::Error> { let sql = refresh_nft_metadata_sql(chain)?; - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [ Some(i32::from(nft.common.possible_spam).to_string()), @@ -769,41 +743,40 @@ impl NftListStorageOps for SqliteNftStorage { nft.uri_meta.external_domain, nft.uri_meta.image_details.map(|v| v.to_string()), Some(eth_addr_to_hex(&nft.common.token_address)), - Some(nft.common.token_id.to_string()), + Some(nft.token_id.to_string()), ]; sql_transaction.execute(&sql, params)?; sql_transaction.commit()?; Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_last_block_number(&self, chain: &Chain) -> MmResult, Self::Error> { let table_name = chain.nft_list_table_name()?; let sql = select_last_block_number_sql(table_name)?; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - query_single_row(&conn, &sql, [], block_number_from_row).map_to_mm(SqlError::from) + self.call(move |conn| { + let block_number = query_single_row(conn, &sql, [], block_number_from_row)?; + Ok(block_number) }) .await? .map(|b| b.try_into()) .transpose() - .map_to_mm(|e| SqlError::FromSqlConversionFailure(2, Type::Integer, Box::new(e))) + .map_to_mm(|e| AsyncConnError::Rusqlite(SqlError::FromSqlConversionFailure(2, Type::Integer, Box::new(e)))) } async fn get_last_scanned_block(&self, chain: &Chain) -> MmResult, Self::Error> { let sql = select_last_scanned_block_sql()?; let params = [chain.to_ticker()]; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - query_single_row(&conn, &sql, params, block_number_from_row).map_to_mm(SqlError::from) + self.call(move |conn| { + let block_number = query_single_row(conn, &sql, params, block_number_from_row)?; + Ok(block_number) }) .await? .map(|b| b.try_into()) .transpose() - .map_to_mm(|e| SqlError::FromSqlConversionFailure(2, Type::Integer, Box::new(e))) + .map_to_mm(|e| AsyncConnError::Rusqlite(SqlError::FromSqlConversionFailure(2, Type::Integer, Box::new(e)))) } async fn update_nft_amount(&self, chain: &Chain, nft: Nft, scanned_block: u64) -> MmResult<(), Self::Error> { @@ -812,15 +785,13 @@ impl NftListStorageOps for SqliteNftStorage { "UPDATE {} SET amount = ?1 WHERE token_address = ?2 AND token_id = ?3;", table_name ); - let scanned_block_params = [chain.to_ticker(), scanned_block.to_string()]; - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + let scanned_block_params = [chain.to_ticker().to_string(), scanned_block.to_string()]; + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [ Some(nft.common.amount.to_string()), Some(eth_addr_to_hex(&nft.common.token_address)), - Some(nft.common.token_id.to_string()), + Some(nft.token_id.to_string()), ]; sql_transaction.execute(&sql, params)?; sql_transaction.execute(&upsert_last_scanned_block_sql()?, scanned_block_params)?; @@ -828,6 +799,7 @@ impl NftListStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn update_nft_amount_and_block_number(&self, chain: &Chain, nft: Nft) -> MmResult<(), Self::Error> { @@ -836,16 +808,14 @@ impl NftListStorageOps for SqliteNftStorage { "UPDATE {} SET amount = ?1, block_number = ?2 WHERE token_address = ?3 AND token_id = ?4;", table_name ); - let scanned_block_params = [chain.to_ticker(), nft.block_number.to_string()]; - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + let scanned_block_params = [chain.to_ticker().to_string(), nft.block_number.to_string()]; + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [ Some(nft.common.amount.to_string()), Some(nft.block_number.to_string()), Some(eth_addr_to_hex(&nft.common.token_address)), - Some(nft.common.token_id.to_string()), + Some(nft.token_id.to_string()), ]; sql_transaction.execute(&sql, params)?; sql_transaction.execute(&upsert_last_scanned_block_sql()?, scanned_block_params)?; @@ -853,20 +823,20 @@ impl NftListStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_nfts_by_token_address(&self, chain: Chain, token_address: String) -> MmResult, Self::Error> { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let table_name = chain.nft_list_table_name()?; - let mut stmt = get_nfts_by_token_address_statement(&conn, table_name)?; + let mut stmt = get_nfts_by_token_address_statement(conn, table_name)?; let nfts = stmt .query_map([token_address], nft_from_row)? .collect::, _>>()?; Ok(nfts) }) .await + .map_to_mm(AsyncConnError::from) } async fn update_nft_spam_by_token_address( @@ -875,11 +845,9 @@ impl NftListStorageOps for SqliteNftStorage { token_address: String, possible_spam: bool, ) -> MmResult<(), Self::Error> { - let selfi = self.clone(); let table_name = chain.nft_list_table_name()?; let sql = format!("UPDATE {} SET possible_spam = ?1 WHERE token_address = ?2;", table_name); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [Some(i32::from(possible_spam).to_string()), Some(token_address.clone())]; sql_transaction.execute(&sql, params)?; @@ -887,17 +855,14 @@ impl NftListStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_animation_external_domains(&self, chain: &Chain) -> MmResult, Self::Error> { - let selfi = self.clone(); let table_name = chain.nft_list_table_name()?; - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_query = format!( - "SELECT DISTINCT animation_domain FROM {} - UNION - SELECT DISTINCT external_domain FROM {}", + "SELECT DISTINCT animation_domain FROM {} UNION SELECT DISTINCT external_domain FROM {}", table_name, table_name ); let mut stmt = conn.prepare(&sql_query)?; @@ -908,6 +873,7 @@ impl NftListStorageOps for SqliteNftStorage { Ok(domains) }) .await + .map_to_mm(AsyncConnError::from) } async fn update_nft_phishing_by_domain( @@ -916,17 +882,13 @@ impl NftListStorageOps for SqliteNftStorage { domain: String, possible_phishing: bool, ) -> MmResult<(), Self::Error> { - let selfi = self.clone(); - let table_name = chain.nft_list_table_name()?; let sql = format!( "UPDATE {} SET possible_phishing = ?1 WHERE token_domain = ?2 OR image_domain = ?2 OR animation_domain = ?2 OR external_domain = ?2;", table_name ); - - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [Some(i32::from(possible_phishing).to_string()), Some(domain)]; sql_transaction.execute(&sql, params)?; @@ -934,33 +896,32 @@ impl NftListStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } } #[async_trait] -impl NftTransferHistoryStorageOps for SqliteNftStorage { - type Error = SqlError; +impl NftTransferHistoryStorageOps for AsyncMutexGuard<'_, AsyncConnection> { + type Error = AsyncConnError; async fn init(&self, chain: &Chain) -> MmResult<(), Self::Error> { - let selfi = self.clone(); let sql_transfer_history = create_transfer_history_table_sql(chain)?; - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { conn.execute(&sql_transfer_history, []).map(|_| ())?; Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn is_initialized(&self, chain: &Chain) -> MmResult { let table_name = chain.transfer_history_table_name()?; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - let nft_list_initialized = query_single_row(&conn, CHECK_TABLE_EXISTS_SQL, [table_name], string_from_row)?; + self.call(move |conn| { + let nft_list_initialized = query_single_row(conn, CHECK_TABLE_EXISTS_SQL, [table_name], string_from_row)?; Ok(nft_list_initialized.is_some()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_transfer_history( @@ -971,9 +932,7 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { page_number: Option, filters: Option, ) -> MmResult { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_builder = get_nft_transfer_builder_preimage(chains, filters)?; let total_count_builder_sql = sql_builder .clone() @@ -999,6 +958,7 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Ok(result) }) .await + .map_to_mm(AsyncConnError::from) } async fn add_transfers_to_history(&self, chain: Chain, transfers: I) -> MmResult<(), Self::Error> @@ -1006,11 +966,8 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { I: IntoIterator + Send + 'static, I::IntoIter: Send, { - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; - for transfer in transfers { let details_json = TransferDetailsJson { block_hash: transfer.common.block_hash, @@ -1021,6 +978,7 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { operator: transfer.common.operator, from_address: transfer.common.from_address, to_address: transfer.common.from_address, + fee_details: transfer.fee_details, }; let transfer_json = json::to_string(&details_json).expect("serialization should not fail"); let params = [ @@ -1031,7 +989,7 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Some(transfer.block_timestamp.to_string()), Some(transfer.contract_type.to_string()), Some(eth_addr_to_hex(&transfer.common.token_address)), - Some(transfer.common.token_id.to_string()), + Some(transfer.token_id.to_string()), Some(transfer.status.to_string()), Some(transfer.common.amount.to_string()), transfer.token_uri, @@ -1050,20 +1008,20 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_last_block_number(&self, chain: &Chain) -> MmResult, Self::Error> { let table_name = chain.transfer_history_table_name()?; let sql = select_last_block_number_sql(table_name)?; - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - query_single_row(&conn, &sql, [], block_number_from_row).map_to_mm(SqlError::from) + self.call(move |conn| { + let block_number = query_single_row(conn, &sql, [], block_number_from_row)?; + Ok(block_number) }) .await? .map(|b| b.try_into()) .transpose() - .map_to_mm(|e| SqlError::FromSqlConversionFailure(2, Type::Integer, Box::new(e))) + .map_to_mm(|e| AsyncConnError::Rusqlite(SqlError::FromSqlConversionFailure(2, Type::Integer, Box::new(e)))) } async fn get_transfers_from_block( @@ -1071,34 +1029,32 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { chain: Chain, from_block: u64, ) -> MmResult, Self::Error> { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - let mut stmt = get_transfers_from_block_statement(&conn, &chain)?; + self.call(move |conn| { + let mut stmt = get_transfers_from_block_statement(conn, &chain)?; let transfers = stmt .query_map([from_block], transfer_history_from_row)? .collect::, _>>()?; Ok(transfers) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_transfers_by_token_addr_id( &self, chain: Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error> { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - let mut stmt = get_transfers_by_token_addr_id_statement(&conn, chain)?; + self.call(move |conn| { + let mut stmt = get_transfers_by_token_addr_id_statement(conn, chain)?; let transfers = stmt .query_map([token_address, token_id.to_string()], transfer_history_from_row)? .collect::, _>>()?; Ok(transfers) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_transfer_by_tx_hash_and_log_index( @@ -1112,18 +1068,17 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { "SELECT * FROM {} WHERE transaction_hash=?1 AND log_index = ?2", table_name ); - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - query_single_row( - &conn, + self.call(move |conn| { + let transfer = query_single_row( + conn, &sql, [transaction_hash, log_index.to_string()], transfer_history_from_row, - ) - .map_to_mm(SqlError::from) + )?; + Ok(transfer) }) .await + .map_to_mm(AsyncConnError::from) } async fn update_transfers_meta_by_token_addr_id( @@ -1149,9 +1104,7 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Some(transfer_meta.token_address), Some(transfer_meta.token_id.to_string()), ]; - let selfi = self.clone(); - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; sql_transaction.execute(&sql, params)?; if set_spam { @@ -1161,17 +1114,17 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_transfers_with_empty_meta(&self, chain: Chain) -> MmResult, Self::Error> { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); - let sql_builder = get_transfers_with_empty_meta_builder(&conn, &chain)?; + self.call(move |conn| { + let sql_builder = get_transfers_with_empty_meta_builder(conn, &chain)?; let token_addr_id_pair = sql_builder.query(token_address_id_from_row)?; Ok(token_addr_id_pair) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_transfers_by_token_address( @@ -1179,17 +1132,16 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { chain: Chain, token_address: String, ) -> MmResult, Self::Error> { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let table_name = chain.transfer_history_table_name()?; - let mut stmt = get_nfts_by_token_address_statement(&conn, table_name)?; - let nfts = stmt + let mut stmt = get_nfts_by_token_address_statement(conn, table_name)?; + let transfers = stmt .query_map([token_address], transfer_history_from_row)? .collect::, _>>()?; - Ok(nfts) + Ok(transfers) }) .await + .map_to_mm(AsyncConnError::from) } async fn update_transfer_spam_by_token_address( @@ -1198,13 +1150,9 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { token_address: String, possible_spam: bool, ) -> MmResult<(), Self::Error> { - let selfi = self.clone(); - let table_name = chain.transfer_history_table_name()?; let sql = format!("UPDATE {} SET possible_spam = ?1 WHERE token_address = ?2;", table_name); - - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [Some(i32::from(possible_spam).to_string()), Some(token_address.clone())]; sql_transaction.execute(&sql, params)?; @@ -1212,31 +1160,27 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_token_addresses(&self, chain: Chain) -> MmResult, Self::Error> { - let selfi = self.clone(); - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let table_name = chain.transfer_history_table_name()?; - let mut stmt = get_token_addresses_statement(&conn, table_name)?; + let mut stmt = get_token_addresses_statement(conn, table_name)?; let addresses = stmt .query_map([], address_from_row)? .collect::, _>>()?; Ok(addresses) }) .await + .map_to_mm(AsyncConnError::from) } async fn get_domains(&self, chain: &Chain) -> MmResult, Self::Error> { - let selfi = self.clone(); let table_name = chain.transfer_history_table_name()?; - async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_query = format!( - "SELECT DISTINCT token_domain FROM {} - UNION - SELECT DISTINCT image_domain FROM {}", + "SELECT DISTINCT token_domain FROM {} UNION SELECT DISTINCT image_domain FROM {}", table_name, table_name ); let mut stmt = conn.prepare(&sql_query)?; @@ -1247,6 +1191,7 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Ok(domains) }) .await + .map_to_mm(AsyncConnError::from) } async fn update_transfer_phishing_by_domain( @@ -1255,16 +1200,12 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { domain: String, possible_phishing: bool, ) -> MmResult<(), Self::Error> { - let selfi = self.clone(); - let table_name = chain.transfer_history_table_name()?; let sql = format!( "UPDATE {} SET possible_phishing = ?1 WHERE token_domain = ?2 OR image_domain = ?2;", table_name ); - - async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + self.call(move |conn| { let sql_transaction = conn.transaction()?; let params = [Some(i32::from(possible_phishing).to_string()), Some(domain)]; sql_transaction.execute(&sql, params)?; @@ -1272,5 +1213,6 @@ impl NftTransferHistoryStorageOps for SqliteNftStorage { Ok(()) }) .await + .map_to_mm(AsyncConnError::from) } } diff --git a/mm2src/coins/nft/storage/wasm/mod.rs b/mm2src/coins/nft/storage/wasm/mod.rs index 6bbc8738c4..ab8f69af68 100644 --- a/mm2src/coins/nft/storage/wasm/mod.rs +++ b/mm2src/coins/nft/storage/wasm/mod.rs @@ -1,7 +1,6 @@ use crate::nft::storage::NftStorageError; use mm2_db::indexed_db::{DbTransactionError, InitDbError}; use mm2_err_handle::prelude::*; -use mm2_number::bigdecimal::ParseBigDecimalError; pub(crate) mod nft_idb; pub(crate) mod wasm_storage; @@ -20,7 +19,6 @@ pub enum WasmNftCacheError { NotSupported(String), InternalError(String), GetLastNftBlockError(String), - ParseBigDecimalError(ParseBigDecimalError), } impl From for WasmNftCacheError { diff --git a/mm2src/coins/nft/storage/wasm/wasm_storage.rs b/mm2src/coins/nft/storage/wasm/wasm_storage.rs index 789ec069da..faf79f663a 100644 --- a/mm2src/coins/nft/storage/wasm/wasm_storage.rs +++ b/mm2src/coins/nft/storage/wasm/wasm_storage.rs @@ -1,24 +1,21 @@ use crate::eth::eth_addr_to_hex; -use crate::nft::nft_structs::{Chain, ContractType, Nft, NftCtx, NftList, NftListFilters, NftTransferHistory, +use crate::nft::nft_structs::{Chain, ContractType, Nft, NftList, NftListFilters, NftTransferHistory, NftsTransferHistoryList, TransferMeta, TransferStatus}; -use crate::nft::storage::wasm::nft_idb::{NftCacheIDB, NftCacheIDBLocked}; +use crate::nft::storage::wasm::nft_idb::NftCacheIDBLocked; use crate::nft::storage::wasm::{WasmNftCacheError, WasmNftCacheResult}; -use crate::nft::storage::{get_offset_limit, CreateNftStorageError, NftListStorageOps, NftTokenAddrId, - NftTransferHistoryFilters, NftTransferHistoryStorageOps, RemoveNftResult}; +use crate::nft::storage::{get_offset_limit, NftListStorageOps, NftTokenAddrId, NftTransferHistoryFilters, + NftTransferHistoryStorageOps, RemoveNftResult}; use async_trait::async_trait; use common::is_initial_upgrade; use ethereum_types::Address; -use mm2_core::mm_ctx::MmArc; -use mm2_db::indexed_db::{BeBigUint, DbTable, DbUpgrader, MultiIndex, OnUpgradeResult, SharedDb, TableSignature}; -use mm2_err_handle::map_mm_error::MapMmError; +use mm2_db::indexed_db::{BeBigUint, DbTable, DbUpgrader, MultiIndex, OnUpgradeResult, TableSignature}; use mm2_err_handle::map_to_mm::MapToMmResult; use mm2_err_handle::prelude::MmResult; -use mm2_number::BigDecimal; +use mm2_number::BigUint; use num_traits::ToPrimitive; use serde_json::{self as json, Value as Json}; use std::collections::HashSet; use std::num::NonZeroUsize; -use std::str::FromStr; const CHAIN_TOKEN_ADD_TOKEN_ID_INDEX: &str = "chain_token_add_token_id_index"; const CHAIN_BLOCK_NUMBER_INDEX: &str = "chain_block_number_index"; @@ -26,94 +23,69 @@ const CHAIN_TOKEN_ADD_INDEX: &str = "chain_token_add_index"; const CHAIN_TOKEN_DOMAIN_INDEX: &str = "chain_token_domain_index"; const CHAIN_IMAGE_DOMAIN_INDEX: &str = "chain_image_domain_index"; -/// Provides methods for interacting with the IndexedDB storage specifically designed for NFT data. -/// -/// This struct abstracts the intricacies of fetching and storing NFT data in the IndexedDB, -/// ensuring optimal performance and data integrity. -#[derive(Clone)] -pub struct IndexedDbNftStorage { - /// The underlying shared database instance for caching NFT data. - db: SharedDb, +fn take_nft_according_to_paging_opts( + mut nfts: Vec, + max: bool, + limit: usize, + page_number: Option, +) -> WasmNftCacheResult { + let total_count = nfts.len(); + nfts.sort_by(|a, b| b.block_number.cmp(&a.block_number)); + let (offset, limit) = get_offset_limit(max, limit, page_number, total_count); + Ok(NftList { + nfts: nfts.into_iter().skip(offset).take(limit).collect(), + skipped: offset, + total: total_count, + }) } -impl IndexedDbNftStorage { - /// Construct a new `IndexedDbNftStorage` using the given MM context. - /// - /// This method ensures that a proper NFT context (`NftCtx`) exists within the MM context - /// and initializes the underlying storage as required. - pub fn new(ctx: &MmArc) -> MmResult { - let nft_ctx = NftCtx::from_ctx(ctx).map_to_mm(CreateNftStorageError::Internal)?; - Ok(IndexedDbNftStorage { - db: nft_ctx.nft_cache_db.clone(), - }) - } - - /// Lock the underlying database to ensure exclusive access, maintaining data consistency during operations. - async fn lock_db(&self) -> WasmNftCacheResult> { - self.db.get_or_initialize().await.mm_err(WasmNftCacheError::from) - } - - fn take_nft_according_to_paging_opts( - mut nfts: Vec, - max: bool, - limit: usize, - page_number: Option, - ) -> WasmNftCacheResult { - let total_count = nfts.len(); - nfts.sort_by(|a, b| b.block_number.cmp(&a.block_number)); - let (offset, limit) = get_offset_limit(max, limit, page_number, total_count); - Ok(NftList { - nfts: nfts.into_iter().skip(offset).take(limit).collect(), - skipped: offset, - total: total_count, - }) - } - - fn filter_nfts(nfts: I, filters: Option) -> WasmNftCacheResult> - where - I: Iterator, - { - let mut filtered_nfts = Vec::new(); - for nft_table in nfts { - let nft = nft_details_from_item(nft_table)?; - if let Some(filters) = &filters { +fn filter_nfts(nfts: I, filters: Option) -> WasmNftCacheResult> +where + I: Iterator, +{ + let mut filtered_nfts = Vec::new(); + for nft_table in nfts { + let nft = nft_details_from_item(nft_table)?; + match filters { + Some(filters) => { if filters.passes_spam_filter(&nft) && filters.passes_phishing_filter(&nft) { filtered_nfts.push(nft); } - } else { - filtered_nfts.push(nft); - } + }, + None => filtered_nfts.push(nft), } - Ok(filtered_nfts) } + Ok(filtered_nfts) +} - fn take_transfers_according_to_paging_opts( - mut transfers: Vec, - max: bool, - limit: usize, - page_number: Option, - ) -> WasmNftCacheResult { - let total_count = transfers.len(); - transfers.sort_by(|a, b| b.block_timestamp.cmp(&a.block_timestamp)); - let (offset, limit) = get_offset_limit(max, limit, page_number, total_count); - Ok(NftsTransferHistoryList { - transfer_history: transfers.into_iter().skip(offset).take(limit).collect(), - skipped: offset, - total: total_count, - }) - } +fn take_transfers_according_to_paging_opts( + mut transfers: Vec, + max: bool, + limit: usize, + page_number: Option, +) -> WasmNftCacheResult { + let total_count = transfers.len(); + transfers.sort_by(|a, b| b.block_timestamp.cmp(&a.block_timestamp)); + let (offset, limit) = get_offset_limit(max, limit, page_number, total_count); + Ok(NftsTransferHistoryList { + transfer_history: transfers.into_iter().skip(offset).take(limit).collect(), + skipped: offset, + total: total_count, + }) +} - fn filter_transfers( - transfers: I, - filters: Option, - ) -> WasmNftCacheResult> - where - I: Iterator, - { - let mut filtered_transfers = Vec::new(); - for transfers_table in transfers { - let transfer = transfer_details_from_item(transfers_table)?; - if let Some(filters) = &filters { +fn filter_transfers( + transfers: I, + filters: Option, +) -> WasmNftCacheResult> +where + I: Iterator, +{ + let mut filtered_transfers = Vec::new(); + for transfers_table in transfers { + let transfer = transfer_details_from_item(transfers_table)?; + match filters { + Some(filters) => { if filters.is_status_match(&transfer) && filters.is_date_match(&transfer) && filters.passes_spam_filter(&transfer) @@ -121,12 +93,11 @@ impl IndexedDbNftStorage { { filtered_transfers.push(transfer); } - } else { - filtered_transfers.push(transfer); - } + }, + None => filtered_transfers.push(transfer), } - Ok(filtered_transfers) } + Ok(filtered_transfers) } impl NftListFilters { @@ -157,7 +128,7 @@ impl NftTransferHistoryFilters { } #[async_trait] -impl NftListStorageOps for IndexedDbNftStorage { +impl NftListStorageOps for NftCacheIDBLocked<'_> { type Error = WasmNftCacheError; async fn init(&self, _chain: &Chain) -> MmResult<(), Self::Error> { Ok(()) } @@ -172,8 +143,7 @@ impl NftListStorageOps for IndexedDbNftStorage { page_number: Option, filters: Option, ) -> MmResult { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let mut nfts = Vec::new(); for chain in chains { @@ -182,10 +152,10 @@ impl NftListStorageOps for IndexedDbNftStorage { .await? .into_iter() .map(|(_item_id, nft)| nft); - let filtered = Self::filter_nfts(nft_tables, filters)?; + let filtered = filter_nfts(nft_tables, filters)?; nfts.extend(filtered); } - Self::take_nft_according_to_paging_opts(nfts, max, limit, page_number) + take_nft_according_to_paging_opts(nfts, max, limit, page_number) } async fn add_nfts_to_list(&self, chain: Chain, nfts: I, last_scanned_block: u64) -> MmResult<(), Self::Error> @@ -193,8 +163,7 @@ impl NftListStorageOps for IndexedDbNftStorage { I: IntoIterator + Send + 'static, I::IntoIter: Send, { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let nft_table = db_transaction.table::().await?; let last_scanned_block_table = db_transaction.table::().await?; for nft in nfts { @@ -215,15 +184,14 @@ impl NftListStorageOps for IndexedDbNftStorage { &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(&token_address)? - .with_value(token_id.to_string())?; + .with_value(BeBigUint::from(token_id))?; if let Some((_item_id, item)) = table.get_item_by_unique_multi_index(index_keys).await? { Ok(Some(nft_details_from_item(item)?)) @@ -236,18 +204,17 @@ impl NftListStorageOps for IndexedDbNftStorage { &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, scanned_block: u64, ) -> MmResult { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let nft_table = db_transaction.table::().await?; let last_scanned_block_table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(&token_address)? - .with_value(token_id.to_string())?; + .with_value(BeBigUint::from(token_id))?; let last_scanned_block = LastScannedBlockTable { chain: chain.to_string(), @@ -269,15 +236,14 @@ impl NftListStorageOps for IndexedDbNftStorage { &self, chain: &Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(&token_address)? - .with_value(token_id.to_string())?; + .with_value(BeBigUint::from(token_id))?; if let Some((_item_id, item)) = table.get_item_by_unique_multi_index(index_keys).await? { Ok(Some(nft_details_from_item(item)?.common.amount.to_string())) @@ -287,13 +253,12 @@ impl NftListStorageOps for IndexedDbNftStorage { } async fn refresh_nft_metadata(&self, chain: &Chain, nft: Nft) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(eth_addr_to_hex(&nft.common.token_address))? - .with_value(nft.common.token_id.to_string())?; + .with_value(BeBigUint::from(nft.token_id.clone()))?; let nft_item = NftListTable::from_nft(&nft)?; table.replace_item_by_unique_multi_index(index_keys, &nft_item).await?; @@ -301,15 +266,13 @@ impl NftListStorageOps for IndexedDbNftStorage { } async fn get_last_block_number(&self, chain: &Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; get_last_block_from_table(chain, table, CHAIN_BLOCK_NUMBER_INDEX).await } async fn get_last_scanned_block(&self, chain: &Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; if let Some((_item_id, item)) = table.get_item_by_unique_index("chain", chain.to_string()).await? { let last_scanned_block = item @@ -323,15 +286,14 @@ impl NftListStorageOps for IndexedDbNftStorage { } async fn update_nft_amount(&self, chain: &Chain, nft: Nft, scanned_block: u64) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let nft_table = db_transaction.table::().await?; let last_scanned_block_table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(eth_addr_to_hex(&nft.common.token_address))? - .with_value(nft.common.token_id.to_string())?; + .with_value(BeBigUint::from(nft.token_id.clone()))?; let nft_item = NftListTable::from_nft(&nft)?; nft_table @@ -348,15 +310,14 @@ impl NftListStorageOps for IndexedDbNftStorage { } async fn update_nft_amount_and_block_number(&self, chain: &Chain, nft: Nft) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let nft_table = db_transaction.table::().await?; let last_scanned_block_table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(eth_addr_to_hex(&nft.common.token_address))? - .with_value(nft.common.token_id.to_string())?; + .with_value(BeBigUint::from(nft.token_id.clone()))?; let nft_item = NftListTable::from_nft(&nft)?; nft_table @@ -373,8 +334,7 @@ impl NftListStorageOps for IndexedDbNftStorage { } async fn get_nfts_by_token_address(&self, chain: Chain, token_address: String) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_INDEX) @@ -395,8 +355,7 @@ impl NftListStorageOps for IndexedDbNftStorage { token_address: String, possible_spam: bool, ) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let chain_str = chain.to_string(); @@ -419,7 +378,7 @@ impl NftListStorageOps for IndexedDbNftStorage { let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(&chain_str)? .with_value(eth_addr_to_hex(&nft.common.token_address))? - .with_value(nft.common.token_id.to_string())?; + .with_value(BeBigUint::from(nft.token_id.clone()))?; let item = NftListTable::from_nft(&nft)?; table.replace_item_by_unique_multi_index(index_keys, &item).await?; @@ -428,8 +387,7 @@ impl NftListStorageOps for IndexedDbNftStorage { } async fn get_animation_external_domains(&self, chain: &Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let mut domains = HashSet::new(); @@ -451,8 +409,7 @@ impl NftListStorageOps for IndexedDbNftStorage { domain: String, possible_phishing: bool, ) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let chain_str = chain.to_string(); @@ -467,7 +424,7 @@ impl NftListStorageOps for IndexedDbNftStorage { } #[async_trait] -impl NftTransferHistoryStorageOps for IndexedDbNftStorage { +impl NftTransferHistoryStorageOps for NftCacheIDBLocked<'_> { type Error = WasmNftCacheError; async fn init(&self, _chain: &Chain) -> MmResult<(), Self::Error> { Ok(()) } @@ -482,8 +439,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { page_number: Option, filters: Option, ) -> MmResult { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let mut transfers = Vec::new(); for chain in chains { @@ -492,10 +448,10 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { .await? .into_iter() .map(|(_item_id, transfer)| transfer); - let filtered = Self::filter_transfers(transfer_tables, filters)?; + let filtered = filter_transfers(transfer_tables, filters)?; transfers.extend(filtered); } - Self::take_transfers_according_to_paging_opts(transfers, max, limit, page_number) + take_transfers_according_to_paging_opts(transfers, max, limit, page_number) } async fn add_transfers_to_history(&self, _chain: Chain, transfers: I) -> MmResult<(), Self::Error> @@ -503,8 +459,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { I: IntoIterator + Send + 'static, I::IntoIter: Send, { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; for transfer in transfers { let transfer_item = NftTransferHistoryTable::from_transfer_history(&transfer)?; @@ -514,8 +469,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { } async fn get_last_block_number(&self, chain: &Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; get_last_block_from_table(chain, table, CHAIN_BLOCK_NUMBER_INDEX).await } @@ -525,8 +479,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { chain: Chain, from_block: u64, ) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let items = table .cursor_builder() @@ -552,16 +505,15 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { &self, chain: Chain, token_address: String, - token_id: BigDecimal, + token_id: BigUint, ) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain.to_string())? .with_value(&token_address)? - .with_value(token_id.to_string())?; + .with_value(BeBigUint::from(token_id))?; table .get_items_by_multi_index(index_keys) @@ -577,8 +529,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { transaction_hash: String, log_index: u32, ) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(NftTransferHistoryTable::CHAIN_TX_HASH_LOG_INDEX_INDEX) .with_value(chain.to_string())? @@ -598,15 +549,14 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { transfer_meta: TransferMeta, set_spam: bool, ) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let chain_str = chain.to_string(); let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(&chain_str)? .with_value(&transfer_meta.token_address)? - .with_value(transfer_meta.token_id.to_string())?; + .with_value(BeBigUint::from(transfer_meta.token_id))?; let transfers: Result, _> = table .get_items_by_multi_index(index_keys) @@ -640,8 +590,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { } async fn get_transfers_with_empty_meta(&self, chain: Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let items = table .cursor_builder() @@ -660,10 +609,11 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { && item.collection_name.is_none() && item.image_url.is_none() && item.token_name.is_none() + && !item.possible_spam { res.insert(NftTokenAddrId { token_address: item.token_address, - token_id: BigDecimal::from_str(&item.token_id).map_err(WasmNftCacheError::ParseBigDecimalError)?, + token_id: BigUint::from(item.token_id), }); } } @@ -675,8 +625,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { chain: Chain, token_address: String, ) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_INDEX) @@ -697,8 +646,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { token_address: String, possible_spam: bool, ) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let chain_str = chain.to_string(); @@ -730,8 +678,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { } async fn get_token_addresses(&self, chain: Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let items = table.get_items("chain", chain.to_string()).await?; @@ -744,8 +691,7 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { } async fn get_domains(&self, chain: &Chain) -> MmResult, Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; let mut domains = HashSet::new(); @@ -767,10 +713,8 @@ impl NftTransferHistoryStorageOps for IndexedDbNftStorage { domain: String, possible_phishing: bool, ) -> MmResult<(), Self::Error> { - let locked_db = self.lock_db().await?; - let db_transaction = locked_db.get_inner().transaction().await?; + let db_transaction = self.get_inner().transaction().await?; let table = db_transaction.table::().await?; - let chain_str = chain.to_string(); update_transfer_phishing_for_index(&table, &chain_str, CHAIN_TOKEN_DOMAIN_INDEX, &domain, possible_phishing) .await?; @@ -822,7 +766,7 @@ async fn update_nft_phishing_for_index( let index_keys = MultiIndex::new(CHAIN_TOKEN_ADD_TOKEN_ID_INDEX) .with_value(chain)? .with_value(eth_addr_to_hex(&nft.common.token_address))? - .with_value(nft.common.token_id.to_string())?; + .with_value(BeBigUint::from(nft.token_id))?; table.replace_item_by_unique_multi_index(index_keys, &nft_item).await?; } Ok(()) @@ -877,7 +821,7 @@ impl BlockNumberTable for NftTransferHistoryTable { #[derive(Clone, Debug, Deserialize, Serialize)] pub(crate) struct NftListTable { token_address: String, - token_id: String, + token_id: BeBigUint, chain: String, amount: String, block_number: BeBigUint, @@ -899,7 +843,7 @@ impl NftListTable { let details_json = json::to_value(nft).map_to_mm(|e| WasmNftCacheError::ErrorSerializing(e.to_string()))?; Ok(NftListTable { token_address: eth_addr_to_hex(&nft.common.token_address), - token_id: nft.common.token_id.to_string(), + token_id: BeBigUint::from(nft.token_id.clone()), chain: nft.chain.to_string(), amount: nft.common.amount.to_string(), block_number: BeBigUint::from(nft.block_number), @@ -952,7 +896,7 @@ pub(crate) struct NftTransferHistoryTable { block_timestamp: BeBigUint, contract_type: ContractType, token_address: String, - token_id: String, + token_id: BeBigUint, status: TransferStatus, amount: String, token_uri: Option, @@ -980,7 +924,7 @@ impl NftTransferHistoryTable { block_timestamp: BeBigUint::from(transfer.block_timestamp), contract_type: transfer.contract_type, token_address: eth_addr_to_hex(&transfer.common.token_address), - token_id: transfer.common.token_id.to_string(), + token_id: BeBigUint::from(transfer.token_id.clone()), status: transfer.status, amount: transfer.common.amount.to_string(), token_uri: transfer.token_uri.clone(), diff --git a/mm2src/db_common/Cargo.toml b/mm2src/db_common/Cargo.toml index 7a469bca71..e161be857e 100644 --- a/mm2src/db_common/Cargo.toml +++ b/mm2src/db_common/Cargo.toml @@ -13,5 +13,8 @@ log = "0.4.17" uuid = { version = "1.2.2", features = ["fast-rng", "serde", "v4"] } [target.'cfg(not(target_arch = "wasm32"))'.dependencies] +crossbeam-channel = "0.5.1" +futures = "0.3.1" rusqlite = { version = "0.28", features = ["bundled"] } sql-builder = "3.1.1" +tokio = { version = "1.20", default-features = false, features = ["macros"] } diff --git a/mm2src/db_common/src/async_conn_tests.rs b/mm2src/db_common/src/async_conn_tests.rs new file mode 100644 index 0000000000..4002b4ac3c --- /dev/null +++ b/mm2src/db_common/src/async_conn_tests.rs @@ -0,0 +1,253 @@ +use crate::async_sql_conn::{AsyncConnError, AsyncConnection, InternalError, Result as AsyncConnResult}; +use rusqlite::{ffi, ErrorCode}; +use std::fmt::Display; + +#[tokio::test] +async fn open_in_memory_test() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await; + assert!(conn.is_ok()); + Ok(()) +} + +#[tokio::test] +async fn call_success_test() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + let result = conn + .call(|conn| { + conn.execute( + "CREATE TABLE person(id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL);", + [], + ) + .map_err(|e| e.into()) + }) + .await; + + assert_eq!(0, result.unwrap()); + + Ok(()) +} + +#[tokio::test] +async fn call_unwrap_success_test() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + let result = conn + .call_unwrap(|conn| { + conn.execute( + "CREATE TABLE person(id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL);", + [], + ) + .unwrap() + }) + .await; + + assert_eq!(0, result); + + Ok(()) +} + +#[tokio::test] +async fn call_failure_test() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + let result = conn + .call(|conn| conn.execute("Invalid sql", []).map_err(|e| e.into())) + .await; + + assert!(match result.unwrap_err() { + AsyncConnError::Rusqlite(e) => { + e == rusqlite::Error::SqlInputError { + error: ffi::Error { + code: ErrorCode::Unknown, + extended_code: 1, + }, + msg: "near \"Invalid\": syntax error".to_string(), + sql: "Invalid sql".to_string(), + offset: 0, + } + }, + _ => false, + }); + + Ok(()) +} + +#[tokio::test] +async fn close_success_test() -> AsyncConnResult<()> { + let mut conn = AsyncConnection::open_in_memory().await?; + + assert!(conn.close().await.is_ok()); + + Ok(()) +} + +#[tokio::test] +async fn double_close_test() -> AsyncConnResult<()> { + let mut conn = AsyncConnection::open_in_memory().await?; + + let mut conn2 = conn.clone(); + + assert!(conn.close().await.is_ok()); + assert!(conn2.close().await.is_ok()); + + Ok(()) +} + +#[tokio::test] +async fn close_call_test() -> AsyncConnResult<()> { + let mut conn = AsyncConnection::open_in_memory().await?; + + let conn2 = conn.clone(); + + assert!(conn.close().await.is_ok()); + + let result = conn2 + .call(|conn| conn.execute("SELECT 1;", []).map_err(|e| e.into())) + .await; + + assert!(matches!(result.unwrap_err(), AsyncConnError::ConnectionClosed)); + + Ok(()) +} + +#[tokio::test] +#[should_panic] +async fn close_call_unwrap_test() { + let mut conn = AsyncConnection::open_in_memory().await.unwrap(); + + let conn2 = conn.clone(); + + assert!(conn.close().await.is_ok()); + + conn2.call_unwrap(|conn| conn.execute("SELECT 1;", [])).await.unwrap(); +} + +#[tokio::test] +async fn close_failure_test() -> AsyncConnResult<()> { + let mut conn = AsyncConnection::open_in_memory().await?; + + conn.call(|conn| { + conn.execute( + "CREATE TABLE person(id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL);", + [], + ) + .map_err(|e| e.into()) + }) + .await?; + + conn.call(|conn| { + // Leak a prepared statement to make the database uncloseable + // See https://www.sqlite.org/c3ref/close.html for details regarding this behaviour + let stmt = Box::new(conn.prepare("INSERT INTO person VALUES (1, ?1);").unwrap()); + Box::leak(stmt); + Ok(()) + }) + .await?; + + assert!(match conn.close().await.unwrap_err() { + AsyncConnError::Close((_, e)) => { + e == rusqlite::Error::SqliteFailure( + ffi::Error { + code: ErrorCode::DatabaseBusy, + extended_code: 5, + }, + Some("unable to close due to unfinalized statements or unfinished backups".to_string()), + ) + }, + _ => false, + }); + + Ok(()) +} + +#[tokio::test] +async fn debug_format_test() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + assert_eq!("AsyncConnection".to_string(), format!("{conn:?}")); + + Ok(()) +} + +#[tokio::test] +async fn test_error_display() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + let error = AsyncConnError::Close((conn, rusqlite::Error::InvalidQuery)); + assert_eq!( + "Close((AsyncConnection, \"Query is not read-only\"))", + format!("{error}") + ); + + let error = AsyncConnError::ConnectionClosed; + assert_eq!("ConnectionClosed", format!("{error}")); + + let error = AsyncConnError::Rusqlite(rusqlite::Error::InvalidQuery); + assert_eq!("Rusqlite(\"Query is not read-only\")", format!("{error}")); + + Ok(()) +} + +#[tokio::test] +async fn test_error_source() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + let error = AsyncConnError::Close((conn, rusqlite::Error::InvalidQuery)); + assert_eq!( + std::error::Error::source(&error) + .and_then(|e| e.downcast_ref::()) + .unwrap(), + &rusqlite::Error::InvalidQuery, + ); + + let error = AsyncConnError::ConnectionClosed; + assert_eq!( + std::error::Error::source(&error).and_then(|e| e.downcast_ref::()), + None, + ); + + let error = AsyncConnError::Rusqlite(rusqlite::Error::InvalidQuery); + assert_eq!( + std::error::Error::source(&error) + .and_then(|e| e.downcast_ref::()) + .unwrap(), + &rusqlite::Error::InvalidQuery, + ); + + Ok(()) +} + +fn failable_func(_: &rusqlite::Connection) -> std::result::Result<(), MyError> { Err(MyError::MySpecificError) } + +#[tokio::test] +async fn test_ergonomic_errors() -> AsyncConnResult<()> { + let conn = AsyncConnection::open_in_memory().await?; + + let res = conn + .call(|conn| failable_func(conn).map_err(|e| AsyncConnError::Internal(InternalError(e.to_string())))) + .await + .unwrap_err(); + + let err = std::error::Error::source(&res) + .and_then(|e| e.downcast_ref::()) + .unwrap() + .to_string(); + + assert_eq!(err, MyError::MySpecificError.to_string()); + + Ok(()) +} + +#[derive(Debug)] +enum MyError { + MySpecificError, +} + +impl Display for MyError { + fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { Ok(()) } +} + +impl std::error::Error for MyError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { None } +} diff --git a/mm2src/db_common/src/async_sql_conn.rs b/mm2src/db_common/src/async_sql_conn.rs new file mode 100644 index 0000000000..78357405a1 --- /dev/null +++ b/mm2src/db_common/src/async_sql_conn.rs @@ -0,0 +1,292 @@ +use crate::sqlite::rusqlite::Error as SqlError; +use crossbeam_channel::Sender; +use futures::channel::oneshot::{self}; +use rusqlite::OpenFlags; +use std::fmt::{self, Debug, Display}; +use std::path::Path; +use std::thread; + +/// Represents the errors specific for AsyncConnection. +#[derive(Debug)] +pub enum AsyncConnError { + /// The connection to the SQLite has been closed and cannot be queried anymore. + ConnectionClosed, + /// An error occurred while closing the SQLite connection. + /// This `Error` variant contains the [`AsyncConnection`], which can be used to retry the close operation + /// and the underlying [`SqlError`] that made it impossible to close the database. + Close((AsyncConnection, SqlError)), + /// A `Rusqlite` error occurred. + Rusqlite(SqlError), + /// An application-specific error occurred. + Internal(InternalError), +} + +impl Display for AsyncConnError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + AsyncConnError::ConnectionClosed => write!(f, "ConnectionClosed"), + AsyncConnError::Close((_, e)) => write!(f, "Close((AsyncConnection, \"{e}\"))"), + AsyncConnError::Rusqlite(e) => write!(f, "Rusqlite(\"{e}\")"), + AsyncConnError::Internal(e) => write!(f, "Internal(\"{e}\")"), + } + } +} + +impl std::error::Error for AsyncConnError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + AsyncConnError::ConnectionClosed => None, + AsyncConnError::Close((_, e)) => Some(e), + AsyncConnError::Rusqlite(e) => Some(e), + AsyncConnError::Internal(e) => Some(e), + } + } +} + +#[derive(Debug)] +pub struct InternalError(pub String); + +impl fmt::Display for InternalError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } +} + +impl std::error::Error for InternalError {} + +impl From for AsyncConnError { + fn from(value: SqlError) -> Self { AsyncConnError::Rusqlite(value) } +} + +/// The result returned on method calls in this crate. +pub type Result = std::result::Result; + +type CallFn = Box; + +enum Message { + Execute(CallFn), + Close(oneshot::Sender>), +} + +/// A handle to call functions in background thread. +#[derive(Clone)] +pub struct AsyncConnection { + sender: Sender, +} + +impl AsyncConnection { + /// Open a new connection to a SQLite database. + /// + /// `AsyncConnection::open(path)` is equivalent to + /// `AsyncConnection::open_with_flags(path, OpenFlags::SQLITE_OPEN_READ_WRITE | + /// OpenFlags::SQLITE_OPEN_CREATE)`. + /// + /// # Failure + /// + /// Will return `Err` if `path` cannot be converted to a C-compatible + /// string or if the underlying SQLite open call fails. + pub async fn open>(path: P) -> Result { + let path = path.as_ref().to_owned(); + start(move || rusqlite::Connection::open(path)).await + } + + /// Open a new AsyncConnection to an in-memory SQLite database. + /// + /// # Failure + /// + /// Will return `Err` if the underlying SQLite open call fails. + pub async fn open_in_memory() -> Result { start(rusqlite::Connection::open_in_memory).await } + + /// Open a new AsyncConnection to a SQLite database. + /// + /// [Database Connection](http://www.sqlite.org/c3ref/open.html) for a + /// description of valid flag combinations. + /// + /// # Failure + /// + /// Will return `Err` if `path` cannot be converted to a C-compatible + /// string or if the underlying SQLite open call fails. + pub async fn open_with_flags>(path: P, flags: OpenFlags) -> Result { + let path = path.as_ref().to_owned(); + start(move || rusqlite::Connection::open_with_flags(path, flags)).await + } + + /// Open a new AsyncConnection to a SQLite database using the specific flags + /// and vfs name. + /// + /// [Database Connection](http://www.sqlite.org/c3ref/open.html) for a + /// description of valid flag combinations. + /// + /// # Failure + /// + /// Will return `Err` if either `path` or `vfs` cannot be converted to a + /// C-compatible string or if the underlying SQLite open call fails. + pub async fn open_with_flags_and_vfs>(path: P, flags: OpenFlags, vfs: &str) -> Result { + let path = path.as_ref().to_owned(); + let vfs = vfs.to_owned(); + start(move || rusqlite::Connection::open_with_flags_and_vfs(path, flags, &vfs)).await + } + + /// Open a new AsyncConnection to an in-memory SQLite database. + /// + /// [Database Connection](http://www.sqlite.org/c3ref/open.html) for a + /// description of valid flag combinations. + /// + /// # Failure + /// + /// Will return `Err` if the underlying SQLite open call fails. + pub async fn open_in_memory_with_flags(flags: OpenFlags) -> Result { + start(move || rusqlite::Connection::open_in_memory_with_flags(flags)).await + } + + /// Open a new connection to an in-memory SQLite database using the + /// specific flags and vfs name. + /// + /// [Database Connection](http://www.sqlite.org/c3ref/open.html) for a + /// description of valid flag combinations. + /// + /// # Failure + /// + /// Will return `Err` if `vfs` cannot be converted to a C-compatible + /// string or if the underlying SQLite open call fails. + pub async fn open_in_memory_with_flags_and_vfs(flags: OpenFlags, vfs: &str) -> Result { + let vfs = vfs.to_owned(); + start(move || rusqlite::Connection::open_in_memory_with_flags_and_vfs(flags, &vfs)).await + } + + /// Call a function in background thread and get the result asynchronously. + /// + /// # Failure + /// + /// Will return `Err` if the database connection has been closed. + pub async fn call(&self, function: F) -> Result + where + F: FnOnce(&mut rusqlite::Connection) -> Result + 'static + Send, + R: Send + 'static, + { + let (sender, receiver) = oneshot::channel::>(); + + self.sender + .send(Message::Execute(Box::new(move |conn| { + let value = function(conn); + let _ = sender.send(value); + }))) + .map_err(|_| AsyncConnError::ConnectionClosed)?; + + receiver.await.map_err(|_| AsyncConnError::ConnectionClosed)? + } + + /// Call a function in background thread and get the result asynchronously. + /// + /// This method can cause a `panic` if the underlying database connection is closed. + /// it is a more user-friendly alternative to the [`AsyncConnection::call`] method. + /// It should be safe if the connection is never explicitly closed (using the [`AsyncConnection::close`] call). + /// + /// Calling this on a closed connection will cause a `panic`. + pub async fn call_unwrap(&self, function: F) -> R + where + F: FnOnce(&mut rusqlite::Connection) -> R + Send + 'static, + R: Send + 'static, + { + let (sender, receiver) = oneshot::channel::(); + + self.sender + .send(Message::Execute(Box::new(move |conn| { + let value = function(conn); + let _ = sender.send(value); + }))) + .expect("database connection should be open"); + + receiver.await.expect("Bug occurred, please report") + } + + /// Close the database AsyncConnection. + /// + /// This is functionally equivalent to the `Drop` implementation for + /// `AsyncConnection`. It consumes the `AsyncConnection`, but on error returns it + /// to the caller for retry purposes. + /// + /// If successful, any following `close` operations performed + /// on `AsyncConnection` copies will succeed immediately. + /// + /// On the other hand, any calls to [`AsyncConnection::call`] will return a [`AsyncConnError::ConnectionClosed`], + /// and any calls to [`AsyncConnection::call_unwrap`] will cause a `panic`. + /// + /// # Failure + /// + /// Will return `Err` if the underlying SQLite close call fails. + pub async fn close(&mut self) -> Result<()> { + let (sender, receiver) = oneshot::channel::>(); + + if let Err(crossbeam_channel::SendError(_)) = self.sender.send(Message::Close(sender)) { + // If the channel is closed on the other side, it means the connection closed successfully + // This is a safeguard against calling close on a `Copy` of the connection + return Ok(()); + } + + let result = receiver.await; + + if result.is_err() { + // If we get a RecvError at this point, it also means the channel closed in the meantime + // we can assume the connection is closed + return Ok(()); + } + + result.unwrap().map_err(|e| AsyncConnError::Close((self.clone(), e))) + } +} + +impl Debug for AsyncConnection { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("AsyncConnection").finish() } +} + +async fn start(open: F) -> Result +where + F: FnOnce() -> rusqlite::Result + Send + 'static, +{ + let (sender, receiver) = crossbeam_channel::unbounded::(); + let (result_sender, result_receiver) = oneshot::channel(); + + thread::spawn(move || { + let mut conn = match open() { + Ok(c) => c, + Err(e) => { + let _ = result_sender.send(Err(e)); + return; + }, + }; + + if let Err(_e) = result_sender.send(Ok(())) { + return; + } + + while let Ok(message) = receiver.recv() { + match message { + Message::Execute(f) => f(&mut conn), + Message::Close(s) => { + let result = conn.close(); + + match result { + Ok(v) => { + if s.send(Ok(v)).is_err() { + // terminate the thread + return; + } + break; + }, + Err((c, e)) => { + conn = c; + if s.send(Err(e)).is_err() { + // terminate the thread + return; + } + }, + } + }, + } + } + }); + + result_receiver + .await + .map_err(|e| AsyncConnError::Internal(InternalError(e.to_string()))) + .map(|_| AsyncConnection { sender }) +} diff --git a/mm2src/db_common/src/lib.rs b/mm2src/db_common/src/lib.rs index bd34839ae7..c1806e3b97 100644 --- a/mm2src/db_common/src/lib.rs +++ b/mm2src/db_common/src/lib.rs @@ -1,3 +1,6 @@ +#[cfg(all(test, not(target_arch = "wasm32")))] +mod async_conn_tests; +#[cfg(not(target_arch = "wasm32"))] pub mod async_sql_conn; #[cfg(not(target_arch = "wasm32"))] mod sql_condition; #[cfg(not(target_arch = "wasm32"))] mod sql_constraint; #[cfg(not(target_arch = "wasm32"))] mod sql_create; diff --git a/mm2src/mm2_core/src/mm_ctx.rs b/mm2src/mm2_core/src/mm_ctx.rs index d2537425c1..91931a4b7b 100644 --- a/mm2src/mm2_core/src/mm_ctx.rs +++ b/mm2src/mm2_core/src/mm_ctx.rs @@ -26,7 +26,9 @@ cfg_wasm32! { } cfg_native! { + use db_common::async_sql_conn::AsyncConnection; use db_common::sqlite::rusqlite::Connection; + use futures::lock::Mutex as AsyncMutex; use futures_rustls::webpki::DNSNameRef; use mm2_metrics::prometheus; use mm2_metrics::MmMetricsError; @@ -110,8 +112,10 @@ pub struct MmCtx { /// The RPC sender forwarding requests to writing part of underlying stream. #[cfg(target_arch = "wasm32")] pub wasm_rpc: Constructible, + /// Deprecated, please use `async_sqlite_connection` for new implementations. #[cfg(not(target_arch = "wasm32"))] pub sqlite_connection: Constructible>>, + /// Deprecated, please create `shared_async_sqlite_conn` for new implementations and call db `KOMODEFI-shared.db`. #[cfg(not(target_arch = "wasm32"))] pub shared_sqlite_conn: Constructible>>, pub mm_version: String, @@ -128,6 +132,9 @@ pub struct MmCtx { pub db_namespace: DbNamespaceId, /// The context belonging to the `nft` mod: `NftCtx`. pub nft_ctx: Mutex>>, + /// asynchronous handle for rusqlite connection. + #[cfg(not(target_arch = "wasm32"))] + pub async_sqlite_connection: Constructible>>, } impl MmCtx { @@ -172,6 +179,8 @@ impl MmCtx { #[cfg(target_arch = "wasm32")] db_namespace: DbNamespaceId::Main, nft_ctx: Mutex::new(None), + #[cfg(not(target_arch = "wasm32"))] + async_sqlite_connection: Constructible::default(), } } @@ -309,7 +318,7 @@ impl MmCtx { #[cfg(not(target_arch = "wasm32"))] pub fn init_sqlite_connection(&self) -> Result<(), String> { let sqlite_file_path = self.dbdir().join("MM2.db"); - log::debug!("Trying to open SQLite database file {}", sqlite_file_path.display()); + log_sqlite_file_open_attempt(&sqlite_file_path); let connection = try_s!(Connection::open(sqlite_file_path)); try_s!(self.sqlite_connection.pin(Arc::new(Mutex::new(connection)))); Ok(()) @@ -318,12 +327,21 @@ impl MmCtx { #[cfg(not(target_arch = "wasm32"))] pub fn init_shared_sqlite_conn(&self) -> Result<(), String> { let sqlite_file_path = self.shared_dbdir().join("MM2-shared.db"); - log::debug!("Trying to open SQLite database file {}", sqlite_file_path.display()); + log_sqlite_file_open_attempt(&sqlite_file_path); let connection = try_s!(Connection::open(sqlite_file_path)); try_s!(self.shared_sqlite_conn.pin(Arc::new(Mutex::new(connection)))); Ok(()) } + #[cfg(not(target_arch = "wasm32"))] + pub async fn init_async_sqlite_connection(&self) -> Result<(), String> { + let sqlite_file_path = self.dbdir().join("KOMODEFI.db"); + log_sqlite_file_open_attempt(&sqlite_file_path); + let async_conn = try_s!(AsyncConnection::open(sqlite_file_path).await); + try_s!(self.async_sqlite_connection.pin(Arc::new(AsyncMutex::new(async_conn)))); + Ok(()) + } + #[cfg(not(target_arch = "wasm32"))] pub fn sqlite_conn_opt(&self) -> Option> { self.sqlite_connection.as_option().map(|conn| conn.lock().unwrap()) @@ -705,3 +723,15 @@ impl MmCtxBuilder { MmArc::new(ctx) } } + +#[cfg(not(target_arch = "wasm32"))] +fn log_sqlite_file_open_attempt(sqlite_file_path: &Path) { + match sqlite_file_path.canonicalize() { + Ok(absolute_path) => { + log::debug!("Trying to open SQLite database file {}", absolute_path.display()); + }, + Err(_) => { + log::debug!("Trying to open SQLite database file {}", sqlite_file_path.display()); + }, + } +} diff --git a/mm2src/mm2_main/src/lp_native_dex.rs b/mm2src/mm2_main/src/lp_native_dex.rs index a8d4019677..a107961646 100644 --- a/mm2src/mm2_main/src/lp_native_dex.rs +++ b/mm2src/mm2_main/src/lp_native_dex.rs @@ -30,8 +30,8 @@ use mm2_err_handle::common_errors::InternalError; use mm2_err_handle::prelude::*; use mm2_event_stream::behaviour::{EventBehaviour, EventInitStatus}; use mm2_libp2p::behaviours::atomicdex::DEPRECATED_NETID_LIST; -use mm2_libp2p::{spawn_gossipsub, AdexBehaviourError, NodeType, RelayAddress, RelayAddressError, SwarmRuntime, - WssCerts}; +use mm2_libp2p::{spawn_gossipsub, AdexBehaviourError, NodeType, RelayAddress, RelayAddressError, SeedNodeInfo, + SwarmRuntime, WssCerts}; use mm2_metrics::mm_gauge; use mm2_net::network_event::NetworkEvent; use mm2_net::p2p::P2PContext; @@ -69,10 +69,47 @@ cfg_wasm32! { pub mod init_metamask; } -const DEFAULT_NETID_SEEDNODES: [&str; 3] = [ - "streamseed1.komodo.earth", - "streamseed2.komodo.earth", - "streamseed3.komodo.earth", +const DEFAULT_NETID_SEEDNODES: &[SeedNodeInfo] = &[ + SeedNodeInfo::new( + "12D3KooWHKkHiNhZtKceQehHhPqwU5W1jXpoVBgS1qst899GjvTm", + "168.119.236.251", + "viserion.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWAToxtunEBWCoAHjefSv74Nsmxranw8juy3eKEdrQyGRF", + "168.119.236.240", + "rhaegal.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWSmEi8ypaVzFA1AGde2RjxNW5Pvxw3qa2fVe48PjNs63R", + "168.119.236.239", + "drogon.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWMrjLmrv8hNgAoVf1RfumfjyPStzd4nv5XL47zN4ZKisb", + "168.119.237.8", + "falkor.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWEWzbYcosK2JK9XpFXzumfgsWJW1F7BZS15yLTrhfjX2Z", + "65.21.51.47", + "smaug.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWJWBnkVsVNjiqUEPjLyHpiSmQVAJ5t6qt1Txv5ctJi9Xd", + "135.181.34.220", + "balerion.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWPR2RoPi19vQtLugjCdvVmCcGLP2iXAzbDfP3tp81ZL4d", + "168.119.237.13", + "kalessin.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWEaZpH61H4yuQkaNG5AsyGdpBhKRppaLdAY52a774ab5u", + "46.4.78.11", + "fr1.cipig.net", + ), ]; pub type P2PResult = Result>; @@ -270,7 +307,7 @@ fn default_seednodes(netid: u16) -> Vec { if netid == 8762 { DEFAULT_NETID_SEEDNODES .iter() - .map(|seed| RelayAddress::Dns(seed.to_string())) + .map(|SeedNodeInfo { domain, .. }| RelayAddress::Dns(domain.to_string())) .collect() } else { Vec::new() @@ -283,7 +320,7 @@ fn default_seednodes(netid: u16) -> Vec { if netid == 8762 { DEFAULT_NETID_SEEDNODES .iter() - .filter_map(|seed| addr_to_ipv4_string(seed).ok()) + .filter_map(|SeedNodeInfo { domain, .. }| addr_to_ipv4_string(domain).ok()) .map(RelayAddress::IPv4) .collect() } else { @@ -424,6 +461,9 @@ pub async fn lp_init_continue(ctx: MmArc) -> MmInitResult<()> { .map_to_mm(MmInitError::ErrorSqliteInitializing)?; ctx.init_shared_sqlite_conn() .map_to_mm(MmInitError::ErrorSqliteInitializing)?; + ctx.init_async_sqlite_connection() + .await + .map_to_mm(MmInitError::ErrorSqliteInitializing)?; init_and_migrate_db(&ctx).await?; migrate_db(&ctx)?; } diff --git a/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs b/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs index 1ac3bb619d..16c65a2c01 100644 --- a/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs +++ b/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs @@ -246,12 +246,26 @@ pub async fn my_balance(ctx: MmArc, req: Json) -> Result>, Stri Ok(try_s!(Response::builder().body(res))) } +#[cfg(not(target_arch = "wasm32"))] +async fn close_async_connection(ctx: &MmArc) { + if let Some(async_conn) = ctx.async_sqlite_connection.as_option() { + let mut conn = async_conn.lock().await; + if let Err(e) = conn.close().await { + error!("Error stopping AsyncConnection: {}", e); + } + } +} + pub async fn stop(ctx: MmArc) -> Result>, String> { dispatch_lp_event(ctx.clone(), StopCtxEvent.into()).await; // Should delay the shutdown a bit in order not to trip the "stop" RPC call in unit tests. // Stopping immediately leads to the "stop" RPC call failing with the "errno 10054" sometimes. let fut = async move { Timer::sleep(0.05).await; + + #[cfg(not(target_arch = "wasm32"))] + close_async_connection(&ctx).await; + if let Err(e) = ctx.stop() { error!("Error stopping MmCtx: {}", e); } diff --git a/mm2src/mm2_p2p/src/behaviours/atomicdex.rs b/mm2src/mm2_p2p/src/behaviours/atomicdex.rs index 63e98e21e7..91f423c4e2 100644 --- a/mm2src/mm2_p2p/src/behaviours/atomicdex.rs +++ b/mm2src/mm2_p2p/src/behaviours/atomicdex.rs @@ -651,7 +651,7 @@ fn start_gossipsub( if !network_info.in_memory() { // Please note WASM nodes don't support `PeersExchange` currently, // so `get_all_network_seednodes` returns an empty list. - for (peer_id, addr) in get_all_network_seednodes(netid) { + for (peer_id, addr, _domain) in get_all_network_seednodes(netid) { let multiaddr = addr.try_to_multiaddr(network_info)?; peers_exchange.add_peer_addresses_to_known_peers(&peer_id, iter::once(multiaddr).collect()); if peer_id != local_peer_id { diff --git a/mm2src/mm2_p2p/src/behaviours/mod.rs b/mm2src/mm2_p2p/src/behaviours/mod.rs index dd78d7ed65..0af8fd71c0 100644 --- a/mm2src/mm2_p2p/src/behaviours/mod.rs +++ b/mm2src/mm2_p2p/src/behaviours/mod.rs @@ -420,18 +420,18 @@ mod tests { assert!(!behaviour.validate_get_known_peers_response(&response)); let address: Multiaddr = - "/ip4/168.119.236.241/tcp/3000/p2p/12D3KooWEsuiKcQaBaKEzuMtT6uFjs89P1E8MK3wGRZbeuCbCw6P" + "/ip4/168.119.236.251/tcp/3000/p2p/12D3KooWHKkHiNhZtKceQehHhPqwU5W1jXpoVBgS1qst899GjvTm" .parse() .unwrap(); let response = HashMap::from_iter(vec![(PeerIdSerde(PeerId::random()), HashSet::from_iter(vec![address]))]); assert!(behaviour.validate_get_known_peers_response(&response)); let address1: Multiaddr = - "/ip4/168.119.236.241/tcp/3000/p2p/12D3KooWEsuiKcQaBaKEzuMtT6uFjs89P1E8MK3wGRZbeuCbCw6P" + "/ip4/168.119.236.251/tcp/3000/p2p/12D3KooWHKkHiNhZtKceQehHhPqwU5W1jXpoVBgS1qst899GjvTm" .parse() .unwrap(); - let address2: Multiaddr = "/ip4/168.119.236.241/tcp/3000".parse().unwrap(); + let address2: Multiaddr = "/ip4/168.119.236.251/tcp/3000".parse().unwrap(); let response = HashMap::from_iter(vec![( PeerIdSerde(PeerId::random()), HashSet::from_iter(vec![address1, address2]), @@ -448,7 +448,7 @@ mod tests { let result = behaviour.get_random_known_peers(1); assert!(result.is_empty()); - let address: Multiaddr = "/ip4/168.119.236.241/tcp/3000".parse().unwrap(); + let address: Multiaddr = "/ip4/168.119.236.251/tcp/3000".parse().unwrap(); behaviour.request_response.add_address(&peer_id, address.clone()); let result = behaviour.get_random_known_peers(1); diff --git a/mm2src/mm2_p2p/src/lib.rs b/mm2src/mm2_p2p/src/lib.rs index fd13446f6e..7aca2eebd1 100644 --- a/mm2src/mm2_p2p/src/lib.rs +++ b/mm2src/mm2_p2p/src/lib.rs @@ -32,6 +32,7 @@ pub use libp2p::identity::{secp256k1::PublicKey as Libp2pSecpPublic, PublicKey a pub use libp2p::{Multiaddr, PeerId}; // relay-address related re-exports +pub use network::SeedNodeInfo; pub use relay_address::RelayAddress; pub use relay_address::RelayAddressError; diff --git a/mm2src/mm2_p2p/src/network.rs b/mm2src/mm2_p2p/src/network.rs index 88794d6329..70978a3301 100644 --- a/mm2src/mm2_p2p/src/network.rs +++ b/mm2src/mm2_p2p/src/network.rs @@ -3,54 +3,82 @@ use libp2p::PeerId; pub const DEFAULT_NETID: u16 = 8762; +pub struct SeedNodeInfo { + pub id: &'static str, + pub ip: &'static str, + pub domain: &'static str, +} + +impl SeedNodeInfo { + pub const fn new(id: &'static str, ip: &'static str, domain: &'static str) -> Self { Self { id, ip, domain } } +} + #[cfg_attr(target_arch = "wasm32", allow(dead_code))] -const ALL_DEFAULT_NETID_SEEDNODES: &[(&str, &str)] = &[ - ( +const ALL_DEFAULT_NETID_SEEDNODES: &[SeedNodeInfo] = &[ + SeedNodeInfo::new( "12D3KooWHKkHiNhZtKceQehHhPqwU5W1jXpoVBgS1qst899GjvTm", "168.119.236.251", + "viserion.dragon-seed.com", ), - ( + SeedNodeInfo::new( "12D3KooWAToxtunEBWCoAHjefSv74Nsmxranw8juy3eKEdrQyGRF", "168.119.236.240", + "rhaegal.dragon-seed.com", ), - ( + SeedNodeInfo::new( "12D3KooWSmEi8ypaVzFA1AGde2RjxNW5Pvxw3qa2fVe48PjNs63R", "168.119.236.239", + "drogon.dragon-seed.com", ), - ("12D3KooWJWBnkVsVNjiqUEPjLyHpiSmQVAJ5t6qt1Txv5ctJi9Xd", "135.181.34.220"), - ( - "12D3KooWEsuiKcQaBaKEzuMtT6uFjs89P1E8MK3wGRZbeuCbCw6P", - "168.119.236.241", + SeedNodeInfo::new( + "12D3KooWMrjLmrv8hNgAoVf1RfumfjyPStzd4nv5XL47zN4ZKisb", + "168.119.237.8", + "falkor.dragon-seed.com", ), - ( - "12D3KooWHBeCnJdzNk51G4mLnao9cDsjuqiMTEo5wMFXrd25bd1F", - "168.119.236.243", + SeedNodeInfo::new( + "12D3KooWEWzbYcosK2JK9XpFXzumfgsWJW1F7BZS15yLTrhfjX2Z", + "65.21.51.47", + "smaug.dragon-seed.com", ), - // TODO: Uncomment this once re-enabled on the main network. - // ( - // "12D3KooWKxavLCJVrQ5Gk1kd9m6cohctGQBmiKPS9XQFoXEoyGmS", - // "168.119.236.249", - // ), - ( - "12D3KooW9soGyPfX6kcyh3uVXNHq1y2dPmQNt2veKgdLXkBiCVKq", - "168.119.236.246", + SeedNodeInfo::new( + "12D3KooWJWBnkVsVNjiqUEPjLyHpiSmQVAJ5t6qt1Txv5ctJi9Xd", + "135.181.34.220", + "balerion.dragon-seed.com", ), - ( - "12D3KooWL6yrrNACb7t7RPyTEPxKmq8jtrcbkcNd6H5G2hK7bXaL", - "168.119.236.233", + SeedNodeInfo::new( + "12D3KooWPR2RoPi19vQtLugjCdvVmCcGLP2iXAzbDfP3tp81ZL4d", + "168.119.237.13", + "kalessin.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWJDoV9vJdy6PnzwVETZ3fWGMhV41VhSbocR1h2geFqq9Y", + "65.108.90.210", + "icefyre.dragon-seed.com", + ), + SeedNodeInfo::new( + "12D3KooWEaZpH61H4yuQkaNG5AsyGdpBhKRppaLdAY52a774ab5u", + "46.4.78.11", + "fr1.cipig.net", ), - ("12D3KooWMrjLmrv8hNgAoVf1RfumfjyPStzd4nv5XL47zN4ZKisb", "168.119.237.8"), - ("12D3KooWPR2RoPi19vQtLugjCdvVmCcGLP2iXAzbDfP3tp81ZL4d", "168.119.237.13"), - ("12D3KooWJDoV9vJdy6PnzwVETZ3fWGMhV41VhSbocR1h2geFqq9Y", "65.108.90.210"), - ("12D3KooWEaZpH61H4yuQkaNG5AsyGdpBhKRppaLdAY52a774ab5u", "46.4.78.11"), - ("12D3KooWAd5gPXwX7eDvKWwkr2FZGfoJceKDCA53SHmTFFVkrN7Q", "46.4.87.18"), ]; +// TODO: Uncomment these once re-enabled on the main network. +// Operated by Dragonhound, still on NetID 7777. Domains will update after netid migration. +// SeedNodeInfo::new("12D3KooWEsuiKcQaBaKEzuMtT6uFjs89P1E8MK3wGRZbeuCbCw6P", "168.119.236.241", "seed1.komodo.earth"), // tintaglia.dragon-seed.com +// SeedNodeInfo::new("12D3KooWHBeCnJdzNk51G4mLnao9cDsjuqiMTEo5wMFXrd25bd1F", "168.119.236.243", "seed2.komodo.earth"), // mercor.dragon-seed.com +// SeedNodeInfo::new("12D3KooWKxavLCJVrQ5Gk1kd9m6cohctGQBmiKPS9XQFoXEoyGmS", "168.119.236.249", "seed3.komodo.earth"), // karrigvestrit.dragon-seed.com +// SeedNodeInfo::new("12D3KooWGrUpCAbkxhPRioNs64sbUmPmpEcou6hYfrqQvxfWDEuf", "135.181.35.77", "seed4.komodo.earth"), // sintara.dragon-seed.com +// SeedNodeInfo::new("12D3KooWKu8pMTgteWacwFjN7zRWWHb3bctyTvHU3xx5x4x6qDYY", "65.21.56.210", "seed6.komodo.earth"), // heeby.dragon-seed.com +// SeedNodeInfo::new("12D3KooW9soGyPfX6kcyh3uVXNHq1y2dPmQNt2veKgdLXkBiCVKq", "168.119.236.246", "seed7.komodo.earth"), // kalo.dragon-seed.com +// SeedNodeInfo::new("12D3KooWL6yrrNACb7t7RPyTEPxKmq8jtrcbkcNd6H5G2hK7bXaL", "168.119.236.233", "seed8.komodo.earth"), // relpda.dragon-seed.com +// Operated by Cipi, still on NetID 7777 +// SeedNodeInfo::new("12D3KooWAd5gPXwX7eDvKWwkr2FZGfoJceKDCA53SHmTFFVkrN7Q", "46.4.87.18", "fr2.cipig.net"), + #[cfg(target_arch = "wasm32")] -pub fn get_all_network_seednodes(_netid: u16) -> Vec<(PeerId, RelayAddress)> { Vec::new() } +pub fn get_all_network_seednodes(_netid: u16) -> Vec<(PeerId, RelayAddress, String)> { Vec::new() } #[cfg(not(target_arch = "wasm32"))] -pub fn get_all_network_seednodes(netid: u16) -> Vec<(PeerId, RelayAddress)> { +pub fn get_all_network_seednodes(netid: u16) -> Vec<(PeerId, RelayAddress, String)> { use std::str::FromStr; if netid != DEFAULT_NETID { @@ -58,10 +86,11 @@ pub fn get_all_network_seednodes(netid: u16) -> Vec<(PeerId, RelayAddress)> { } ALL_DEFAULT_NETID_SEEDNODES .iter() - .map(|(peer_id, ipv4)| { - let peer_id = PeerId::from_str(peer_id).expect("valid peer id"); - let address = RelayAddress::IPv4(ipv4.to_string()); - (peer_id, address) + .map(|SeedNodeInfo { id, ip, domain }| { + let peer_id = PeerId::from_str(id).expect("valid peer id"); + let address = RelayAddress::IPv4(ip.to_string()); + let domain = domain.to_string(); + (peer_id, address, domain) }) .collect() } diff --git a/mm2src/mm2_test_helpers/src/for_tests.rs b/mm2src/mm2_test_helpers/src/for_tests.rs index 0f33a332e2..2d917886b1 100644 --- a/mm2src/mm2_test_helpers/src/for_tests.rs +++ b/mm2src/mm2_test_helpers/src/for_tests.rs @@ -907,6 +907,20 @@ pub fn mm_ctx_with_custom_db() -> MmArc { ctx } +#[cfg(not(target_arch = "wasm32"))] +pub async fn mm_ctx_with_custom_async_db() -> MmArc { + use db_common::async_sql_conn::AsyncConnection; + use futures::lock::Mutex as AsyncMutex; + use std::sync::Arc; + + let ctx = MmCtxBuilder::new().into_mm_arc(); + + let connection = AsyncConnection::open_in_memory().await.unwrap(); + let _ = ctx.async_sqlite_connection.pin(Arc::new(AsyncMutex::new(connection))); + + ctx +} + /// Automatically kill a wrapped process. pub struct RaiiKill { pub handle: Child,