diff --git a/Cargo.lock b/Cargo.lock index aa42cf9..cd5272b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3387,7 +3387,6 @@ dependencies = [ "pallet-transaction-payment-rpc", "pallet-utxo-rpc", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "sc-authority-discovery", "sc-basic-authorship", "sc-cli", @@ -3664,7 +3663,6 @@ dependencies = [ "pallet-transaction-payment-rpc-runtime-api", "pallet-utxo", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "parity-scale-codec", "sp-api", "sp-block-builder", @@ -4138,7 +4136,6 @@ dependencies = [ "log", "pallet-aura", "pallet-timestamp", - "pallet-utxo-tokens", "parity-scale-codec", "pp-api", "serde", @@ -4160,11 +4157,11 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "pallet-utxo-rpc-runtime-api", - "pallet-utxo-tokens", "parity-scale-codec", "serde", "sp-api", "sp-blockchain", + "sp-core", "sp-runtime", ] @@ -4173,25 +4170,14 @@ name = "pallet-utxo-rpc-runtime-api" version = "0.1.0" dependencies = [ "frame-support", - "pallet-utxo-tokens", "parity-scale-codec", "serde", "serde_json", "sp-api", + "sp-core", "sp-runtime", ] -[[package]] -name = "pallet-utxo-tokens" -version = "0.1.0" -dependencies = [ - "frame-support", - "hex-literal 0.2.1", - "log", - "parity-scale-codec", - "serde", -] - [[package]] name = "parity-db" version = "0.3.1" diff --git a/Lets talk about verification.md b/Lets talk about verification.md new file mode 100644 index 0000000..5c17641 --- /dev/null +++ b/Lets talk about verification.md @@ -0,0 +1,101 @@ +**This description is still approximate and not accurate, we need to define an approach and agree on checks.** + +## Draft TransactionVerifier + +I suggest adding a structure that will contain: + +```rust +pub struct TransactionVerifier<'a, T: frame_system::Config> { + // Pointer to a tx that we have to check + tx: &'a TransactionFor, + // All inputs, to avoid repeated search in the loop + all_inputs_map: BTreeMap>, + // All outputs, to avoid repeated search in the loop + all_outputs_map: BTreeMap>, + // Using TokenId, you can get the entire amount of this token in all inputs + total_value_of_input_tokens: BTreeMap>, + // Using TokenId, you can get the entire amount of this token in all outputs + total_value_of_output_tokens: BTreeMap>, + // A set of transaction verification functions, this approach will allow you to remove unnecessary cycles, which will speed up the function + set_of_checks: Vec<&'a mut FnMut(...)>, + // ... + // I may add a priority field to the set of checks. I'm still thinking here. +} +``` + +This struct we will use this way in the pallet utxo: + +```rust + pub fn validate_transaction( + tx: &TransactionFor, + ) -> Result { + TransactionVerifier::<'_, T>::new(tx) + .checking_inputs() + .checking_outputs() + .checking_utxos_exists() + .checking_signatures() + .checking_tokens_transferring() + .checking_tokens_issued() + .checking_nft_mint() + .checking_assets_burn() + .calculating_reward() + .collect_result()? + } + +``` + +When creating a new instance of this structure, we must initialize the fields. + +Each subsequent check adds a new instance of the function to `set_of_checks`, which will be called in` collect_result`. + +At the moment we can split the verification function for these parts: + +* `checking_inputs` + * Checks that inputs exist in a transaction + * Checking that the number of inputs is not more than the maximum allowed number, now in the code I see that it is `u32::MAX` + * Ensure each input is used only a single time + +* `checking_outputs` + * Checks that outputs exist in a transaction + * Checking that the number of outputs is not more than the maximum allowed number, now in the code I see that it is `u32::MAX` + * Ensure each output is unique + * Output value must be nonzero + * An output can't exist already in the UtxoStore + +* `checking_utxos_exists` + * Resolve the transaction inputs by looking up UTXOs being spent by them. + +* `checking_signatures` + * if all spent UTXOs are available, check the math and signatures + +* `checking_tokens_transferring` + * We have to check that the total sum of input tokens is less or equal to output tokens. (Or just equal?) + * All inputs with such data code must be correctly mapped to outputs + * If NFT is sent we must not burn or lose data + +* `checking_tokens_issued` + * We must check the correctness of the issued tokens + * We have to check the length of `metadata_uri` and` ticker` + * We must check the correctness of `value` and `decimal` + +* `checking_nft_mint` + * We have to check the uniqueness of digital data, only one NFT can refer to one object + * We have to check the length of `metadata_uri` + +* `checking_assets_burn` + * Is there burn more than possible? + * Is there tocken_id exist for the burn? + +* `calculating_reward` + * Just collecting MLT for a transaction reward. + +* `collect_result` + * Call all of these functions in one loop. + +## Questions +* Do we need other checks? +* What is we need for checking Bitcoin Script? +* What is we need for checking contracts? +* If we can check an output address here, and add a possibility to find in the UtxoStore by any address format, then we can remove `fn pick_utxo` and `fn send_to_address`. Isn't that? + +I'm glad to see any suggestions or critics. \ No newline at end of file diff --git a/node/Cargo.toml b/node/Cargo.toml index 4a3ca5d..6f3a026 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -24,7 +24,6 @@ structopt = '0.3.8' node-template-runtime = {version = '3.0.0', path = '../runtime'} pallet-utxo-rpc = { path = "../pallets/utxo/rpc" } pallet-utxo-rpc-runtime-api = { path = "../pallets/utxo/rpc/runtime-api" } -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } log = "0.4.8" ureq = "2.2.0" diff --git a/node/src/chain_spec.rs b/node/src/chain_spec.rs index cec7851..c35de75 100644 --- a/node/src/chain_spec.rs +++ b/node/src/chain_spec.rs @@ -164,7 +164,7 @@ fn testnet_genesis( .map(|x| { // may need to create a const variable to represent 1_000 and 100_000_000 pallet_utxo::TransactionOutput::new_pubkey( - 1_000 * 100_000_000 * 400_000_000 as pallet_utxo::Value, + 1_000 * 100_000_000 * 400_000_000 as pallet_utxo::tokens::Value, H256::from_slice(x.as_slice()), ) }) diff --git a/pallets/utxo/Cargo.toml b/pallets/utxo/Cargo.toml index d0d6fb5..490a2c0 100644 --- a/pallets/utxo/Cargo.toml +++ b/pallets/utxo/Cargo.toml @@ -21,7 +21,6 @@ std = [ hex-literal = "0.2.1" log = "0.4.8" serde = '1.0.119' -pallet-utxo-tokens = { path = "./tokens" } variant_count = '1.1' [dependencies.bech32] @@ -36,7 +35,7 @@ version = '0.1.0' [dependencies.codec] default-features = false -features = ['derive'] +features = ["derive", "chain-error"] package = 'parity-scale-codec' version = '2.0.0' diff --git a/pallets/utxo/rpc/Cargo.toml b/pallets/utxo/rpc/Cargo.toml index d609b1e..21bc854 100644 --- a/pallets/utxo/rpc/Cargo.toml +++ b/pallets/utxo/rpc/Cargo.toml @@ -6,7 +6,6 @@ edition = "2018" [dependencies] pallet-utxo-rpc-runtime-api = { path = "./runtime-api" } -pallet-utxo-tokens = {path = "../tokens"} jsonrpc-core = "18.0.0" jsonrpc-core-client = "18.0.0" jsonrpc-derive = "18.0.0" @@ -44,3 +43,9 @@ default-features = false git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" + +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" \ No newline at end of file diff --git a/pallets/utxo/rpc/runtime-api/Cargo.toml b/pallets/utxo/rpc/runtime-api/Cargo.toml index b7c739e..02587b7 100644 --- a/pallets/utxo/rpc/runtime-api/Cargo.toml +++ b/pallets/utxo/rpc/runtime-api/Cargo.toml @@ -4,9 +4,6 @@ version = "0.1.0" authors = ["RBB Lab"] edition = "2018" -[dependencies] -pallet-utxo-tokens = { path = "../../tokens" } - [dependencies.serde] version = "1.0.104" optional = true @@ -36,6 +33,12 @@ git = 'https://github.com/paritytech/substrate.git' version = '4.0.0-dev' branch = "master" +[dependencies.sp-core] +default-features = false +git = 'https://github.com/paritytech/substrate.git' +version = '4.0.0-dev' +branch = "master" + [dev-dependencies] serde_json = "1.0.48" diff --git a/pallets/utxo/rpc/runtime-api/src/lib.rs b/pallets/utxo/rpc/runtime-api/src/lib.rs index ebaa307..40b77ab 100644 --- a/pallets/utxo/rpc/runtime-api/src/lib.rs +++ b/pallets/utxo/rpc/runtime-api/src/lib.rs @@ -16,15 +16,8 @@ // Author(s): A. Altonen, Anton Sinitsyn #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::inherent::Vec; - sp_api::decl_runtime_apis! { pub trait UtxoApi { fn send() -> u32; - // What means Vec<(u64, Vec)> ? - // At the moment we have some problems with use serde in RPC, we can serialize and deserialize - // only simple types. This approach allow us to return Vec<(TokenId, TokenName)> instead of - // pallet_utxo_tokens::TokenListData - fn tokens_list() -> Vec<(u64, Vec)>; } } diff --git a/pallets/utxo/rpc/src/lib.rs b/pallets/utxo/rpc/src/lib.rs index 6945bad..5bb25cb 100644 --- a/pallets/utxo/rpc/src/lib.rs +++ b/pallets/utxo/rpc/src/lib.rs @@ -27,10 +27,6 @@ use std::sync::Arc; pub trait UtxoApi { #[rpc(name = "utxo_send")] fn send(&self, at: Option) -> Result; - - // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - #[rpc(name = "tokens_list")] - fn tokens_list(&self, at: Option) -> Result)>>; } /// A struct that implements the [`UtxoApi`]. @@ -79,18 +75,4 @@ where data: Some(format!("{:?}", e).into()), }) } - - fn tokens_list(&self, at: Option<::Hash>) -> Result)>> { - let api = self.client.runtime_api(); - let at = BlockId::hash(at.unwrap_or_else(|| - // If the block hash is not supplied assume the best block. - self.client.info().best_hash)); - - let runtime_api_result = api.tokens_list(&at); - runtime_api_result.map_err(|e| RpcError { - code: ErrorCode::ServerError(Error::StorageError as i64), - message: "Something wrong".into(), - data: Some(format!("{:?}", e).into()), - }) - } } diff --git a/pallets/utxo/src/base58_nostd.rs b/pallets/utxo/src/base58_nostd.rs new file mode 100644 index 0000000..fd2a58a --- /dev/null +++ b/pallets/utxo/src/base58_nostd.rs @@ -0,0 +1,229 @@ +//! Base58-to-text encoding +//! +//! Based on https://github.com/trezor/trezor-crypto/blob/master/base58.c +//! commit hash: c6e7d37 +//! works only up to 128 bytes + +const ALPHABET: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; + +const B58_DIGITS_MAP: &'static [i8] = &[ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, -1, -1, -1, -1, -1, -1, -1, 9, 10, 11, 12, 13, 14, 15, 16, -1, + 17, 18, 19, 20, 21, -1, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, -1, -1, -1, -1, -1, -1, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, -1, -1, -1, -1, -1, +]; + +/// Errors that can occur when decoding base58 encoded string. +#[derive(Debug, PartialEq)] +pub enum FromBase58Error { + /// The input contained a character which is not a part of the base58 format. + InvalidBase58Character(char, usize), + /// The input had invalid length. + InvalidBase58Length, +} + +/// A trait for converting a value to base58 encoded string. +pub trait ToBase58 { + /// Converts a value of `self` to a base58 value, returning the owned string. + fn to_base58(&self) -> Vec; +} + +/// A trait for converting base58 encoded values. +pub trait FromBase58 { + /// Convert a value of `self`, interpreted as base58 encoded data, into an owned vector of bytes, returning a vector. + fn from_base58(&self) -> Result, FromBase58Error>; +} + +impl ToBase58 for [u8] { + fn to_base58(&self) -> Vec { + let zcount = self.iter().take_while(|x| **x == 0).count(); + let size = (self.len() - zcount) * 138 / 100 + 1; + let mut buffer = vec![0u8; size]; + + let mut i = zcount; + let mut high = size - 1; + + while i < self.len() { + let mut carry = self[i] as u32; + let mut j = size - 1; + + while j > high || carry != 0 { + carry += 256 * buffer[j] as u32; + buffer[j] = (carry % 58) as u8; + carry /= 58; + + // in original trezor implementation it was underflowing + if j > 0 { + j -= 1; + } + } + + i += 1; + high = j; + } + + let mut j = buffer.iter().take_while(|x| **x == 0).count(); + + let mut result = Vec::new(); + for _ in 0..zcount { + result.push(b'1'); + } + + while j < size { + result.push(ALPHABET[buffer[j] as usize]); + j += 1; + } + + result + } +} + +impl FromBase58 for str { + fn from_base58(&self) -> Result, FromBase58Error> { + let mut bin = [0u8; 132]; + let mut out = [0u32; (132 + 3) / 4]; + let bytesleft = (bin.len() % 4) as u8; + let zeromask = match bytesleft { + 0 => 0u32, + _ => 0xffffffff << (bytesleft * 8), + }; + + let zcount = self.chars().take_while(|x| *x == '1').count(); + let mut i = zcount; + let b58: Vec = self.bytes().collect(); + + while i < self.len() { + if (b58[i] & 0x80) != 0 { + // High-bit set on invalid digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + if B58_DIGITS_MAP[b58[i] as usize] == -1 { + // // Invalid base58 digit + return Err(FromBase58Error::InvalidBase58Character(b58[i] as char, i)); + } + + let mut c = B58_DIGITS_MAP[b58[i] as usize] as u64; + let mut j = out.len(); + while j != 0 { + j -= 1; + let t = out[j] as u64 * 58 + c; + c = (t & 0x3f00000000) >> 32; + out[j] = (t & 0xffffffff) as u32; + } + + if c != 0 { + // Output number too big (carry to the next int32) + return Err(FromBase58Error::InvalidBase58Length); + } + + if (out[0] & zeromask) != 0 { + // Output number too big (last int32 filled too far) + return Err(FromBase58Error::InvalidBase58Length); + } + + i += 1; + } + + let mut i = 1; + let mut j = 0; + + bin[0] = match bytesleft { + 3 => ((out[0] & 0xff0000) >> 16) as u8, + 2 => ((out[0] & 0xff00) >> 8) as u8, + 1 => { + j = 1; + (out[0] & 0xff) as u8 + } + _ => { + i = 0; + bin[0] + } + }; + + while j < out.len() { + bin[i] = ((out[j] >> 0x18) & 0xff) as u8; + bin[i + 1] = ((out[j] >> 0x10) & 0xff) as u8; + bin[i + 2] = ((out[j] >> 8) & 0xff) as u8; + bin[i + 3] = ((out[j] >> 0) & 0xff) as u8; + i += 4; + j += 1; + } + + let leading_zeros = bin.iter().take_while(|x| **x == 0).count(); + Ok(bin[leading_zeros - zcount..].to_vec()) + } +} + +#[cfg(test)] +mod tests { + use super::{FromBase58, ToBase58}; + + #[test] + fn test_from_base58_basic() { + assert_eq!("".from_base58().unwrap(), b""); + assert_eq!("Z".from_base58().unwrap(), &[32]); + assert_eq!("n".from_base58().unwrap(), &[45]); + assert_eq!("q".from_base58().unwrap(), &[48]); + assert_eq!("r".from_base58().unwrap(), &[49]); + assert_eq!("z".from_base58().unwrap(), &[57]); + assert_eq!("4SU".from_base58().unwrap(), &[45, 49]); + assert_eq!("4k8".from_base58().unwrap(), &[49, 49]); + assert_eq!("ZiCa".from_base58().unwrap(), &[97, 98, 99]); + assert_eq!("3mJr7AoUXx2Wqd".from_base58().unwrap(), b"1234598760"); + assert_eq!( + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f".from_base58().unwrap(), + b"abcdefghijklmnopqrstuvwxyz" + ); + } + + #[test] + fn test_from_base58_invalid_char() { + assert!("0".from_base58().is_err()); + assert!("O".from_base58().is_err()); + assert!("I".from_base58().is_err()); + assert!("l".from_base58().is_err()); + assert!("3mJr0".from_base58().is_err()); + assert!("O3yxU".from_base58().is_err()); + assert!("3sNI".from_base58().is_err()); + assert!("4kl8".from_base58().is_err()); + assert!("s!5<".from_base58().is_err()); + assert!("t$@mX<*".from_base58().is_err()); + } + + #[test] + fn test_from_base58_initial_zeros() { + assert_eq!("1ZiCa".from_base58().unwrap(), b"\0abc"); + assert_eq!("11ZiCa".from_base58().unwrap(), b"\0\0abc"); + assert_eq!("111ZiCa".from_base58().unwrap(), b"\0\0\0abc"); + assert_eq!("1111ZiCa".from_base58().unwrap(), b"\0\0\0\0abc"); + } + + #[test] + fn test_to_base58_basic() { + assert_eq!(b"".to_base58(), ""); + assert_eq!(&[32].to_base58(), "Z"); + assert_eq!(&[45].to_base58(), "n"); + assert_eq!(&[48].to_base58(), "q"); + assert_eq!(&[49].to_base58(), "r"); + assert_eq!(&[57].to_base58(), "z"); + assert_eq!(&[45, 49].to_base58(), "4SU"); + assert_eq!(&[49, 49].to_base58(), "4k8"); + assert_eq!(b"abc".to_base58(), "ZiCa"); + assert_eq!(b"1234598760".to_base58(), "3mJr7AoUXx2Wqd"); + assert_eq!( + b"abcdefghijklmnopqrstuvwxyz".to_base58(), + "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f" + ); + } + + #[test] + fn test_to_base58_initial_zeros() { + assert_eq!(b"\0abc".to_base58(), "1ZiCa"); + assert_eq!(b"\0\0abc".to_base58(), "11ZiCa"); + assert_eq!(b"\0\0\0abc".to_base58(), "111ZiCa"); + assert_eq!(b"\0\0\0\0abc".to_base58(), "1111ZiCa"); + } +} diff --git a/pallets/utxo/src/header.rs b/pallets/utxo/src/header.rs deleted file mode 100644 index 6a71482..0000000 --- a/pallets/utxo/src/header.rs +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) 2021 RBB S.r.l -// opensource@mintlayer.org -// SPDX-License-Identifier: MIT -// Licensed under the MIT License; -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://spdx.org/licenses/MIT -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Author(s): C. Yap, Anton Sinitsyn - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::sp_std::convert::TryFrom; - -use codec::{Decode, Encode}; - -pub type TXOutputHeader = u128; -pub type TokenID = u64; - -// Check one bit in a number -#[inline(always)] -fn check_bit(number: u128, pos: u32) -> bool { - (number & (1u128.overflowing_shl(pos).0)) != 0 -} - -#[inline(always)] -fn set_bit(number: u128, pos: u32) -> u128 { - number | (1u128.overflowing_shl(pos).0) -} - -// Copy number to bits field -fn fit_in_bits(number: u128, pos: u32, length: u32) -> u128 { - let mut result = 0u128; - for i in pos..pos + length { - if check_bit(number, i) { - result = set_bit(result, i - pos); - } - } - result -} - -fn move_bits(from: u128, f_offset: u32, f_length: u32, to_offset: u32) -> u128 { - let mut result = 0u128; - for i in f_offset..f_offset + f_length { - if check_bit(from, i) { - result = set_bit(result, i - f_offset + to_offset); - } - } - result -} - -#[derive(Debug)] -struct BitsField { - length: u32, - offset: u32, - pub data: u128, -} - -// Size of bit fields, total 72 bits -const TOKEN_TYPE_SIZE: u32 = 3; -const TOKEN_ID_SIZE: u32 = 64; -const VERSION_SIZE: u32 = 5; - -#[derive(Debug)] -pub struct OutputHeaderData { - token_type: BitsField, - token_id: BitsField, - version: BitsField, - reserve: BitsField, -} - -impl OutputHeaderData { - pub fn new(header: u128) -> OutputHeaderData { - let mut offset = 0; - - // Signature method - let token_type = BitsField { - length: TOKEN_TYPE_SIZE, - offset, - data: fit_in_bits(header, offset, TOKEN_TYPE_SIZE), - }; - offset += TOKEN_TYPE_SIZE; - - // Token ID - let token_id = BitsField { - length: TOKEN_ID_SIZE, - offset, - data: fit_in_bits(header, offset, TOKEN_ID_SIZE), - }; - offset += TOKEN_ID_SIZE; - - // Version number - let version = BitsField { - length: VERSION_SIZE, - offset, - data: fit_in_bits(header, offset, VERSION_SIZE), - }; - offset += VERSION_SIZE; - - // You can add another field here. Just do not forget to add offset - OutputHeaderData { - token_type, - token_id, - version, - reserve: BitsField { - length: u128::BITS - offset, - offset, - data: fit_in_bits(header, offset, u128::BITS - offset), - }, - } - } - - pub fn as_u128(&self) -> u128 { - // Easy one because these bits have a concrete place - let mut result = 0u128; - let mut offset = 0; - result += move_bits(self.token_type.data, 0, TOKEN_TYPE_SIZE, offset); - offset += TOKEN_TYPE_SIZE; - result += move_bits(self.token_id.data, 0, TOKEN_ID_SIZE, offset); - offset += TOKEN_ID_SIZE; - result += move_bits(self.version.data, 0, VERSION_SIZE, offset); - - result - } - - pub fn token_type(&self) -> Option { - TryFrom::try_from(self.token_type.data).ok() - } - - pub fn set_token_type(&mut self, token_id: TokenType) { - self.token_type.data = token_id as u128; - } - - pub fn token_id(&self) -> TokenID { - self.token_id.data as u64 - } - - pub fn set_token_id(&mut self, token_id: TokenID) { - self.token_id.data = token_id as u128; - } - - pub fn version(&self) -> u128 { - self.version.data - } - - pub fn set_version(&mut self, version: u64) { - self.version.data = version as u128; - } - - pub fn validate(&self) -> bool { - self.token_type().is_some() - } -} - -pub trait OutputHeaderHelper { - fn as_tx_output_header(&self) -> OutputHeaderData; -} - -impl OutputHeaderHelper for TXOutputHeader { - fn as_tx_output_header(&self) -> OutputHeaderData { - OutputHeaderData::new(*self) - } -} - -// https://stackoverflow.com/posts/57578431/revisions from Shepmaster -// whenever a new type/variant is supported, we don't have to code a lot of 'matches' boilerplate. -macro_rules! u128_to_enum { - ($(#[$meta:meta])* $vis:vis enum $name:ident { - $($(#[$vmeta:meta])* $vname:ident $(= $val:expr)?,)* - }) => { - $(#[$meta])* - $vis enum $name { - $($(#[$vmeta])* $vname $(= $val)?,)* - } - - impl TryFrom for $name { - type Error = &'static str; - - fn try_from(v: u128) -> Result { - match v { - $(x if x == $name::$vname as u128 => Ok($name::$vname),)* - _ => { - Err(stringify!(unsupported $name)) - }, - } - } - } - } -} - -u128_to_enum! { -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, Hash, Debug)] -pub enum TokenType { - MLT = 0, - Normal = 1, - CT = 2, - NFT = 3, - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn validate() { - // improper sig meth - assert_eq!(OutputHeaderData::new(0b11111_111u128).validate(), false); - // improper token type - assert_eq!(OutputHeaderData::new(0b11000_100u128).validate(), false); - - // Proper header - assert!(OutputHeaderData::new( - 0b10_0000000000000000000000000000000000000000000000000000000000000000_010u128 - ) - .validate()); - assert!(OutputHeaderData::new( - 0b01_0000000000000000000000000000000000000000000000000000000000000001_000u128 - ) - .validate()); - assert!(OutputHeaderData::new(0u128).validate()); - } - - #[test] - fn token_types() { - let x = 0b11011_000u128; // last 3 bits are 000, so token_type should be 0 or MLT. - let header = OutputHeaderData::new(x); - assert!(header.token_type().is_some()); - assert_eq!(header.token_type().unwrap(), TokenType::MLT); - - let x = 0b0000100_001; // last 3 bits are 001, so token_type should be Normal - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::Normal - ); - - let x = 0b111110_010; // last 3 bits are 010, so token_type should be CT - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::CT - ); - - let x = 0b111110_011; // last 3 bits are 011, so token_type should be NFT - assert_eq!( - OutputHeaderData::new(x).token_type().unwrap(), - TokenType::NFT - ); - - let x = 0b10_111; // last 3 bits is are, and it's not yet supported. - assert_eq!(OutputHeaderData::new(x).token_type(), None); - - // last 3 bits are 001. Convert to 000 for MLT. - let mut header = OutputHeaderData::new(185u128); - header.set_token_type(TokenType::MLT); - assert_eq!(header.as_u128(), 184); - - // last 3 bits of header are 000. Convert to 010 for CT. - header.set_token_type(TokenType::CT); - assert_eq!(header.as_u128(), 186); - } - - #[allow(dead_code)] - fn print_bits(number: u128) { - let mut space = 0; - for i in 0..128 { - if check_bit(number, 127 - i) { - print!("1"); - } else { - print!("0"); - } - space += 1; - if space == 4 { - space = 0; - print!("_"); - } - } - println!(""); - } - - #[test] - fn token_ids() { - const TOKENID_TEST_0: u64 = 0; - const TOKENID_TEST_1: u64 = 1; - const TOKENID_TEST_2: u64 = 2; - - // the middle 64 bits are 000000, so type is TOKENID_TEST_0. - let header = OutputHeaderData::new( - 0b1010_0000000000000000000000000000000000000000000000000000000000000000_110, - ); - assert_eq!(header.token_id(), TOKENID_TEST_0); - - // the middle 64 bits are 000001, so type is TOKENID_TEST_1. - let header = OutputHeaderData::new( - 0b1010_0000000000000000000000000000000000000000000000000000000000000001_110, - ); - assert_eq!(header.token_id(), TOKENID_TEST_1); - - // the first 64 bits are 000010, so type is TOKENID_TEST_1. - assert_eq!( - OutputHeaderData::new(0b000001_101).token_id(), - TOKENID_TEST_1 - ); - assert_eq!(OutputHeaderData::new(3u128).token_id(), TOKENID_TEST_0); - - let mut improper_header = OutputHeaderData::new(u128::MAX); - improper_header.set_token_id(TOKENID_TEST_2); - assert_eq!(improper_header.token_id(), TOKENID_TEST_2); - } -} diff --git a/pallets/utxo/src/lib.rs b/pallets/utxo/src/lib.rs index 38ed7ac..4cb5c6a 100644 --- a/pallets/utxo/src/lib.rs +++ b/pallets/utxo/src/lib.rs @@ -17,32 +17,31 @@ #![cfg_attr(not(feature = "std"), no_std)] -pub use header::*; pub use pallet::*; -#[cfg(test)] -mod mock; - -#[cfg(test)] -mod tests; - +mod base58_nostd; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; - -mod header; +#[cfg(test)] +mod mock; mod script; mod sign; +#[cfg(test)] +mod tests; +pub mod tokens; +pub mod verifier; pub mod weights; #[frame_support::pallet] pub mod pallet { - use crate::sign::{self, Scheme}; - use crate::{OutputHeaderData, OutputHeaderHelper, TXOutputHeader, TokenID, TokenType}; + // use crate::sign::{self, Scheme}; + use crate::tokens::{/*Mlt,*/ OutputData, TokenId, Value}; + use crate::verifier::TransactionVerifier; use bech32; use chainscript::Script; use codec::{Decode, Encode}; use core::marker::PhantomData; - use frame_support::weights::PostDispatchInfo; + // use frame_support::weights::PostDispatchInfo; use frame_support::{ dispatch::{DispatchResultWithPostInfo, Vec}, pallet_prelude::*, @@ -52,31 +51,19 @@ pub mod pallet { }; use frame_system::pallet_prelude::*; use hex_literal::hex; - use pallet_utxo_tokens::TokenListData; use pp_api::ProgrammablePoolApi; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{ - sp_std::collections::btree_map::BTreeMap, + // sp_std::collections::btree_map::BTreeMap, sp_std::{convert::TryInto, str, vec}, sr25519, testing::SR25519, - H256, H512, - }; - use sp_runtime::traits::{ - AtLeast32Bit, Zero, /*, StaticLookup , AtLeast32BitUnsigned, Member, One */ + H256, + H512, }; - use sp_runtime::DispatchErrorWithPostInfo; - - pub type Value = u128; - pub type String = Vec; - - pub struct Mlt(Value); - impl Mlt { - pub fn to_munit(&self) -> Value { - self.0 * 1_000 * 100_000_000 - } - } + use sp_runtime::traits::AtLeast32Bit; + // use sp_runtime::DispatchErrorWithPostInfo; #[pallet::error] pub enum Error { @@ -104,6 +91,8 @@ pub mod pallet { Unapproved, /// The source account would not survive the transfer and it needs to stay alive. WouldDie, + // Thrown when there is an attempt to mint a duplicate collection. + NftCollectionExists, } #[pallet::pallet] @@ -233,8 +222,8 @@ pub mod pallet { #[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] pub struct TransactionOutput { pub(crate) value: Value, - pub(crate) header: TXOutputHeader, pub(crate) destination: Destination, + pub(crate) data: Option, } impl TransactionOutput { @@ -246,8 +235,8 @@ pub mod pallet { let pubkey = sp_core::sr25519::Public::from_h256(pubkey); Self { value, - header: 0, destination: Destination::Pubkey(pubkey.into()), + data: None, } } @@ -255,8 +244,8 @@ pub mod pallet { pub fn new_create_pp(value: Value, code: Vec, data: Vec) -> Self { Self { value, - header: 0, destination: Destination::CreatePP(code, data), + data: None, } } @@ -264,20 +253,8 @@ pub mod pallet { pub fn new_call_pp(value: Value, dest_account: AccountId, input: Vec) -> Self { Self { value, - header: 0, destination: Destination::CallPP(dest_account, input), - } - } - - pub fn new_token(token_id: TokenID, value: Value, pub_key: H256) -> Self { - let pub_key = sp_core::sr25519::Public::from_h256(pub_key); - let mut header = OutputHeaderData::new(0); - header.set_token_id(token_id); - let header = header.as_u128(); - Self { - value, - header, - destination: Destination::Pubkey(pub_key.into()), + data: None, } } @@ -285,23 +262,12 @@ pub mod pallet { pub fn new_script_hash(value: Value, hash: H256) -> Self { Self { value, - header: 0, destination: Destination::ScriptHash(hash), + data: None, } } } - impl TransactionOutput { - fn validate_header(&self) -> Result<(), &'static str> { - // Check signature and token id - self.header - .as_tx_output_header() - .validate() - .then(|| ()) - .ok_or("Incorrect header") - } - } - #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] pub struct Transaction { @@ -343,14 +309,6 @@ pub mod pallet { #[allow(type_alias_bounds)] pub type TransactionFor = Transaction; - #[pallet::storage] - #[pallet::getter(fn token_list)] - pub(super) type TokenList = StorageValue<_, TokenListData, ValueQuery>; - - #[pallet::storage] - #[pallet::getter(fn tokens_higher_id)] - pub(super) type TokensHigherID = StorageValue<_, TokenID, ValueQuery>; - #[pallet::storage] #[pallet::getter(fn reward_total)] pub(super) type RewardTotal = StorageValue<_, Value, ValueQuery>; @@ -360,11 +318,17 @@ pub mod pallet { pub(super) type UtxoStore = StorageMap<_, Identity, H256, Option>, ValueQuery>; + #[pallet::storage] + #[pallet::getter(fn pointer_to_issue_token)] + pub(super) type PointerToIssueToken = + StorageMap<_, Identity, TokenId, /* UTXO */ H256, OptionQuery>; + #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { - TokenCreated(u64, T::AccountId), + TokenCreated(H256, T::AccountId), + Minted(H256, T::AccountId, Vec), TransactionSuccess(TransactionFor), } @@ -375,6 +339,13 @@ pub mod pallet { } } + pub(crate) fn get_utxo_by_token_id( + token_id: TokenId, + ) -> Option> { + let utxo_id = PointerToIssueToken::::get(token_id)?; + UtxoStore::::get(utxo_id) + } + // Strips a transaction of its Signature fields by replacing value with ZERO-initialized fixed hash. pub fn get_simple_transaction( tx: &Transaction, @@ -406,7 +377,6 @@ pub mod pallet { for authority in auths { // TODO: where do we get the header info? - // TODO: are the rewards always of MLT token type? let utxo = TransactionOutput::new_pubkey(share_value, *authority); let hash = { @@ -441,227 +411,260 @@ pub mod pallet { pub fn validate_transaction( tx: &TransactionFor, ) -> Result { - //ensure rather than assert to avoid panic - //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries - ensure!(!tx.inputs.is_empty(), "no inputs"); - ensure!(!tx.outputs.is_empty(), "no outputs"); - ensure!(tx.inputs.len() < (u32::MAX as usize), "too many inputs"); - ensure!(tx.outputs.len() < (u32::MAX as usize), "too many outputs"); - - //ensure each input is used only a single time - //maps each input into btree - //if map.len() > num of inputs then fail - //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html - //WARNING workshop code has a bug here - //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs - //input_map.len() > transaction.inputs.len() //THIS IS WRONG - { - let input_map: BTreeMap<_, ()> = - tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); - //we want map size and input size to be equal to ensure each is used only once - ensure!( - input_map.len() == tx.inputs.len(), - "each input should be used only once" - ); - } - //ensure each output is unique - //map each output to btree to count unique elements - //WARNING example code has a bug here - //out_map.len() != transaction.outputs.len() //THIS IS WRONG - { - let out_map: BTreeMap<_, ()> = tx.outputs.iter().map(|output| (output, ())).collect(); - //check each output is defined only once - ensure!( - out_map.len() == tx.outputs.len(), - "each output should be used once" - ); - } - - // In order to avoid race condition in network we maintain a list of required utxos for a tx - // Example of race condition: - // Assume both alice and bob have 10 coins each and bob owes charlie 20 coins - // In order to pay charlie alice must first send 10 coins to bob which creates a new utxo - // If bob uses the new utxo to try and send the coins to charlie before charlie receives the alice to bob 10 coins utxo - // then the tx from bob to charlie is invalid. By maintaining a list of required utxos we can ensure the tx can happen as and - // when the utxo is available. We use max longevity at the moment. That should be fixed. - - // Resolve the transaction inputs by looking up UTXOs being spent by them. - // - // This will cointain one of the following: - // * Ok(utxos): a vector of UTXOs each input spends. - // * Err(missing): a vector of outputs missing from the store - let input_utxos = { - let mut missing = Vec::new(); - let mut resolved: Vec> = Vec::new(); - - for input in &tx.inputs { - if let Some(input_utxo) = >::get(&input.outpoint) { - let lock_commitment = input_utxo.destination.lock_commitment(); - ensure!( - input.lock_hash() == *lock_commitment, - "Lock hash does not match" - ); - resolved.push(input_utxo); - } else { - missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); - } - } - - missing.is_empty().then(|| resolved).ok_or(missing) - }; - - let full_inputs: Vec<(crate::TokenID, TransactionOutputFor)> = tx - .inputs - .iter() - .filter_map(|input| >::get(&input.outpoint)) - .map(|output| (OutputHeaderData::new(output.header).token_id(), output)) - .collect(); - - let input_vec: Vec<(crate::TokenID, Value)> = - full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); - - let out_vec: Vec<(crate::TokenID, Value)> = tx - .outputs - .iter() - .map(|output| { - ( - OutputHeaderData::new(output.header).token_id(), - output.value, - ) - }) - .collect(); - - // Check for token creation - let tokens_list = >::get(); - for output in tx.outputs.iter() { - let tid = OutputHeaderData::new(output.header).token_id(); - // If we have input and output for the same token it's not a problem - if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { - continue; - } else { - // But when we don't have an input for token but token id exist in TokenList - ensure!( - tokens_list.iter().find(|&x| x.id == tid).is_none(), - "no inputs for the token id" - ); - } - } - - let mut new_utxos = Vec::new(); - let mut reward = 0; - - // Check that outputs are valid - for (output_index, output) in tx.outputs.iter().enumerate() { - // Check the header is valid - let res = output.validate_header(); - if let Err(e) = res { - log::error!("Header error: {}", e); - } - ensure!(res.is_ok(), "header error. Please check the logs."); - - match output.destination { - Destination::Pubkey(_) | Destination::ScriptHash(_) => { - ensure!(output.value > 0, "output value must be nonzero"); - let hash = tx.outpoint(output_index as u64); - ensure!(!>::contains_key(hash), "output already exists"); - new_utxos.push(hash.as_fixed_bytes().to_vec()); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate OP_CALL"); - } - } - } - - // if all spent UTXOs are available, check the math and signatures - if let Ok(input_utxos) = &input_utxos { - // We have to check sum of input tokens is less or equal to output tokens. - let mut inputs_sum: BTreeMap = BTreeMap::new(); - let mut outputs_sum: BTreeMap = BTreeMap::new(); - - for x in input_vec { - let value = - x.1.checked_add(*inputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("input value overflow")?; - inputs_sum.insert(x.0, value); - } - for x in out_vec { - let value = - x.1.checked_add(*outputs_sum.get(&x.0).unwrap_or(&0)) - .ok_or("output value overflow")?; - outputs_sum.insert(x.0, value); - } - - let mut new_token_exist = false; - for output_token in &outputs_sum { - match inputs_sum.get(&output_token.0) { - Some(input_value) => ensure!( - input_value >= &output_token.1, - "output value must not exceed input value" - ), - None => { - // If the transaction has one an output with a new token ID - if new_token_exist { - frame_support::fail!("input for the token not found") - } else { - new_token_exist = true; - } - } - } - } - - for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { - match &input_utxo.destination { - Destination::Pubkey(pubkey) => { - let msg = sign::TransactionSigMsg::construct( - sign::SigHash::default(), - &tx, - &input_utxos, - index as u64, - u32::MAX, - ); - let ok = pubkey - .parse_sig(&input.witness[..]) - .ok_or("bad signature format")? - .verify(&msg); - ensure!(ok, "signature must be valid"); - } - Destination::CreatePP(_, _) => { - log::info!("TODO validate spending of OP_CREATE"); - } - Destination::CallPP(_, _) => { - log::info!("TODO validate spending of OP_CALL"); - } - Destination::ScriptHash(_hash) => { - let witness = input.witness.clone(); - let lock = input.lock.clone(); - crate::script::verify(&tx, &input_utxos, index as u64, witness, lock) - .map_err(|_| "script verification failed")?; - } - } - } - - // Reward at the moment only in MLT - reward = if inputs_sum.contains_key(&(TokenType::MLT as TokenID)) - && outputs_sum.contains_key(&(TokenType::MLT as TokenID)) - { - inputs_sum[&(TokenType::MLT as TokenID)] - .checked_sub(outputs_sum[&(TokenType::MLT as TokenID)]) - .ok_or("reward underflow")? - } else { - *inputs_sum.get(&(TokenType::MLT as TokenID)).ok_or("fee doesn't exist")? - } - } - - Ok(ValidTransaction { - priority: reward as u64, - requires: input_utxos.map_or_else(|x| x, |_| Vec::new()), - provides: new_utxos, - longevity: TransactionLongevity::MAX, - propagate: true, - }) + TransactionVerifier::<'_, T>::new(tx) + .checking_inputs()? + .checking_outputs()? + .checking_signatures()? + .checking_utxos_exists()? + .checking_tokens_transferring()? + .checking_tokens_issued()? + .checking_nft_mint()? + .checking_assets_burn()? + .calculating_reward()? + .collect_result() + + /* + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!tx.inputs.is_empty(), "no inputs"); + ensure!(!tx.outputs.is_empty(), "no outputs"); + ensure!(tx.inputs.len() < (u32::MAX as usize), "too many inputs"); + ensure!(tx.outputs.len() < (u32::MAX as usize), "too many outputs"); + + //ensure each input is used only a single time + //maps each input into btree + //if map.len() > num of inputs then fail + //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html + //WARNING workshop code has a bug here + //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs + //input_map.len() > transaction.inputs.len() //THIS IS WRONG + { + let input_map: BTreeMap<_, ()> = + tx.inputs.iter().map(|input| (input.outpoint, ())).collect(); + //we want map size and input size to be equal to ensure each is used only once + ensure!( + input_map.len() == tx.inputs.len(), + "each input should be used only once" + ); + } + //ensure each output is unique + //map each output to btree to count unique elements + //WARNING example code has a bug here + //out_map.len() != transaction.outputs.len() //THIS IS WRONG + { + let out_map: BTreeMap<_, ()> = tx.outputs.iter().map(|output| (output, ())).collect(); + //check each output is defined only once + ensure!( + out_map.len() == tx.outputs.len(), + "each output should be used once" + ); + } + let simple_tx = get_simple_transaction(tx); + let mut reward = 0; + // Resolve the transaction inputs by looking up UTXOs being spent by them. + // + // This will cointain one of the following: + // * Ok(utxos): a vector of UTXOs each input spends. + // * Err(missing): a vector of outputs missing from the store + let input_utxos = { + let mut missing = Vec::new(); + let mut resolved: Vec> = Vec::new(); + + for input in &tx.inputs { + if let Some(input_utxo) = >::get(&input.outpoint) { + let lock_commitment = input_utxo.destination.lock_commitment(); + ensure!( + input.lock_hash() == *lock_commitment, + "Lock hash does not match" + ); + resolved.push(input_utxo); + } else { + missing.push(input.outpoint.clone().as_fixed_bytes().to_vec()); + } + } + + missing.is_empty().then(|| resolved).ok_or(missing) + }; + + let full_inputs: Vec<(TokenId, TransactionOutputFor)> = tx + .inputs + .iter() + .filter_map(|input| >::get(&input.outpoint)) + .filter_map(|output| match output.data { + Some(ref data) => match data { + OutputData::TokenTransferV1 { token_id, amount } => Some((*token_id, output)), + OutputData::TokenIssuanceV1 { + token_id, + token_ticker, + amount_to_issue, + number_of_decimals, + metadata_URI, + } => Some((*token_id, output)), + OutputData::TokenBurnV1 { .. } => { + // frame_support::fail!("Token gone forever, we can't use it anymore").ok(); + None + } + OutputData::NftMintV1 { + token_id, + data_hash, + metadata_URI, + } => Some((*token_id, output)), + }, + None => Some((H256::zero(), output)), + }) + .collect(); + + let input_vec: Vec<(TokenId, Value)> = + full_inputs.iter().map(|output| (output.0, output.1.value)).collect(); + + let out_vec: Vec<(TokenId, Value)> = tx + .outputs + .iter() + .filter_map(|output| { + match output.data { + Some(OutputData::TokenTransferV1 { token_id, amount }) => { + Some((token_id, amount)) + } + Some(OutputData::TokenIssuanceV1 { + token_id, + amount_to_issue, + .. + }) => Some((token_id, amount_to_issue)), + Some(OutputData::NftMintV1 { token_id, .. }) => Some((token_id, 1)), + // Token gone forever, we can't use it anymore + Some(OutputData::TokenBurnV1 { .. }) => None, + None => Some((H256::zero(), output.value)), + } + }) + .collect(); + + // Check for token creation + for output in tx.outputs.iter() { + let tid = match output.data { + Some(OutputData::TokenTransferV1 { token_id, .. }) => token_id, + Some(OutputData::TokenIssuanceV1 { token_id, .. }) => token_id, + _ => continue, + }; + // If we have input and output for the same token it's not a problem + if full_inputs.iter().find(|&x| (x.0 == tid) && (x.1 != *output)).is_some() { + continue; + } else { + // But when we don't have an input for token but token id exist in TokenList + ensure!( + !>::contains_key(tid), + "no inputs for the token id" + ); + } + } + + let mut new_utxos = Vec::new(); + // Check that outputs are valid + for (output_index, output) in tx.outputs.iter().enumerate() { + match output.destination { + Destination::Pubkey(_) | Destination::ScriptHash(_) => { + ensure!(output.value > 0, "output value must be nonzero"); + let hash = tx.outpoint(output_index as u64); + ensure!(!>::contains_key(hash), "output already exists"); + new_utxos.push(hash.as_fixed_bytes().to_vec()); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate OP_CALL"); + } + } + } + + // if all spent UTXOs are available, check the math and signatures + if let Ok(input_utxos) = &input_utxos { + // We have to check sum of input tokens is less or equal to output tokens. + let mut inputs_sum: BTreeMap = BTreeMap::new(); + let mut outputs_sum: BTreeMap = BTreeMap::new(); + + for x in input_vec { + let value = + x.1.checked_add(*inputs_sum.get(&x.0).unwrap_or(&0)) + .ok_or("input value overflow")?; + inputs_sum.insert(x.0, value); + } + for x in out_vec { + let value = + x.1.checked_add(*outputs_sum.get(&x.0).unwrap_or(&0)) + .ok_or("output value overflow")?; + outputs_sum.insert(x.0, value); + } + + let mut new_token_exist = false; + for output_token in &outputs_sum { + match inputs_sum.get(&output_token.0) { + Some(input_value) => ensure!( + input_value >= &output_token.1, + "output value must not exceed input value" + ), + None => { + // If the transaction has one an output with a new token ID + if new_token_exist { + frame_support::fail!("input for the token not found") + } else { + new_token_exist = true; + } + } + } + } + + for (index, (input, input_utxo)) in tx.inputs.iter().zip(input_utxos).enumerate() { + match &input_utxo.destination { + Destination::Pubkey(pubkey) => { + let msg = sign::TransactionSigMsg::construct( + sign::SigHash::default(), + &tx, + &input_utxos, + index as u64, + u32::MAX, + ); + let ok = pubkey + .parse_sig(&input.witness[..]) + .ok_or("bad signature format")? + .verify(&msg); + ensure!(ok, "signature must be valid"); + } + Destination::CreatePP(_, _) => { + log::info!("TODO validate spending of OP_CREATE"); + } + Destination::CallPP(_, _) => { + log::info!("TODO validate spending of OP_CALL"); + } + Destination::ScriptHash(_hash) => { + let witness = input.witness.clone(); + let lock = input.lock.clone(); + crate::script::verify(&tx, &input_utxos, index as u64, witness, lock) + .map_err(|_| "script verification failed")?; + } + } + } + + // Reward at the moment only in MLT + reward = if inputs_sum.contains_key(&(H256::zero() as TokenId)) + && outputs_sum.contains_key(&(H256::zero() as TokenId)) + { + inputs_sum[&(H256::default() as TokenId)] + .checked_sub(outputs_sum[&(H256::zero() as TokenId)]) + .ok_or("reward underflow")? + } else { + *inputs_sum.get(&(H256::zero() as TokenId)).ok_or("fee doesn't exist")? + }; + } + + Ok(ValidTransaction { + priority: reward as u64, + requires: input_utxos.map_or_else(|x| x, |_| Vec::new()), + provides: new_utxos, + longevity: TransactionLongevity::MAX, + propagate: true, + }) + + */ } /// Update storage to reflect changes made by transaction @@ -712,62 +715,129 @@ pub mod pallet { Ok(().into()) } - pub fn token_create( + // pub fn token_create( + // caller: &T::AccountId, + // public: H256, + // input_for_fee: TransactionInput, + // token_name: Vec, + // token_ticker: Vec, + // supply: Value, + // ) -> Result> { + // ensure!(token_name.len() <= 25, Error::::Unapproved); + // ensure!(token_ticker.len() <= 5, Error::::Unapproved); + // ensure!(!supply.is_zero(), Error::::MinBalanceZero); + // + // // Input with MLT FEE + // let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; + // ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + // + // // Save in UTXO + // let instance = crate::TokenInstance::new_normal( + // BlakeTwo256::hash_of(&(&token_name, &token_ticker)), + // token_name, + // token_ticker, + // supply, + // ); + // let token_id = *instance.id(); + // + // ensure!( + // !>::contains_key(instance.id()), + // Error::::InUse + // ); + // + // let mut tx = Transaction { + // inputs: crate::vec![ + // // Fee an input equal 100 MLT + // input_for_fee, + // ], + // outputs: crate::vec![ + // // Output a new tokens + // TransactionOutput::new_token(*instance.id(), supply, public), + // ], + // }; + // + // // We shall make an output to return odd funds + // if fee > Mlt(100).to_munit() { + // tx.outputs.push(TransactionOutput::new_pubkey( + // fee - Mlt(100).to_munit(), + // public, + // )); + // } + // + // let sig = crypto::sr25519_sign( + // SR25519, + // &sp_core::sr25519::Public::from_h256(public), + // &tx.encode(), + // ) + // .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; + // for i in 0..tx.inputs.len() { + // tx.inputs[i].witness = sig.0.to_vec(); + // } + // // Success + // spend::(caller, &tx)?; + // + // // Save in Store + // >::insert(token_id, Some(instance)); + // Ok(token_id) + // } + + /* + fn mint( caller: &T::AccountId, - public: H256, - input_for_fee: TransactionInput, - token_name: String, - token_ticker: String, - supply: Value, - ) -> Result> { - ensure!(token_name.len() <= 25, Error::::Unapproved); - ensure!(token_ticker.len() <= 5, Error::::Unapproved); - ensure!(!supply.is_zero(), Error::::MinBalanceZero); - - // Take a free TokenID - let token_id = - >::get().checked_add(1).ok_or("All tokens IDs has taken")?; - - // Input with MLT FEE - let fee = UtxoStore::::get(input_for_fee.outpoint).ok_or(Error::::Unapproved)?.value; - ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); - - // Save in UTXO - let instance = crate::TokenInstance::new(token_id, token_name, token_ticker, supply); - let mut tx = Transaction { - inputs: crate::vec![ - // Fee an input equal 100 MLT - input_for_fee, - ], - outputs: crate::vec![ - // Output a new tokens - TransactionOutput::new_token(token_id, supply, public), - ], - }; - - // We shall make an output to return odd funds - if fee > Mlt(100).to_munit() { - tx.outputs.push(TransactionOutput::new_pubkey( - fee - Mlt(100).to_munit(), - public, - )); - } - - // Save in Store - >::mutate(|x| { - if x.iter().find(|&x| x.id == token_id).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); - - // Success - spend::(caller, &tx)?; - Ok(token_id) + creator_pubkey: sp_core::sr25519::Public, + data_url: Vec, + data: Vec, + ) -> Result> { + let (fee, inputs_hashes) = pick_utxo::(caller, Mlt(100).to_munit()); + ensure!(fee >= Mlt(100).to_munit(), Error::::Unapproved); + ensure!(data_url.len() <= 50, Error::::Unapproved); + + let instance = TokenInstance::new_nft( + BlakeTwo256::hash_of(&data), + data.clone(), + data_url.clone(), + creator_pubkey.to_vec(), + ); + + let inputs_for_fee = inputs_hashes + .iter() + .filter_map(|x| >::get(&x)) + .map(|output| TransactionInput::new_empty(BlakeTwo256::hash_of(&(&output, 0 as u64)))) + .collect(); + + ensure!( + !TokenList::::contains_key(instance.id()), + Error::::NftCollectionExists + ); + + let mut tx = Transaction { + inputs: inputs_for_fee, + outputs: crate::vec![ + // Output a new tokens + TransactionOutput::new_nft( + *instance.id(), + data, + data_url, + H256::from(creator_pubkey) + ), + ], + }; + + let sig = crypto::sr25519_sign(SR25519, &creator_pubkey, &tx.encode()) + .ok_or(DispatchError::Token(sp_runtime::TokenError::CannotCreate))?; + for i in 0..tx.inputs.len() { + tx.inputs[i].witness = sig.0.to_vec(); + } + // Success + spend::(caller, &tx)?; + + // Save in Store + TokenList::::insert(instance.id(), Some(instance.clone())); + Ok(*instance.id()) } + */ - /// Pick the UTXOs of `caller` from UtxoStore that satify request `value` + /// Pick the UTXOs of `caller` from UtxoStore that satisfy request `value` /// /// Return a list of UTXOs that satisfy the request /// Return empty vector if caller doesn't have enough UTXO @@ -817,28 +887,6 @@ pub mod pallet { Ok(().into()) } - #[pallet::weight(T::WeightInfo::token_create(768_usize.saturating_add(token_name.len()) as u32))] - pub fn token_create( - origin: OriginFor, - public: H256, - input_for_fee: TransactionInput, - token_name: String, - token_ticker: String, - supply: Value, - ) -> DispatchResultWithPostInfo { - let caller = &ensure_signed(origin)?; - let token_id = token_create::( - caller, - public, - input_for_fee, - token_name, - token_ticker, - supply, - )?; - Self::deposit_event(Event::::TokenCreated(token_id, caller.clone())); - Ok(().into()) - } - #[pallet::weight(T::WeightInfo::send_to_address(16_u32.saturating_add(address.len() as u32)))] pub fn send_to_address( origin: OriginFor, @@ -889,7 +937,8 @@ pub mod pallet { TransactionOutput { value, destination: dest, - header: Default::default(), + // todo: We need to check what kind of token over here + data: None, }, TransactionOutput::new_pubkey(total - value, H256::from(pubkey_raw)), ], @@ -931,26 +980,38 @@ pub mod pallet { } } -use pallet_utxo_tokens::{TokenInstance, TokenListData}; +use frame_support::inherent::Vec; +use frame_support::pallet_prelude::DispatchResultWithPostInfo; +use sp_core::{ + crypto::UncheckedFrom, + Encode, {H256, H512}, +}; +use sp_runtime::sp_std::vec; +use utxo_api::UtxoApi; impl crate::Pallet { pub fn send() -> u32 { 1337 } - pub fn tokens_list() -> TokenListData { - >::get() + pub fn nft_read( + nft_id: &core::primitive::str, + ) -> Option<(/* Data url */ Vec, /* Data hash */ Vec)> { + match crate::pallet::get_utxo_by_token_id::( + crate::tokens::TokenId::from_string(&nft_id).ok()?, + )? + .data + { + Some(crate::tokens::OutputData::NftMintV1 { + data_hash, + metadata_uri, + .. + }) => Some((metadata_uri, data_hash.encode())), + _ => None, + } } } -use frame_support::pallet_prelude::DispatchResultWithPostInfo; -use sp_core::{ - crypto::UncheckedFrom, - {H256, H512}, -}; -use sp_runtime::sp_std::vec; -use utxo_api::UtxoApi; - impl UtxoApi for Pallet where T::AccountId: UncheckedFrom + AsRef<[u8]>, diff --git a/pallets/utxo/src/tests.rs b/pallets/utxo/src/tests.rs index 7b6bbc9..c9a72bf 100644 --- a/pallets/utxo/src/tests.rs +++ b/pallets/utxo/src/tests.rs @@ -16,8 +16,8 @@ // Author(s): C. Yap use crate::{ - mock::*, Destination, RewardTotal, TokenList, Transaction, TransactionInput, TransactionOutput, - UtxoStore, Value, + mock::*, tokens::Value, Destination, RewardTotal, Transaction, TransactionInput, + TransactionOutput, UtxoStore, }; use chainscript::{opcodes::all as opc, Builder}; use codec::Encode; @@ -26,7 +26,7 @@ use frame_support::{ sp_io::crypto, sp_runtime::traits::{BlakeTwo256, Hash}, }; -use pallet_utxo_tokens::TokenInstance; + use sp_core::{sp_std::vec, sr25519::Public, testing::SR25519, H256, H512}; fn tx_input_gen_no_signature() -> (TransactionOutput, TransactionInput) { @@ -350,61 +350,56 @@ fn test_script() { #[test] fn test_tokens() { - use crate::TokensHigherID; - - let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); - test_ext.execute_with(|| { - // Let's create a new test token - let token_id = >::get() - .checked_add(1) - .ok_or("All tokens IDs has taken") - .unwrap(); - // Let's make a tx for a new token: - // * We need at least one input for the fee and one output for a new token. - // * TokenID for a new token has to be unique. - let instance = - TokenInstance::new(token_id, b"New token test".to_vec(), b"NTT".to_vec(), 1000); - let (utxo0, input0) = tx_input_gen_no_signature(); - let first_tx = Transaction { - // 100 MLT - inputs: vec![input0], - outputs: vec![ - // 100 a new tokens - TransactionOutput::new_token(token_id, instance.supply, H256::from(alice_pub_key)), - // 20 MLT to be paid as a fee, 80 MLT returning - TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), - ], - } - .sign_unchecked(&[utxo0], 0, &alice_pub_key); - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); - - // Store a new TokenInstance to the Storage - >::mutate(|x| { - if x.iter().find(|&x| x.id == token_id).is_none() { - x.push(instance.clone()) - } else { - panic!("the token has already existed with the same id") - } - }); - dbg!(&>::get()); - - // alice sends 1000 tokens to karl and the rest back to herself 10 tokens - let utxo_hash_mlt = first_tx.outpoint(1); - let utxo_hash_token = first_tx.outpoint(0); - let prev_utxos = [first_tx.outputs[1].clone(), first_tx.outputs[0].clone()]; - - let tx = Transaction { - inputs: vec![ - TransactionInput::new_empty(utxo_hash_mlt), - TransactionInput::new_empty(utxo_hash_token), - ], - outputs: vec![TransactionOutput::new_token(token_id, 10, H256::from(karl_pub_key))], - } - .sign_unchecked(&prev_utxos, 0, &alice_pub_key) - .sign_unchecked(&prev_utxos, 1, &alice_pub_key); - - assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); - }); + // let (mut test_ext, alice_pub_key, karl_pub_key) = new_test_ext_and_keys(); + // test_ext.execute_with(|| { + // // Let's create a new test token + // let token_id = BlakeTwo256::hash_of(&b"TEST"); + // let supply = 1000; + // // Let's make a tx for a new token: + // // * We need at least one input for the fee and one output for a new token. + // // * TokenID for a new token has to be unique. + // let instance = TokenInstance::new_normal( + // token_id, + // b"New token test".to_vec(), + // b"NTT".to_vec(), + // supply, + // ); + // let mut first_tx = Transaction { + // inputs: vec![ + // // 100 MLT + // tx_input_gen_no_signature(), + // ], + // outputs: vec![ + // // 100 a new tokens + // TransactionOutput::new_token(token_id, supply, H256::from(alice_pub_key)), + // // 20 MLT to be paid as a fee, 80 MLT returning + // TransactionOutput::new_pubkey(80, H256::from(alice_pub_key)), + // ], + // } + // .sign_unchecked(&[utxo0], 0, &alice_pub_key); + // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), first_tx.clone())); + // + // // Store a new TokenInstance to the Storage + // >::insert(token_id, Some(instance.clone())); + // dbg!(&>::get(token_id)); + // + // // alice sends 1000 tokens to karl and the rest back to herself 10 tokens + // let utxo_hash_mlt = first_tx.outpoint(1); + // let utxo_hash_token = first_tx.outpoint(0); + // let prev_utxos = [first_tx.outputs[1].clone(), first_tx.outputs[0].clone()]; + // + // let tx = Transaction { + // inputs: vec![ + // TransactionInput::new_empty(utxo_hash_mlt), + // TransactionInput::new_empty(utxo_hash_token), + // ], + // outputs: vec![TransactionOutput::new_token(token_id, 10, H256::from(karl_pub_key))], + // } + // .sign_unchecked(&prev_utxos, 0, &alice_pub_key) + // .sign_unchecked(&prev_utxos, 1, &alice_pub_key); + // + // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + // }); } #[test] @@ -536,3 +531,49 @@ fn test_send_to_address() { ); }) } + +#[test] +fn nft_test() { + // execute_with_alice(|alice_pub_key| { + // // Let's create a new test nft + // let nft_id = BlakeTwo256::hash_of(&b"TEST"); + // let instance = TokenInstance::new_nft( + // nft_id, + // (*b"01010101010101010101010101010101").to_vec(), + // b"http://facebook.com".to_vec(), + // alice_pub_key.to_vec(), + // ); + // + // if let TokenInstance::Nft { + // id, + // data, + // data_url, + // creator_pubkey, + // .. + // } = instance + // { + // let mut tx = Transaction { + // inputs: vec![ + // // 100 MLT + // tx_input_gen_no_signature(), + // ], + // outputs: vec![TransactionOutput::new_nft( + // id, + // data.to_vec(), + // data_url, + // H256::from_slice(creator_pubkey.as_slice()), + // )], + // }; + // let alice_sig = crypto::sr25519_sign(SR25519, &alice_pub_key, &tx.encode()).unwrap(); + // tx.inputs[0].witness = alice_sig.0.to_vec(); + // assert_ok!(Utxo::spend(Origin::signed(H256::zero()), tx.clone())); + // } + // + // // it should allow to write and read ? + // // let rsp = await dataToken.readData(firstTokenId); + // // assert.equal(rsp, empty); + // // await dataToken.writeData(firstTokenId, data); + // // rsp = await dataToken.readData(firstTokenId); + // // assert.equal(rsp, data); + // }); +} diff --git a/pallets/utxo/src/tokens.rs b/pallets/utxo/src/tokens.rs new file mode 100644 index 0000000..3331b67 --- /dev/null +++ b/pallets/utxo/src/tokens.rs @@ -0,0 +1,156 @@ +#![cfg_attr(not(feature = "std"), no_std)] + +// use crate::ss58_nostd::*; +// use crate::TransactionOutputFor; +use crate::base58_nostd::{FromBase58, FromBase58Error, ToBase58}; +use codec::{Decode, Encode}; +// use frame_support::sp_runtime::traits::{BlakeTwo256, Hash}; +use frame_support::ensure; +use frame_support::{dispatch::Vec, RuntimeDebug}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +#[cfg(feature = "std")] +use sp_core::crypto::Ss58Codec; +use sp_core::{H160, H256}; + +const LENGTH_BYTES_TO_REPRESENT_ID: usize = 20; + +pub type Value = u128; + +pub struct Mlt(Value); +impl Mlt { + pub fn to_munit(&self) -> Value { + self.0 * 1_000 * 100_000_000 + } +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +enum TokenIdInner { + // todo: Need to check this + MLT, + Asset(H160), +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub struct TokenId { + inner: TokenIdInner, +} + +impl TokenId { + pub fn mlt() -> TokenId { + TokenId { + inner: TokenIdInner::MLT, + } + } + + pub fn new_asset(first_input_hash: H256) -> TokenId { + TokenId { + // We are loosing the first bytes of H256 over here + inner: TokenIdInner::Asset(H160::from(first_input_hash)), + } + } + + pub fn to_string(&self) -> Vec { + match self.inner { + TokenIdInner::MLT => vec![], + TokenIdInner::Asset(hash) => hash.as_bytes().to_base58().to_vec(), + } + } + + fn hash160_from_bytes(bytes: &[u8]) -> Result { + ensure!( + bytes.len() == LENGTH_BYTES_TO_REPRESENT_ID, + "Unexpected length of the asset ID" + ); + let mut buffer = [0u8; 20]; + buffer.copy_from_slice(bytes); + Ok(H160::from(buffer)) + } + + pub fn from_string(data: &str) -> Result { + let data = data.from_base58().map_err(|x| match x { + FromBase58Error::InvalidBase58Character { .. } => "Invalid Base58 character", + FromBase58Error::InvalidBase58Length => "Invalid Base58 length", + })?; + + let hash = TokenId::hash160_from_bytes(data.as_slice())?; + + Ok(TokenId { + inner: TokenIdInner::Asset(hash), + }) + } +} + +// We should implement it for Ss58Codec +impl AsMut<[u8]> for TokenId { + fn as_mut(&mut self) -> &mut [u8] { + match self.inner { + TokenIdInner::MLT => &mut [], + TokenIdInner::Asset(ref mut hash) => hash.as_bytes_mut(), + } + } +} + +// We should implement it for Ss58Codec +impl AsRef<[u8]> for TokenId { + fn as_ref(&self) -> &[u8] { + match self.inner { + TokenIdInner::MLT => &[], + TokenIdInner::Asset(ref hash) => hash.as_ref(), + } + } +} + +// We should implement it for Ss58Codec +impl Default for TokenId { + fn default() -> Self { + TokenId::mlt() + } +} + +#[cfg(feature = "std")] +// Unfortunately, the default codec can't be used with std +impl Ss58Codec for TokenId {} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub enum OutputData { + // TokenTransfer data to another user. If it is a token, then the token data must also be transferred to the recipient. + #[codec(index = 1)] + TokenTransferV1 { token_id: TokenId, amount: u128 }, + // A new token creation + #[codec(index = 2)] + TokenIssuanceV1 { + token_id: TokenId, + token_ticker: Vec, + amount_to_issue: u128, + // Should be not more than 18 numbers + number_of_decimals: u8, + metadata_uri: Vec, + }, + // Burning a token or NFT + #[codec(index = 3)] + TokenBurnV1 { + token_id: TokenId, + amount_to_burn: u128, + }, + // A new NFT creation + #[codec(index = 4)] + NftMintV1 { + token_id: TokenId, + data_hash: NftDataHash, + metadata_uri: Vec, + }, +} + +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Clone, Encode, Decode, Eq, PartialEq, PartialOrd, Ord, RuntimeDebug)] +pub enum NftDataHash { + #[codec(index = 1)] + Hash32([u8; 32]), + #[codec(index = 2)] + Raw(Vec), + // Or any type that you want to implement +} diff --git a/pallets/utxo/src/verifier.rs b/pallets/utxo/src/verifier.rs new file mode 100644 index 0000000..2aee0ed --- /dev/null +++ b/pallets/utxo/src/verifier.rs @@ -0,0 +1,143 @@ +use crate::tokens::{OutputData, TokenId}; +use crate::{/*Transaction,*/ TransactionFor, TransactionOutputFor}; +use frame_support::ensure; +use frame_support::pallet_prelude::ValidTransaction; +use sp_core::sp_std::collections::btree_map::BTreeMap; +use sp_core::H256; + +pub struct TransactionVerifier<'a, T: frame_system::Config> { + tx: &'a TransactionFor, + input_map: Option>>, + output_map: Option>>, +} + +impl TransactionVerifier<'_, T> { + pub fn new(tx: &TransactionFor) -> TransactionVerifier { + TransactionVerifier { + tx, + input_map: None, + output_map: None, + } + } + + fn get_token_id_from_input(_outpoint: H256) -> TokenId { + unimplemented!() + } + + fn get_token_id_from_output(output: &TransactionOutputFor) -> TokenId { + match output.data { + Some(OutputData::TokenTransferV1 { ref token_id, .. }) + | Some(OutputData::TokenIssuanceV1 { ref token_id, .. }) + | Some(OutputData::NftMintV1 { ref token_id, .. }) => token_id.clone(), + Some(OutputData::TokenBurnV1 { .. }) => unreachable!(), + _ => TokenId::mlt(), + } + } + + fn get_output_by_outpoint(_outpoint: H256) -> TransactionOutputFor { + unimplemented!() + } + + pub fn checking_inputs(&mut self) -> Result, &'static str> { + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!self.tx.inputs.is_empty(), "no inputs"); + ensure!( + self.tx.inputs.len() < (u32::MAX as usize), + "too many inputs" + ); + + //ensure each input is used only a single time + //maps each input into btree + //if map.len() > num of inputs then fail + //https://doc.rust-lang.org/std/collections/struct.BTreeMap.html + //WARNING workshop code has a bug here + //https://github.com/substrate-developer-hub/utxo-workshop/blob/workshop/runtime/src/utxo.rs + //input_map.len() > transaction.inputs.len() //THIS IS WRONG + + let input_map: BTreeMap> = self + .tx + .inputs + .iter() + .map(|input| { + ( + TransactionVerifier::<'_, T>::get_token_id_from_input(input.outpoint), + TransactionVerifier::<'_, T>::get_output_by_outpoint(input.outpoint), + ) + }) + .collect(); + //we want map size and input size to be equal to ensure each is used only once + ensure!( + input_map.len() == self.tx.inputs.len(), + "each input should be used only once" + ); + self.input_map = Some(input_map); + unimplemented!() + } + + pub fn checking_outputs(&mut self) -> Result, &'static str> { + //ensure rather than assert to avoid panic + //both inputs and outputs should contain at least 1 and at most u32::MAX - 1 entries + ensure!(!self.tx.outputs.is_empty(), "no outputs"); + ensure!( + self.tx.outputs.len() < (u32::MAX as usize), + "too many outputs" + ); + + //ensure each output is unique + //map each output to btree to count unique elements + //WARNING example code has a bug here + //out_map.len() != transaction.outputs.len() //THIS IS WRONG + + let output_map: BTreeMap> = self + .tx + .outputs + .iter() + .map(|output| { + ( + TransactionVerifier::<'_, T>::get_token_id_from_output(&output), + output.clone(), + ) + }) + .collect(); + //check each output is defined only once + ensure!( + output_map.len() == self.tx.outputs.len(), + "each output should be used once" + ); + self.output_map = Some(output_map); + unimplemented!() + } + + pub fn checking_signatures(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_utxos_exists(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_tokens_transferring(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_tokens_issued(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_nft_mint(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn checking_assets_burn(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn calculating_reward(&self) -> Result, &'static str> { + unimplemented!() + } + + pub fn collect_result(&self) -> Result { + unimplemented!() + } +} diff --git a/pallets/utxo/tokens/Cargo.toml b/pallets/utxo/tokens/Cargo.toml deleted file mode 100644 index f78e558..0000000 --- a/pallets/utxo/tokens/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "pallet-utxo-tokens" -version = "0.1.0" -authors = ["RBB Lab"] -edition = "2018" - -[dependencies] -hex-literal = "0.2.1" -log = "0.4.8" - -[dependencies.frame-support] -default-features = false -git = 'https://github.com/paritytech/substrate.git' -version = '4.0.0-dev' -branch = "master" - -[dependencies.serde] -version = "1.0.104" -features = ["derive"] - -[dependencies.codec] -package = "parity-scale-codec" -version = "2.0.0" -default-features = false -features = ["derive"] diff --git a/pallets/utxo/tokens/Readme.md b/pallets/utxo/tokens/Readme.md deleted file mode 100644 index 412d360..0000000 --- a/pallets/utxo/tokens/Readme.md +++ /dev/null @@ -1,24 +0,0 @@ -# Token creation - -Call the extrinsic: -```bash -* Creator - Alice -* Pubkey - 0x2e1e60ac02d5a716b300e83b04bb4ddd48360ea119f5024f0ea7b2b1c1578a52 -* Input - we will take Fee over here -* Token name - any value -* Token ticker - any value -* Supply - any value -``` - -# Request the tokens list - -Call the RPC: - -```bash -curl http://localhost:9933 -H "Content-Type:application/json;charset=utf-8" -d '{ - "jsonrpc":"2.0", - "id":1, - "method":"tokens_list", - "params": [] -}' -``` \ No newline at end of file diff --git a/pallets/utxo/tokens/src/lib.rs b/pallets/utxo/tokens/src/lib.rs deleted file mode 100644 index 8c08575..0000000 --- a/pallets/utxo/tokens/src/lib.rs +++ /dev/null @@ -1,44 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; - -use codec::{Decode, Encode}; -use frame_support::{dispatch::Vec, RuntimeDebug}; - -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Hash)] -pub struct TokenInstance { - pub id: u64, - pub name: Vec, - pub ticker: Vec, - pub supply: u128, - // We can add another fields like: - // pub number_format: NumberFormat, - // pub image: UUID, - // pub transaction: XXX, -} - -impl Default for TokenInstance { - fn default() -> Self { - Self { - id: 0, - name: Vec::new(), - ticker: Vec::new(), - supply: 0, - } - } -} - -impl TokenInstance { - pub fn new(id: u64, name: Vec, ticker: Vec, supply: u128) -> Self { - Self { - id, - name, - ticker, - supply, - } - } -} - -pub type TokenListData = Vec; diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 6ab2f14..5e7a413 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -15,9 +15,6 @@ git = 'https://github.com/paritytech/substrate.git' version = '5.0.0-dev' branch = "master" -[dependencies] -pallet-utxo-tokens = { path = "../pallets/utxo/tokens" } - [dependencies.codec] default-features = false features = ['derive'] diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index da049cb..514e662 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -571,12 +571,6 @@ impl_runtime_apis! { fn send() -> u32 { Utxo::send() } - - // What means Vec<(u64, Vec)> ? Have a look at utxo/rpc/runtime-api/src/lib.rs - fn tokens_list() -> Vec<(u64, Vec)> { - let list = Utxo::tokens_list(); - list.into_iter().map(|x| (x.id, x.name)).collect() - } } impl pallet_contracts_rpc_runtime_api::ContractsApi<