From ef56750219c30fd9f87a8f8ed5f644af6f89f248 Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 17 Oct 2024 14:22:35 -0700 Subject: [PATCH 01/27] create the copy of `bittensor/core/subtensor.py` with async suffix. --- bittensor/core/subtensor.py | 2 +- bittensor/core/subtensor_async.py | 1890 +++++++++++++++++++++++++++++ 2 files changed, 1891 insertions(+), 1 deletion(-) create mode 100644 bittensor/core/subtensor_async.py diff --git a/bittensor/core/subtensor.py b/bittensor/core/subtensor.py index b57b3d85bd..f0a1dde55a 100644 --- a/bittensor/core/subtensor.py +++ b/bittensor/core/subtensor.py @@ -16,7 +16,7 @@ # DEALINGS IN THE SOFTWARE. """ -The ``bittensor.core.subtensor`` module in Bittensor serves as a crucial interface for interacting with the Bittensor +The ``bittensor.core.subtensor.Subtensor`` module in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. """ diff --git a/bittensor/core/subtensor_async.py b/bittensor/core/subtensor_async.py new file mode 100644 index 0000000000..31ff5216b5 --- /dev/null +++ b/bittensor/core/subtensor_async.py @@ -0,0 +1,1890 @@ +# The MIT License (MIT) +# Copyright © 2024 Opentensor Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# documentation files (the “Software”), to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# the Software. +# +# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + +""" +Async version of `bittensor.core.subtensor.Subtensor` module. +""" + +import argparse +import copy +import socket +from typing import Union, Optional, TypedDict, Any + +import numpy as np +import scalecodec +from bittensor_wallet import Wallet +from numpy.typing import NDArray +from retry import retry +from scalecodec.base import RuntimeConfiguration +from scalecodec.exceptions import RemainingScaleBytesNotEmptyException +from scalecodec.type_registry import load_type_registry_preset +from scalecodec.types import ScaleType +from substrateinterface.base import QueryMapResult, SubstrateInterface + +from bittensor.core import settings +from bittensor.core.axon import Axon +from bittensor.core.chain_data import ( + NeuronInfo, + PrometheusInfo, + SubnetHyperparameters, + NeuronInfoLite, + custom_rpc_type_registry, +) +from bittensor.core.config import Config +from bittensor.core.extrinsics.commit_weights import ( + commit_weights_extrinsic, + reveal_weights_extrinsic, +) +from bittensor.core.extrinsics.prometheus import ( + do_serve_prometheus, + prometheus_extrinsic, +) +from bittensor.core.extrinsics.registration import ( + burned_register_extrinsic, + register_extrinsic, +) +from bittensor.core.extrinsics.serving import ( + do_serve_axon, + serve_axon_extrinsic, + publish_metadata, + get_metadata, +) +from bittensor.core.extrinsics.set_weights import set_weights_extrinsic +from bittensor.core.extrinsics.transfer import ( + transfer_extrinsic, +) +from bittensor.core.metagraph import Metagraph +from bittensor.utils import torch +from bittensor.utils import u16_normalized_float, networking +from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging +from bittensor.utils.weight_utils import generate_weight_hash + +KEY_NONCE: dict[str, int] = {} + + +class ParamWithTypes(TypedDict): + name: str # Name of the parameter. + type: str # ScaleType string of the parameter. + + +class Subtensor: + """ + The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, + facilitating a range of operations essential for the decentralized machine learning network. + + This class enables neurons (network participants) to engage in activities such as registering on the network, + managing staked weights, setting inter-neuronal weights, and participating in consensus mechanisms. + + The Bittensor network operates on a digital ledger where each neuron holds stakes (S) and learns a set + of inter-peer weights (W). These weights, set by the neurons themselves, play a critical role in determining + the ranking and incentive mechanisms within the network. Higher-ranked neurons, as determined by their + contributions and trust within the network, receive more incentives. + + The Subtensor class connects to various Bittensor networks like the main ``finney`` network or local test + networks, providing a gateway to the blockchain layer of Bittensor. It leverages a staked weighted trust + system and consensus to ensure fair and distributed incentive mechanisms, where incentives (I) are + primarily allocated to neurons that are trusted by the majority of the network. + + Additionally, Bittensor introduces a speculation-based reward mechanism in the form of bonds (B), allowing + neurons to accumulate bonds in other neurons, speculating on their future value. This mechanism aligns + with market-based speculation, incentivizing neurons to make judicious decisions in their inter-neuronal + investments. + + Example Usage:: + + from bittensor.core.subtensor import Subtensor + + # Connect to the main Bittensor network (Finney). + finney_subtensor = Subtensor(network='finney') + + # Close websocket connection with the Bittensor network. + finney_subtensor.close() + + # Register a new neuron on the network. + wallet = bittensor_wallet.Wallet(...) # Assuming a wallet instance is created. + netuid = 1 + success = finney_subtensor.register(wallet=wallet, netuid=netuid) + + # Set inter-neuronal weights for collaborative learning. + success = finney_subtensor.set_weights(wallet=wallet, netuid=netuid, uids=[...], weights=[...]) + + # Get the metagraph for a specific subnet using given subtensor connection + metagraph = finney_subtensor.metagraph(netuid=netuid) + + By facilitating these operations, the Subtensor class is instrumental in maintaining the decentralized + intelligence and dynamic learning environment of the Bittensor network, as envisioned in its foundational + principles and mechanisms described in the `NeurIPS paper + `_. paper. + """ + + def __init__( + self, + network: Optional[str] = None, + config: Optional["Config"] = None, + _mock: bool = False, + log_verbose: bool = False, + connection_timeout: int = 600, + ) -> None: + """ + Initializes a Subtensor interface for interacting with the Bittensor blockchain. + + NOTE: + Currently subtensor defaults to the ``finney`` network. This will change in a future release. + + We strongly encourage users to run their own local subtensor node whenever possible. This increases decentralization and resilience of the network. In a future release, local subtensor will become the default and the fallback to ``finney`` removed. Please plan ahead for this change. We will provide detailed instructions on how to run a local subtensor node in the documentation in a subsequent release. + + Args: + network (Optional[str]): The network name to connect to (e.g., ``finney``, ``local``). This can also be the chain endpoint (e.g., ``wss://entrypoint-finney.opentensor.ai:443``) and will be correctly parsed into the network and chain endpoint. If not specified, defaults to the main Bittensor network. + config (Optional[bittensor.core.config.Config]): Configuration object for the subtensor. If not provided, a default configuration is used. + _mock (bool): If set to ``True``, uses a mocked connection for testing purposes. Default is ``False``. + log_verbose (bool): Whether to enable verbose logging. If set to ``True``, detailed log information about the connection and network operations will be provided. Default is ``True``. + connection_timeout (int): The maximum time in seconds to keep the connection alive. Default is ``600``. + + This initialization sets up the connection to the specified Bittensor network, allowing for various blockchain operations such as neuron registration, stake management, and setting weights. + """ + # Determine config.subtensor.chain_endpoint and config.subtensor.network config. + # If chain_endpoint is set, we override the network flag, otherwise, the chain_endpoint is assigned by the + # network. + # Argument importance: network > chain_endpoint > config.subtensor.chain_endpoint > config.subtensor.network + + if config is None: + config = Subtensor.config() + self._config = copy.deepcopy(config) + + # Setup config.subtensor.network and config.subtensor.chain_endpoint + self.chain_endpoint, self.network = Subtensor.setup_config( + network, self._config + ) + + if ( + self.network == "finney" + or self.chain_endpoint == settings.FINNEY_ENTRYPOINT + ) and log_verbose: + logging.info( + f"You are connecting to {self.network} network with endpoint {self.chain_endpoint}." + ) + logging.debug( + "We strongly encourage running a local subtensor node whenever possible. " + "This increases decentralization and resilience of the network." + ) + logging.debug( + "In a future release, local subtensor will become the default endpoint. " + "To get ahead of this change, please run a local subtensor node and point to it." + ) + + self.log_verbose = log_verbose + self._connection_timeout = connection_timeout + self.substrate: "SubstrateInterface" = None + self._get_substrate() + + def __str__(self) -> str: + if self.network == self.chain_endpoint: + # Connecting to chain endpoint without network known. + return f"subtensor({self.chain_endpoint})" + else: + # Connecting to network with endpoint known. + return f"subtensor({self.network}, {self.chain_endpoint})" + + def __repr__(self) -> str: + return self.__str__() + + def close(self): + """Cleans up resources for this subtensor instance like active websocket connection and active extensions.""" + if self.substrate: + self.substrate.close() + + def _get_substrate(self): + """Establishes a connection to the Substrate node using configured parameters.""" + try: + # Set up params. + self.substrate = SubstrateInterface( + ss58_format=settings.SS58_FORMAT, + use_remote_preset=True, + url=self.chain_endpoint, + type_registry=settings.TYPE_REGISTRY, + ) + if self.log_verbose: + logging.debug( + f"Connected to {self.network} network and {self.chain_endpoint}." + ) + + try: + self.substrate.websocket.settimeout(self._connection_timeout) + except (AttributeError, TypeError, socket.error, OSError) as e: + logging.warning(f"Error setting timeout: {e}") + + except ConnectionRefusedError as error: + logging.error( + f"Could not connect to {self.network} network with {self.chain_endpoint} chain endpoint.", + ) + logging.info( + "You can check if you have connectivity by running this command: nc -vz localhost " + f"{self.chain_endpoint}" + ) + raise ConnectionRefusedError(error.args) + + @staticmethod + def config() -> "Config": + """ + Creates and returns a Bittensor configuration object. + + Returns: + config (bittensor.core.config.Config): A Bittensor configuration object configured with arguments added by the `subtensor.add_args` method. + """ + parser = argparse.ArgumentParser() + Subtensor.add_args(parser) + return Config(parser, args=[]) + + @staticmethod + def setup_config(network: Optional[str], config: "Config"): + """ + Sets up and returns the configuration for the Subtensor network and endpoint. + + This method determines the appropriate network and chain endpoint based on the provided network string or + configuration object. It evaluates the network and endpoint in the following order of precedence: + 1. Provided network string. + 2. Configured chain endpoint in the `config` object. + 3. Configured network in the `config` object. + 4. Default chain endpoint. + 5. Default network. + + Args: + network (Optional[str]): The name of the Subtensor network. If None, the network and endpoint will be determined from the `config` object. + config (bittensor.core.config.Config): The configuration object containing the network and chain endpoint settings. + + Returns: + tuple: A tuple containing the formatted WebSocket endpoint URL and the evaluated network name. + """ + if network is not None: + ( + evaluated_network, + evaluated_endpoint, + ) = Subtensor.determine_chain_endpoint_and_network(network) + else: + if config.is_set("subtensor.chain_endpoint"): + ( + evaluated_network, + evaluated_endpoint, + ) = Subtensor.determine_chain_endpoint_and_network( + config.subtensor.chain_endpoint + ) + + elif config.is_set("subtensor.network"): + ( + evaluated_network, + evaluated_endpoint, + ) = Subtensor.determine_chain_endpoint_and_network( + config.subtensor.network + ) + + elif config.subtensor.get("chain_endpoint"): + ( + evaluated_network, + evaluated_endpoint, + ) = Subtensor.determine_chain_endpoint_and_network( + config.subtensor.chain_endpoint + ) + + elif config.subtensor.get("network"): + ( + evaluated_network, + evaluated_endpoint, + ) = Subtensor.determine_chain_endpoint_and_network( + config.subtensor.network + ) + + else: + ( + evaluated_network, + evaluated_endpoint, + ) = Subtensor.determine_chain_endpoint_and_network( + settings.DEFAULTS.subtensor.network + ) + + return ( + networking.get_formatted_ws_endpoint_url(evaluated_endpoint), + evaluated_network, + ) + + @classmethod + def help(cls): + """Print help to stdout.""" + parser = argparse.ArgumentParser() + cls.add_args(parser) + print(cls.__new__.__doc__) + parser.print_help() + + @classmethod + def add_args(cls, parser: "argparse.ArgumentParser", prefix: Optional[str] = None): + """ + Adds command-line arguments to the provided ArgumentParser for configuring the Subtensor settings. + + Args: + parser (argparse.ArgumentParser): The ArgumentParser object to which the Subtensor arguments will be added. + prefix (Optional[str]): An optional prefix for the argument names. If provided, the prefix is prepended to each argument name. + + Arguments added: + --subtensor.network: The Subtensor network flag. Possible values are 'finney', 'test', 'archive', and 'local'. Overrides the chain endpoint if set. + --subtensor.chain_endpoint: The Subtensor chain endpoint flag. If set, it overrides the network flag. + --subtensor._mock: If true, uses a mocked connection to the chain. + + Example: + parser = argparse.ArgumentParser() + Subtensor.add_args(parser) + """ + prefix_str = "" if prefix is None else f"{prefix}." + try: + default_network = settings.DEFAULT_NETWORK + default_chain_endpoint = settings.FINNEY_ENTRYPOINT + + parser.add_argument( + f"--{prefix_str}subtensor.network", + default=default_network, + type=str, + help="""The subtensor network flag. The likely choices are: + -- finney (main network) + -- test (test network) + -- archive (archive network +300 blocks) + -- local (local running network) + If this option is set it overloads subtensor.chain_endpoint with + an entry point node from that network. + """, + ) + parser.add_argument( + f"--{prefix_str}subtensor.chain_endpoint", + default=default_chain_endpoint, + type=str, + help="""The subtensor endpoint flag. If set, overrides the --network flag.""", + ) + parser.add_argument( + f"--{prefix_str}subtensor._mock", + default=False, + type=bool, + help="""If true, uses a mocked connection to the chain.""", + ) + + except argparse.ArgumentError: + # re-parsing arguments. + pass + + # Inner private functions + @networking.ensure_connected + def _encode_params( + self, + call_definition: list["ParamWithTypes"], + params: Union[list[Any], dict[str, Any]], + ) -> str: + """Returns a hex encoded string of the params using their types.""" + param_data = scalecodec.ScaleBytes(b"") + + for i, param in enumerate(call_definition["params"]): # type: ignore + scale_obj = self.substrate.create_scale_object(param["type"]) + if type(params) is list: + param_data += scale_obj.encode(params[i]) + else: + if param["name"] not in params: + raise ValueError(f"Missing param {param['name']} in params dict.") + + param_data += scale_obj.encode(params[param["name"]]) + + return param_data.to_hex() + + def _get_hyperparameter( + self, param_name: str, netuid: int, block: Optional[int] = None + ) -> Optional[Any]: + """ + Retrieves a specified hyperparameter for a specific subnet. + + Args: + param_name (str): The name of the hyperparameter to retrieve. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[Union[int, float]]: The value of the specified hyperparameter if the subnet exists, ``None`` otherwise. + """ + if not self.subnet_exists(netuid, block): + return None + + result = self.query_subtensor(param_name, block, [netuid]) + if result is None or not hasattr(result, "value"): + return None + + return result.value + + # Calls methods + @networking.ensure_connected + def query_subtensor( + self, name: str, block: Optional[int] = None, params: Optional[list] = None + ) -> "ScaleType": + """ + Queries named storage from the Subtensor module on the Bittensor blockchain. This function is used to retrieve specific data or parameters from the blockchain, such as stake, rank, or other neuron-specific attributes. + + Args: + name (str): The name of the storage function to query. + block (Optional[int]): The blockchain block number at which to perform the query. + params (Optional[list[object]]): A list of parameters to pass to the query function. + + Returns: + query_response (scalecodec.ScaleType): An object containing the requested data. + + This query function is essential for accessing detailed information about the network and its neurons, providing valuable insights into the state and dynamics of the Bittensor ecosystem. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry() -> "ScaleType": + return self.substrate.query( + module="SubtensorModule", + storage_function=name, + params=params, + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), + ) + + return make_substrate_call_with_retry() + + @networking.ensure_connected + def query_map_subtensor( + self, name: str, block: Optional[int] = None, params: Optional[list] = None + ) -> "QueryMapResult": + """ + Queries map storage from the Subtensor module on the Bittensor blockchain. This function is designed to retrieve a map-like data structure, which can include various neuron-specific details or network-wide attributes. + + Args: + name (str): The name of the map storage function to query. + block (Optional[int]): The blockchain block number at which to perform the query. + params (Optional[list[object]]): A list of parameters to pass to the query function. + + Returns: + QueryMapResult (substrateinterface.base.QueryMapResult): An object containing the map-like data structure, or ``None`` if not found. + + This function is particularly useful for analyzing and understanding complex network structures and relationships within the Bittensor ecosystem, such as inter-neuronal connections and stake distributions. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry(): + return self.substrate.query_map( + module="SubtensorModule", + storage_function=name, + params=params, + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), + ) + + return make_substrate_call_with_retry() + + def query_runtime_api( + self, + runtime_api: str, + method: str, + params: Optional[Union[list[int], dict[str, int]]], + block: Optional[int] = None, + ) -> Optional[str]: + """ + Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users who need to interact with specific runtime methods and decode complex data types. + + Args: + runtime_api (str): The name of the runtime API to query. + method (str): The specific method within the runtime API to call. + params (Optional[list[ParamWithTypes]]): The parameters to pass to the method call. + block (Optional[int]): The blockchain block number at which to perform the query. + + Returns: + Optional[str]: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. + + This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed and specific interactions with the network's runtime environment. + """ + call_definition = settings.TYPE_REGISTRY["runtime_api"][runtime_api]["methods"][ + method + ] + + json_result = self.state_call( + method=f"{runtime_api}_{method}", + data=( + "0x" + if params is None + else self._encode_params(call_definition=call_definition, params=params) + ), + block=block, + ) + + if json_result is None: + return None + + return_type = call_definition["type"] + + as_scale_bytes = scalecodec.ScaleBytes(json_result["result"]) + + rpc_runtime_config = RuntimeConfiguration() + rpc_runtime_config.update_type_registry(load_type_registry_preset("legacy")) + rpc_runtime_config.update_type_registry(custom_rpc_type_registry) + + obj = rpc_runtime_config.create_scale_object(return_type, as_scale_bytes) + if obj.data.to_hex() == "0x0400": # RPC returned None result + return None + + return obj.decode() + + @networking.ensure_connected + def state_call( + self, method: str, data: str, block: Optional[int] = None + ) -> dict[Any, Any]: + """ + Makes a state call to the Bittensor blockchain, allowing for direct queries of the blockchain's state. This function is typically used for advanced queries that require specific method calls and data inputs. + + Args: + method (str): The method name for the state call. + data (str): The data to be passed to the method. + block (Optional[int]): The blockchain block number at which to perform the state call. + + Returns: + result (dict[Any, Any]): The result of the rpc call. + + The state call function provides a more direct and flexible way of querying blockchain data, useful for specific use cases where standard queries are insufficient. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry() -> dict[Any, Any]: + block_hash = None if block is None else self.substrate.get_block_hash(block) + return self.substrate.rpc_request( + method="state_call", + params=[method, data, block_hash] if block_hash else [method, data], + ) + + return make_substrate_call_with_retry() + + @networking.ensure_connected + def query_map( + self, + module: str, + name: str, + block: Optional[int] = None, + params: Optional[list] = None, + ) -> "QueryMapResult": + """ + Queries map storage from any module on the Bittensor blockchain. This function retrieves data structures that represent key-value mappings, essential for accessing complex and structured data within the blockchain modules. + + Args: + module (str): The name of the module from which to query the map storage. + name (str): The specific storage function within the module to query. + block (Optional[int]): The blockchain block number at which to perform the query. + params (Optional[list[object]]): Parameters to be passed to the query. + + Returns: + result (substrateinterface.base.QueryMapResult): A data structure representing the map storage if found, ``None`` otherwise. + + This function is particularly useful for retrieving detailed and structured data from various blockchain modules, offering insights into the network's state and the relationships between its different components. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry() -> "QueryMapResult": + return self.substrate.query_map( + module=module, + storage_function=name, + params=params, + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), + ) + + return make_substrate_call_with_retry() + + @networking.ensure_connected + def query_constant( + self, module_name: str, constant_name: str, block: Optional[int] = None + ) -> Optional["ScaleType"]: + """ + Retrieves a constant from the specified module on the Bittensor blockchain. This function is used to access fixed parameters or values defined within the blockchain's modules, which are essential for understanding the network's configuration and rules. + + Args: + module_name (str): The name of the module containing the constant. + constant_name (str): The name of the constant to retrieve. + block (Optional[int]): The blockchain block number at which to query the constant. + + Returns: + Optional[scalecodec.ScaleType]: The value of the constant if found, ``None`` otherwise. + + Constants queried through this function can include critical network parameters such as inflation rates, consensus rules, or validation thresholds, providing a deeper understanding of the Bittensor network's operational parameters. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry(): + return self.substrate.get_constant( + module_name=module_name, + constant_name=constant_name, + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), + ) + + return make_substrate_call_with_retry() + + @networking.ensure_connected + def query_module( + self, + module: str, + name: str, + block: Optional[int] = None, + params: Optional[list] = None, + ) -> "ScaleType": + """ + Queries any module storage on the Bittensor blockchain with the specified parameters and block number. This function is a generic query interface that allows for flexible and diverse data retrieval from various blockchain modules. + + Args: + module (str): The name of the module from which to query data. + name (str): The name of the storage function within the module. + block (Optional[int]): The blockchain block number at which to perform the query. + params (Optional[list[object]]): A list of parameters to pass to the query function. + + Returns: + Optional[scalecodec.ScaleType]: An object containing the requested data if found, ``None`` otherwise. + + This versatile query function is key to accessing a wide range of data and insights from different parts of the Bittensor blockchain, enhancing the understanding and analysis of the network's state and dynamics. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry() -> "ScaleType": + return self.substrate.query( + module=module, + storage_function=name, + params=params, + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), + ) + + return make_substrate_call_with_retry() + + # Common subtensor methods + def metagraph( + self, netuid: int, lite: bool = True, block: Optional[int] = None + ) -> "Metagraph": # type: ignore + """ + Returns a synced metagraph for a specified subnet within the Bittensor network. The metagraph represents the network's structure, including neuron connections and interactions. + + Args: + netuid (int): The network UID of the subnet to query. + lite (bool): If true, returns a metagraph using a lightweight sync (no weights, no bonds). Default is ``True``. + block (Optional[int]): Block number for synchronization, or ``None`` for the latest block. + + Returns: + bittensor.core.metagraph.Metagraph: The metagraph representing the subnet's structure and neuron relationships. + + The metagraph is an essential tool for understanding the topology and dynamics of the Bittensor network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus processes. + """ + metagraph = Metagraph( + network=self.network, netuid=netuid, lite=lite, sync=False + ) + metagraph.sync(block=block, lite=lite, subtensor=self) + + return metagraph + + @staticmethod + def determine_chain_endpoint_and_network( + network: str, + ) -> tuple[Optional[str], Optional[str]]: + """Determines the chain endpoint and network from the passed network or chain_endpoint. + + Args: + network (str): The network flag. The choices are: ``finney`` (main network), ``archive`` (archive network +300 blocks), ``local`` (local running network), ``test`` (test network). + + Returns: + tuple[Optional[str], Optional[str]]: The network and chain endpoint flag. If passed, overrides the ``network`` argument. + """ + + if network is None: + return None, None + if network in ["finney", "local", "test", "archive"]: + if network == "finney": + # Kiru Finney staging network. + return network, settings.FINNEY_ENTRYPOINT + elif network == "local": + return network, settings.LOCAL_ENTRYPOINT + elif network == "test": + return network, settings.FINNEY_TEST_ENTRYPOINT + elif network == "archive": + return network, settings.ARCHIVE_ENTRYPOINT + else: + if ( + network == settings.FINNEY_ENTRYPOINT + or "entrypoint-finney.opentensor.ai" in network + ): + return "finney", settings.FINNEY_ENTRYPOINT + elif ( + network == settings.FINNEY_TEST_ENTRYPOINT + or "test.finney.opentensor.ai" in network + ): + return "test", settings.FINNEY_TEST_ENTRYPOINT + elif ( + network == settings.ARCHIVE_ENTRYPOINT + or "archive.chain.opentensor.ai" in network + ): + return "archive", settings.ARCHIVE_ENTRYPOINT + elif "127.0.0.1" in network or "localhost" in network: + return "local", network + else: + return "unknown", network + return None, None + + def get_netuids_for_hotkey( + self, hotkey_ss58: str, block: Optional[int] = None + ) -> list[int]: + """ + Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function identifies the specific subnets within the Bittensor network where the neuron associated with the hotkey is active. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + block (Optional[int]): The blockchain block number at which to perform the query. + + Returns: + list[int]: A list of netuids where the neuron is a member. + """ + result = self.query_map_subtensor("IsNetworkMember", block, [hotkey_ss58]) + return ( + [record[0].value for record in result if record[1]] + if result and hasattr(result, "records") + else [] + ) + + @networking.ensure_connected + def get_current_block(self) -> int: + """ + Returns the current block number on the Bittensor blockchain. This function provides the latest block number, indicating the most recent state of the blockchain. + + Returns: + int: The current chain block number. + + Knowing the current block number is essential for querying real-time data and performing time-sensitive operations on the blockchain. It serves as a reference point for network activities and data synchronization. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry(): + return self.substrate.get_block_number(None) # type: ignore + + return make_substrate_call_with_retry() + + def is_hotkey_registered_any( + self, hotkey_ss58: str, block: Optional[int] = None + ) -> bool: + """ + Checks if a neuron's hotkey is registered on any subnet within the Bittensor network. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + block (Optional[int]): The blockchain block number at which to perform the check. + + Returns: + bool: ``True`` if the hotkey is registered on any subnet, False otherwise. + + This function is essential for determining the network-wide presence and participation of a neuron. + """ + return len(self.get_netuids_for_hotkey(hotkey_ss58, block)) > 0 + + def is_hotkey_registered_on_subnet( + self, hotkey_ss58: str, netuid: int, block: Optional[int] = None + ) -> bool: + """ + Checks if a neuron's hotkey is registered on a specific subnet within the Bittensor network. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number at which to perform the check. + + Returns: + bool: ``True`` if the hotkey is registered on the specified subnet, False otherwise. + + This function helps in assessing the participation of a neuron in a particular subnet, indicating its specific area of operation or influence within the network. + """ + return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) is not None + + def is_hotkey_registered( + self, + hotkey_ss58: str, + netuid: Optional[int] = None, + block: Optional[int] = None, + ) -> bool: + """ + Determines whether a given hotkey (public key) is registered in the Bittensor network, either globally across any subnet or specifically on a specified subnet. This function checks the registration status of a neuron identified by its hotkey, which is crucial for validating its participation and activities within the network. + + Args: + hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. + block (Optional[int]): The blockchain block number at which to perform the query. + + Returns: + bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. + + This function is important for verifying the active status of neurons in the Bittensor network. It aids in understanding whether a neuron is eligible to participate in network processes such as consensus, validation, and incentive distribution based on its registration status. + """ + if netuid is None: + return self.is_hotkey_registered_any(hotkey_ss58, block) + else: + return self.is_hotkey_registered_on_subnet(hotkey_ss58, netuid, block) + + # Not used in Bittensor, but is actively used by the community in almost all subnets + def set_weights( + self, + wallet: "Wallet", + netuid: int, + uids: Union[NDArray[np.int64], "torch.LongTensor", list], + weights: Union[NDArray[np.float32], "torch.FloatTensor", list], + version_key: int = settings.version_as_int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, + max_retries: int = 5, + ) -> tuple[bool, str]: + """ + Sets the inter-neuronal weights for the specified neuron. This process involves specifying the influence or trust a neuron places on other neurons in the network, which is a fundamental aspect of Bittensor's decentralized learning architecture. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron setting the weights. + netuid (int): The unique identifier of the subnet. + uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being set for. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. + version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. + max_retries (int): The number of maximum attempts to set weights. Default is ``5``. + + Returns: + tuple[bool, str]: ``True`` if the setting of weights is successful, False otherwise. And `msg`, a string value describing the success or potential error. + + This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】. + """ + uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to set weights!" + while ( + self.blocks_since_last_update(netuid, uid) > self.weights_rate_limit(netuid) # type: ignore + and retries < max_retries + ): + try: + logging.info( + f"Setting weights for subnet #{netuid}. Attempt {retries + 1} of {max_retries}." + ) + success, message = set_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + uids=uids, + weights=weights, + version_key=version_key, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + except Exception as e: + logging.error(f"Error setting weights: {e}") + finally: + retries += 1 + + return success, message + + def register( + self, + wallet: "Wallet", + netuid: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = True, + prompt: bool = False, + max_allowed_attempts: int = 3, + output_in_place: bool = True, + cuda: bool = False, + dev_id: Union[list[int], int] = 0, + tpb: int = 256, + num_processes: Optional[int] = None, + update_interval: Optional[int] = None, + log_verbose: bool = False, + ) -> bool: + """ + Registers a neuron on the Bittensor network using the provided wallet. + + Registration is a critical step for a neuron to become an active participant in the network, enabling it to stake, set weights, and receive incentives. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron to be registered. + netuid (int): The unique identifier of the subnet. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Defaults to `False`. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. + prompt (bool): If ``True``, prompts for user confirmation before proceeding. + max_allowed_attempts (int): Maximum number of attempts to register the wallet. + output_in_place (bool): If true, prints the progress of the proof of work to the console in-place. Meaning the progress is printed on the same lines. Defaults to `True`. + cuda (bool): If ``true``, the wallet should be registered using CUDA device(s). Defaults to `False`. + dev_id (Union[List[int], int]): The CUDA device id to use, or a list of device ids. Defaults to `0` (zero). + tpb (int): The number of threads per block (CUDA). Default to `256`. + num_processes (Optional[int]): The number of processes to use to register. Default to `None`. + update_interval (Optional[int]): The number of nonces to solve between updates. Default to `None`. + log_verbose (bool): If ``true``, the registration process will log more information. Default to `False`. + + Returns: + bool: ``True`` if the registration is successful, False otherwise. + + This function facilitates the entry of new neurons into the network, supporting the decentralized + growth and scalability of the Bittensor ecosystem. + """ + return register_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + max_allowed_attempts=max_allowed_attempts, + output_in_place=output_in_place, + cuda=cuda, + dev_id=dev_id, + tpb=tpb, + num_processes=num_processes, + update_interval=update_interval, + log_verbose=log_verbose, + ) + + def burned_register( + self, + wallet: "Wallet", + netuid: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = True, + prompt: bool = False, + ) -> bool: + """ + Registers a neuron on the Bittensor network by recycling TAO. This method of registration involves recycling TAO tokens, allowing them to be re-mined by performing work on the network. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron to be registered. + netuid (int): The unique identifier of the subnet. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Defaults to `False`. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Defaults to `False`. + + Returns: + bool: ``True`` if the registration is successful, False otherwise. + """ + return burned_register_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + + def serve_axon( + self, + netuid: int, + axon: "Axon", + wait_for_inclusion: bool = False, + wait_for_finalization: bool = True, + ) -> bool: + """ + Registers an ``Axon`` serving endpoint on the Bittensor network for a specific neuron. This function is used to set up the Axon, a key component of a neuron that handles incoming queries and data processing tasks. + + Args: + netuid (int): The unique identifier of the subnetwork. + axon (bittensor.core.axon.Axon): The Axon instance to be registered for serving. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``True``. + + Returns: + bool: ``True`` if the Axon serve registration is successful, False otherwise. + + By registering an Axon, the neuron becomes an active part of the network's distributed computing infrastructure, contributing to the collective intelligence of Bittensor. + """ + return serve_axon_extrinsic( + self, netuid, axon, wait_for_inclusion, wait_for_finalization + ) + + # metagraph + @property + def block(self) -> int: + """Returns current chain block. + + Returns: + block (int): Current chain block. + """ + return self.get_current_block() + + def blocks_since_last_update(self, netuid: int, uid: int) -> Optional[int]: + """ + Returns the number of blocks since the last update for a specific UID in the subnetwork. + + Args: + netuid (int): The unique identifier of the subnetwork. + uid (int): The unique identifier of the neuron. + + Returns: + Optional[int]: The number of blocks since the last update, or ``None`` if the subnetwork or UID does not exist. + """ + call = self._get_hyperparameter(param_name="LastUpdate", netuid=netuid) + return None if call is None else self.get_current_block() - int(call[uid]) + + @networking.ensure_connected + def get_block_hash(self, block_id: int) -> str: + """ + Retrieves the hash of a specific block on the Bittensor blockchain. The block hash is a unique identifier representing the cryptographic hash of the block's content, ensuring its integrity and immutability. + + Args: + block_id (int): The block number for which the hash is to be retrieved. + + Returns: + str: The cryptographic hash of the specified block. + + The block hash is a fundamental aspect of blockchain technology, providing a secure reference to each block's data. It is crucial for verifying transactions, ensuring data consistency, and maintaining the trustworthiness of the blockchain. + """ + return self.substrate.get_block_hash(block_id=block_id) + + def weights_rate_limit(self, netuid: int) -> Optional[int]: + """ + Returns network WeightsSetRateLimit hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + + Returns: + Optional[int]: The value of the WeightsSetRateLimit hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ + call = self._get_hyperparameter(param_name="WeightsSetRateLimit", netuid=netuid) + return None if call is None else int(call) + + # Keep backwards compatibility for community usage. + # Make some commitment on-chain about arbitrary data. + def commit(self, wallet, netuid: int, data: str): + """ + Commits arbitrary data to the Bittensor network by publishing metadata. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the data. + netuid (int): The unique identifier of the subnetwork. + data (str): The data to be committed to the network. + """ + publish_metadata(self, wallet, netuid, f"Raw{len(data)}", data.encode()) + + # Keep backwards compatibility for community usage. + def subnetwork_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: + """ + Returns network SubnetworkN hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the SubnetworkN hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="SubnetworkN", netuid=netuid, block=block + ) + return None if call is None else int(call) + + # Community uses this method + def transfer( + self, + wallet: "Wallet", + dest: str, + amount: Union["Balance", float], + wait_for_inclusion: bool = True, + wait_for_finalization: bool = False, + prompt: bool = False, + ) -> bool: + """ + Executes a transfer of funds from the provided wallet to the specified destination address. This function is used to move TAO tokens within the Bittensor network, facilitating transactions between neurons. + + Args: + wallet (bittensor_wallet.Wallet): The wallet from which funds are being transferred. + dest (str): The destination public key address. + amount (Union[bittensor.utils.balance.Balance, float]): The amount of TAO to be transferred. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. + + Returns: + transfer_extrinsic (bool): ``True`` if the transfer is successful, False otherwise. + + This function is essential for the fluid movement of tokens in the network, supporting various economic activities such as staking, delegation, and reward distribution. + """ + return transfer_extrinsic( + subtensor=self, + wallet=wallet, + dest=dest, + amount=amount, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + + # Community uses this method via `bittensor.api.extrinsics.prometheus.prometheus_extrinsic` + def get_neuron_for_pubkey_and_subnet( + self, hotkey_ss58: str, netuid: int, block: Optional[int] = None + ) -> Optional["NeuronInfo"]: + """ + Retrieves information about a neuron based on its public key (hotkey SS58 address) and the specific subnet UID (netuid). This function provides detailed neuron information for a particular subnet within the Bittensor network. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number at which to perform the query. + + Returns: + Optional[bittensor.core.chain_data.neuron_info.NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. + + This function is crucial for accessing specific neuron data and understanding its status, stake, and other attributes within a particular subnet of the Bittensor ecosystem. + """ + return self.neuron_for_uid( + self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block=block), + netuid, + block=block, + ) + + @networking.ensure_connected + def neuron_for_uid( + self, uid: Optional[int], netuid: int, block: Optional[int] = None + ) -> "NeuronInfo": + """ + Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive view of a neuron's attributes, including its stake, rank, and operational status. + + Args: + uid (Optional[int]): The unique identifier of the neuron. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + bittensor.core.chain_data.neuron_info.NeuronInfo: Detailed information about the neuron if found, ``None`` otherwise. + + This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. + """ + if uid is None: + return NeuronInfo.get_null_neuron() + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry(): + block_hash = None if block is None else self.substrate.get_block_hash(block) + params = [netuid, uid] + if block_hash: + params = params + [block_hash] + return self.substrate.rpc_request( + method="neuronInfo_getNeuron", + params=params, # custom rpc method + ) + + json_body = make_substrate_call_with_retry() + + if not (result := json_body.get("result", None)): + return NeuronInfo.get_null_neuron() + + return NeuronInfo.from_vec_u8(result) + + # Community uses this method + def serve_prometheus( + self, + wallet: "Wallet", + port: int, + netuid: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = True, + ) -> bool: + """ + Serves Prometheus metrics by submitting an extrinsic to a blockchain network via the specified wallet. The function allows configuring whether to wait for the transaction's inclusion in a block and its finalization. + + Args: + wallet (bittensor_wallet.Wallet): Bittensor wallet instance used for submitting the extrinsic. + port (int): The port number on which Prometheus metrics are served. + netuid (int): The unique identifier of the subnetwork. + wait_for_inclusion (bool): If True, waits for the transaction to be included in a block. Defaults to ``False``. + wait_for_finalization (bool): If True, waits for the transaction to be finalized. Defaults to ``True``. + + Returns: + bool: Returns True if the Prometheus extrinsic is successfully processed, otherwise False. + """ + return prometheus_extrinsic( + self, + wallet=wallet, + port=port, + netuid=netuid, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + # Community uses this method + def get_subnet_hyperparameters( + self, netuid: int, block: Optional[int] = None + ) -> Optional[Union[list, "SubnetHyperparameters"]]: + """ + Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters define the operational settings and rules governing the subnet's behavior. + + Args: + netuid (int): The network UID of the subnet to query. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[bittensor.core.chain_data.subnet_hyperparameters.SubnetHyperparameters]: The subnet's hyperparameters, or ``None`` if not available. + + Understanding the hyperparameters is crucial for comprehending how subnets are configured and managed, and how they interact with the network's consensus and incentive mechanisms. + """ + hex_bytes_result = self.query_runtime_api( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + + if hex_bytes_result is None: + return [] + + if hex_bytes_result.startswith("0x"): + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + else: + bytes_result = bytes.fromhex(hex_bytes_result) + + return SubnetHyperparameters.from_vec_u8(bytes_result) # type: ignore + + # Community uses this method + # Returns network ImmunityPeriod hyper parameter. + def immunity_period( + self, netuid: int, block: Optional[int] = None + ) -> Optional[int]: + """ + Retrieves the 'ImmunityPeriod' hyperparameter for a specific subnet. This parameter defines the duration during which new neurons are protected from certain network penalties or restrictions. + + Args: + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, ``None`` otherwise. + + The 'ImmunityPeriod' is a critical aspect of the network's governance system, ensuring that new participants have a grace period to establish themselves and contribute to the network without facing immediate punitive actions. + """ + call = self._get_hyperparameter( + param_name="ImmunityPeriod", netuid=netuid, block=block + ) + return None if call is None else int(call) + + # Community uses this method + def get_uid_for_hotkey_on_subnet( + self, hotkey_ss58: str, netuid: int, block: Optional[int] = None + ) -> Optional[int]: + """ + Retrieves the unique identifier (UID) for a neuron's hotkey on a specific subnet. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. + + The UID is a critical identifier within the network, linking the neuron's hotkey to its operational and governance activities on a particular subnet. + """ + _result = self.query_subtensor("Uids", block, [netuid, hotkey_ss58]) + return getattr(_result, "value", None) + + # Community uses this method + def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: + """ + Returns network Tempo hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the Tempo hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ + call = self._get_hyperparameter(param_name="Tempo", netuid=netuid, block=block) + return None if call is None else int(call) + + # Community uses this method + def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> str: + """ + Retrieves the on-chain commitment for a specific neuron in the Bittensor network. + + Args: + netuid (int): The unique identifier of the subnetwork. + uid (int): The unique identifier of the neuron. + block (Optional[int]): The block number to retrieve the commitment from. If None, the latest block is used. Default is ``None``. + + Returns: + str: The commitment data as a string. + """ + metagraph = self.metagraph(netuid) + hotkey = metagraph.hotkeys[uid] # type: ignore + + metadata = get_metadata(self, netuid, hotkey, block) + commitment = metadata["info"]["fields"][0] # type: ignore + hex_data = commitment[list(commitment.keys())[0]][2:] # type: ignore + + return bytes.fromhex(hex_data).decode() + + # Community uses this via `bittensor.utils.weight_utils.process_weights_for_netuid` function. + def min_allowed_weights( + self, netuid: int, block: Optional[int] = None + ) -> Optional[int]: + """ + Returns network MinAllowedWeights hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the MinAllowedWeights hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MinAllowedWeights", block=block, netuid=netuid + ) + return None if call is None else int(call) + + # Community uses this via `bittensor.utils.weight_utils.process_weights_for_netuid` function. + def max_weight_limit( + self, netuid: int, block: Optional[int] = None + ) -> Optional[float]: + """ + Returns network MaxWeightsLimit hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the MaxWeightsLimit hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxWeightsLimit", block=block, netuid=netuid + ) + return None if call is None else u16_normalized_float(int(call)) + + # # Community uses this method. It is used in subtensor in neuron_info, and serving. + def get_prometheus_info( + self, netuid: int, hotkey_ss58: str, block: Optional[int] = None + ) -> Optional["PrometheusInfo"]: + """ + Returns the prometheus information for this hotkey account. + + Args: + netuid (int): The unique identifier of the subnetwork. + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int]): The block number to retrieve the prometheus information from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[bittensor.core.chain_data.prometheus_info.PrometheusInfo]: A PrometheusInfo object containing the prometheus information, or ``None`` if the prometheus information is not found. + """ + result = self.query_subtensor("Prometheus", block, [netuid, hotkey_ss58]) + if result is not None and hasattr(result, "value"): + return PrometheusInfo( + ip=networking.int_to_ip(result.value["ip"]), + ip_type=result.value["ip_type"], + port=result.value["port"], + version=result.value["version"], + block=result.value["block"], + ) + return None + + # Community uses this method + def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: + """ + Checks if a subnet with the specified unique identifier (netuid) exists within the Bittensor network. + + Args: + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number at which to check the subnet's existence. + + Returns: + bool: ``True`` if the subnet exists, False otherwise. + + This function is critical for verifying the presence of specific subnets in the network, enabling a deeper understanding of the network's structure and composition. + """ + _result = self.query_subtensor("NetworksAdded", block, [netuid]) + return getattr(_result, "value", False) + + # Metagraph uses this method + def bonds( + self, netuid: int, block: Optional[int] = None + ) -> list[tuple[int, list[tuple[int, int]]]]: + """ + Retrieves the bond distribution set by neurons within a specific subnet of the Bittensor network. Bonds represent the investments or commitments made by neurons in one another, indicating a level of trust and perceived value. This bonding mechanism is integral to the network's market-based approach to measuring and rewarding machine intelligence. + + Args: + netuid (int): The network UID of the subnet to query. + block (Optional[int]): The blockchain block number for the query. + + Returns: + list[tuple[int, list[tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other neurons. + + Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior within the subnet. It reflects how neurons recognize and invest in each other's intelligence and contributions, supporting diverse and niche systems within the Bittensor ecosystem. + """ + b_map = [] + b_map_encoded = self.query_map_subtensor( + name="Bonds", block=block, params=[netuid] + ) + if b_map_encoded.records: + for uid, b in b_map_encoded: + b_map.append((uid.serialize(), b.serialize())) + + return b_map + + def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: + """ + Retrieves the burn cost for registering a new subnet within the Bittensor network. This cost represents the amount of Tao that needs to be locked or burned to establish a new subnet. + + Args: + block (Optional[int]): The blockchain block number for the query. + + Returns: + int: The burn cost for subnet registration. + + The subnet burn cost is an important economic parameter, reflecting the network's mechanisms for controlling the proliferation of subnets and ensuring their commitment to the network's long-term viability. + """ + lock_cost = self.query_runtime_api( + runtime_api="SubnetRegistrationRuntimeApi", + method="get_network_registration_cost", + params=[], + block=block, + ) + + if lock_cost is None: + return None + + return lock_cost + + # Metagraph uses this method + def neurons(self, netuid: int, block: Optional[int] = None) -> list["NeuronInfo"]: + """ + Retrieves a list of all neurons within a specified subnet of the Bittensor network. This function provides a snapshot of the subnet's neuron population, including each neuron's attributes and network interactions. + + Args: + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + list[bittensor.core.chain_data.neuron_info.NeuronInfo]: A list of NeuronInfo objects detailing each neuron's characteristics in the subnet. + + Understanding the distribution and status of neurons within a subnet is key to comprehending the network's decentralized structure and the dynamics of its consensus and governance processes. + """ + neurons_lite = self.neurons_lite(netuid=netuid, block=block) + weights = self.weights(block=block, netuid=netuid) + bonds = self.bonds(block=block, netuid=netuid) + + weights_as_dict = {uid: w for uid, w in weights} + bonds_as_dict = {uid: b for uid, b in bonds} + + neurons = [ + NeuronInfo.from_weights_bonds_and_neuron_lite( + neuron_lite, weights_as_dict, bonds_as_dict + ) + for neuron_lite in neurons_lite + ] + + return neurons + + # Metagraph uses this method + def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: + """ + Retrieves the total number of subnets within the Bittensor network as of a specific blockchain block. + + Args: + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[int]: The total number of subnets in the network. + + Understanding the total number of subnets is essential for assessing the network's growth and the extent of its decentralized infrastructure. + """ + _result = self.query_subtensor("TotalNetworks", block) + return getattr(_result, "value", None) + + # Metagraph uses this method + def get_subnets(self, block: Optional[int] = None) -> list[int]: + """ + Retrieves a list of all subnets currently active within the Bittensor network. This function provides an overview of the various subnets and their identifiers. + + Args: + block (Optional[int]): The blockchain block number for the query. + + Returns: + list[int]: A list of network UIDs representing each active subnet. + + This function is valuable for understanding the network's structure and the diversity of subnets available for neuron participation and collaboration. + """ + result = self.query_map_subtensor("NetworksAdded", block) + return ( + [network[0].value for network in result.records if network[1]] + if result and hasattr(result, "records") + else [] + ) + + # Metagraph uses this method + def neurons_lite( + self, netuid: int, block: Optional[int] = None + ) -> list["NeuronInfoLite"]: + """ + Retrieves a list of neurons in a 'lite' format from a specific subnet of the Bittensor network. This function provides a streamlined view of the neurons, focusing on key attributes such as stake and network participation. + + Args: + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + list[bittensor.core.chain_data.neuron_info_lite.NeuronInfoLite]: A list of simplified neuron information for the subnet. + + This function offers a quick overview of the neuron population within a subnet, facilitating efficient analysis of the network's decentralized structure and neuron dynamics. + """ + hex_bytes_result = self.query_runtime_api( + runtime_api="NeuronInfoRuntimeApi", + method="get_neurons_lite", + params=[netuid], + block=block, + ) + + if hex_bytes_result is None: + return [] + + if hex_bytes_result.startswith("0x"): + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + else: + bytes_result = bytes.fromhex(hex_bytes_result) + + return NeuronInfoLite.list_from_vec_u8(bytes_result) # type: ignore + + # Used in the `neurons` method which is used in metagraph.py + def weights( + self, netuid: int, block: Optional[int] = None + ) -> list[tuple[int, list[tuple[int, int]]]]: + """ + Retrieves the weight distribution set by neurons within a specific subnet of the Bittensor network. This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the network's trust and value assignment mechanisms. + + Args: + netuid (int): The network UID of the subnet to query. + block (Optional[int]): The blockchain block number for the query. + + Returns: + list[tuple[int, list[tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its assigned weights. + + The weight distribution is a key factor in the network's consensus algorithm and the ranking of neurons, influencing their influence and reward allocation within the subnet. + """ + w_map = [] + w_map_encoded = self.query_map_subtensor( + name="Weights", block=block, params=[netuid] + ) + if w_map_encoded.records: + for uid, w in w_map_encoded: + w_map.append((uid.serialize(), w.serialize())) + + return w_map + + # Used by community via `transfer_extrinsic` + @networking.ensure_connected + def get_balance(self, address: str, block: Optional[int] = None) -> "Balance": + """ + Retrieves the token balance of a specific address within the Bittensor network. This function queries the blockchain to determine the amount of Tao held by a given account. + + Args: + address (str): The Substrate address in ``ss58`` format. + block (Optional[int]): The blockchain block number at which to perform the query. + + Returns: + bittensor.utils.balance.Balance: The account balance at the specified block, represented as a Balance object. + + This function is important for monitoring account holdings and managing financial transactions within the Bittensor ecosystem. It helps in assessing the economic status and capacity of network participants. + """ + try: + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) + def make_substrate_call_with_retry(): + return self.substrate.query( + module="System", + storage_function="Account", + params=[address], + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), + ) + + result = make_substrate_call_with_retry() + + except RemainingScaleBytesNotEmptyException: + logging.error( + "Received a corrupted message. This likely points to an error with the network or subnet." + ) + return Balance(1000) + return Balance(result.value["data"]["free"]) + + # Used in community via `bittensor.core.subtensor.Subtensor.transfer` + @networking.ensure_connected + def get_transfer_fee( + self, wallet: "Wallet", dest: str, value: Union["Balance", float, int] + ) -> "Balance": + """ + Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. This function simulates the transfer to estimate the associated cost, taking into account the current network conditions and transaction complexity. + + Args: + wallet (bittensor_wallet.Wallet): The wallet from which the transfer is initiated. + dest (str): The ``SS58`` address of the destination account. + value (Union[bittensor.utils.balance.Balance, float, int]): The amount of tokens to be transferred, specified as a Balance object, or in Tao (float) or Rao (int) units. + + Returns: + bittensor.utils.balance.Balance: The estimated transaction fee for the transfer, represented as a Balance object. + + Estimating the transfer fee is essential for planning and executing token transactions, ensuring that the wallet has sufficient funds to cover both the transfer amount and the associated costs. This function provides a crucial tool for managing financial operations within the Bittensor network. + """ + if isinstance(value, float): + value = Balance.from_tao(value) + elif isinstance(value, int): + value = Balance.from_rao(value) + + if isinstance(value, Balance): + call = self.substrate.compose_call( + call_module="Balances", + call_function="transfer_allow_death", + call_params={"dest": dest, "value": value.rao}, + ) + + try: + payment_info = self.substrate.get_payment_info( + call=call, keypair=wallet.coldkeypub + ) + except Exception as e: + settings.bt_console.print( + f":cross_mark: [red]Failed to get payment info[/red]:[bold white]\n {e}[/bold white]" + ) + payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao + + fee = Balance.from_rao(payment_info["partialFee"]) + return fee + else: + fee = Balance.from_rao(int(2e7)) + logging.error( + "To calculate the transaction fee, the value must be Balance, float, or int. Received type: %s. Fee " + "is %s", + type(value), + 2e7, + ) + return fee + + # Used in community via `bittensor.core.subtensor.Subtensor.transfer` + def get_existential_deposit( + self, block: Optional[int] = None + ) -> Optional["Balance"]: + """ + Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with balances below this threshold can be reaped to conserve network resources. + + Args: + block (Optional[int]): Block number at which to query the deposit amount. If ``None``, the current block is used. + + Returns: + Optional[bittensor.utils.balance.Balance]: The existential deposit amount, or ``None`` if the query fails. + + The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring efficient use of storage and preventing the proliferation of dust accounts. + """ + result = self.query_constant( + module_name="Balances", constant_name="ExistentialDeposit", block=block + ) + if result is None or not hasattr(result, "value"): + return None + return Balance.from_rao(result.value) + + # Community uses this method + def commit_weights( + self, + wallet: "Wallet", + netuid: int, + salt: list[int], + uids: Union[NDArray[np.int64], list], + weights: Union[NDArray[np.int64], list], + version_key: int = settings.version_as_int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, + max_retries: int = 5, + ) -> tuple[bool, str]: + """ + Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. + This action serves as a commitment or snapshot of the neuron's current weight distribution. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + salt (list[int]): list of randomly generated integers as salt to generated weighted hash. + uids (np.ndarray): NumPy array of neuron UIDs for which weights are being committed. + weights (np.ndarray): NumPy array of weight values corresponding to each UID. + version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. + max_retries (int): The number of maximum attempts to commit weights. Default is ``5``. + + Returns: + tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + + This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, + enhancing transparency and accountability within the Bittensor network. + """ + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to commit weights!" + + logging.info( + f"Committing weights with params: netuid={netuid}, uids={uids}, weights={weights}, version_key={version_key}" + ) + + # Generate the hash of the weights + commit_hash = generate_weight_hash( + address=wallet.hotkey.ss58_address, + netuid=netuid, + uids=list(uids), + values=list(weights), + salt=salt, + version_key=version_key, + ) + + logging.info(f"Commit Hash: {commit_hash}") + + while retries < max_retries: + try: + success, message = commit_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + commit_hash=commit_hash, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + if success: + break + except Exception as e: + logging.error(f"Error committing weights: {e}") + finally: + retries += 1 + + return success, message + + # Community uses this method + def reveal_weights( + self, + wallet: "Wallet", + netuid: int, + uids: Union[NDArray[np.int64], list], + weights: Union[NDArray[np.int64], list], + salt: Union[NDArray[np.int64], list], + version_key: int = settings.version_as_int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, + max_retries: int = 5, + ) -> tuple[bool, str]: + """ + Reveals the weights for a specific subnet on the Bittensor blockchain using the provided wallet. + This action serves as a revelation of the neuron's previously committed weight distribution. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron revealing the weights. + netuid (int): The unique identifier of the subnet. + uids (np.ndarray): NumPy array of neuron UIDs for which weights are being revealed. + weights (np.ndarray): NumPy array of weight values corresponding to each UID. + salt (np.ndarray): NumPy array of salt values corresponding to the hash function. + version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version``. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. + max_retries (int): The number of maximum attempts to reveal weights. Default is ``5``. + + Returns: + tuple[bool, str]: ``True`` if the weight revelation is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + + This function allows neurons to reveal their previously committed weight distribution, ensuring transparency + and accountability within the Bittensor network. + """ + + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to reveal weights!" + + while retries < max_retries: + try: + success, message = reveal_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + uids=list(uids), + weights=list(weights), + salt=list(salt), + version_key=version_key, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + if success: + break + except Exception as e: + logging.error(f"Error revealing weights: {e}") + finally: + retries += 1 + + return success, message + + def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: + """ + Retrieves the 'Difficulty' hyperparameter for a specified subnet in the Bittensor network. + + This parameter is instrumental in determining the computational challenge required for neurons to participate in consensus and validation processes. + + Args: + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. + + The 'Difficulty' parameter directly impacts the network's security and integrity by setting the computational effort required for validating transactions and participating in the network's consensus mechanism. + """ + call = self._get_hyperparameter( + param_name="Difficulty", netuid=netuid, block=block + ) + if call is None: + return None + return int(call) + + def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance"]: + """ + Retrieves the 'Burn' hyperparameter for a specified subnet. The 'Burn' parameter represents the amount of Tao that is effectively recycled within the Bittensor network. + + Args: + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[Balance]: The value of the 'Burn' hyperparameter if the subnet exists, None otherwise. + + Understanding the 'Burn' rate is essential for analyzing the network registration usage, particularly how it is correlated with user activity and the overall cost of participation in a given subnet. + """ + call = self._get_hyperparameter(param_name="Burn", netuid=netuid, block=block) + return None if call is None else Balance.from_rao(int(call)) + + # Subnet 27 uses this method + _do_serve_prometheus = do_serve_prometheus + # Subnet 27 uses this method name + _do_serve_axon = do_serve_axon From 8886cc8ba89b96201986b29d1b128a06ee9ad48c Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 15:48:18 -0700 Subject: [PATCH 02/27] add async_substrate_interface.py --- bittensor/utils/async_substrate_interface.py | 2742 ++++++++++++++++++ 1 file changed, 2742 insertions(+) create mode 100644 bittensor/utils/async_substrate_interface.py diff --git a/bittensor/utils/async_substrate_interface.py b/bittensor/utils/async_substrate_interface.py new file mode 100644 index 0000000000..60ec9dce9c --- /dev/null +++ b/bittensor/utils/async_substrate_interface.py @@ -0,0 +1,2742 @@ +import asyncio +import json +import random +from collections import defaultdict +from dataclasses import dataclass +from hashlib import blake2b +from typing import Optional, Any, Union, Callable, Awaitable, cast + +from bt_decode import PortableRegistry, decode as decode_by_type_string, MetadataV15 +from async_property import async_property +from scalecodec import GenericExtrinsic +from scalecodec.base import ScaleBytes, ScaleType, RuntimeConfigurationObject +from scalecodec.type_registry import load_type_registry_preset +from scalecodec.types import GenericCall +from bittensor_wallet import Keypair +from substrateinterface.exceptions import ( + SubstrateRequestException, + ExtrinsicNotFound, + BlockNotFound, +) +from substrateinterface.storage import StorageKey +import websockets + +ResultHandler = Callable[[dict, Any], Awaitable[tuple[dict, bool]]] + + +class TimeoutException(Exception): + pass + + +def timeout_handler(signum, frame): + raise TimeoutException("Operation timed out") + + +class ExtrinsicReceipt: + """ + Object containing information of submitted extrinsic. Block hash where extrinsic is included is required + when retrieving triggered events or determine if extrinsic was successful + """ + + def __init__( + self, + substrate: "AsyncSubstrateInterface", + extrinsic_hash: Optional[str] = None, + block_hash: Optional[str] = None, + block_number: Optional[int] = None, + extrinsic_idx: Optional[int] = None, + finalized=None, + ): + """ + Object containing information of submitted extrinsic. Block hash where extrinsic is included is required + when retrieving triggered events or determine if extrinsic was successful + + Parameters + ---------- + substrate + extrinsic_hash + block_hash + finalized + """ + self.substrate = substrate + self.extrinsic_hash = extrinsic_hash + self.block_hash = block_hash + self.block_number = block_number + self.finalized = finalized + + self.__extrinsic_idx = extrinsic_idx + self.__extrinsic = None + + self.__triggered_events: Optional[list] = None + self.__is_success: Optional[bool] = None + self.__error_message = None + self.__weight = None + self.__total_fee_amount = None + + async def get_extrinsic_identifier(self) -> str: + """ + Returns the on-chain identifier for this extrinsic in format "[block_number]-[extrinsic_idx]" e.g. 134324-2 + Returns + ------- + str + """ + if self.block_number is None: + if self.block_hash is None: + raise ValueError( + "Cannot create extrinsic identifier: block_hash is not set" + ) + + self.block_number = await self.substrate.get_block_number(self.block_hash) + + if self.block_number is None: + raise ValueError( + "Cannot create extrinsic identifier: unknown block_hash" + ) + + return f"{self.block_number}-{await self.extrinsic_idx}" + + async def retrieve_extrinsic(self): + if not self.block_hash: + raise ValueError( + "ExtrinsicReceipt can't retrieve events because it's unknown which block_hash it is " + "included, manually set block_hash or use `wait_for_inclusion` when sending extrinsic" + ) + # Determine extrinsic idx + + block = await self.substrate.get_block(block_hash=self.block_hash) + + extrinsics = block["extrinsics"] + + if len(extrinsics) > 0: + if self.__extrinsic_idx is None: + self.__extrinsic_idx = self.__get_extrinsic_index( + block_extrinsics=extrinsics, extrinsic_hash=self.extrinsic_hash + ) + + if self.__extrinsic_idx >= len(extrinsics): + raise ExtrinsicNotFound() + + self.__extrinsic = extrinsics[self.__extrinsic_idx] + + @async_property + async def extrinsic_idx(self) -> int: + """ + Retrieves the index of this extrinsic in containing block + + Returns + ------- + int + """ + if self.__extrinsic_idx is None: + await self.retrieve_extrinsic() + return self.__extrinsic_idx + + @async_property + async def triggered_events(self) -> list: + """ + Gets triggered events for submitted extrinsic. block_hash where extrinsic is included is required, manually + set block_hash or use `wait_for_inclusion` when submitting extrinsic + + Returns + ------- + list + """ + if self.__triggered_events is None: + if not self.block_hash: + raise ValueError( + "ExtrinsicReceipt can't retrieve events because it's unknown which block_hash it is " + "included, manually set block_hash or use `wait_for_inclusion` when sending extrinsic" + ) + + if await self.extrinsic_idx is None: + await self.retrieve_extrinsic() + + self.__triggered_events = [] + + for event in await self.substrate.get_events(block_hash=self.block_hash): + if event["extrinsic_idx"] == await self.extrinsic_idx: + self.__triggered_events.append(event) + + return cast(list, self.__triggered_events) + + async def process_events(self): + if await self.triggered_events: + self.__total_fee_amount = 0 + + # Process fees + has_transaction_fee_paid_event = False + + for event in await self.triggered_events: + if ( + event["event"]["module_id"] == "TransactionPayment" + and event["event"]["event_id"] == "TransactionFeePaid" + ): + self.__total_fee_amount = event["event"]["attributes"]["actual_fee"] + has_transaction_fee_paid_event = True + + # Process other events + for event in await self.triggered_events: + # Check events + if ( + event["event"]["module_id"] == "System" + and event["event"]["event_id"] == "ExtrinsicSuccess" + ): + self.__is_success = True + self.__error_message = None + + if "dispatch_info" in event["event"]["attributes"]: + self.__weight = event["event"]["attributes"]["dispatch_info"][ + "weight" + ] + else: + # Backwards compatibility + self.__weight = event["event"]["attributes"]["weight"] + + elif ( + event["event"]["module_id"] == "System" + and event["event"]["event_id"] == "ExtrinsicFailed" + ): + self.__is_success = False + + dispatch_info = event["event"]["attributes"]["dispatch_info"] + dispatch_error = event["event"]["attributes"]["dispatch_error"] + + self.__weight = dispatch_info["weight"] + + if "Module" in dispatch_error: + module_index = dispatch_error["Module"][0]["index"] + error_index = int.from_bytes( + bytes(dispatch_error["Module"][0]["error"]), + byteorder="little", + signed=False, + ) + + if isinstance(error_index, str): + # Actual error index is first u8 in new [u8; 4] format + error_index = int(error_index[2:4], 16) + module_error = self.substrate.metadata.get_module_error( + module_index=module_index, error_index=error_index + ) + self.__error_message = { + "type": "Module", + "name": module_error.name, + "docs": module_error.docs, + } + elif "BadOrigin" in dispatch_error: + self.__error_message = { + "type": "System", + "name": "BadOrigin", + "docs": "Bad origin", + } + elif "CannotLookup" in dispatch_error: + self.__error_message = { + "type": "System", + "name": "CannotLookup", + "docs": "Cannot lookup", + } + elif "Other" in dispatch_error: + self.__error_message = { + "type": "System", + "name": "Other", + "docs": "Unspecified error occurred", + } + + elif not has_transaction_fee_paid_event: + if ( + event["event"]["module_id"] == "Treasury" + and event["event"]["event_id"] == "Deposit" + ): + self.__total_fee_amount += event["event"]["attributes"]["value"] + elif ( + event["event"]["module_id"] == "Balances" + and event["event"]["event_id"] == "Deposit" + ): + self.__total_fee_amount += event.value["attributes"]["amount"] + + @async_property + async def is_success(self) -> bool: + """ + Returns `True` if `ExtrinsicSuccess` event is triggered, `False` in case of `ExtrinsicFailed` + In case of False `error_message` will contain more details about the error + + + Returns + ------- + bool + """ + if self.__is_success is None: + await self.process_events() + + return cast(bool, self.__is_success) + + @async_property + async def error_message(self) -> Optional[dict]: + """ + Returns the error message if the extrinsic failed in format e.g.: + + `{'type': 'System', 'name': 'BadOrigin', 'docs': 'Bad origin'}` + + Returns + ------- + dict + """ + if self.__error_message is None: + if await self.is_success: + return None + await self.process_events() + return self.__error_message + + @async_property + async def weight(self) -> Union[int, dict]: + """ + Contains the actual weight when executing this extrinsic + + Returns + ------- + int (WeightV1) or dict (WeightV2) + """ + if self.__weight is None: + await self.process_events() + return self.__weight + + @async_property + async def total_fee_amount(self) -> int: + """ + Contains the total fee costs deducted when executing this extrinsic. This includes fee for the validator ( + (`Balances.Deposit` event) and the fee deposited for the treasury (`Treasury.Deposit` event) + + Returns + ------- + int + """ + if self.__total_fee_amount is None: + await self.process_events() + return cast(int, self.__total_fee_amount) + + # Helper functions + @staticmethod + def __get_extrinsic_index(block_extrinsics: list, extrinsic_hash: str) -> int: + """ + Returns the index of a provided extrinsic + """ + for idx, extrinsic in enumerate(block_extrinsics): + if ( + extrinsic.extrinsic_hash + and f"0x{extrinsic.extrinsic_hash.hex()}" == extrinsic_hash + ): + return idx + raise ExtrinsicNotFound() + + # Backwards compatibility methods + def __getitem__(self, item): + return getattr(self, item) + + def __iter__(self): + for item in self.__dict__.items(): + yield item + + def get(self, name): + return self[name] + + +class QueryMapResult: + def __init__( + self, + records: list, + page_size: int, + substrate: "AsyncSubstrateInterface", + module: Optional[str] = None, + storage_function: Optional[str] = None, + params: Optional[list] = None, + block_hash: Optional[str] = None, + last_key: Optional[str] = None, + max_results: Optional[int] = None, + ignore_decoding_errors: bool = False, + ): + self.records = records + self.page_size = page_size + self.module = module + self.storage_function = storage_function + self.block_hash = block_hash + self.substrate = substrate + self.last_key = last_key + self.max_results = max_results + self.params = params + self.ignore_decoding_errors = ignore_decoding_errors + self.loading_complete = False + self._buffer = iter(self.records) # Initialize the buffer with initial records + + async def retrieve_next_page(self, start_key) -> list: + result = await self.substrate.query_map( + module=self.module, + storage_function=self.storage_function, + params=self.params, + page_size=self.page_size, + block_hash=self.block_hash, + start_key=start_key, + max_results=self.max_results, + ignore_decoding_errors=self.ignore_decoding_errors, + ) + + # Update last key from new result set to use as offset for next page + self.last_key = result.last_key + return result.records + + def __aiter__(self): + return self + + async def __anext__(self): + try: + # Try to get the next record from the buffer + return next(self._buffer) + except StopIteration: + # If no more records in the buffer, try to fetch the next page + if self.loading_complete: + raise StopAsyncIteration + + next_page = await self.retrieve_next_page(self.last_key) + if not next_page: + self.loading_complete = True + raise StopAsyncIteration + + # Update the buffer with the newly fetched records + self._buffer = iter(next_page) + return next(self._buffer) + + def __getitem__(self, item): + return self.records[item] + + +@dataclass +class Preprocessed: + queryable: str + method: str + params: list + value_scale_type: str + storage_item: ScaleType + + +class RuntimeCache: + blocks: dict[int, "Runtime"] + block_hashes: dict[str, "Runtime"] + + def __init__(self): + self.blocks = {} + self.block_hashes = {} + + def add_item( + self, block: Optional[int], block_hash: Optional[str], runtime: "Runtime" + ): + if block is not None: + self.blocks[block] = runtime + if block_hash is not None: + self.block_hashes[block_hash] = runtime + + def retrieve( + self, block: Optional[int], block_hash: Optional[str] + ) -> Optional["Runtime"]: + if block is not None: + return self.blocks.get(block) + elif block_hash is not None: + return self.block_hashes.get(block_hash) + else: + return None + + +class Runtime: + block_hash: str + block_id: int + runtime_version = None + transaction_version = None + cache_region = None + metadata = None + type_registry_preset = None + + def __init__(self, chain, runtime_config, metadata, type_registry): + self.runtime_config = RuntimeConfigurationObject() + self.config = {} + self.chain = chain + self.type_registry = type_registry + self.runtime_config = runtime_config + self.metadata = metadata + + @property + def implements_scaleinfo(self) -> bool: + """ + Returns True if current runtime implementation a `PortableRegistry` (`MetadataV14` and higher) + """ + if self.metadata: + return self.metadata.portable_registry is not None + else: + return False + + def reload_type_registry( + self, use_remote_preset: bool = True, auto_discover: bool = True + ): + """ + Reload type registry and preset used to instantiate the SubstrateInterface object. Useful to periodically apply + changes in type definitions when a runtime upgrade occurred + + Parameters + ---------- + use_remote_preset: When True preset is downloaded from Github master, otherwise use files from local installed + scalecodec package + auto_discover + + Returns + ------- + + """ + self.runtime_config.clear_type_registry() + + self.runtime_config.implements_scale_info = self.implements_scaleinfo + + # Load metadata types in runtime configuration + self.runtime_config.update_type_registry(load_type_registry_preset(name="core")) + self.apply_type_registry_presets( + use_remote_preset=use_remote_preset, auto_discover=auto_discover + ) + + def apply_type_registry_presets( + self, + use_remote_preset: bool = True, + auto_discover: bool = True, + ): + """ + Applies type registry presets to the runtime + :param use_remote_preset: bool, whether to use presets from remote + :param auto_discover: bool, whether to use presets from local installed scalecodec package + """ + if self.type_registry_preset is not None: + # Load type registry according to preset + type_registry_preset_dict = load_type_registry_preset( + name=self.type_registry_preset, use_remote_preset=use_remote_preset + ) + + if not type_registry_preset_dict: + raise ValueError( + f"Type registry preset '{self.type_registry_preset}' not found" + ) + + elif auto_discover: + # Try to auto discover type registry preset by chain name + type_registry_name = self.chain.lower().replace(" ", "-") + try: + type_registry_preset_dict = load_type_registry_preset( + type_registry_name + ) + self.type_registry_preset = type_registry_name + except ValueError: + type_registry_preset_dict = None + + else: + type_registry_preset_dict = None + + if type_registry_preset_dict: + # Load type registries in runtime configuration + if self.implements_scaleinfo is False: + # Only runtime with no embedded types in metadata need the default set of explicit defined types + self.runtime_config.update_type_registry( + load_type_registry_preset( + "legacy", use_remote_preset=use_remote_preset + ) + ) + + if self.type_registry_preset != "legacy": + self.runtime_config.update_type_registry(type_registry_preset_dict) + + if self.type_registry: + # Load type registries in runtime configuration + self.runtime_config.update_type_registry(self.type_registry) + + +class RequestManager: + RequestResults = dict[Union[str, int], list[Union[ScaleType, dict]]] + + def __init__(self, payloads): + self.response_map = {} + self.responses = defaultdict(lambda: {"complete": False, "results": []}) + self.payloads_count = len(payloads) + + def add_request(self, item_id: int, request_id: Any): + """ + Adds an outgoing request to the responses map for later retrieval + """ + self.response_map[item_id] = request_id + + def overwrite_request(self, item_id: int, request_id: Any): + """ + Overwrites an existing request in the responses map with a new request_id. This is used + for multipart responses that generate a subscription id we need to watch, rather than the initial + request_id. + """ + self.response_map[request_id] = self.response_map.pop(item_id) + return request_id + + def add_response(self, item_id: int, response: dict, complete: bool): + """ + Maps a response to the request for later retrieval + """ + request_id = self.response_map[item_id] + self.responses[request_id]["results"].append(response) + self.responses[request_id]["complete"] = complete + + @property + def is_complete(self) -> bool: + """ + Returns whether all requests in the manager have completed + """ + return ( + all(info["complete"] for info in self.responses.values()) + and len(self.responses) == self.payloads_count + ) + + def get_results(self) -> RequestResults: + """ + Generates a dictionary mapping the requests initiated to the responses received. + """ + return { + request_id: info["results"] for request_id, info in self.responses.items() + } + + +class Websocket: + def __init__( + self, + ws_url: str, + max_subscriptions=1024, + max_connections=100, + shutdown_timer=5, + options: Optional[dict] = None, + ): + """ + Websocket manager object. Allows for the use of a single websocket connection by multiple + calls. + + :param ws_url: Websocket URL to connect to + :param max_subscriptions: Maximum number of subscriptions per websocket connection + :param max_connections: Maximum number of connections total + :param shutdown_timer: Number of seconds to shut down websocket connection after last use + """ + # TODO allow setting max concurrent connections and rpc subscriptions per connection + # TODO reconnection logic + self.ws_url = ws_url + self.ws: Optional[websockets.WebSocketClientProtocol] = None + self.id = 0 + self.max_subscriptions = max_subscriptions + self.max_connections = max_connections + self.shutdown_timer = shutdown_timer + self._received = {} + self._in_use = 0 + self._receiving_task = None + self._attempts = 0 + self._initialized = False + self._lock = asyncio.Lock() + self._exit_task = None + self._open_subscriptions = 0 + self._options = options if options else {} + + async def __aenter__(self): + async with self._lock: + self._in_use += 1 + if self._exit_task: + self._exit_task.cancel() + if not self._initialized: + self._initialized = True + await self._connect() + self._receiving_task = asyncio.create_task(self._start_receiving()) + return self + + async def _connect(self): + self.ws = await asyncio.wait_for( + websockets.connect(self.ws_url, **self._options), timeout=10 + ) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + async with self._lock: + self._in_use -= 1 + if self._exit_task is not None: + self._exit_task.cancel() + try: + await self._exit_task + except asyncio.CancelledError: + pass + if self._in_use == 0 and self.ws is not None: + self.id = 0 + self._open_subscriptions = 0 + self._exit_task = asyncio.create_task(self._exit_with_timer()) + + async def _exit_with_timer(self): + """ + Allows for graceful shutdown of websocket connection after specified number of seconds, allowing + for reuse of the websocket connection. + """ + try: + await asyncio.sleep(self.shutdown_timer) + await self.shutdown() + except asyncio.CancelledError: + pass + + async def shutdown(self): + async with self._lock: + try: + self._receiving_task.cancel() + await self._receiving_task + await self.ws.close() + except (AttributeError, asyncio.CancelledError): + pass + self.ws = None + self._initialized = False + self._receiving_task = None + self.id = 0 + + async def _recv(self) -> None: + try: + response = json.loads( + await cast(websockets.WebSocketClientProtocol, self.ws).recv() + ) + async with self._lock: + self._open_subscriptions -= 1 + if "id" in response: + self._received[response["id"]] = response + elif "params" in response: + self._received[response["params"]["subscription"]] = response + else: + raise KeyError(response) + except websockets.ConnectionClosed: + raise + except KeyError as e: + raise e + + async def _start_receiving(self): + try: + while True: + await self._recv() + except asyncio.CancelledError: + pass + except websockets.ConnectionClosed: + # TODO try reconnect, but only if it's needed + raise + + async def send(self, payload: dict) -> int: + """ + Sends a payload to the websocket connection. + + :param payload: payload, generate a payload with the AsyncSubstrateInterface.make_payload method + """ + async with self._lock: + original_id = self.id + self.id += 1 + self._open_subscriptions += 1 + try: + await self.ws.send(json.dumps({**payload, **{"id": original_id}})) + return original_id + except websockets.ConnectionClosed: + raise + + async def retrieve(self, item_id: int) -> Optional[dict]: + """ + Retrieves a single item from received responses dict queue + + :param item_id: id of the item to retrieve + + :return: retrieved item + """ + while True: + async with self._lock: + if item_id in self._received: + return self._received.pop(item_id) + await asyncio.sleep(0.1) + + +class AsyncSubstrateInterface: + runtime = None + registry: Optional[PortableRegistry] = None + + def __init__( + self, + chain_endpoint: str, + use_remote_preset=False, + auto_discover=True, + auto_reconnect=True, + ss58_format=None, + type_registry=None, + chain_name=None, + ): + """ + The asyncio-compatible version of the subtensor interface commands we use in bittensor + """ + self.chain_endpoint = chain_endpoint + self.__chain = chain_name + self.ws = Websocket( + chain_endpoint, + options={ + "max_size": 2**32, + "read_limit": 2**16, + "write_limit": 2**16, + }, + ) + self._lock = asyncio.Lock() + self.last_block_hash: Optional[str] = None + self.config = { + "use_remote_preset": use_remote_preset, + "auto_discover": auto_discover, + "auto_reconnect": auto_reconnect, + "rpc_methods": None, + "strict_scale_decode": True, + } + self.initialized = False + self._forgettable_task = None + self.ss58_format = ss58_format + self.type_registry = type_registry + self.runtime_cache = RuntimeCache() + self.block_id: Optional[int] = None + self.runtime_version = None + self.runtime_config = RuntimeConfigurationObject() + self.__metadata_cache = {} + self.type_registry_preset = None + self.transaction_version = None + self.metadata = None + self.metadata_version_hex = "0x0f000000" # v15 + + async def __aenter__(self): + await self.initialize() + + async def initialize(self): + """ + Initialize the connection to the chain. + """ + async with self._lock: + if not self.initialized: + if not self.__chain: + chain = await self.rpc_request("system_chain", []) + self.__chain = chain.get("result") + self.reload_type_registry() + await asyncio.gather(self.load_registry(), self.init_runtime(None)) + self.initialized = True + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + @property + def chain(self): + """ + Returns the substrate chain currently associated with object + """ + return self.__chain + + async def get_storage_item(self, module: str, storage_function: str): + if not self.metadata: + await self.init_runtime() + metadata_pallet = self.metadata.get_metadata_pallet(module) + storage_item = metadata_pallet.get_storage_function(storage_function) + return storage_item + + async def _get_current_block_hash( + self, block_hash: Optional[str], reuse: bool + ) -> Optional[str]: + if block_hash: + self.last_block_hash = block_hash + return block_hash + elif reuse: + if self.last_block_hash: + return self.last_block_hash + return block_hash + + async def load_registry(self): + metadata_rpc_result = await self.rpc_request( + "state_call", + ["Metadata_metadata_at_version", self.metadata_version_hex], + ) + metadata_option_hex_str = metadata_rpc_result["result"] + metadata_option_bytes = bytes.fromhex(metadata_option_hex_str[2:]) + metadata_v15 = MetadataV15.decode_from_metadata_option(metadata_option_bytes) + self.registry = PortableRegistry.from_metadata_v15(metadata_v15) + + async def decode_scale( + self, type_string, scale_bytes: bytes, return_scale_obj=False + ): + """ + Helper function to decode arbitrary SCALE-bytes (e.g. 0x02000000) according to given RUST type_string + (e.g. BlockNumber). The relevant versioning information of the type (if defined) will be applied if block_hash + is set + + Parameters + ---------- + type_string + scale_bytes + block_hash + return_scale_obj: if True the SCALE object itself is returned, otherwise the serialized dict value of the object + + Returns + ------- + + """ + if scale_bytes == b"\x00": + obj = None + else: + obj = decode_by_type_string(type_string, self.registry, scale_bytes) + return obj + + async def init_runtime( + self, block_hash: Optional[str] = None, block_id: Optional[int] = None + ) -> Runtime: + """ + This method is used by all other methods that deals with metadata and types defined in the type registry. + It optionally retrieves the block_hash when block_id is given and sets the applicable metadata for that + block_hash. Also, it applies all the versioned types at the time of the block_hash. + + Because parsing of metadata and type registry is quite heavy, the result will be cached per runtime id. + In the future there could be support for caching backends like Redis to make this cache more persistent. + + :param block_hash: optional block hash, should not be specified if block_id is + :param block_id: optional block id, should not be specified if block_hash is + + :returns: Runtime object + """ + + async def get_runtime(block_hash, block_id) -> Runtime: + # Check if runtime state already set to current block + if (block_hash and block_hash == self.last_block_hash) or ( + block_id and block_id == self.block_id + ): + return Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ) + + if block_id is not None: + block_hash = await self.get_block_hash(block_id) + + if not block_hash: + block_hash = await self.get_chain_head() + + self.last_block_hash = block_hash + self.block_id = block_id + + # In fact calls and storage functions are decoded against runtime of previous block, therefor retrieve + # metadata and apply type registry of runtime of parent block + block_header = await self.rpc_request( + "chain_getHeader", [self.last_block_hash] + ) + + if block_header["result"] is None: + raise SubstrateRequestException( + f'Block not found for "{self.last_block_hash}"' + ) + + parent_block_hash: str = block_header["result"]["parentHash"] + + if ( + parent_block_hash + == "0x0000000000000000000000000000000000000000000000000000000000000000" + ): + runtime_block_hash = self.last_block_hash + else: + runtime_block_hash = parent_block_hash + + runtime_info = await self.get_block_runtime_version( + block_hash=runtime_block_hash + ) + + if runtime_info is None: + raise SubstrateRequestException( + f"No runtime information for block '{block_hash}'" + ) + + # Check if runtime state already set to current block + if runtime_info.get("specVersion") == self.runtime_version: + return Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ) + + self.runtime_version = runtime_info.get("specVersion") + self.transaction_version = runtime_info.get("transactionVersion") + + if not self.metadata: + if self.runtime_version in self.__metadata_cache: + # Get metadata from cache + # self.debug_message('Retrieved metadata for {} from memory'.format(self.runtime_version)) + self.metadata = self.__metadata_cache[self.runtime_version] + else: + self.metadata = await self.get_block_metadata( + block_hash=runtime_block_hash, decode=True + ) + # self.debug_message('Retrieved metadata for {} from Substrate node'.format(self.runtime_version)) + + # Update metadata cache + self.__metadata_cache[self.runtime_version] = self.metadata + + # Update type registry + self.reload_type_registry(use_remote_preset=False, auto_discover=True) + + if self.implements_scaleinfo: + # self.debug_message('Add PortableRegistry from metadata to type registry') + self.runtime_config.add_portable_registry(self.metadata) + + # Set active runtime version + self.runtime_config.set_active_spec_version_id(self.runtime_version) + + # Check and apply runtime constants + ss58_prefix_constant = await self.get_constant( + "System", "SS58Prefix", block_hash=block_hash + ) + + if ss58_prefix_constant: + self.ss58_format = ss58_prefix_constant + + # Set runtime compatibility flags + try: + _ = self.runtime_config.create_scale_object( + "sp_weights::weight_v2::Weight" + ) + self.config["is_weight_v2"] = True + self.runtime_config.update_type_registry_types( + {"Weight": "sp_weights::weight_v2::Weight"} + ) + except NotImplementedError: + self.config["is_weight_v2"] = False + self.runtime_config.update_type_registry_types({"Weight": "WeightV1"}) + return Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ) + + if block_id and block_hash: + raise ValueError("Cannot provide block_hash and block_id at the same time") + + if not (runtime := self.runtime_cache.retrieve(block_id, block_hash)): + runtime = await get_runtime(block_hash, block_id) + self.runtime_cache.add_item(block_id, block_hash, runtime) + return runtime + + def reload_type_registry( + self, use_remote_preset: bool = True, auto_discover: bool = True + ): + """ + Reload type registry and preset used to instantiate the SubtrateInterface object. Useful to periodically apply + changes in type definitions when a runtime upgrade occurred + + Parameters + ---------- + use_remote_preset: When True preset is downloaded from Github master, otherwise use files from local installed scalecodec package + auto_discover + + Returns + ------- + + """ + self.runtime_config.clear_type_registry() + + self.runtime_config.implements_scale_info = self.implements_scaleinfo + + # Load metadata types in runtime configuration + self.runtime_config.update_type_registry(load_type_registry_preset(name="core")) + self.apply_type_registry_presets( + use_remote_preset=use_remote_preset, auto_discover=auto_discover + ) + + def apply_type_registry_presets( + self, use_remote_preset: bool = True, auto_discover: bool = True + ): + if self.type_registry_preset is not None: + # Load type registry according to preset + type_registry_preset_dict = load_type_registry_preset( + name=self.type_registry_preset, use_remote_preset=use_remote_preset + ) + + if not type_registry_preset_dict: + raise ValueError( + f"Type registry preset '{self.type_registry_preset}' not found" + ) + + elif auto_discover: + # Try to auto discover type registry preset by chain name + type_registry_name = self.chain.lower().replace(" ", "-") + try: + type_registry_preset_dict = load_type_registry_preset( + type_registry_name + ) + # self.debug_message(f"Auto set type_registry_preset to {type_registry_name} ...") + self.type_registry_preset = type_registry_name + except ValueError: + type_registry_preset_dict = None + + else: + type_registry_preset_dict = None + + if type_registry_preset_dict: + # Load type registries in runtime configuration + if self.implements_scaleinfo is False: + # Only runtime with no embedded types in metadata need the default set of explicit defined types + self.runtime_config.update_type_registry( + load_type_registry_preset( + "legacy", use_remote_preset=use_remote_preset + ) + ) + + if self.type_registry_preset != "legacy": + self.runtime_config.update_type_registry(type_registry_preset_dict) + + if self.type_registry: + # Load type registries in runtime configuration + self.runtime_config.update_type_registry(self.type_registry) + + @property + def implements_scaleinfo(self) -> Optional[bool]: + """ + Returns True if current runtime implementation a `PortableRegistry` (`MetadataV14` and higher) + + Returns + ------- + bool + """ + if self.metadata: + return self.metadata.portable_registry is not None + else: + return None + + async def create_storage_key( + self, + pallet: str, + storage_function: str, + params: Optional[list] = None, + block_hash: str = None, + ) -> StorageKey: + """ + Create a `StorageKey` instance providing storage function details. See `subscribe_storage()`. + + Parameters + ---------- + pallet: name of pallet + storage_function: name of storage function + params: Optional list of parameters in case of a Mapped storage function + + Returns + ------- + StorageKey + """ + await self.init_runtime(block_hash=block_hash) + + return StorageKey.create_from_storage_function( + pallet, + storage_function, + params, + runtime_config=self.runtime_config, + metadata=self.metadata, + ) + + async def _get_block_handler( + self, + block_hash: str, + ignore_decoding_errors: bool = False, + include_author: bool = False, + header_only: bool = False, + finalized_only: bool = False, + subscription_handler: Optional[Callable] = None, + ): + try: + await self.init_runtime(block_hash=block_hash) + except BlockNotFound: + return None + + async def decode_block(block_data, block_data_hash=None): + if block_data: + if block_data_hash: + block_data["header"]["hash"] = block_data_hash + + if type(block_data["header"]["number"]) is str: + # Convert block number from hex (backwards compatibility) + block_data["header"]["number"] = int( + block_data["header"]["number"], 16 + ) + + extrinsic_cls = self.runtime_config.get_decoder_class("Extrinsic") + + if "extrinsics" in block_data: + for idx, extrinsic_data in enumerate(block_data["extrinsics"]): + extrinsic_decoder = extrinsic_cls( + data=ScaleBytes(extrinsic_data), + metadata=self.metadata, + runtime_config=self.runtime_config, + ) + try: + extrinsic_decoder.decode(check_remaining=True) + block_data["extrinsics"][idx] = extrinsic_decoder + + except Exception as e: + if not ignore_decoding_errors: + raise + block_data["extrinsics"][idx] = None + + for idx, log_data in enumerate(block_data["header"]["digest"]["logs"]): + if type(log_data) is str: + # Convert digest log from hex (backwards compatibility) + try: + log_digest_cls = self.runtime_config.get_decoder_class( + "sp_runtime::generic::digest::DigestItem" + ) + + if log_digest_cls is None: + raise NotImplementedError( + "No decoding class found for 'DigestItem'" + ) + + log_digest = log_digest_cls(data=ScaleBytes(log_data)) + log_digest.decode( + check_remaining=self.config.get("strict_scale_decode") + ) + + block_data["header"]["digest"]["logs"][idx] = log_digest + + if include_author and "PreRuntime" in log_digest.value: + if self.implements_scaleinfo: + engine = bytes(log_digest[1][0]) + # Retrieve validator set + parent_hash = block_data["header"]["parentHash"] + validator_set = await self.query( + "Session", "Validators", block_hash=parent_hash + ) + + if engine == b"BABE": + babe_predigest = ( + self.runtime_config.create_scale_object( + type_string="RawBabePreDigest", + data=ScaleBytes( + bytes(log_digest[1][1]) + ), + ) + ) + + babe_predigest.decode( + check_remaining=self.config.get( + "strict_scale_decode" + ) + ) + + rank_validator = babe_predigest[1].value[ + "authority_index" + ] + + block_author = validator_set[rank_validator] + block_data["author"] = block_author.value + + elif engine == b"aura": + aura_predigest = ( + self.runtime_config.create_scale_object( + type_string="RawAuraPreDigest", + data=ScaleBytes( + bytes(log_digest[1][1]) + ), + ) + ) + + aura_predigest.decode(check_remaining=True) + + rank_validator = aura_predigest.value[ + "slot_number" + ] % len(validator_set) + + block_author = validator_set[rank_validator] + block_data["author"] = block_author.value + else: + raise NotImplementedError( + f"Cannot extract author for engine {log_digest.value['PreRuntime'][0]}" + ) + else: + if ( + log_digest.value["PreRuntime"]["engine"] + == "BABE" + ): + validator_set = await self.query( + "Session", + "Validators", + block_hash=block_hash, + ) + rank_validator = log_digest.value["PreRuntime"][ + "data" + ]["authority_index"] + + block_author = validator_set.elements[ + rank_validator + ] + block_data["author"] = block_author.value + else: + raise NotImplementedError( + f"Cannot extract author for engine {log_digest.value['PreRuntime']['engine']}" + ) + + except Exception: + if not ignore_decoding_errors: + raise + block_data["header"]["digest"]["logs"][idx] = None + + return block_data + + if callable(subscription_handler): + rpc_method_prefix = "Finalized" if finalized_only else "New" + + async def result_handler(message, update_nr, subscription_id): + new_block = await decode_block({"header": message["params"]["result"]}) + + subscription_result = subscription_handler( + new_block, update_nr, subscription_id + ) + + if subscription_result is not None: + # Handler returned end result: unsubscribe from further updates + self._forgettable_task = asyncio.create_task( + self.rpc_request( + f"chain_unsubscribe{rpc_method_prefix}Heads", + [subscription_id], + ) + ) + + return subscription_result + + result = await self._make_rpc_request( + [ + self.make_payload( + "_get_block_handler", + f"chain_subscribe{rpc_method_prefix}Heads", + [], + ) + ], + result_handler=result_handler, + ) + + return result + + else: + if header_only: + response = await self.rpc_request("chain_getHeader", [block_hash]) + return await decode_block( + {"header": response["result"]}, block_data_hash=block_hash + ) + + else: + response = await self.rpc_request("chain_getBlock", [block_hash]) + return await decode_block( + response["result"]["block"], block_data_hash=block_hash + ) + + async def get_block( + self, + block_hash: Optional[str] = None, + block_number: Optional[int] = None, + ignore_decoding_errors: bool = False, + include_author: bool = False, + finalized_only: bool = False, + ) -> Optional[dict]: + """ + Retrieves a block and decodes its containing extrinsics and log digest items. If `block_hash` and `block_number` + is omitted the chain tip will be retrieve, or the finalized head if `finalized_only` is set to true. + + Either `block_hash` or `block_number` should be set, or both omitted. + + Parameters + ---------- + block_hash: the hash of the block to be retrieved + block_number: the block number to retrieved + ignore_decoding_errors: When set this will catch all decoding errors, set the item to None and continue decoding + include_author: This will retrieve the block author from the validator set and add to the result + finalized_only: when no `block_hash` or `block_number` is set, this will retrieve the finalized head + + Returns + ------- + A dict containing the extrinsic and digest logs data + """ + if block_hash and block_number: + raise ValueError("Either block_hash or block_number should be be set") + + if block_number is not None: + block_hash = await self.get_block_hash(block_number) + + if block_hash is None: + return + + if block_hash and finalized_only: + raise ValueError( + "finalized_only cannot be True when block_hash is provided" + ) + + if block_hash is None: + # Retrieve block hash + if finalized_only: + block_hash = await self.get_chain_finalised_head() + else: + block_hash = await self.get_chain_head() + + return await self._get_block_handler( + block_hash=block_hash, + ignore_decoding_errors=ignore_decoding_errors, + header_only=False, + include_author=include_author, + ) + + async def get_events(self, block_hash: Optional[str] = None) -> list: + """ + Convenience method to get events for a certain block (storage call for module 'System' and function 'Events') + + Parameters + ---------- + block_hash + + Returns + ------- + list + """ + + def convert_event_data(data): + # Extract phase information + phase_key, phase_value = next(iter(data["phase"].items())) + try: + extrinsic_idx = phase_value[0] + except IndexError: + extrinsic_idx = None + + # Extract event details + module_id, event_data = next(iter(data["event"].items())) + event_id, attributes_data = next(iter(event_data[0].items())) + + # Convert class and pays_fee dictionaries to their string equivalents if they exist + attributes = attributes_data + if isinstance(attributes, dict): + for key, value in attributes.items(): + if isinstance(value, dict): + # Convert nested single-key dictionaries to their keys as strings + sub_key = next(iter(value.keys())) + if value[sub_key] == (): + attributes[key] = sub_key + + # Create the converted dictionary + converted = { + "phase": phase_key, + "extrinsic_idx": extrinsic_idx, + "event": { + "module_id": module_id, + "event_id": event_id, + "attributes": attributes, + }, + "topics": list(data["topics"]), # Convert topics tuple to a list + } + + return converted + + events = [] + + if not block_hash: + block_hash = await self.get_chain_head() + + storage_obj = await self.query( + module="System", storage_function="Events", block_hash=block_hash + ) + if storage_obj: + for item in list(storage_obj): + # print("item!", item) + events.append(convert_event_data(item)) + # events += list(storage_obj) + return events + + async def get_block_runtime_version(self, block_hash: str) -> dict: + """ + Retrieve the runtime version id of given block_hash + """ + response = await self.rpc_request("state_getRuntimeVersion", [block_hash]) + return response.get("result") + + async def get_block_metadata( + self, block_hash: Optional[str] = None, decode: bool = True + ) -> Union[dict, ScaleType]: + """ + A pass-though to existing JSONRPC method `state_getMetadata`. + + Parameters + ---------- + block_hash + decode: True for decoded version + + Returns + ------- + + """ + params = None + if decode and not self.runtime_config: + raise ValueError( + "Cannot decode runtime configuration without a supplied runtime_config" + ) + + if block_hash: + params = [block_hash] + response = await self.rpc_request("state_getMetadata", params) + + if "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + + if response.get("result") and decode: + metadata_decoder = self.runtime_config.create_scale_object( + "MetadataVersioned", data=ScaleBytes(response.get("result")) + ) + metadata_decoder.decode() + + return metadata_decoder + + return response + + async def _preprocess( + self, + query_for: Optional[list], + block_hash: Optional[str], + storage_function: str, + module: str, + ) -> Preprocessed: + """ + Creates a Preprocessed data object for passing to `_make_rpc_request` + """ + params = query_for if query_for else [] + # Search storage call in metadata + metadata_pallet = self.metadata.get_metadata_pallet(module) + + if not metadata_pallet: + raise SubstrateRequestException(f'Pallet "{module}" not found') + + storage_item = metadata_pallet.get_storage_function(storage_function) + + if not metadata_pallet or not storage_item: + raise SubstrateRequestException( + f'Storage function "{module}.{storage_function}" not found' + ) + + # SCALE type string of value + param_types = storage_item.get_params_type_string() + value_scale_type = storage_item.get_value_type_string() + + if len(params) != len(param_types): + raise ValueError( + f"Storage function requires {len(param_types)} parameters, {len(params)} given" + ) + + storage_key = StorageKey.create_from_storage_function( + module, + storage_item.value["name"], + params, + runtime_config=self.runtime_config, + metadata=self.metadata, + ) + method = "state_getStorageAt" + return Preprocessed( + str(query_for), + method, + [storage_key.to_hex(), block_hash], + value_scale_type, + storage_item, + ) + + async def _process_response( + self, + response: dict, + subscription_id: Union[int, str], + value_scale_type: Optional[str], + storage_item: Optional[ScaleType] = None, + runtime: Optional[Runtime] = None, + result_handler: Optional[ResultHandler] = None, + ) -> tuple[Union[ScaleType, dict], bool]: + """ + Processes the RPC call response by decoding it, returning it as is, or setting a handler for subscriptions, + depending on the specific call. + + :param response: the RPC call response + :param subscription_id: the subscription id for subscriptions, used only for subscriptions with a result handler + :param value_scale_type: Scale Type string used for decoding ScaleBytes results + :param storage_item: The ScaleType object used for decoding ScaleBytes results + :param runtime: the runtime object, used for decoding ScaleBytes results + :param result_handler: the result handler coroutine used for handling longer-running subscriptions + + :return: (decoded response, completion) + """ + result: Union[dict, ScaleType] = response + if value_scale_type and isinstance(storage_item, ScaleType): + if not runtime: + async with self._lock: + runtime = Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ) + if response.get("result") is not None: + query_value = response.get("result") + elif storage_item.value["modifier"] == "Default": + # Fallback to default value of storage function if no result + query_value = storage_item.value_object["default"].value_object + else: + # No result is interpreted as an Option<...> result + value_scale_type = f"Option<{value_scale_type}>" + query_value = storage_item.value_object["default"].value_object + if isinstance(query_value, str): + q = bytes.fromhex(query_value[2:]) + elif isinstance(query_value, bytearray): + q = bytes(query_value) + else: + q = query_value + obj = await self.decode_scale(value_scale_type, q, True) + result = obj + if asyncio.iscoroutinefunction(result_handler): + # For multipart responses as a result of subscriptions. + message, bool_result = await result_handler(response, subscription_id) + return message, bool_result + return result, True + + async def _make_rpc_request( + self, + payloads: list[dict], + value_scale_type: Optional[str] = None, + storage_item: Optional[ScaleType] = None, + runtime: Optional[Runtime] = None, + result_handler: Optional[ResultHandler] = None, + ) -> RequestManager.RequestResults: + request_manager = RequestManager(payloads) + + subscription_added = False + + async with self.ws as ws: + for item in payloads: + item_id = await ws.send(item["payload"]) + request_manager.add_request(item_id, item["id"]) + + while True: + for item_id in request_manager.response_map.keys(): + if ( + item_id not in request_manager.responses + or asyncio.iscoroutinefunction(result_handler) + ): + if response := await ws.retrieve(item_id): + if ( + asyncio.iscoroutinefunction(result_handler) + and not subscription_added + ): + # handles subscriptions, overwrites the previous mapping of {item_id : payload_id} + # with {subscription_id : payload_id} + try: + item_id = request_manager.overwrite_request( + item_id, response["result"] + ) + except KeyError: + raise SubstrateRequestException(str(response)) + decoded_response, complete = await self._process_response( + response, + item_id, + value_scale_type, + storage_item, + runtime, + result_handler, + ) + request_manager.add_response( + item_id, decoded_response, complete + ) + if ( + asyncio.iscoroutinefunction(result_handler) + and not subscription_added + ): + subscription_added = True + break + + if request_manager.is_complete: + break + + return request_manager.get_results() + + @staticmethod + def make_payload(id_: str, method: str, params: list) -> dict: + """ + Creates a payload for making an rpc_request with _make_rpc_request + + :param id_: a unique name you would like to give to this request + :param method: the method in the RPC request + :param params: the params in the RPC request + + :return: the payload dict + """ + return { + "id": id_, + "payload": {"jsonrpc": "2.0", "method": method, "params": params}, + } + + async def rpc_request( + self, + method: str, + params: Optional[list], + block_hash: Optional[str] = None, + reuse_block_hash: bool = False, + ) -> Any: + """ + Makes an RPC request to the subtensor. Use this only if ``self.query`` and ``self.query_multiple`` and + ``self.query_map`` do not meet your needs. + + :param method: str the method in the RPC request + :param params: list of the params in the RPC request + :param block_hash: optional str, the hash of the block — only supply this if not supplying the block + hash in the params, and not reusing the block hash + :param reuse_block_hash: optional bool, whether to reuse the block hash in the params — only mark as True + if not supplying the block hash in the params, or via the `block_hash` parameter + + :return: the response from the RPC request + """ + block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash) + params = params or [] + payload_id = f"{method}{random.randint(0, 7000)}" + payloads = [ + self.make_payload( + payload_id, + method, + params + [block_hash] if block_hash else params, + ) + ] + runtime = Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ) + result = await self._make_rpc_request(payloads, runtime=runtime) + if "error" in result[payload_id][0]: + raise SubstrateRequestException(result[payload_id][0]["error"]["message"]) + if "result" in result[payload_id][0]: + return result[payload_id][0] + else: + raise SubstrateRequestException(result[payload_id][0]) + + async def get_block_hash(self, block_id: int) -> str: + return (await self.rpc_request("chain_getBlockHash", [block_id]))["result"] + + async def get_chain_head(self) -> str: + result = await self._make_rpc_request( + [ + self.make_payload( + "rpc_request", + "chain_getHead", + [], + ) + ], + runtime=Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ), + ) + self.last_block_hash = result["rpc_request"][0]["result"] + return result["rpc_request"][0]["result"] + + async def compose_call( + self, + call_module: str, + call_function: str, + call_params: Optional[dict] = None, + block_hash: Optional[str] = None, + ) -> GenericCall: + """ + Composes a call payload which can be used in an extrinsic. + + :param call_module: Name of the runtime module e.g. Balances + :param call_function: Name of the call function e.g. transfer + :param call_params: This is a dict containing the params of the call. e.g. + `{'dest': 'EaG2CRhJWPb7qmdcJvy3LiWdh26Jreu9Dx6R1rXxPmYXoDk', 'value': 1000000000000}` + :param block_hash: Use metadata at given block_hash to compose call + + :return: A composed call + """ + if call_params is None: + call_params = {} + + await self.init_runtime(block_hash=block_hash) + + call = self.runtime_config.create_scale_object( + type_string="Call", metadata=self.metadata + ) + + call.encode( + { + "call_module": call_module, + "call_function": call_function, + "call_args": call_params, + } + ) + + return call + + async def query_multiple( + self, + params: list, + storage_function: str, + module: str, + block_hash: Optional[str] = None, + reuse_block_hash: bool = False, + ) -> dict[str, ScaleType]: + """ + Queries the subtensor. Only use this when making multiple queries, else use ``self.query`` + """ + # By allowing for specifying the block hash, users, if they have multiple query types they want + # to do, can simply query the block hash first, and then pass multiple query_subtensor calls + # into an asyncio.gather, with the specified block hash + block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash) + if block_hash: + self.last_block_hash = block_hash + runtime = await self.init_runtime(block_hash=block_hash) + preprocessed: tuple[Preprocessed] = await asyncio.gather( + *[ + self._preprocess([x], block_hash, storage_function, module) + for x in params + ] + ) + all_info = [ + self.make_payload(item.queryable, item.method, item.params) + for item in preprocessed + ] + # These will always be the same throughout the preprocessed list, so we just grab the first one + value_scale_type = preprocessed[0].value_scale_type + storage_item = preprocessed[0].storage_item + + responses = await self._make_rpc_request( + all_info, value_scale_type, storage_item, runtime + ) + return { + param: responses[p.queryable][0] for (param, p) in zip(params, preprocessed) + } + + async def query_multi( + self, storage_keys: list[StorageKey], block_hash: Optional[str] = None + ) -> list: + """ + Query multiple storage keys in one request. + + Example: + + ``` + storage_keys = [ + substrate.create_storage_key( + "System", "Account", ["F4xQKRUagnSGjFqafyhajLs94e7Vvzvr8ebwYJceKpr8R7T"] + ), + substrate.create_storage_key( + "System", "Account", ["GSEX8kR4Kz5UZGhvRUCJG93D5hhTAoVZ5tAe6Zne7V42DSi"] + ) + ] + + result = substrate.query_multi(storage_keys) + ``` + + Parameters + ---------- + storage_keys: list of StorageKey objects + block_hash: Optional block_hash of state snapshot + + Returns + ------- + list of `(storage_key, scale_obj)` tuples + """ + + await self.init_runtime(block_hash=block_hash) + + # Retrieve corresponding value + response = await self.rpc_request( + "state_queryStorageAt", [[s.to_hex() for s in storage_keys], block_hash] + ) + + if "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + + result = [] + + storage_key_map = {s.to_hex(): s for s in storage_keys} + + for result_group in response["result"]: + for change_storage_key, change_data in result_group["changes"]: + # Decode result for specified storage_key + storage_key = storage_key_map[change_storage_key] + if change_data is None: + change_data = b"\x00" + else: + change_data = bytes.fromhex(change_data[2:]) + result.append( + ( + storage_key, + await self.decode_scale( + storage_key.value_scale_type, change_data + ), + ) + ) + + return result + + async def create_scale_object( + self, + type_string: str, + data: Optional[ScaleBytes] = None, + block_hash: Optional[str] = None, + **kwargs, + ) -> "ScaleType": + """ + Convenience method to create a SCALE object of type `type_string`, this will initialize the runtime + automatically at moment of `block_hash`, or chain tip if omitted. + + :param type_string: str Name of SCALE type to create + :param data: ScaleBytes Optional ScaleBytes to decode + :param block_hash: Optional block hash for moment of decoding, when omitted the chain tip will be used + :param kwargs: keyword args for the Scale Type constructor + + :return: The created Scale Type object + """ + runtime = await self.init_runtime(block_hash=block_hash) + if "metadata" not in kwargs: + kwargs["metadata"] = runtime.metadata + + return runtime.runtime_config.create_scale_object( + type_string, data=data, **kwargs + ) + + async def generate_signature_payload( + self, + call: GenericCall, + era=None, + nonce: int = 0, + tip: int = 0, + tip_asset_id: Optional[int] = None, + include_call_length: bool = False, + ) -> ScaleBytes: + # Retrieve genesis hash + genesis_hash = await self.get_block_hash(0) + + if not era: + era = "00" + + if era == "00": + # Immortal extrinsic + block_hash = genesis_hash + else: + # Determine mortality of extrinsic + era_obj = self.runtime_config.create_scale_object("Era") + + if isinstance(era, dict) and "current" not in era and "phase" not in era: + raise ValueError( + 'The era dict must contain either "current" or "phase" element to encode a valid era' + ) + + era_obj.encode(era) + block_hash = await self.get_block_hash( + block_id=era_obj.birth(era.get("current")) + ) + + # Create signature payload + signature_payload = self.runtime_config.create_scale_object( + "ExtrinsicPayloadValue" + ) + + # Process signed extensions in metadata + if "signed_extensions" in self.metadata[1][1]["extrinsic"]: + # Base signature payload + signature_payload.type_mapping = [["call", "CallBytes"]] + + # Add signed extensions to payload + signed_extensions = self.metadata.get_signed_extensions() + + if "CheckMortality" in signed_extensions: + signature_payload.type_mapping.append( + ["era", signed_extensions["CheckMortality"]["extrinsic"]] + ) + + if "CheckEra" in signed_extensions: + signature_payload.type_mapping.append( + ["era", signed_extensions["CheckEra"]["extrinsic"]] + ) + + if "CheckNonce" in signed_extensions: + signature_payload.type_mapping.append( + ["nonce", signed_extensions["CheckNonce"]["extrinsic"]] + ) + + if "ChargeTransactionPayment" in signed_extensions: + signature_payload.type_mapping.append( + ["tip", signed_extensions["ChargeTransactionPayment"]["extrinsic"]] + ) + + if "ChargeAssetTxPayment" in signed_extensions: + signature_payload.type_mapping.append( + ["asset_id", signed_extensions["ChargeAssetTxPayment"]["extrinsic"]] + ) + + if "CheckMetadataHash" in signed_extensions: + signature_payload.type_mapping.append( + ["mode", signed_extensions["CheckMetadataHash"]["extrinsic"]] + ) + + if "CheckSpecVersion" in signed_extensions: + signature_payload.type_mapping.append( + [ + "spec_version", + signed_extensions["CheckSpecVersion"]["additional_signed"], + ] + ) + + if "CheckTxVersion" in signed_extensions: + signature_payload.type_mapping.append( + [ + "transaction_version", + signed_extensions["CheckTxVersion"]["additional_signed"], + ] + ) + + if "CheckGenesis" in signed_extensions: + signature_payload.type_mapping.append( + [ + "genesis_hash", + signed_extensions["CheckGenesis"]["additional_signed"], + ] + ) + + if "CheckMortality" in signed_extensions: + signature_payload.type_mapping.append( + [ + "block_hash", + signed_extensions["CheckMortality"]["additional_signed"], + ] + ) + + if "CheckEra" in signed_extensions: + signature_payload.type_mapping.append( + ["block_hash", signed_extensions["CheckEra"]["additional_signed"]] + ) + + if "CheckMetadataHash" in signed_extensions: + signature_payload.type_mapping.append( + [ + "metadata_hash", + signed_extensions["CheckMetadataHash"]["additional_signed"], + ] + ) + + if include_call_length: + length_obj = self.runtime_config.create_scale_object("Bytes") + call_data = str(length_obj.encode(str(call.data))) + + else: + call_data = str(call.data) + + payload_dict = { + "call": call_data, + "era": era, + "nonce": nonce, + "tip": tip, + "spec_version": self.runtime_version, + "genesis_hash": genesis_hash, + "block_hash": block_hash, + "transaction_version": self.transaction_version, + "asset_id": {"tip": tip, "asset_id": tip_asset_id}, + "metadata_hash": None, + "mode": "Disabled", + } + + signature_payload.encode(payload_dict) + + if signature_payload.data.length > 256: + return ScaleBytes( + data=blake2b(signature_payload.data.data, digest_size=32).digest() + ) + + return signature_payload.data + + async def create_signed_extrinsic( + self, + call: GenericCall, + keypair: Keypair, + era: Optional[dict] = None, + nonce: Optional[int] = None, + tip: int = 0, + tip_asset_id: Optional[int] = None, + signature: Optional[Union[bytes, str]] = None, + ) -> "GenericExtrinsic": + """ + Creates an extrinsic signed by given account details + + :param call: GenericCall to create extrinsic for + :param keypair: Keypair used to sign the extrinsic + :param era: Specify mortality in blocks in follow format: + {'period': [amount_blocks]} If omitted the extrinsic is immortal + :param nonce: nonce to include in extrinsics, if omitted the current nonce is retrieved on-chain + :param tip: The tip for the block author to gain priority during network congestion + :param tip_asset_id: Optional asset ID with which to pay the tip + :param signature: Optionally provide signature if externally signed + + :return: The signed Extrinsic + """ + await self.init_runtime() + + # Check requirements + if not isinstance(call, GenericCall): + raise TypeError("'call' must be of type Call") + + # Check if extrinsic version is supported + if self.metadata[1][1]["extrinsic"]["version"] != 4: # type: ignore + raise NotImplementedError( + f"Extrinsic version {self.metadata[1][1]['extrinsic']['version']} not supported" # type: ignore + ) + + # Retrieve nonce + if nonce is None: + nonce = await self.get_account_nonce(keypair.ss58_address) or 0 + + # Process era + if era is None: + era = "00" + else: + if isinstance(era, dict) and "current" not in era and "phase" not in era: + # Retrieve current block id + era["current"] = await self.get_block_number( + await self.get_chain_finalised_head() + ) + + if signature is not None: + if isinstance(signature, str) and signature[0:2] == "0x": + signature = bytes.fromhex(signature[2:]) + + # Check if signature is a MultiSignature and contains signature version + if len(signature) == 65: + signature_version = signature[0] + signature = signature[1:] + else: + signature_version = keypair.crypto_type + + else: + # Create signature payload + signature_payload = await self.generate_signature_payload( + call=call, era=era, nonce=nonce, tip=tip, tip_asset_id=tip_asset_id + ) + + # Set Signature version to crypto type of keypair + signature_version = keypair.crypto_type + + # Sign payload + signature = keypair.sign(signature_payload) + + # Create extrinsic + extrinsic = self.runtime_config.create_scale_object( + type_string="Extrinsic", metadata=self.metadata + ) + + value = { + "account_id": f"0x{keypair.public_key.hex()}", + "signature": f"0x{signature.hex()}", + "call_function": call.value["call_function"], + "call_module": call.value["call_module"], + "call_args": call.value["call_args"], + "nonce": nonce, + "era": era, + "tip": tip, + "asset_id": {"tip": tip, "asset_id": tip_asset_id}, + "mode": "Disabled", + } + + # Check if ExtrinsicSignature is MultiSignature, otherwise omit signature_version + signature_cls = self.runtime_config.get_decoder_class("ExtrinsicSignature") + if issubclass(signature_cls, self.runtime_config.get_decoder_class("Enum")): + value["signature_version"] = signature_version + + extrinsic.encode(value) + + return extrinsic + + async def get_chain_finalised_head(self): + """ + A pass-though to existing JSONRPC method `chain_getFinalizedHead` + + Returns + ------- + + """ + response = await self.rpc_request("chain_getFinalizedHead", []) + + if response is not None: + if "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + + return response.get("result") + + async def runtime_call( + self, + api: str, + method: str, + params: Optional[Union[list, dict]] = None, + block_hash: Optional[str] = None, + ) -> ScaleType: + """ + Calls a runtime API method + + :param api: Name of the runtime API e.g. 'TransactionPaymentApi' + :param method: Name of the method e.g. 'query_fee_details' + :param params: List of parameters needed to call the runtime API + :param block_hash: Hash of the block at which to make the runtime API call + + :return: ScaleType from the runtime call + """ + await self.init_runtime() + + if params is None: + params = {} + + try: + runtime_call_def = self.runtime_config.type_registry["runtime_api"][api][ + "methods" + ][method] + runtime_api_types = self.runtime_config.type_registry["runtime_api"][ + api + ].get("types", {}) + except KeyError: + raise ValueError(f"Runtime API Call '{api}.{method}' not found in registry") + + if isinstance(params, list) and len(params) != len(runtime_call_def["params"]): + raise ValueError( + f"Number of parameter provided ({len(params)}) does not " + f"match definition {len(runtime_call_def['params'])}" + ) + + # Add runtime API types to registry + self.runtime_config.update_type_registry_types(runtime_api_types) + runtime = Runtime( + self.chain, + self.runtime_config, + self.metadata, + self.type_registry, + ) + + # Encode params + param_data = ScaleBytes(bytes()) + for idx, param in enumerate(runtime_call_def["params"]): + scale_obj = runtime.runtime_config.create_scale_object(param["type"]) + if isinstance(params, list): + param_data += scale_obj.encode(params[idx]) + else: + if param["name"] not in params: + raise ValueError(f"Runtime Call param '{param['name']}' is missing") + + param_data += scale_obj.encode(params[param["name"]]) + + # RPC request + result_data = await self.rpc_request( + "state_call", [f"{api}_{method}", str(param_data), block_hash] + ) + + # Decode result + # TODO update this to use bt-decode + result_obj = runtime.runtime_config.create_scale_object( + runtime_call_def["type"] + ) + result_obj.decode( + ScaleBytes(result_data["result"]), + check_remaining=self.config.get("strict_scale_decode"), + ) + + return result_obj + + async def get_account_nonce(self, account_address: str) -> int: + """ + Returns current nonce for given account address + + :param account_address: SS58 formatted address + + :return: Nonce for given account address + """ + nonce_obj = await self.runtime_call( + "AccountNonceApi", "account_nonce", [account_address] + ) + return nonce_obj.value + + async def get_metadata_constant(self, module_name, constant_name, block_hash=None): + """ + Retrieves the details of a constant for given module name, call function name and block_hash + (or chaintip if block_hash is omitted) + + Parameters + ---------- + module_name + constant_name + block_hash + + Returns + ------- + MetadataModuleConstants + """ + + # await self.init_runtime(block_hash=block_hash) + + for module in self.metadata.pallets: + if module_name == module.name and module.constants: + for constant in module.constants: + if constant_name == constant.value["name"]: + return constant + + async def get_constant( + self, + module_name: str, + constant_name: str, + block_hash: Optional[str] = None, + reuse_block_hash: bool = False, + ) -> Optional["ScaleType"]: + """ + Returns the decoded `ScaleType` object of the constant for given module name, call function name and block_hash + (or chaintip if block_hash is omitted) + + Parameters + ---------- + :param module_name: Name of the module to query + :param constant_name: Name of the constant to query + :param block_hash: Hash of the block at which to make the runtime API call + :param reuse_block_hash: Reuse last-used block hash if set to true + + :return: ScaleType from the runtime call + """ + block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash) + constant = await self.get_metadata_constant( + module_name, constant_name, block_hash=block_hash + ) + if constant: + # Decode to ScaleType + return await self.decode_scale( + constant.type, + bytes(constant.constant_value), + return_scale_obj=True, + ) + else: + return None + + async def get_payment_info( + self, call: GenericCall, keypair: Keypair + ) -> dict[str, Any]: + """ + Retrieves fee estimation via RPC for given extrinsic + + Parameters + ---------- + call: Call object to estimate fees for + keypair: Keypair of the sender, does not have to include private key because no valid signature is required + + Returns + ------- + Dict with payment info + + E.g. `{'class': 'normal', 'partialFee': 151000000, 'weight': {'ref_time': 143322000}}` + + """ + + # Check requirements + if not isinstance(call, GenericCall): + raise TypeError("'call' must be of type Call") + + if not isinstance(keypair, Keypair): + raise TypeError("'keypair' must be of type Keypair") + + # No valid signature is required for fee estimation + signature = "0x" + "00" * 64 + + # Create extrinsic + extrinsic = await self.create_signed_extrinsic( + call=call, keypair=keypair, signature=signature + ) + extrinsic_len = self.runtime_config.create_scale_object("u32") + extrinsic_len.encode(len(extrinsic.data)) + + result = await self.runtime_call( + "TransactionPaymentApi", "query_info", [extrinsic, extrinsic_len] + ) + + return result.value + + async def query( + self, + module: str, + storage_function: str, + params: Optional[list] = None, + block_hash: Optional[str] = None, + raw_storage_key: Optional[bytes] = None, + subscription_handler=None, + reuse_block_hash: bool = False, + ) -> "ScaleType": + """ + Queries subtensor. This should only be used when making a single request. For multiple requests, + you should use ``self.query_multiple`` + """ + block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash) + if block_hash: + self.last_block_hash = block_hash + runtime = await self.init_runtime(block_hash=block_hash) + preprocessed: Preprocessed = await self._preprocess( + params, block_hash, storage_function, module + ) + payload = [ + self.make_payload( + preprocessed.queryable, preprocessed.method, preprocessed.params + ) + ] + value_scale_type = preprocessed.value_scale_type + storage_item = preprocessed.storage_item + + responses = await self._make_rpc_request( + payload, + value_scale_type, + storage_item, + runtime, + result_handler=subscription_handler, + ) + return responses[preprocessed.queryable][0] + + async def query_map( + self, + module: str, + storage_function: str, + params: Optional[list] = None, + block_hash: Optional[str] = None, + max_results: Optional[int] = None, + start_key: Optional[str] = None, + page_size: int = 100, + ignore_decoding_errors: bool = False, + reuse_block_hash: bool = False, + ) -> "QueryMapResult": + """ + Iterates over all key-pairs located at the given module and storage_function. The storage + item must be a map. + + Example: + + ``` + result = await substrate.query_map('System', 'Account', max_results=100) + + async for account, account_info in result: + print(f"Free balance of account '{account.value}': {account_info.value['data']['free']}") + ``` + + Note: it is important that you do not use `for x in result.records`, as this will sidestep possible + pagination. You must do `async for x in result`. + + :param module: The module name in the metadata, e.g. System or Balances. + :param storage_function: The storage function name, e.g. Account or Locks. + :param params: The input parameters in case of for example a `DoubleMap` storage function + :param block_hash: Optional block hash for result at given block, when left to None the chain tip will be used. + :param max_results: the maximum of results required, if set the query will stop fetching results when number is + reached + :param start_key: The storage key used as offset for the results, for pagination purposes + :param page_size: The results are fetched from the node RPC in chunks of this size + :param ignore_decoding_errors: When set this will catch all decoding errors, set the item to None and continue + decoding + :param reuse_block_hash: use True if you wish to make the query using the last-used block hash. Do not mark True + if supplying a block_hash + + :return: QueryMapResult object + """ + params = params or [] + block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash) + if block_hash: + self.last_block_hash = block_hash + runtime = await self.init_runtime(block_hash=block_hash) + + metadata_pallet = runtime.metadata.get_metadata_pallet(module) + if not metadata_pallet: + raise ValueError(f'Pallet "{module}" not found') + storage_item = metadata_pallet.get_storage_function(storage_function) + + if not metadata_pallet or not storage_item: + raise ValueError( + f'Storage function "{module}.{storage_function}" not found' + ) + + value_type = storage_item.get_value_type_string() + param_types = storage_item.get_params_type_string() + key_hashers = storage_item.get_param_hashers() + + # Check MapType conditions + if len(param_types) == 0: + raise ValueError("Given storage function is not a map") + if len(params) > len(param_types) - 1: + raise ValueError( + f"Storage function map can accept max {len(param_types) - 1} parameters, {len(params)} given" + ) + + # Generate storage key prefix + storage_key = StorageKey.create_from_storage_function( + module, + storage_item.value["name"], + params, + runtime_config=runtime.runtime_config, + metadata=runtime.metadata, + ) + prefix = storage_key.to_hex() + + if not start_key: + start_key = prefix + + # Make sure if the max result is smaller than the page size, adjust the page size + if max_results is not None and max_results < page_size: + page_size = max_results + + # Retrieve storage keys + response = await self.rpc_request( + method="state_getKeysPaged", + params=[prefix, page_size, start_key, block_hash], + ) + + if "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + + result_keys = response.get("result") + + result = [] + last_key = None + + def concat_hash_len(key_hasher: str) -> int: + """ + Helper function to avoid if statements + """ + if key_hasher == "Blake2_128Concat": + return 16 + elif key_hasher == "Twox64Concat": + return 8 + elif key_hasher == "Identity": + return 0 + else: + raise ValueError("Unsupported hash type") + + if len(result_keys) > 0: + last_key = result_keys[-1] + + # Retrieve corresponding value + response = await self.rpc_request( + method="state_queryStorageAt", params=[result_keys, block_hash] + ) + + if "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + + for result_group in response["result"]: + for item in result_group["changes"]: + try: + # Determine type string + key_type_string = [] + for n in range(len(params), len(param_types)): + key_type_string.append( + f"[u8; {concat_hash_len(key_hashers[n])}]" + ) + key_type_string.append(param_types[n]) + + item_key_obj = await self.decode_scale( + type_string=f"({', '.join(key_type_string)})", + scale_bytes=bytes.fromhex(item[0][len(prefix) :]), + return_scale_obj=True, + ) + + # strip key_hashers to use as item key + if len(param_types) - len(params) == 1: + item_key = item_key_obj[1] + else: + item_key = tuple( + item_key_obj[key + 1] + for key in range(len(params), len(param_types) + 1, 2) + ) + + except Exception as _: + if not ignore_decoding_errors: + raise + item_key = None + + try: + try: + item_bytes = bytes.fromhex(item[1][2:]) + except ValueError: + item_bytes = bytes.fromhex(item[1]) + + item_value = await self.decode_scale( + type_string=value_type, + scale_bytes=item_bytes, + return_scale_obj=True, + ) + except Exception as _: + if not ignore_decoding_errors: + raise + item_value = None + + result.append([item_key, item_value]) + + return QueryMapResult( + records=result, + page_size=page_size, + module=module, + storage_function=storage_function, + params=params, + block_hash=block_hash, + substrate=self, + last_key=last_key, + max_results=max_results, + ignore_decoding_errors=ignore_decoding_errors, + ) + + async def submit_extrinsic( + self, + extrinsic: GenericExtrinsic, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + ) -> "ExtrinsicReceipt": + """ + Submit an extrinsic to the connected node, with the possibility to wait until the extrinsic is included + in a block and/or the block is finalized. The receipt returned provided information about the block and + triggered events + + Parameters + ---------- + extrinsic: Extrinsic The extrinsic to be sent to the network + wait_for_inclusion: wait until extrinsic is included in a block (only works for websocket connections) + wait_for_finalization: wait until extrinsic is finalized (only works for websocket connections) + + Returns + ------- + ExtrinsicReceipt + + """ + + # Check requirements + if not isinstance(extrinsic, GenericExtrinsic): + raise TypeError("'extrinsic' must be of type Extrinsics") + + async def result_handler(message: dict, subscription_id) -> tuple[dict, bool]: + """ + Result handler function passed as an arg to _make_rpc_request as the result_handler + to handle the results of the extrinsic rpc call, which are multipart, and require + subscribing to the message + + :param message: message received from the rpc call + :param subscription_id: subscription id received from the initial rpc call for the subscription + + :returns: tuple containing the dict of the block info for the subscription, and bool for whether + the subscription is completed. + """ + # Check if extrinsic is included and finalized + if "params" in message and isinstance(message["params"]["result"], dict): + # Convert result enum to lower for backwards compatibility + message_result = { + k.lower(): v for k, v in message["params"]["result"].items() + } + + if "finalized" in message_result and wait_for_finalization: + # Created as a task because we don't actually care about the result + self._forgettable_task = asyncio.create_task( + self.rpc_request("author_unwatchExtrinsic", [subscription_id]) + ) + return { + "block_hash": message_result["finalized"], + "extrinsic_hash": "0x{}".format(extrinsic.extrinsic_hash.hex()), + "finalized": True, + }, True + elif ( + "inblock" in message_result + and wait_for_inclusion + and not wait_for_finalization + ): + # Created as a task because we don't actually care about the result + self._forgettable_task = asyncio.create_task( + self.rpc_request("author_unwatchExtrinsic", [subscription_id]) + ) + return { + "block_hash": message_result["inblock"], + "extrinsic_hash": "0x{}".format(extrinsic.extrinsic_hash.hex()), + "finalized": False, + }, True + return message, False + + if wait_for_inclusion or wait_for_finalization: + responses = ( + await self._make_rpc_request( + [ + self.make_payload( + "rpc_request", + "author_submitAndWatchExtrinsic", + [str(extrinsic.data)], + ) + ], + result_handler=result_handler, + ) + )["rpc_request"] + response = next( + (r for r in responses if "block_hash" in r and "extrinsic_hash" in r), + None, + ) + + if not response: + raise SubstrateRequestException(responses) + + # Also, this will be a multipart response, so maybe should change to everything after the first response? + # The following code implies this will be a single response after the initial subscription id. + result = ExtrinsicReceipt( + substrate=self, + extrinsic_hash=response["extrinsic_hash"], + block_hash=response["block_hash"], + finalized=response["finalized"], + ) + + else: + response = await self.rpc_request( + "author_submitExtrinsic", [str(extrinsic.data)] + ) + + if "result" not in response: + raise SubstrateRequestException(response.get("error")) + + result = ExtrinsicReceipt(substrate=self, extrinsic_hash=response["result"]) + + return result + + async def get_metadata_call_function( + self, + module_name: str, + call_function_name: str, + block_hash: Optional[str] = None, + ) -> Optional[list]: + """ + Retrieves a list of all call functions in metadata active for given block_hash (or chaintip if block_hash + is omitted) + + :param module_name: name of the module + :param call_function_name: name of the call function + :param block_hash: optional block hash + + :return: list of call functions + """ + runtime = await self.init_runtime(block_hash=block_hash) + + for pallet in runtime.metadata.pallets: + if pallet.name == module_name and pallet.calls: + for call in pallet.calls: + if call.name == call_function_name: + return call + return None + + async def get_block_number(self, block_hash: Optional[str]) -> int: + """Async version of `substrateinterface.base.get_block_number` method.""" + response = await self.rpc_request("chain_getHeader", [block_hash]) + + if "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + + elif "result" in response: + if response["result"]: + return int(response["result"]["number"], 16) + + async def close(self): + """ + Closes the substrate connection, and the websocket connection. + """ + try: + await self.ws.shutdown() + except AttributeError: + pass From f24481db724a7fe04c54d6a0d9ecc9f75a65da79 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 17:34:02 -0700 Subject: [PATCH 03/27] update `bittensor.utils.format_error_message` to be compatible with async_subtensor --- bittensor/core/extrinsics/commit_weights.py | 4 +- bittensor/core/extrinsics/prometheus.py | 2 +- bittensor/core/extrinsics/registration.py | 4 +- bittensor/core/extrinsics/serving.py | 4 +- bittensor/core/extrinsics/set_weights.py | 2 +- bittensor/core/extrinsics/transfer.py | 2 +- bittensor/core/settings.py | 2 + bittensor/utils/__init__.py | 103 +++++++++++++++++--- requirements/prod.txt | 1 + tests/unit_tests/extrinsics/test_init.py | 13 +-- 10 files changed, 108 insertions(+), 29 deletions(-) diff --git a/bittensor/core/extrinsics/commit_weights.py b/bittensor/core/extrinsics/commit_weights.py index 5e9f2e9e19..8cb27e94b8 100644 --- a/bittensor/core/extrinsics/commit_weights.py +++ b/bittensor/core/extrinsics/commit_weights.py @@ -139,7 +139,7 @@ def commit_weights_extrinsic( logging.info(success_message) return True, success_message else: - error_message = format_error_message(error_message) + error_message = format_error_message(error_message, substrate=subtensor.substrate) logging.error(f"Failed to commit weights: {error_message}") return False, error_message @@ -269,6 +269,6 @@ def reveal_weights_extrinsic( logging.info(success_message) return True, success_message else: - error_message = format_error_message(error_message) + error_message = format_error_message(error_message, substrate=subtensor.substrate) logging.error(f"Failed to reveal weights: {error_message}") return False, error_message diff --git a/bittensor/core/extrinsics/prometheus.py b/bittensor/core/extrinsics/prometheus.py index a6ab1cfb16..30d9455d2e 100644 --- a/bittensor/core/extrinsics/prometheus.py +++ b/bittensor/core/extrinsics/prometheus.py @@ -180,7 +180,7 @@ def prometheus_extrinsic( return True else: bt_console.print( - f":cross_mark: [red]Failed[/red]: {format_error_message(error_message)}" + f":cross_mark: [red]Failed[/red]: {format_error_message(error_message, substrate=subtensor.substrate)}" ) return False else: diff --git a/bittensor/core/extrinsics/registration.py b/bittensor/core/extrinsics/registration.py index 2528368094..cdd7a44024 100644 --- a/bittensor/core/extrinsics/registration.py +++ b/bittensor/core/extrinsics/registration.py @@ -95,7 +95,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, format_error_message(response.error_message) + return False, format_error_message(response.error_message, substrate=subtensor.substrate) # Successful registration else: return True, None @@ -340,7 +340,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, format_error_message(response.error_message) + return False, format_error_message(response.error_message, substrate=subtensor.substrate) # Successful registration else: return True, None diff --git a/bittensor/core/extrinsics/serving.py b/bittensor/core/extrinsics/serving.py index 490f9c268e..31349650fc 100644 --- a/bittensor/core/extrinsics/serving.py +++ b/bittensor/core/extrinsics/serving.py @@ -186,7 +186,7 @@ def serve_extrinsic( ) return True else: - logging.error(f"Failed: {format_error_message(error_message)}") + logging.error(f"Failed: {format_error_message(error_message, substrate=subtensor.substrate)}") return False else: return True @@ -299,7 +299,7 @@ def publish_metadata( if response.is_success: return True else: - raise MetadataError(format_error_message(response.error_message)) + raise MetadataError(format_error_message(response.error_message, substrate=self.substrate)) # Community uses this function directly diff --git a/bittensor/core/extrinsics/set_weights.py b/bittensor/core/extrinsics/set_weights.py index 7680061c5b..436868ecbc 100644 --- a/bittensor/core/extrinsics/set_weights.py +++ b/bittensor/core/extrinsics/set_weights.py @@ -184,7 +184,7 @@ def set_weights_extrinsic( ) return True, "Successfully set weights and Finalized." else: - error_message = format_error_message(error_message) + error_message = format_error_message(error_message, substrate=subtensor.substrate) logging.error(error_message) return False, error_message diff --git a/bittensor/core/extrinsics/transfer.py b/bittensor/core/extrinsics/transfer.py index 896fecbf96..4ab28ffb80 100644 --- a/bittensor/core/extrinsics/transfer.py +++ b/bittensor/core/extrinsics/transfer.py @@ -201,7 +201,7 @@ def transfer_extrinsic( ) else: bt_console.print( - f":cross_mark: [red]Failed[/red]: {format_error_message(error_message)}" + f":cross_mark: [red]Failed[/red]: {format_error_message(error_message, substrate=subtensor.substrate)}" ) if success: diff --git a/bittensor/core/settings.py b/bittensor/core/settings.py index 29948b612e..d9bca4255b 100644 --- a/bittensor/core/settings.py +++ b/bittensor/core/settings.py @@ -53,6 +53,8 @@ def turn_console_on(): turn_console_off() bt_console = __console__ +bt_err_console = Console(stderr=True) +bt_verbose_console = Console(quiet=True) HOME_DIR = Path.home() diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 6239d89808..63378b0ec5 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -15,19 +15,21 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +import ast import hashlib -from typing import Literal, Union, Optional, TYPE_CHECKING +from typing import Any, Literal, Union, Optional, TYPE_CHECKING import scalecodec from bittensor_wallet import Keypair from substrateinterface.utils import ss58 -from bittensor.core.settings import SS58_FORMAT +from bittensor.core.settings import SS58_FORMAT, bt_err_console from bittensor.utils.btlogging import logging from .registration import torch, use_torch from .version import version_checking, check_version, VersionCheckError if TYPE_CHECKING: + from bittensor.utils.async_substrate_interface import AsyncSubstrateInterface from substrateinterface import SubstrateInterface RAOPERTAO = 1e9 @@ -141,15 +143,14 @@ def get_hash(content, encoding="utf-8"): return sha3.hexdigest() -def format_error_message( - error_message: dict, substrate: "SubstrateInterface" = None -) -> str: +def format_error_message(error_message: Union[dict, Exception], substrate: Union["AsyncSubstrateInterface", "SubstrateInterface"]) -> str: """ Formats an error message from the Subtensor error information for use in extrinsics. Args: - error_message (dict): A dictionary containing the error information from Subtensor. - substrate (SubstrateInterface, optional): The substrate interface to use. + error_message: A dictionary containing the error information from Subtensor, or a SubstrateRequestException + containing dictionary literal args. + substrate: The initialised SubstrateInterface object to use. Returns: str: A formatted error message string. @@ -158,6 +159,27 @@ def format_error_message( err_type = "UnknownType" err_description = "Unknown Description" + if isinstance(error_message, Exception): + # generally gotten through SubstrateRequestException args + new_error_message = None + for arg in error_message.args: + try: + d = ast.literal_eval(arg) + if isinstance(d, dict): + if "error" in d: + new_error_message = d["error"] + break + elif all(x in d for x in ["code", "message", "data"]): + new_error_message = d + break + except ValueError: + pass + if new_error_message is None: + return_val = " ".join(error_message.args) + return f"Subtensor returned: {return_val}" + else: + error_message = new_error_message + if isinstance(error_message, dict): # subtensor error structure if ( @@ -166,14 +188,11 @@ def format_error_message( and error_message.get("data") ): err_name = "SubstrateRequestException" - err_type = error_message.get("message") - err_data = error_message.get("data") + err_type = error_message.get("message", "") + err_data = error_message.get("data", "") # subtensor custom error marker if err_data.startswith("Custom error:") and substrate: - if not substrate.metadata: - substrate.get_metadata() - if substrate.metadata: try: pallet = substrate.metadata.get_metadata_pallet( @@ -185,8 +204,10 @@ def format_error_message( err_type = error_dict.get("message", err_type) err_docs = error_dict.get("docs", []) err_description = err_docs[0] if err_docs else err_description - except Exception: - logging.error("Substrate pallets data unavailable.") + except (AttributeError, IndexError): + bt_err_console.print( + "Substrate pallets data unavailable. This is usually caused by an uninitialized substrate." + ) else: err_description = err_data @@ -277,3 +298,57 @@ def is_valid_bittensor_address_or_public_key(address: Union[str, bytes]) -> bool else: # Invalid address type return False + + +def decode_hex_identity_dict(info_dictionary) -> dict[str, Any]: + """ + Decodes hex-encoded strings in a dictionary. + + This function traverses the given dictionary, identifies hex-encoded strings, and decodes them into readable strings. It handles nested dictionaries and lists within the dictionary. + + Args: + info_dictionary (dict): The dictionary containing hex-encoded strings to decode. + + Returns: + dict: The dictionary with decoded strings. + + Examples: + input_dict = { + ... "name": {"value": "0x6a6f686e"}, + ... "additional": [ + ... [{"data": "0x64617461"}] + ... ] + ... } + decode_hex_identity_dict(input_dict) + {'name': 'john', 'additional': [('data', 'data')]} + """ + + def get_decoded(data: str) -> str: + """Decodes a hex-encoded string.""" + try: + return bytes.fromhex(data[2:]).decode() + except UnicodeDecodeError: + print(f"Could not decode: {key}: {item}") + + for key, value in info_dictionary.items(): + if isinstance(value, dict): + item = list(value.values())[0] + if isinstance(item, str) and item.startswith("0x"): + try: + info_dictionary[key] = get_decoded(item) + except UnicodeDecodeError: + print(f"Could not decode: {key}: {item}") + else: + info_dictionary[key] = item + if key == "additional": + additional = [] + for item in value: + additional.append( + tuple( + get_decoded(data=next(iter(sub_item.values()))) + for sub_item in item + ) + ) + info_dictionary[key] = additional + + return info_dictionary diff --git a/requirements/prod.txt b/requirements/prod.txt index bed65e9d2e..bb8e243948 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,6 +1,7 @@ wheel setuptools~=70.0.0 aiohttp~=3.9 +async-property==0.2.2 backoff bittensor-cli bt-decode diff --git a/tests/unit_tests/extrinsics/test_init.py b/tests/unit_tests/extrinsics/test_init.py index 8a2480a9b9..8ff60d2de6 100644 --- a/tests/unit_tests/extrinsics/test_init.py +++ b/tests/unit_tests/extrinsics/test_init.py @@ -1,9 +1,10 @@ """Tests for bittensor/extrinsics/__ini__ module.""" from bittensor.utils import format_error_message +from tests.unit_tests.extrinsics.test_commit_weights import subtensor -def test_format_error_message_with_right_error_message(): +def test_format_error_message_with_right_error_message(mocker): """Verify that error message from extrinsic response parses correctly.""" # Prep fake_error_message = { @@ -13,7 +14,7 @@ def test_format_error_message_with_right_error_message(): } # Call - result = format_error_message(fake_error_message) + result = format_error_message(fake_error_message, substrate=mocker.MagicMock()) # Assertions @@ -22,13 +23,13 @@ def test_format_error_message_with_right_error_message(): assert "Some error description." in result -def test_format_error_message_with_empty_error_message(): +def test_format_error_message_with_empty_error_message(mocker): """Verify that empty error message from extrinsic response parses correctly.""" # Prep fake_error_message = {} # Call - result = format_error_message(fake_error_message) + result = format_error_message(fake_error_message, substrate=mocker.MagicMock()) # Assertions @@ -37,13 +38,13 @@ def test_format_error_message_with_empty_error_message(): assert "Unknown Description" in result -def test_format_error_message_with_wrong_type_error_message(): +def test_format_error_message_with_wrong_type_error_message(mocker): """Verify that error message from extrinsic response with wrong type parses correctly.""" # Prep fake_error_message = None # Call - result = format_error_message(fake_error_message) + result = format_error_message(fake_error_message, substrate=mocker.MagicMock()) # Assertions From 873e2395897066041668aff9715fce7bdc0bddc5 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:02:44 -0700 Subject: [PATCH 04/27] update `bittensor.core.chain_data` --- bittensor/core/chain_data/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/core/chain_data/__init__.py b/bittensor/core/chain_data/__init__.py index 9ad1e38881..68936a6b5f 100644 --- a/bittensor/core/chain_data/__init__.py +++ b/bittensor/core/chain_data/__init__.py @@ -17,6 +17,6 @@ from .stake_info import StakeInfo from .subnet_hyperparameters import SubnetHyperparameters from .subnet_info import SubnetInfo -from .utils import custom_rpc_type_registry +from .utils import custom_rpc_type_registry, decode_account_id, process_stake_data ProposalCallData = GenericCall From c4e7d207272c4b3e6367402342f4a06d130ccc01 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:03:16 -0700 Subject: [PATCH 05/27] update `bittensor.core.async_subtensor.py` from btcli --- bittensor/core/async_subtensor.py | 1086 +++++++++++++++++++++++++++++ 1 file changed, 1086 insertions(+) create mode 100644 bittensor/core/async_subtensor.py diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py new file mode 100644 index 0000000000..4f5874fdba --- /dev/null +++ b/bittensor/core/async_subtensor.py @@ -0,0 +1,1086 @@ +import asyncio +from typing import Optional, Any, Union, TypedDict, Iterable + +import aiohttp +import scalecodec +import typer +from bittensor_cli.src import Constants, defaults, TYPE_REGISTRY +from bittensor_cli.src import DelegatesDetails +from bittensor_cli.src.bittensor.async_substrate_interface import ( + AsyncSubstrateInterface, + TimeoutException, +) +from bittensor_cli.src.bittensor.balances import Balance +from bittensor_cli.src.bittensor.utils import ( + ss58_to_vec_u8, + format_error_message, + console, + err_console, + decode_hex_identity_dict, + validate_chain_endpoint, +) +from bittensor_wallet import Wallet +from bittensor_wallet.utils import SS58_FORMAT +from scalecodec import GenericCall +from scalecodec.base import RuntimeConfiguration +from scalecodec.type_registry import load_type_registry_preset +from substrateinterface.exceptions import SubstrateRequestException + +from bittensor.core.chain_data import ( + DelegateInfo, + custom_rpc_type_registry, + StakeInfo, + NeuronInfoLite, + NeuronInfo, + SubnetHyperparameters, + decode_account_id, +) + + +class ParamWithTypes(TypedDict): + name: str # Name of the parameter. + type: str # ScaleType string of the parameter. + + +class ProposalVoteData: + index: int + threshold: int + ayes: list[str] + nays: list[str] + end: int + + def __init__(self, proposal_dict: dict) -> None: + self.index = proposal_dict["index"] + self.threshold = proposal_dict["threshold"] + self.ayes = self.decode_ss58_tuples(proposal_dict["ayes"]) + self.nays = self.decode_ss58_tuples(proposal_dict["nays"]) + self.end = proposal_dict["end"] + + @staticmethod + def decode_ss58_tuples(l: tuple): + """ + Decodes a tuple of ss58 addresses formatted as bytes tuples + """ + return [decode_account_id(l[x][0]) for x in range(len(l))] + + +class AsyncSubtensor: + """Thin layer for interacting with Substrate Interface. Mostly a collection of frequently-used calls.""" + + def __init__(self, network): + if network in Constants.network_map: + self.chain_endpoint = Constants.network_map[network] + self.network = network + if network == "local": + console.log( + "[yellow]Warning[/yellow]: Verify your local subtensor is running on port 9944." + ) + else: + is_valid, _ = validate_chain_endpoint(network) + if is_valid: + self.chain_endpoint = network + if network in Constants.network_map.values(): + self.network = next( + key + for key, value in Constants.network_map.items() + if value == network + ) + else: + self.network = "custom" + else: + console.log( + f"Network not specified or not valid. Using default chain endpoint: " + f"{Constants.network_map[defaults.subtensor.network]}.\n" + f"You can set this for commands with the `--network` flag, or by setting this" + f" in the config." + ) + self.chain_endpoint = Constants.network_map[defaults.subtensor.network] + self.network = defaults.subtensor.network + + self.substrate = AsyncSubstrateInterface( + chain_endpoint=self.chain_endpoint, + ss58_format=SS58_FORMAT, + type_registry=TYPE_REGISTRY, + chain_name="Bittensor", + ) + + def __str__(self): + return f"Network: {self.network}, Chain: {self.chain_endpoint}" + + async def __aenter__(self): + with console.status( + f"[yellow]Connecting to Substrate:[/yellow][bold white] {self}..." + ): + try: + async with self.substrate: + return self + except TimeoutException: + err_console.print( + "\n[red]Error[/red]: Timeout occurred connecting to substrate. " + f"Verify your chain and network settings: {self}" + ) + raise typer.Exit(code=1) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.substrate.close() + + async def encode_params( + self, + call_definition: list["ParamWithTypes"], + params: Union[list[Any], dict[str, Any]], + ) -> str: + """Returns a hex encoded string of the params using their types.""" + param_data = scalecodec.ScaleBytes(b"") + + for i, param in enumerate(call_definition["params"]): # type: ignore + scale_obj = await self.substrate.create_scale_object(param["type"]) + if isinstance(params, list): + param_data += scale_obj.encode(params[i]) + else: + if param["name"] not in params: + raise ValueError(f"Missing param {param['name']} in params dict.") + + param_data += scale_obj.encode(params[param["name"]]) + + return param_data.to_hex() + + async def get_all_subnet_netuids( + self, block_hash: Optional[str] = None + ) -> list[int]: + """ + Retrieves the list of all subnet unique identifiers (netuids) currently present in the Bittensor network. + + :param block_hash: The hash of the block to retrieve the subnet unique identifiers from. + :return: A list of subnet netuids. + + This function provides a comprehensive view of the subnets within the Bittensor network, + offering insights into its diversity and scale. + """ + result = await self.substrate.query_map( + module="SubtensorModule", + storage_function="NetworksAdded", + block_hash=block_hash, + reuse_block_hash=True, + ) + return ( + [] + if result is None or not hasattr(result, "records") + else [netuid async for netuid, exists in result if exists] + ) + + async def is_hotkey_delegate( + self, + hotkey_ss58: str, + block_hash: Optional[str] = None, + reuse_block: Optional[bool] = False, + ) -> bool: + """ + Determines whether a given hotkey (public key) is a delegate on the Bittensor network. This function + checks if the neuron associated with the hotkey is part of the network's delegation system. + + :param hotkey_ss58: The SS58 address of the neuron's hotkey. + :param block_hash: The hash of the blockchain block number for the query. + :param reuse_block: Whether to reuse the last-used block hash. + + :return: `True` if the hotkey is a delegate, `False` otherwise. + + Being a delegate is a significant status within the Bittensor network, indicating a neuron's + involvement in consensus and governance processes. + """ + delegates = await self.get_delegates( + block_hash=block_hash, reuse_block=reuse_block + ) + return hotkey_ss58 in [info.hotkey_ss58 for info in delegates] + + async def get_delegates( + self, block_hash: Optional[str] = None, reuse_block: Optional[bool] = False + ) -> list[DelegateInfo]: + """ + Fetches all delegates on the chain + + :param block_hash: hash of the blockchain block number for the query. + :param reuse_block: whether to reuse the last-used block hash. + + :return: List of DelegateInfo objects, or an empty list if there are no delegates. + """ + hex_bytes_result = await self.query_runtime_api( + runtime_api="DelegateInfoRuntimeApi", + method="get_delegates", + params=[], + block_hash=block_hash, + ) + if hex_bytes_result is not None: + try: + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + except ValueError: + bytes_result = bytes.fromhex(hex_bytes_result) + + return DelegateInfo.list_from_vec_u8(bytes_result) + else: + return [] + + async def get_stake_info_for_coldkey( + self, + coldkey_ss58: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> list[StakeInfo]: + """ + Retrieves stake information associated with a specific coldkey. This function provides details + about the stakes held by an account, including the staked amounts and associated delegates. + + :param coldkey_ss58: The ``SS58`` address of the account's coldkey. + :param block_hash: The hash of the blockchain block number for the query. + :param reuse_block: Whether to reuse the last-used block hash. + + :return: A list of StakeInfo objects detailing the stake allocations for the account. + + Stake information is vital for account holders to assess their investment and participation + in the network's delegation and consensus processes. + """ + encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) + + hex_bytes_result = await self.query_runtime_api( + runtime_api="StakeInfoRuntimeApi", + method="get_stake_info_for_coldkey", + params=[encoded_coldkey], + block_hash=block_hash, + reuse_block=reuse_block, + ) + + if hex_bytes_result is None: + return [] + + try: + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + except ValueError: + bytes_result = bytes.fromhex(hex_bytes_result) + + return StakeInfo.list_from_vec_u8(bytes_result) + + async def get_stake_for_coldkey_and_hotkey( + self, hotkey_ss58: str, coldkey_ss58: str, block_hash: Optional[str] + ) -> Balance: + """ + Retrieves stake information associated with a specific coldkey and hotkey. + :param hotkey_ss58: the hotkey SS58 address to query + :param coldkey_ss58: the coldkey SS58 address to query + :param block_hash: the hash of the blockchain block number for the query. + :return: Stake Balance for the given coldkey and hotkey + """ + _result = await self.substrate.query( + module="SubtensorModule", + storage_function="Stake", + params=[hotkey_ss58, coldkey_ss58], + block_hash=block_hash, + ) + return Balance.from_rao(_result or 0) + + async def query_runtime_api( + self, + runtime_api: str, + method: str, + params: Optional[Union[list[list[int]], dict[str, int]]], + block_hash: Optional[str] = None, + reuse_block: Optional[bool] = False, + ) -> Optional[str]: + """ + Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying + runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users + who need to interact with specific runtime methods and decode complex data types. + + :param runtime_api: The name of the runtime API to query. + :param method: The specific method within the runtime API to call. + :param params: The parameters to pass to the method call. + :param block_hash: The hash of the blockchain block number at which to perform the query. + :param reuse_block: Whether to reuse the last-used block hash. + + :return: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. + + This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed + and specific interactions with the network's runtime environment. + """ + call_definition = TYPE_REGISTRY["runtime_api"][runtime_api]["methods"][method] + + data = ( + "0x" + if params is None + else await self.encode_params( + call_definition=call_definition, params=params + ) + ) + api_method = f"{runtime_api}_{method}" + + json_result = await self.substrate.rpc_request( + method="state_call", + params=[api_method, data, block_hash] if block_hash else [api_method, data], + ) + + if json_result is None: + return None + + return_type = call_definition["type"] + + as_scale_bytes = scalecodec.ScaleBytes(json_result["result"]) # type: ignore + + rpc_runtime_config = RuntimeConfiguration() + rpc_runtime_config.update_type_registry(load_type_registry_preset("legacy")) + rpc_runtime_config.update_type_registry(custom_rpc_type_registry) + + obj = rpc_runtime_config.create_scale_object(return_type, as_scale_bytes) + if obj.data.to_hex() == "0x0400": # RPC returned None result + return None + + return obj.decode() + + async def get_balance( + self, + *addresses: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> dict[str, Balance]: + """ + Retrieves the balance for given coldkey(s) + :param addresses: coldkey addresses(s) + :param block_hash: the block hash, optional + :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + :return: dict of {address: Balance objects} + """ + calls = [ + ( + await self.substrate.create_storage_key( + "System", "Account", [address], block_hash=block_hash + ) + ) + for address in addresses + ] + batch_call = await self.substrate.query_multi(calls, block_hash=block_hash) + results = {} + for item in batch_call: + value = item[1] or {"data": {"free": 0}} + results.update({item[0].params[0]: Balance(value["data"]["free"])}) + return results + + async def get_total_stake_for_coldkey( + self, + *ss58_addresses, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> dict[str, Balance]: + """ + Returns the total stake held on a coldkey. + + :param ss58_addresses: The SS58 address(es) of the coldkey(s) + :param block_hash: The hash of the block number to retrieve the stake from. + :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + + :return: {address: Balance objects} + """ + calls = [ + ( + await self.substrate.create_storage_key( + "SubtensorModule", + "TotalColdkeyStake", + [address], + block_hash=block_hash, + ) + ) + for address in ss58_addresses + ] + batch_call = await self.substrate.query_multi(calls, block_hash=block_hash) + results = {} + for item in batch_call: + results.update({item[0].params[0]: Balance.from_rao(item[1] or 0)}) + return results + + async def get_total_stake_for_hotkey( + self, + *ss58_addresses, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> dict[str, Balance]: + """ + Returns the total stake held on a hotkey. + + :param ss58_addresses: The SS58 address(es) of the hotkey(s) + :param block_hash: The hash of the block number to retrieve the stake from. + :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + + :return: {address: Balance objects} + """ + results = await self.substrate.query_multiple( + params=[s for s in ss58_addresses], + module="SubtensorModule", + storage_function="TotalHotkeyStake", + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + return {k: Balance.from_rao(r or 0) for (k, r) in results.items()} + + async def get_netuids_for_hotkey( + self, + hotkey_ss58: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> list[int]: + """ + Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function + identifies the specific subnets within the Bittensor network where the neuron associated with + the hotkey is active. + + :param hotkey_ss58: The ``SS58`` address of the neuron's hotkey. + :param block_hash: The hash of the blockchain block number at which to perform the query. + :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + + :return: A list of netuids where the neuron is a member. + """ + + result = await self.substrate.query_map( + module="SubtensorModule", + storage_function="IsNetworkMember", + params=[hotkey_ss58], + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + return ( + [record[0] async for record in result if record[1]] + if result and hasattr(result, "records") + else [] + ) + + async def subnet_exists( + self, netuid: int, block_hash: Optional[str] = None, reuse_block: bool = False + ) -> bool: + """ + Checks if a subnet with the specified unique identifier (netuid) exists within the Bittensor network. + + :param netuid: The unique identifier of the subnet. + :param block_hash: The hash of the blockchain block number at which to check the subnet existence. + :param reuse_block: Whether to reuse the last-used block hash. + + :return: `True` if the subnet exists, `False` otherwise. + + This function is critical for verifying the presence of specific subnets in the network, + enabling a deeper understanding of the network's structure and composition. + """ + result = await self.substrate.query( + module="SubtensorModule", + storage_function="NetworksAdded", + params=[netuid], + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + return result + + async def get_hyperparameter( + self, + param_name: str, + netuid: int, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> Optional[Any]: + """ + Retrieves a specified hyperparameter for a specific subnet. + + :param param_name: The name of the hyperparameter to retrieve. + :param netuid: The unique identifier of the subnet. + :param block_hash: The hash of blockchain block number for the query. + :param reuse_block: Whether to reuse the last-used block hash. + + :return: The value of the specified hyperparameter if the subnet exists, or None + """ + if not await self.subnet_exists(netuid, block_hash): + print("subnet does not exist") + return None + + result = await self.substrate.query( + module="SubtensorModule", + storage_function=param_name, + params=[netuid], + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + + if result is None: + return None + + return result + + async def filter_netuids_by_registered_hotkeys( + self, + all_netuids: Iterable[int], + filter_for_netuids: Iterable[int], + all_hotkeys: Iterable[Wallet], + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> list[int]: + """ + Filters a given list of all netuids for certain specified netuids and hotkeys + + :param all_netuids: A list of netuids to filter. + :param filter_for_netuids: A subset of all_netuids to filter from the main list + :param all_hotkeys: Hotkeys to filter from the main list + :param block_hash: hash of the blockchain block number at which to perform the query. + :param reuse_block: whether to reuse the last-used blockchain hash when retrieving info. + + :return: the filtered list of netuids. + """ + netuids_with_registered_hotkeys = [ + item + for sublist in await asyncio.gather( + *[ + self.get_netuids_for_hotkey( + wallet.hotkey.ss58_address, + reuse_block=reuse_block, + block_hash=block_hash, + ) + for wallet in all_hotkeys + ] + ) + for item in sublist + ] + + if not filter_for_netuids: + all_netuids = netuids_with_registered_hotkeys + + else: + filtered_netuids = [ + netuid for netuid in all_netuids if netuid in filter_for_netuids + ] + + registered_hotkeys_filtered = [ + netuid + for netuid in netuids_with_registered_hotkeys + if netuid in filter_for_netuids + ] + + # Combine both filtered lists + all_netuids = filtered_netuids + registered_hotkeys_filtered + + return list(set(all_netuids)) + + async def get_existential_deposit( + self, block_hash: Optional[str] = None, reuse_block: bool = False + ) -> Balance: + """ + Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit + is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with + balances below this threshold can be reaped to conserve network resources. + + :param block_hash: Block hash at which to query the deposit amount. If `None`, the current block is used. + :param reuse_block: Whether to reuse the last-used blockchain block hash. + + :return: The existential deposit amount + + The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring + efficient use of storage and preventing the proliferation of dust accounts. + """ + result = await self.substrate.get_constant( + module_name="Balances", + constant_name="ExistentialDeposit", + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + + if result is None: + raise Exception("Unable to retrieve existential deposit amount.") + + return Balance.from_rao(result) + + async def neurons( + self, netuid: int, block_hash: Optional[str] = None + ) -> list[NeuronInfo]: + """ + Retrieves a list of all neurons within a specified subnet of the Bittensor network. This function + provides a snapshot of the subnet's neuron population, including each neuron's attributes and network + interactions. + + :param netuid: The unique identifier of the subnet. + :param block_hash: The hash of the blockchain block number for the query. + + :return: A list of NeuronInfo objects detailing each neuron's characteristics in the subnet. + + Understanding the distribution and status of neurons within a subnet is key to comprehending the + network's decentralized structure and the dynamics of its consensus and governance processes. + """ + neurons_lite, weights, bonds = await asyncio.gather( + self.neurons_lite(netuid=netuid, block_hash=block_hash), + self.weights(netuid=netuid, block_hash=block_hash), + self.bonds(netuid=netuid, block_hash=block_hash), + ) + + weights_as_dict = {uid: w for uid, w in weights} + bonds_as_dict = {uid: b for uid, b in bonds} + + neurons = [ + NeuronInfo.from_weights_bonds_and_neuron_lite( + neuron_lite, weights_as_dict, bonds_as_dict + ) + for neuron_lite in neurons_lite + ] + + return neurons + + async def neurons_lite( + self, netuid: int, block_hash: Optional[str] = None, reuse_block: bool = False + ) -> list[NeuronInfoLite]: + """ + Retrieves a list of neurons in a 'lite' format from a specific subnet of the Bittensor network. + This function provides a streamlined view of the neurons, focusing on key attributes such as stake + and network participation. + + :param netuid: The unique identifier of the subnet. + :param block_hash: The hash of the blockchain block number for the query. + :param reuse_block: Whether to reuse the last-used blockchain block hash. + + :return: A list of simplified neuron information for the subnet. + + This function offers a quick overview of the neuron population within a subnet, facilitating + efficient analysis of the network's decentralized structure and neuron dynamics. + """ + hex_bytes_result = await self.query_runtime_api( + runtime_api="NeuronInfoRuntimeApi", + method="get_neurons_lite", + params=[ + netuid + ], # TODO check to see if this can accept more than one at a time + block_hash=block_hash, + reuse_block=reuse_block, + ) + + if hex_bytes_result is None: + return [] + + try: + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + except ValueError: + bytes_result = bytes.fromhex(hex_bytes_result) + + return NeuronInfoLite.list_from_vec_u8(bytes_result) + + async def neuron_for_uid( + self, uid: Optional[int], netuid: int, block_hash: Optional[str] = None + ) -> NeuronInfo: + """ + Retrieves detailed information about a specific neuron identified by its unique identifier (UID) + within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive + view of a neuron's attributes, including its stake, rank, and operational status. + + + :param uid: The unique identifier of the neuron. + :param netuid: The unique identifier of the subnet. + :param block_hash: The hash of the blockchain block number for the query. + + :return: Detailed information about the neuron if found, a null neuron otherwise + + This function is crucial for analyzing individual neurons' contributions and status within a specific + subnet, offering insights into their roles in the network's consensus and validation mechanisms. + """ + if uid is None: + return NeuronInfo.get_null_neuron() + + params = [netuid, uid, block_hash] if block_hash else [netuid, uid] + json_body = await self.substrate.rpc_request( + method="neuronInfo_getNeuron", + params=params, # custom rpc method + ) + + if not (result := json_body.get("result", None)): + return NeuronInfo.get_null_neuron() + + bytes_result = bytes(result) + return NeuronInfo.from_vec_u8(bytes_result) + + async def get_delegated( + self, + coldkey_ss58: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> list[tuple[DelegateInfo, Balance]]: + """ + Retrieves a list of delegates and their associated stakes for a given coldkey. This function + identifies the delegates that a specific account has staked tokens on. + + :param coldkey_ss58: The `SS58` address of the account's coldkey. + :param block_hash: The hash of the blockchain block number for the query. + :param reuse_block: Whether to reuse the last-used blockchain block hash. + + :return: A list of tuples, each containing a delegate's information and staked amount. + + This function is important for account holders to understand their stake allocations and their + involvement in the network's delegation and consensus mechanisms. + """ + + block_hash = ( + block_hash + if block_hash + else (self.substrate.last_block_hash if reuse_block else None) + ) + encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) + json_body = await self.substrate.rpc_request( + method="delegateInfo_getDelegated", + params=([block_hash, encoded_coldkey] if block_hash else [encoded_coldkey]), + ) + + if not (result := json_body.get("result")): + return [] + + return DelegateInfo.delegated_list_from_vec_u8(bytes(result)) + + async def query_identity( + self, + key: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> dict: + """ + Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves + detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized + identity and governance system. + + Note: + See the `Bittensor CLI documentation `_ for supported identity + parameters. + + :param key: The key used to query the neuron's identity, typically the neuron's SS58 address. + :param block_hash: The hash of the blockchain block number at which to perform the query. + :param reuse_block: Whether to reuse the last-used blockchain block hash. + + :return: An object containing the identity information of the neuron if found, ``None`` otherwise. + + The identity information can include various attributes such as the neuron's stake, rank, and other + network-specific details, providing insights into the neuron's role and status within the Bittensor network. + """ + + def decode_hex_identity_dict(info_dictionary): + for k, v in info_dictionary.items(): + if isinstance(v, dict): + item = next(iter(v.values())) + else: + item = v + if isinstance(item, tuple) and item: + if len(item) > 1: + try: + info_dictionary[k] = ( + bytes(item).hex(sep=" ", bytes_per_sep=2).upper() + ) + except UnicodeDecodeError: + print(f"Could not decode: {k}: {item}") + else: + try: + info_dictionary[k] = bytes(item[0]).decode("utf-8") + except UnicodeDecodeError: + print(f"Could not decode: {k}: {item}") + else: + info_dictionary[k] = item + + return info_dictionary + + identity_info = await self.substrate.query( + module="Registry", + storage_function="IdentityOf", + params=[key], + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + try: + return decode_hex_identity_dict(identity_info["info"]) + except TypeError: + return {} + + async def weights( + self, netuid: int, block_hash: Optional[str] = None + ) -> list[tuple[int, list[tuple[int, int]]]]: + """ + Retrieves the weight distribution set by neurons within a specific subnet of the Bittensor network. + This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the + network's trust and value assignment mechanisms. + + Args: + :param netuid: The network UID of the subnet to query. + :param block_hash: The hash of the blockchain block for the query. + + :return: A list of tuples mapping each neuron's UID to its assigned weights. + + The weight distribution is a key factor in the network's consensus algorithm and the ranking of neurons, + influencing their influence and reward allocation within the subnet. + """ + # TODO look into seeing if we can speed this up with storage query + w_map_encoded = await self.substrate.query_map( + module="SubtensorModule", + storage_function="Weights", + params=[netuid], + block_hash=block_hash, + ) + w_map = [(uid, w or []) async for uid, w in w_map_encoded] + + return w_map + + async def bonds( + self, netuid: int, block_hash: Optional[str] = None + ) -> list[tuple[int, list[tuple[int, int]]]]: + """ + Retrieves the bond distribution set by neurons within a specific subnet of the Bittensor network. + Bonds represent the investments or commitments made by neurons in one another, indicating a level + of trust and perceived value. This bonding mechanism is integral to the network's market-based approach + to measuring and rewarding machine intelligence. + + :param netuid: The network UID of the subnet to query. + :param block_hash: The hash of the blockchain block number for the query. + + :return: list of tuples mapping each neuron's UID to its bonds with other neurons. + + Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior + within the subnet. It reflects how neurons recognize and invest in each other's intelligence and + contributions, supporting diverse and niche systems within the Bittensor ecosystem. + """ + b_map_encoded = await self.substrate.query_map( + module="SubtensorModule", + storage_function="Bonds", + params=[netuid], + block_hash=block_hash, + ) + b_map = [(uid, b) async for uid, b in b_map_encoded] + + return b_map + + async def does_hotkey_exist( + self, + hotkey_ss58: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> bool: + """ + Returns true if the hotkey is known by the chain and there are accounts. + + :param hotkey_ss58: The SS58 address of the hotkey. + :param block_hash: The hash of the block number to check the hotkey against. + :param reuse_block: Whether to reuse the last-used blockchain hash. + + :return: `True` if the hotkey is known by the chain and there are accounts, `False` otherwise. + """ + _result = await self.substrate.query( + module="SubtensorModule", + storage_function="Owner", + params=[hotkey_ss58], + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + result = decode_account_id(_result[0]) + return_val = ( + False + if result is None + else result != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + ) + return return_val + + async def get_hotkey_owner( + self, hotkey_ss58: str, block_hash: str + ) -> Optional[str]: + hk_owner_query = await self.substrate.query( + module="SubtensorModule", + storage_function="Owner", + params=[hotkey_ss58], + block_hash=block_hash, + ) + val = decode_account_id(hk_owner_query[0]) + if val: + exists = await self.does_hotkey_exist(hotkey_ss58, block_hash=block_hash) + else: + exists = False + hotkey_owner = val if exists else None + return hotkey_owner + + async def sign_and_send_extrinsic( + self, + call: GenericCall, + wallet: Wallet, + wait_for_inclusion: bool = True, + wait_for_finalization: bool = False, + ) -> tuple[bool, str]: + """ + Helper method to sign and submit an extrinsic call to chain. + + :param call: a prepared Call object + :param wallet: the wallet whose coldkey will be used to sign the extrinsic + :param wait_for_inclusion: whether to wait until the extrinsic call is included on the chain + :param wait_for_finalization: whether to wait until the extrinsic call is finalized on the chain + + :return: (success, error message) + """ + extrinsic = await self.substrate.create_signed_extrinsic( + call=call, keypair=wallet.coldkey + ) # sign with coldkey + try: + response = await self.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + # We only wait here if we expect finalization. + if not wait_for_finalization and not wait_for_inclusion: + return True, "" + await response.process_events() + if await response.is_success: + return True, "" + else: + return False, format_error_message( + await response.error_message, substrate=self.substrate + ) + except SubstrateRequestException as e: + return False, format_error_message(e, substrate=self.substrate) + + async def get_children(self, hotkey, netuid) -> tuple[bool, list, str]: + """ + This method retrieves the children of a given hotkey and netuid. It queries the SubtensorModule's ChildKeys + storage function to get the children and formats them before returning as a tuple. + + :param hotkey: The hotkey value. + :param netuid: The netuid value. + + :return: A tuple containing a boolean indicating success or failure, a list of formatted children, and an error + message (if applicable) + """ + try: + children = await self.substrate.query( + module="SubtensorModule", + storage_function="ChildKeys", + params=[hotkey, netuid], + ) + if children: + formatted_children = [] + for proportion, child in children: + # Convert U64 to int + formatted_child = decode_account_id(child[0]) + int_proportion = int(proportion) + formatted_children.append((int_proportion, formatted_child)) + return True, formatted_children, "" + else: + return True, [], "" + except SubstrateRequestException as e: + return False, [], format_error_message(e, self.substrate) + + async def get_subnet_hyperparameters( + self, netuid: int, block_hash: Optional[str] = None + ) -> Optional[Union[list, SubnetHyperparameters]]: + """ + Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters + define the operational settings and rules governing the subnet's behavior. + + :param netuid: The network UID of the subnet to query. + :param block_hash: The hash of the blockchain block number for the query. + + :return: The subnet's hyperparameters, or `None` if not available. + + Understanding the hyperparameters is crucial for comprehending how subnets are configured and + managed, and how they interact with the network's consensus and incentive mechanisms. + """ + hex_bytes_result = await self.query_runtime_api( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block_hash=block_hash, + ) + + if hex_bytes_result is None: + return [] + + if hex_bytes_result.startswith("0x"): + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + else: + bytes_result = bytes.fromhex(hex_bytes_result) + + return SubnetHyperparameters.from_vec_u8(bytes_result) + + async def get_vote_data( + self, + proposal_hash: str, + block_hash: Optional[str] = None, + reuse_block: bool = False, + ) -> Optional["ProposalVoteData"]: + """ + Retrieves the voting data for a specific proposal on the Bittensor blockchain. This data includes + information about how senate members have voted on the proposal. + + :param proposal_hash: The hash of the proposal for which voting data is requested. + :param block_hash: The hash of the blockchain block number to query the voting data. + :param reuse_block: Whether to reuse the last-used blockchain block hash. + + :return: An object containing the proposal's voting data, or `None` if not found. + + This function is important for tracking and understanding the decision-making processes within + the Bittensor network, particularly how proposals are received and acted upon by the governing body. + """ + vote_data = await self.substrate.query( + module="Triumvirate", + storage_function="Voting", + params=[proposal_hash], + block_hash=block_hash, + reuse_block_hash=reuse_block, + ) + if vote_data is None: + return None + else: + return ProposalVoteData(vote_data) + + async def get_delegate_identities( + self, block_hash: Optional[str] = None + ) -> dict[str, DelegatesDetails]: + """ + Fetches delegates identities from the chain and GitHub. Preference is given to chain data, and missing info + is filled-in by the info from GitHub. At some point, we want to totally move away from fetching this info + from GitHub, but chain data is still limited in that regard. + + Args: + block_hash: the hash of the blockchain block for the query + + Returns: {ss58: DelegatesDetails, ...} + + """ + timeout = aiohttp.ClientTimeout(10.0) + async with aiohttp.ClientSession(timeout=timeout) as session: + identities_info, response = await asyncio.gather( + self.substrate.query_map( + module="Registry", + storage_function="IdentityOf", + block_hash=block_hash, + ), + session.get(Constants.delegates_detail_url), + ) + + all_delegates_details = { + decode_account_id(ss58_address[0]): DelegatesDetails.from_chain_data( + decode_hex_identity_dict(identity["info"]) + ) + for ss58_address, identity in identities_info + } + + if response.ok: + all_delegates: dict[str, Any] = await response.json(content_type=None) + + for delegate_hotkey, delegate_details in all_delegates.items(): + delegate_info = all_delegates_details.setdefault( + delegate_hotkey, + DelegatesDetails( + display=delegate_details.get("name", ""), + web=delegate_details.get("url", ""), + additional=delegate_details.get("description", ""), + pgp_fingerprint=delegate_details.get("fingerprint", ""), + ), + ) + delegate_info.display = ( + delegate_info.display or delegate_details.get("name", "") + ) + delegate_info.web = delegate_info.web or delegate_details.get( + "url", "" + ) + delegate_info.additional = ( + delegate_info.additional + or delegate_details.get("description", "") + ) + delegate_info.pgp_fingerprint = ( + delegate_info.pgp_fingerprint + or delegate_details.get("fingerprint", "") + ) + + return all_delegates_details From 3be23e566a99171078e93ebbb176cba0399e701e Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:13:28 -0700 Subject: [PATCH 06/27] add DelegatesDetails for async_subtensor --- bittensor/utils/delegates_details.py | 43 ++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 bittensor/utils/delegates_details.py diff --git a/bittensor/utils/delegates_details.py b/bittensor/utils/delegates_details.py new file mode 100644 index 0000000000..88a5633e76 --- /dev/null +++ b/bittensor/utils/delegates_details.py @@ -0,0 +1,43 @@ +from dataclasses import dataclass +from typing import Any, Optional + + +@dataclass +class DelegatesDetails: + display: str + additional: list[tuple[str, str]] + web: str + legal: Optional[str] = None + riot: Optional[str] = None + email: Optional[str] = None + pgp_fingerprint: Optional[str] = None + image: Optional[str] = None + twitter: Optional[str] = None + + @classmethod + def from_chain_data(cls, data: dict[str, Any]) -> "DelegatesDetails": + def decode(key: str, default: Optional[str] = ""): + try: + if isinstance(data.get(key), dict): + value = next(data.get(key).values()) + return bytes(value[0]).decode("utf-8") + elif isinstance(data.get(key), int): + return data.get(key) + elif isinstance(data.get(key), tuple): + return bytes(data.get(key)[0]).decode("utf-8") + else: + return default + except (UnicodeDecodeError, TypeError): + return default + + return cls( + display=decode("display"), + additional=decode("additional", []), + web=decode("web"), + legal=decode("legal"), + riot=decode("riot"), + email=decode("email"), + pgp_fingerprint=decode("pgp_fingerprint", None), + image=decode("image"), + twitter=decode("twitter"), + ) From fdc9a83887f9ce3844b52b44ea939884be77059a Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:13:34 -0700 Subject: [PATCH 07/27] add validate_chain_endpoint for async_subtensor --- bittensor/utils/__init__.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 63378b0ec5..701f34849f 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -15,6 +15,7 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +from urllib.parse import urlparse import ast import hashlib from typing import Any, Literal, Union, Optional, TYPE_CHECKING @@ -352,3 +353,17 @@ def get_decoded(data: str) -> str: info_dictionary[key] = additional return info_dictionary + + +def validate_chain_endpoint(endpoint_url: str) -> tuple[bool, str]: + """Validates if the provided endpoint URL is a valid WebSocket URL.""" + parsed = urlparse(endpoint_url) + if parsed.scheme not in ("ws", "wss"): + return False, ( + f"Invalid URL or network name provided: [bright_cyan]({endpoint_url})[/bright_cyan].\n" + "Allowed network names are [bright_cyan]finney, test, local[/bright_cyan]. " + "Valid chain endpoints should use the scheme [bright_cyan]`ws` or `wss`[/bright_cyan].\n" + ) + if not parsed.netloc: + return False, "Invalid URL passed as the endpoint" + return True, "" From e6fe7b95fa888dfde3e8b2fc135c55008165ee04 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:35:17 -0700 Subject: [PATCH 08/27] update async_substrate_interface.py by Optional where acceptable and doesn't brake logic --- bittensor/utils/async_substrate_interface.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bittensor/utils/async_substrate_interface.py b/bittensor/utils/async_substrate_interface.py index 60ec9dce9c..b8e74c7310 100644 --- a/bittensor/utils/async_substrate_interface.py +++ b/bittensor/utils/async_substrate_interface.py @@ -433,7 +433,7 @@ def add_item( self.block_hashes[block_hash] = runtime def retrieve( - self, block: Optional[int], block_hash: Optional[str] + self, block: Optional[int] = None, block_hash: Optional[str] = None ) -> Optional["Runtime"]: if block is not None: return self.blocks.get(block) @@ -1543,7 +1543,7 @@ async def _process_response( self, response: dict, subscription_id: Union[int, str], - value_scale_type: Optional[str], + value_scale_type: Optional[str] = None, storage_item: Optional[ScaleType] = None, runtime: Optional[Runtime] = None, result_handler: Optional[ResultHandler] = None, @@ -2721,7 +2721,7 @@ async def get_metadata_call_function( return call return None - async def get_block_number(self, block_hash: Optional[str]) -> int: + async def get_block_number(self, block_hash: Optional[str] = None) -> int: """Async version of `substrateinterface.base.get_block_number` method.""" response = await self.rpc_request("chain_getHeader", [block_hash]) From a4fe55ecd1a2d275979c46e3b85f3d39b23ef516 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:35:38 -0700 Subject: [PATCH 09/27] improve settings for async_subtensor.py --- bittensor/core/settings.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/bittensor/core/settings.py b/bittensor/core/settings.py index d9bca4255b..94bcbb93cc 100644 --- a/bittensor/core/settings.py +++ b/bittensor/core/settings.py @@ -62,16 +62,16 @@ def turn_console_on(): WALLETS_DIR = USER_BITTENSOR_DIR / "wallets" MINERS_DIR = USER_BITTENSOR_DIR / "miners" -# Bittensor networks name -NETWORKS = ["local", "finney", "test", "archive"] - -DEFAULT_ENDPOINT = "wss://entrypoint-finney.opentensor.ai:443" -DEFAULT_NETWORK = NETWORKS[1] # Create dirs if they don't exist WALLETS_DIR.mkdir(parents=True, exist_ok=True) MINERS_DIR.mkdir(parents=True, exist_ok=True) +# Bittensor networks name +NETWORKS = ["finney", "test", "archive", "local"] + +DEFAULT_ENDPOINT = "wss://entrypoint-finney.opentensor.ai:443" +DEFAULT_NETWORK = NETWORKS[0] # Bittensor endpoints (Needs to use wss://) FINNEY_ENTRYPOINT = "wss://entrypoint-finney.opentensor.ai:443" @@ -79,6 +79,13 @@ def turn_console_on(): ARCHIVE_ENTRYPOINT = "wss://archive.chain.opentensor.ai:443/" LOCAL_ENTRYPOINT = os.getenv("BT_SUBTENSOR_CHAIN_ENDPOINT") or "ws://127.0.0.1:9946" +NETWORK_MAP = { + NETWORKS[0]: FINNEY_ENTRYPOINT, + NETWORKS[1]: FINNEY_TEST_ENTRYPOINT, + NETWORKS[2]: ARCHIVE_ENTRYPOINT, + NETWORKS[3]: LOCAL_ENTRYPOINT, +} + # Currency Symbols Bittensor TAO_SYMBOL: str = chr(0x03C4) RAO_SYMBOL: str = chr(0x03C1) From b30352611b5fae7d6cb92a470ea0369f78b569fa Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:49:34 -0700 Subject: [PATCH 10/27] fix format errors --- bittensor/core/extrinsics/registration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bittensor/core/extrinsics/registration.py b/bittensor/core/extrinsics/registration.py index cdd7a44024..8c34891aaf 100644 --- a/bittensor/core/extrinsics/registration.py +++ b/bittensor/core/extrinsics/registration.py @@ -95,7 +95,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, format_error_message(response.error_message, substrate=subtensor.substrate) + return False, format_error_message(response.error_message, substrate=self.substrate) # Successful registration else: return True, None @@ -340,7 +340,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, format_error_message(response.error_message, substrate=subtensor.substrate) + return False, format_error_message(response.error_message, substrate=self.substrate) # Successful registration else: return True, None From 809cbf1442158d79b8feda6cf2c55b039f4b3609 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 18:49:48 -0700 Subject: [PATCH 11/27] fix annotations --- bittensor/utils/async_substrate_interface.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bittensor/utils/async_substrate_interface.py b/bittensor/utils/async_substrate_interface.py index b8e74c7310..ec5e267e63 100644 --- a/bittensor/utils/async_substrate_interface.py +++ b/bittensor/utils/async_substrate_interface.py @@ -2286,7 +2286,7 @@ async def get_constant( constant_name: str, block_hash: Optional[str] = None, reuse_block_hash: bool = False, - ) -> Optional["ScaleType"]: + ) -> "ScaleType": """ Returns the decoded `ScaleType` object of the constant for given module name, call function name and block_hash (or chaintip if block_hash is omitted) @@ -2365,7 +2365,7 @@ async def query( raw_storage_key: Optional[bytes] = None, subscription_handler=None, reuse_block_hash: bool = False, - ) -> "ScaleType": + ) -> Union["ScaleType"]: """ Queries subtensor. This should only be used when making a single request. For multiple requests, you should use ``self.query_multiple`` From c8a45598af6fd6c28c7179f5e95b012d80909158 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 19:14:50 -0700 Subject: [PATCH 12/27] add async_subtensor.py with adaptation to SDK (all methods checked and work well) --- bittensor/core/async_subtensor.py | 59 +++++++++++++++---------------- 1 file changed, 28 insertions(+), 31 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 4f5874fdba..1646d542ab 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -4,21 +4,6 @@ import aiohttp import scalecodec import typer -from bittensor_cli.src import Constants, defaults, TYPE_REGISTRY -from bittensor_cli.src import DelegatesDetails -from bittensor_cli.src.bittensor.async_substrate_interface import ( - AsyncSubstrateInterface, - TimeoutException, -) -from bittensor_cli.src.bittensor.balances import Balance -from bittensor_cli.src.bittensor.utils import ( - ss58_to_vec_u8, - format_error_message, - console, - err_console, - decode_hex_identity_dict, - validate_chain_endpoint, -) from bittensor_wallet import Wallet from bittensor_wallet.utils import SS58_FORMAT from scalecodec import GenericCall @@ -35,6 +20,20 @@ SubnetHyperparameters, decode_account_id, ) +from bittensor.core.settings import bt_console as console, bt_err_console as err_console, TYPE_REGISTRY, DEFAULTS, \ + NETWORK_MAP, DELEGATES_DETAILS_URL, DEFAULT_NETWORK +from bittensor.utils import ( + ss58_to_vec_u8, + format_error_message, + decode_hex_identity_dict, + validate_chain_endpoint, +) +from bittensor.utils.async_substrate_interface import ( + AsyncSubstrateInterface, + TimeoutException, +) +from bittensor.utils.balance import Balance +from bittensor.utils.delegates_details import DelegatesDetails class ParamWithTypes(TypedDict): @@ -57,19 +56,17 @@ def __init__(self, proposal_dict: dict) -> None: self.end = proposal_dict["end"] @staticmethod - def decode_ss58_tuples(l: tuple): - """ - Decodes a tuple of ss58 addresses formatted as bytes tuples - """ - return [decode_account_id(l[x][0]) for x in range(len(l))] + def decode_ss58_tuples(line: tuple): + """Decodes a tuple of ss58 addresses formatted as bytes tuples.""" + return [decode_account_id(line[x][0]) for x in range(len(line))] class AsyncSubtensor: """Thin layer for interacting with Substrate Interface. Mostly a collection of frequently-used calls.""" - def __init__(self, network): - if network in Constants.network_map: - self.chain_endpoint = Constants.network_map[network] + def __init__(self, network: str = DEFAULT_NETWORK): + if network in NETWORK_MAP: + self.chain_endpoint = NETWORK_MAP[network] self.network = network if network == "local": console.log( @@ -79,10 +76,10 @@ def __init__(self, network): is_valid, _ = validate_chain_endpoint(network) if is_valid: self.chain_endpoint = network - if network in Constants.network_map.values(): + if network in NETWORK_MAP.values(): self.network = next( key - for key, value in Constants.network_map.items() + for key, value in NETWORK_MAP.items() if value == network ) else: @@ -90,12 +87,12 @@ def __init__(self, network): else: console.log( f"Network not specified or not valid. Using default chain endpoint: " - f"{Constants.network_map[defaults.subtensor.network]}.\n" + f"{NETWORK_MAP[DEFAULTS.subtensor.network]}.\n" f"You can set this for commands with the `--network` flag, or by setting this" f" in the config." ) - self.chain_endpoint = Constants.network_map[defaults.subtensor.network] - self.network = defaults.subtensor.network + self.chain_endpoint = NETWORK_MAP[DEFAULTS.subtensor.network] + self.network = DEFAULTS.subtensor.network self.substrate = AsyncSubstrateInterface( chain_endpoint=self.chain_endpoint, @@ -752,7 +749,7 @@ async def query_identity( network-specific details, providing insights into the neuron's role and status within the Bittensor network. """ - def decode_hex_identity_dict(info_dictionary): + def decode_hex_identity_dict_(info_dictionary): for k, v in info_dictionary.items(): if isinstance(v, dict): item = next(iter(v.values())) @@ -784,7 +781,7 @@ def decode_hex_identity_dict(info_dictionary): reuse_block_hash=reuse_block, ) try: - return decode_hex_identity_dict(identity_info["info"]) + return decode_hex_identity_dict_(identity_info["info"]) except TypeError: return {} @@ -1045,7 +1042,7 @@ async def get_delegate_identities( storage_function="IdentityOf", block_hash=block_hash, ), - session.get(Constants.delegates_detail_url), + session.get(DELEGATES_DETAILS_URL), ) all_delegates_details = { From fa6d48b1862898ab6cc3612b58b2c08c1e085194 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 19:40:36 -0700 Subject: [PATCH 13/27] update settings.py to be compatible with async_extrinsics --- bittensor/core/settings.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/bittensor/core/settings.py b/bittensor/core/settings.py index 94bcbb93cc..81542fa6d8 100644 --- a/bittensor/core/settings.py +++ b/bittensor/core/settings.py @@ -57,6 +57,32 @@ def turn_console_on(): bt_verbose_console = Console(quiet=True) +def print_console(message: str, colour: str, title: str, console: Console): + console.print( + f"[bold {colour}][{title}]:[/bold {colour}] [{colour}]{message}[/{colour}]\n" + ) + + +def print_verbose(message: str, status=None): + """Print verbose messages while temporarily pausing the status spinner.""" + if status: + status.stop() + print_console(message, "green", "Verbose", bt_verbose_console) + status.start() + else: + print_console(message, "green", "Verbose", bt_verbose_console) + + +def print_error(message: str, status=None): + """Print error messages while temporarily pausing the status spinner.""" + if status: + status.stop() + print_console(message, "red", "Error", bt_err_console) + status.start() + else: + print_console(message, "red", "Error", bt_err_console) + + HOME_DIR = Path.home() USER_BITTENSOR_DIR = HOME_DIR / ".bittensor" WALLETS_DIR = USER_BITTENSOR_DIR / "wallets" From affb8580c72b593ad0937ec5d4978ef799eefa96 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 19:40:55 -0700 Subject: [PATCH 14/27] add async_transfer extrinsic --- bittensor/core/async_subtensor.py | 22 +++ bittensor/core/extrinsics/async_transfer.py | 199 ++++++++++++++++++++ 2 files changed, 221 insertions(+) create mode 100644 bittensor/core/extrinsics/async_transfer.py diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 1646d542ab..d2e93da5c8 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -34,6 +34,7 @@ ) from bittensor.utils.balance import Balance from bittensor.utils.delegates_details import DelegatesDetails +from bittensor.core.extrinsics.async_transfer import transfer_extrinsic class ParamWithTypes(TypedDict): @@ -1081,3 +1082,24 @@ async def get_delegate_identities( ) return all_delegates_details + +# extrinsics + + async def transfer( + self, + wallet: Wallet, + destination: str, + amount: float, + transfer_all: bool, + prompt: bool, + ): + """Transfer token of amount to destination.""" + await transfer_extrinsic( + self, + wallet, + destination, + Balance.from_tao(amount), + transfer_all, + prompt=prompt, + ) + diff --git a/bittensor/core/extrinsics/async_transfer.py b/bittensor/core/extrinsics/async_transfer.py new file mode 100644 index 0000000000..2959f149e6 --- /dev/null +++ b/bittensor/core/extrinsics/async_transfer.py @@ -0,0 +1,199 @@ +import asyncio +from typing import TYPE_CHECKING +from bittensor_wallet import Wallet +from bittensor_wallet.errors import KeyFileError +from rich.prompt import Confirm +from substrateinterface.exceptions import SubstrateRequestException + +from bittensor.core.settings import NETWORK_EXPLORER_MAP, bt_console as console, bt_err_console as err_console, print_verbose, print_error +from bittensor.utils.balance import Balance + +from bittensor.utils import ( + format_error_message, + get_explorer_url_for_network, + is_valid_bittensor_address_or_public_key, +) + +if TYPE_CHECKING: + from bittensor.core.async_subtensor import AsyncSubtensor + + +async def transfer_extrinsic( + subtensor: "AsyncSubtensor", + wallet: Wallet, + destination: str, + amount: Balance, + transfer_all: bool = False, + wait_for_inclusion: bool = True, + wait_for_finalization: bool = False, + keep_alive: bool = True, + prompt: bool = False, +) -> bool: + """Transfers funds from this wallet to the destination public key address. + + :param subtensor: initialized AsyncSubtensor object used for transfer + :param wallet: Bittensor wallet object to make transfer from. + :param destination: Destination public key address (ss58_address or ed25519) of recipient. + :param amount: Amount to stake as Bittensor balance. + :param transfer_all: Whether to transfer all funds from this wallet to the destination address. + :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, + or returns `False` if the extrinsic fails to enter the block within the timeout. + :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning + `True`, or returns `False` if the extrinsic fails to be finalized within the timeout. + :param keep_alive: If set, keeps the account alive by keeping the balance above the existential deposit. + :param prompt: If `True`, the call waits for confirmation from the user before proceeding. + :return: success: Flag is `True` if extrinsic was finalized or included in the block. If we did not wait for + finalization / inclusion, the response is `True`, regardless of its inclusion. + """ + + async def get_transfer_fee() -> Balance: + """ + Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. + This function simulates the transfer to estimate the associated cost, taking into account the current + network conditions and transaction complexity. + """ + call = await subtensor.substrate.compose_call( + call_module="Balances", + call_function="transfer_allow_death", + call_params={"dest": destination, "value": amount.rao}, + ) + + try: + payment_info = await subtensor.substrate.get_payment_info( + call=call, keypair=wallet.coldkeypub + ) + except SubstrateRequestException as e: + payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao + err_console.print( + f":cross_mark: [red]Failed to get payment info[/red]:[bold white]\n" + f" {format_error_message(e, subtensor.substrate)}[/bold white]\n" + f" Defaulting to default transfer fee: {payment_info['partialFee']}" + ) + + return Balance.from_rao(payment_info["partialFee"]) + + async def do_transfer() -> tuple[bool, str, str]: + """ + Makes transfer from wallet to destination public key address. + :return: success, block hash, formatted error message + """ + call = await subtensor.substrate.compose_call( + call_module="Balances", + call_function="transfer_allow_death", + call_params={"dest": destination, "value": amount.rao}, + ) + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, keypair=wallet.coldkey + ) + response = await subtensor.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + # We only wait here if we expect finalization. + if not wait_for_finalization and not wait_for_inclusion: + return True, "", "" + + # Otherwise continue with finalization. + await response.process_events() + if await response.is_success: + block_hash_ = response.block_hash + return True, block_hash_, "" + else: + return False, "", format_error_message(await response.error_message) + + # Validate destination address. + if not is_valid_bittensor_address_or_public_key(destination): + err_console.print( + f":cross_mark: [red]Invalid destination SS58 address[/red]:[bold white]\n {destination}[/bold white]" + ) + return False + console.print(f"[dark_orange]Initiating transfer on network: {subtensor.network}") + # Unlock wallet coldkey. + try: + wallet.unlock_coldkey() + except KeyFileError: + err_console.print("Error decrypting coldkey (possibly incorrect password)") + return False + + # Check balance. + with console.status( + f":satellite: Checking balance and fees on chain [white]{subtensor.network}[/white]", + spinner="aesthetic", + ) as status: + # check existential deposit and fee + print_verbose("Fetching existential and fee", status) + block_hash = await subtensor.substrate.get_chain_head() + account_balance_, existential_deposit = await asyncio.gather( + subtensor.get_balance( + wallet.coldkeypub.ss58_address, block_hash=block_hash + ), + subtensor.get_existential_deposit(block_hash=block_hash), + ) + account_balance = account_balance_[wallet.coldkeypub.ss58_address] + fee = await get_transfer_fee() + + if not keep_alive: + # Check if the transfer should keep_alive the account + existential_deposit = Balance(0) + + # Check if we have enough balance. + if transfer_all is True: + amount = account_balance - fee - existential_deposit + if amount < Balance(0): + print_error("Not enough balance to transfer") + return False + + if account_balance < (amount + fee + existential_deposit): + err_console.print( + ":cross_mark: [bold red]Not enough balance[/bold red]:\n\n" + f" balance: [bright_cyan]{account_balance}[/bright_cyan]\n" + f" amount: [bright_cyan]{amount}[/bright_cyan]\n" + f" for fee: [bright_cyan]{fee}[/bright_cyan]" + ) + return False + + # Ask before moving on. + if prompt: + if not Confirm.ask( + "Do you want to transfer:[bold white]\n" + f" amount: [bright_cyan]{amount}[/bright_cyan]\n" + f" from: [light_goldenrod2]{wallet.name}[/light_goldenrod2] : [bright_magenta]{wallet.coldkey.ss58_address}\n[/bright_magenta]" + f" to: [bright_magenta]{destination}[/bright_magenta]\n for fee: [bright_cyan]{fee}[/bright_cyan]" + ): + return False + + with console.status(":satellite: Transferring...", spinner="earth") as status: + success, block_hash, err_msg = await do_transfer() + + if success: + console.print(":white_heavy_check_mark: [green]Finalized[/green]") + console.print(f"[green]Block Hash: {block_hash}[/green]") + + if subtensor.network == "finney": + print_verbose("Fetching explorer URLs", status) + explorer_urls = get_explorer_url_for_network( + subtensor.network, block_hash, NETWORK_EXPLORER_MAP + ) + if explorer_urls != {} and explorer_urls: + console.print( + f"[green]Opentensor Explorer Link: {explorer_urls.get('opentensor')}[/green]" + ) + console.print( + f"[green]Taostats Explorer Link: {explorer_urls.get('taostats')}[/green]" + ) + else: + console.print(f":cross_mark: [red]Failed[/red]: {err_msg}") + + if success: + with console.status(":satellite: Checking Balance...", spinner="aesthetic"): + new_balance = await subtensor.get_balance( + wallet.coldkeypub.ss58_address, reuse_block=False + ) + console.print( + f"Balance:\n" + f" [blue]{account_balance}[/blue] :arrow_right: [green]{new_balance[wallet.coldkey.ss58_address]}[/green]" + ) + return True + + return False From 3a28617f114dd071ca192cf4f79f92079d6c81eb Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 20:07:29 -0700 Subject: [PATCH 15/27] add async_registration extrinsic --- bittensor/core/async_subtensor.py | 29 +- .../core/extrinsics/async_registration.py | 1612 +++++++++++++++++ 2 files changed, 1640 insertions(+), 1 deletion(-) create mode 100644 bittensor/core/extrinsics/async_registration.py diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index d2e93da5c8..308bca7a94 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -20,6 +20,8 @@ SubnetHyperparameters, decode_account_id, ) +from bittensor.core.extrinsics.async_registration import register_extrinsic +from bittensor.core.extrinsics.async_transfer import transfer_extrinsic from bittensor.core.settings import bt_console as console, bt_err_console as err_console, TYPE_REGISTRY, DEFAULTS, \ NETWORK_MAP, DELEGATES_DETAILS_URL, DEFAULT_NETWORK from bittensor.utils import ( @@ -34,7 +36,6 @@ ) from bittensor.utils.balance import Balance from bittensor.utils.delegates_details import DelegatesDetails -from bittensor.core.extrinsics.async_transfer import transfer_extrinsic class ParamWithTypes(TypedDict): @@ -1103,3 +1104,29 @@ async def transfer( prompt=prompt, ) + async def pow_register( + self: "AsyncSubtensor", + wallet: Wallet, + netuid, + processors, + update_interval, + output_in_place, + verbose, + use_cuda, + dev_id, + threads_per_block, + ): + """Register neuron.""" + return await register_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + prompt=True, + tpb=threads_per_block, + update_interval=update_interval, + num_processes=processors, + cuda=use_cuda, + dev_id=dev_id, + output_in_place=output_in_place, + log_verbose=verbose, + ) \ No newline at end of file diff --git a/bittensor/core/extrinsics/async_registration.py b/bittensor/core/extrinsics/async_registration.py new file mode 100644 index 0000000000..38ffd79449 --- /dev/null +++ b/bittensor/core/extrinsics/async_registration.py @@ -0,0 +1,1612 @@ +import asyncio +import binascii +import functools +import hashlib +import io +import math +import multiprocessing as mp +import os +import random +import subprocess +import time +import typing +from contextlib import redirect_stdout +from dataclasses import dataclass +from datetime import timedelta +from multiprocessing import Process, Event, Lock, Array, Value, Queue +from multiprocessing.queues import Queue as Queue_Type +from queue import Empty, Full +from typing import Optional + +import backoff +import numpy as np +from Crypto.Hash import keccak +from bittensor_wallet import Wallet +from bittensor_wallet.errors import KeyFileError +from rich.console import Console +from rich.prompt import Confirm +from rich.status import Status +from substrateinterface.exceptions import SubstrateRequestException + +from bittensor.core.chain_data import NeuronInfo +from bittensor.core.settings import bt_console as console, bt_err_console as err_console, print_verbose, print_error +from bittensor.utils import format_error_message +from bittensor.utils.formatting import millify, get_human_readable + +if typing.TYPE_CHECKING: + from bittensor.core.async_subtensor import AsyncSubtensor + + +# TODO: compair and remove existing code (bittensor.utils.registration) + +def use_torch() -> bool: + """Force the use of torch over numpy for certain operations.""" + return True if os.getenv("USE_TORCH") == "1" else False + + +def legacy_torch_api_compat(func: typing.Callable): + """ + Convert function operating on numpy Input&Output to legacy torch Input&Output API if `use_torch()` is True. + + :param func: Function with numpy Input/Output to be decorated. + + :return: Decorated function + """ + + @functools.wraps(func) + def decorated(*args, **kwargs): + if use_torch(): + # if argument is a Torch tensor, convert it to numpy + args = [ + arg.cpu().numpy() if isinstance(arg, torch.Tensor) else arg + for arg in args + ] + kwargs = { + key: value.cpu().numpy() if isinstance(value, torch.Tensor) else value + for key, value in kwargs.items() + } + ret = func(*args, **kwargs) + if use_torch(): + # if return value is a numpy array, convert it to Torch tensor + if isinstance(ret, np.ndarray): + ret = torch.from_numpy(ret) + return ret + + return decorated + + +@functools.cache +def _get_real_torch(): + try: + import torch as _real_torch + except ImportError: + _real_torch = None + return _real_torch + + +def log_no_torch_error(): + err_console.print( + "This command requires torch. You can install torch" + " with `pip install torch` and run the command again." + ) + + +@dataclass +class POWSolution: + """A solution to the registration PoW problem.""" + + nonce: int + block_number: int + difficulty: int + seal: bytes + + async def is_stale(self, subtensor: "AsyncSubtensor") -> bool: + """Returns True if the POW is stale. + This means the block the POW is solved for is within 3 blocks of the current block. + """ + current_block = await subtensor.substrate.get_block_number(None) + return self.block_number < current_block - 3 + + +@dataclass +class RegistrationStatistics: + """Statistics for a registration.""" + + time_spent_total: float + rounds_total: int + time_average: float + time_spent: float + hash_rate_perpetual: float + hash_rate: float + difficulty: int + block_number: int + block_hash: str + + +class RegistrationStatisticsLogger: + """Logs statistics for a registration.""" + + console: Console + status: Optional[Status] + + def __init__(self, console_: Console, output_in_place: bool = True) -> None: + self.console = console_ + + if output_in_place: + self.status = self.console.status("Solving") + else: + self.status = None + + def start(self) -> None: + if self.status is not None: + self.status.start() + + def stop(self) -> None: + if self.status is not None: + self.status.stop() + + @classmethod + def get_status_message( + cls, stats: RegistrationStatistics, verbose: bool = False + ) -> str: + """ + Provides a message of the current status of the block solving as a str for a logger or stdout + """ + message = ( + "Solving\n" + + f"Time Spent (total): [bold white]{timedelta(seconds=stats.time_spent_total)}[/bold white]\n" + + ( + f"Time Spent This Round: {timedelta(seconds=stats.time_spent)}\n" + + f"Time Spent Average: {timedelta(seconds=stats.time_average)}\n" + if verbose + else "" + ) + + f"Registration Difficulty: [bold white]{millify(stats.difficulty)}[/bold white]\n" + + f"Iters (Inst/Perp): [bold white]{get_human_readable(stats.hash_rate, 'H')}/s / " + + f"{get_human_readable(stats.hash_rate_perpetual, 'H')}/s[/bold white]\n" + + f"Block Number: [bold white]{stats.block_number}[/bold white]\n" + + f"Block Hash: [bold white]{stats.block_hash.encode('utf-8')}[/bold white]\n" + ) + return message + + def update(self, stats: RegistrationStatistics, verbose: bool = False) -> None: + """ + Passes the current status to the logger + """ + if self.status is not None: + self.status.update(self.get_status_message(stats, verbose=verbose)) + else: + self.console.log(self.get_status_message(stats, verbose=verbose)) + + +class _SolverBase(Process): + """ + A process that solves the registration PoW problem. + + :param proc_num: The number of the process being created. + :param num_proc: The total number of processes running. + :param update_interval: The number of nonces to try to solve before checking for a new block. + :param finished_queue: The queue to put the process number when a process finishes each update_interval. + Used for calculating the average time per update_interval across all processes. + :param solution_queue: The queue to put the solution the process has found during the pow solve. + :param stop_event: The event to set by the main process when all the solver processes should stop. + The solver process will check for the event after each update_interval. + The solver process will stop when the event is set. + Used to stop the solver processes when a solution is found. + :param curr_block: The array containing this process's current block hash. + The main process will set the array to the new block hash when a new block is finalized in the + network. The solver process will get the new block hash from this array when newBlockEvent is set + :param curr_block_num: The value containing this process's current block number. + The main process will set the value to the new block number when a new block is finalized in + the network. The solver process will get the new block number from this value when + new_block_event is set. + :param curr_diff: The array containing this process's current difficulty. The main process will set the array to + the new difficulty when a new block is finalized in the network. The solver process will get the + new difficulty from this array when newBlockEvent is set. + :param check_block: The lock to prevent this process from getting the new block data while the main process is + updating the data. + :param limit: The limit of the pow solve for a valid solution. + + :var new_block_event: The event to set by the main process when a new block is finalized in the network. + The solver process will check for the event after each update_interval. + The solver process will get the new block hash and difficulty and start solving for a new + nonce. + """ + + proc_num: int + num_proc: int + update_interval: int + finished_queue: Queue_Type + solution_queue: Queue_Type + new_block_event: Event + stop_event: Event + hotkey_bytes: bytes + curr_block: Array + curr_block_num: Value + curr_diff: Array + check_block: Lock + limit: int + + def __init__( + self, + proc_num, + num_proc, + update_interval, + finished_queue, + solution_queue, + stop_event, + curr_block, + curr_block_num, + curr_diff, + check_block, + limit, + ): + Process.__init__(self, daemon=True) + self.proc_num = proc_num + self.num_proc = num_proc + self.update_interval = update_interval + self.finished_queue = finished_queue + self.solution_queue = solution_queue + self.new_block_event = Event() + self.new_block_event.clear() + self.curr_block = curr_block + self.curr_block_num = curr_block_num + self.curr_diff = curr_diff + self.check_block = check_block + self.stop_event = stop_event + self.limit = limit + + def run(self): + raise NotImplementedError("_SolverBase is an abstract class") + + @staticmethod + def create_shared_memory() -> tuple[Array, Value, Array]: + """Creates shared memory for the solver processes to use.""" + curr_block = Array("h", 32, lock=True) # byte array + curr_block_num = Value("i", 0, lock=True) # int + curr_diff = Array("Q", [0, 0], lock=True) # [high, low] + + return curr_block, curr_block_num, curr_diff + + +class _Solver(_SolverBase): + """ + Performs POW Solution + """ + + def run(self): + block_number: int + block_and_hotkey_hash_bytes: bytes + block_difficulty: int + nonce_limit = int(math.pow(2, 64)) - 1 + + # Start at random nonce + nonce_start = random.randint(0, nonce_limit) + nonce_end = nonce_start + self.update_interval + while not self.stop_event.is_set(): + if self.new_block_event.is_set(): + with self.check_block: + block_number = self.curr_block_num.value + block_and_hotkey_hash_bytes = bytes(self.curr_block) + block_difficulty = _registration_diff_unpack(self.curr_diff) + + self.new_block_event.clear() + + # Do a block of nonces + solution = _solve_for_nonce_block( + nonce_start, + nonce_end, + block_and_hotkey_hash_bytes, + block_difficulty, + self.limit, + block_number, + ) + if solution is not None: + self.solution_queue.put(solution) + + try: + # Send time + self.finished_queue.put_nowait(self.proc_num) + except Full: + pass + + nonce_start = random.randint(0, nonce_limit) + nonce_start = nonce_start % nonce_limit + nonce_end = nonce_start + self.update_interval + + +class _CUDASolver(_SolverBase): + """ + Performs POW Solution using CUDA + """ + + dev_id: int + tpb: int + + def __init__( + self, + proc_num, + num_proc, + update_interval, + finished_queue, + solution_queue, + stop_event, + curr_block, + curr_block_num, + curr_diff, + check_block, + limit, + dev_id: int, + tpb: int, + ): + super().__init__( + proc_num, + num_proc, + update_interval, + finished_queue, + solution_queue, + stop_event, + curr_block, + curr_block_num, + curr_diff, + check_block, + limit, + ) + self.dev_id = dev_id + self.tpb = tpb + + def run(self): + block_number: int = 0 # dummy value + block_and_hotkey_hash_bytes: bytes = b"0" * 32 # dummy value + block_difficulty: int = int(math.pow(2, 64)) - 1 # dummy value + nonce_limit = int(math.pow(2, 64)) - 1 # U64MAX + + # Start at random nonce + nonce_start = random.randint(0, nonce_limit) + while not self.stop_event.is_set(): + if self.new_block_event.is_set(): + with self.check_block: + block_number = self.curr_block_num.value + block_and_hotkey_hash_bytes = bytes(self.curr_block) + block_difficulty = _registration_diff_unpack(self.curr_diff) + + self.new_block_event.clear() + + # Do a block of nonces + solution = _solve_for_nonce_block_cuda( + nonce_start, + self.update_interval, + block_and_hotkey_hash_bytes, + block_difficulty, + self.limit, + block_number, + self.dev_id, + self.tpb, + ) + if solution is not None: + self.solution_queue.put(solution) + + try: + # Signal that a nonce_block was finished using queue + # send our proc_num + self.finished_queue.put(self.proc_num) + except Full: + pass + + # increase nonce by number of nonces processed + nonce_start += self.update_interval * self.tpb + nonce_start = nonce_start % nonce_limit + + +class LazyLoadedTorch: + def __bool__(self): + return bool(_get_real_torch()) + + def __getattr__(self, name): + if real_torch := _get_real_torch(): + return getattr(real_torch, name) + else: + log_no_torch_error() + raise ImportError("torch not installed") + + +if typing.TYPE_CHECKING: + import torch +else: + torch = LazyLoadedTorch() + + +class MaxSuccessException(Exception): + """ + Raised when the POW Solver has reached the max number of successful solutions + """ + + +class MaxAttemptsException(Exception): + """ + Raised when the POW Solver has reached the max number of attempts + """ + + +async def is_hotkey_registered( + subtensor: "AsyncSubtensor", netuid: int, hotkey_ss58: str +) -> bool: + """Checks to see if the hotkey is registered on a given netuid""" + _result = await subtensor.substrate.query( + module="SubtensorModule", + storage_function="Uids", + params=[netuid, hotkey_ss58], + ) + if _result is not None: + return True + else: + return False + + +async def register_extrinsic( + subtensor: "AsyncSubtensor", + wallet: "Wallet", + netuid: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = True, + prompt: bool = False, + max_allowed_attempts: int = 3, + output_in_place: bool = True, + cuda: bool = False, + dev_id: typing.Union[list[int], int] = 0, + tpb: int = 256, + num_processes: Optional[int] = None, + update_interval: Optional[int] = None, + log_verbose: bool = False, +) -> bool: + """Registers the wallet to the chain. + + :param subtensor: initialized AsyncSubtensor object to use for chain interactions + :param wallet: Bittensor wallet object. + :param netuid: The ``netuid`` of the subnet to register on. + :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, or returns + `False` if the extrinsic fails to enter the block within the timeout. + :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning `True`, + or returns `False` if the extrinsic fails to be finalized within the timeout. + :param prompt: If `True`, the call waits for confirmation from the user before proceeding. + :param max_allowed_attempts: Maximum number of attempts to register the wallet. + :param output_in_place: Whether the POW solving should be outputted to the console as it goes along. + :param cuda: If `True`, the wallet should be registered using CUDA device(s). + :param dev_id: The CUDA device id to use, or a list of device ids. + :param tpb: The number of threads per block (CUDA). + :param num_processes: The number of processes to use to register. + :param update_interval: The number of nonces to solve between updates. + :param log_verbose: If `True`, the registration process will log more information. + + :return: `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, + the response is `True`. + """ + + async def get_neuron_for_pubkey_and_subnet(): + uid = await subtensor.substrate.query( + "SubtensorModule", "Uids", [netuid, wallet.hotkey.ss58_address] + ) + if uid is None: + return NeuronInfo.get_null_neuron() + + params = [netuid, uid] + json_body = await subtensor.substrate.rpc_request( + method="neuronInfo_getNeuron", + params=params, + ) + + if not (result := json_body.get("result", None)): + return NeuronInfo.get_null_neuron() + + return NeuronInfo.from_vec_u8(bytes(result)) + + print_verbose("Checking subnet status") + if not await subtensor.subnet_exists(netuid): + err_console.print( + f":cross_mark: [red]Failed[/red]: error: [bold white]subnet:{netuid}[/bold white] does not exist." + ) + return False + + with console.status( + f":satellite: Checking Account on [bold]subnet:{netuid}[/bold]...", + spinner="aesthetic", + ) as status: + neuron = await get_neuron_for_pubkey_and_subnet() + if not neuron.is_null: + print_error( + f"Wallet {wallet} is already registered on subnet {neuron.netuid} with uid {neuron.uid}", + status, + ) + return True + + if prompt: + if not Confirm.ask( + f"Continue Registration?\n" + f" hotkey ({wallet.hotkey_str}):\t[bold white]{wallet.hotkey.ss58_address}[/bold white]\n" + f" coldkey ({wallet.name}):\t[bold white]{wallet.coldkeypub.ss58_address}[/bold white]\n" + f" network:\t\t[bold white]{subtensor.network}[/bold white]" + ): + return False + + if not torch: + log_no_torch_error() + return False + + # Attempt rolling registration. + attempts = 1 + pow_result: Optional[POWSolution] + while True: + console.print( + ":satellite: Registering...({}/{})".format(attempts, max_allowed_attempts) + ) + # Solve latest POW. + if cuda: + if not torch.cuda.is_available(): + if prompt: + console.print("CUDA is not available.") + return False + pow_result = await create_pow( + subtensor, + wallet, + netuid, + output_in_place, + cuda=cuda, + dev_id=dev_id, + tpb=tpb, + num_processes=num_processes, + update_interval=update_interval, + log_verbose=log_verbose, + ) + else: + pow_result = await create_pow( + subtensor, + wallet, + netuid, + output_in_place, + cuda=cuda, + num_processes=num_processes, + update_interval=update_interval, + log_verbose=log_verbose, + ) + + # pow failed + if not pow_result: + # might be registered already on this subnet + is_registered = await is_hotkey_registered( + subtensor, netuid=netuid, hotkey_ss58=wallet.hotkey.ss58_address + ) + if is_registered: + err_console.print( + f":white_heavy_check_mark: [green]Already registered on netuid:{netuid}[/green]" + ) + return True + + # pow successful, proceed to submit pow to chain for registration + else: + with console.status(":satellite: Submitting POW..."): + # check if pow result is still valid + while not await pow_result.is_stale(subtensor=subtensor): + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="register", + call_params={ + "netuid": netuid, + "block_number": pow_result.block_number, + "nonce": pow_result.nonce, + "work": [int(byte_) for byte_ in pow_result.seal], + "hotkey": wallet.hotkey.ss58_address, + "coldkey": wallet.coldkeypub.ss58_address, + }, + ) + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, keypair=wallet.hotkey + ) + response = await subtensor.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + if not wait_for_finalization and not wait_for_inclusion: + success, err_msg = True, "" + else: + await response.process_events() + success = await response.is_success + if not success: + success, err_msg = ( + False, + format_error_message( + await response.error_message, + substrate=subtensor.substrate, + ), + ) + # Look error here + # https://github.com/opentensor/subtensor/blob/development/pallets/subtensor/src/errors.rs + + if "HotKeyAlreadyRegisteredInSubNet" in err_msg: + console.print( + f":white_heavy_check_mark: [green]Already Registered on " + f"[bold]subnet:{netuid}[/bold][/green]" + ) + return True + err_console.print( + f":cross_mark: [red]Failed[/red]: {err_msg}" + ) + await asyncio.sleep(0.5) + + # Successful registration, final check for neuron and pubkey + if success: + console.print(":satellite: Checking Registration status...") + is_registered = await is_hotkey_registered( + subtensor, + netuid=netuid, + hotkey_ss58=wallet.hotkey.ss58_address, + ) + if is_registered: + console.print( + ":white_heavy_check_mark: [green]Registered[/green]" + ) + return True + else: + # neuron not found, try again + err_console.print( + ":cross_mark: [red]Unknown error. Neuron not found.[/red]" + ) + continue + else: + # Exited loop because pow is no longer valid. + err_console.print("[red]POW is stale.[/red]") + # Try again. + continue + + if attempts < max_allowed_attempts: + # Failed registration, retry pow + attempts += 1 + err_console.print( + ":satellite: Failed registration, retrying pow ...({attempts}/{max_allowed_attempts})" + ) + else: + # Failed to register after max attempts. + err_console.print("[red]No more attempts.[/red]") + return False + + +async def run_faucet_extrinsic( + subtensor: "AsyncSubtensor", + wallet: Wallet, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = True, + prompt: bool = False, + max_allowed_attempts: int = 3, + output_in_place: bool = True, + cuda: bool = False, + dev_id: int = 0, + tpb: int = 256, + num_processes: Optional[int] = None, + update_interval: Optional[int] = None, + log_verbose: bool = False, + max_successes: int = 3, +) -> tuple[bool, str]: + r"""Runs a continual POW to get a faucet of TAO on the test net. + + :param subtensor: The subtensor interface object used to run the extrinsic + :param wallet: Bittensor wallet object. + :param prompt: If `True`, the call waits for confirmation from the user before proceeding. + :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, + or returns `False` if the extrinsic fails to enter the block within the timeout. + :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning `True`, + or returns `False` if the extrinsic fails to be finalized within the timeout. + :param max_allowed_attempts: Maximum number of attempts to register the wallet. + :param output_in_place: Whether to output logging data as the process runs. + :param cuda: If `True`, the wallet should be registered using CUDA device(s). + :param dev_id: The CUDA device id to use + :param tpb: The number of threads per block (CUDA). + :param num_processes: The number of processes to use to register. + :param update_interval: The number of nonces to solve between updates. + :param log_verbose: If `True`, the registration process will log more information. + :param max_successes: The maximum number of successful faucet runs for the wallet. + + :return: `True` if extrinsic was finalized or included in the block. If we did not wait for + finalization/inclusion, the response is also `True` + """ + if prompt: + if not Confirm.ask( + "Run Faucet?\n" + f" wallet name: [bold white]{wallet.name}[/bold white]\n" + f" coldkey: [bold white]{wallet.coldkeypub.ss58_address}[/bold white]\n" + f" network: [bold white]{subtensor}[/bold white]" + ): + return False, "" + + if not torch: + log_no_torch_error() + return False, "Requires torch" + + # Unlock coldkey + try: + wallet.unlock_coldkey() + except KeyFileError: + return False, "There was an error unlocking your coldkey" + + # Get previous balance. + old_balance = await subtensor.get_balance(wallet.coldkeypub.ss58_address) + + # Attempt rolling registration. + attempts = 1 + successes = 1 + while True: + try: + pow_result = None + while pow_result is None or await pow_result.is_stale(subtensor=subtensor): + # Solve latest POW. + if cuda: + if not torch.cuda.is_available(): + if prompt: + err_console.print("CUDA is not available.") + return False, "CUDA is not available." + pow_result: Optional[POWSolution] = await create_pow( + subtensor, + wallet, + -1, + output_in_place, + cuda=cuda, + dev_id=dev_id, + tpb=tpb, + num_processes=num_processes, + update_interval=update_interval, + log_verbose=log_verbose, + ) + else: + pow_result: Optional[POWSolution] = await create_pow( + subtensor, + wallet, + -1, + output_in_place, + cuda=cuda, + num_processes=num_processes, + update_interval=update_interval, + log_verbose=log_verbose, + ) + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="faucet", + call_params={ + "block_number": pow_result.block_number, + "nonce": pow_result.nonce, + "work": [int(byte_) for byte_ in pow_result.seal], + }, + ) + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, keypair=wallet.coldkey + ) + response = await subtensor.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + # process if registration successful, try again if pow is still valid + await response.process_events() + if not await response.is_success: + err_console.print( + f":cross_mark: [red]Failed[/red]: " + f"{format_error_message(await response.error_message, subtensor.substrate)}" + ) + if attempts == max_allowed_attempts: + raise MaxAttemptsException + attempts += 1 + # Wait a bit before trying again + time.sleep(1) + + # Successful registration + else: + new_balance = await subtensor.get_balance( + wallet.coldkeypub.ss58_address + ) + console.print( + f"Balance: [blue]{old_balance[wallet.coldkeypub.ss58_address]}[/blue] :arrow_right:" + f" [green]{new_balance[wallet.coldkeypub.ss58_address]}[/green]" + ) + old_balance = new_balance + + if successes == max_successes: + raise MaxSuccessException + + attempts = 1 # Reset attempts on success + successes += 1 + + except KeyboardInterrupt: + return True, "Done" + + except MaxSuccessException: + return True, f"Max successes reached: {3}" + + except MaxAttemptsException: + return False, f"Max attempts reached: {max_allowed_attempts}" + + +async def _check_for_newest_block_and_update( + subtensor: "AsyncSubtensor", + netuid: int, + old_block_number: int, + hotkey_bytes: bytes, + curr_diff: Array, + curr_block: Array, + curr_block_num: Value, + update_curr_block: typing.Callable, + check_block: Lock, + solvers: list[_Solver], + curr_stats: RegistrationStatistics, +) -> int: + """ + Checks for a new block and updates the current block information if a new block is found. + + :param subtensor: The subtensor object to use for getting the current block. + :param netuid: The netuid to use for retrieving the difficulty. + :param old_block_number: The old block number to check against. + :param hotkey_bytes: The bytes of the hotkey's pubkey. + :param curr_diff: The current difficulty as a multiprocessing array. + :param curr_block: Where the current block is stored as a multiprocessing array. + :param curr_block_num: Where the current block number is stored as a multiprocessing value. + :param update_curr_block: A function that updates the current block. + :param check_block: A mp lock that is used to check for a new block. + :param solvers: A list of solvers to update the current block for. + :param curr_stats: The current registration statistics to update. + + :return: The current block number. + """ + block_number = await subtensor.substrate.get_block_number(None) + if block_number != old_block_number: + old_block_number = block_number + # update block information + block_number, difficulty, block_hash = await _get_block_with_retry( + subtensor=subtensor, netuid=netuid + ) + block_bytes = bytes.fromhex(block_hash[2:]) + + update_curr_block( + curr_diff, + curr_block, + curr_block_num, + block_number, + block_bytes, + difficulty, + hotkey_bytes, + check_block, + ) + # Set new block events for each solver + + for worker in solvers: + worker.new_block_event.set() + + # update stats + curr_stats.block_number = block_number + curr_stats.block_hash = block_hash + curr_stats.difficulty = difficulty + + return old_block_number + + +async def _block_solver( + subtensor: "AsyncSubtensor", + wallet: Wallet, + num_processes: int, + netuid: int, + dev_id: list[int], + tpb: int, + update_interval: int, + curr_block, + curr_block_num, + curr_diff, + n_samples, + alpha_, + output_in_place, + log_verbose, + cuda: bool, +): + """ + Shared code used by the Solvers to solve the POW solution + """ + limit = int(math.pow(2, 256)) - 1 + + # Establish communication queues + ## See the _Solver class for more information on the queues. + stop_event = Event() + stop_event.clear() + + solution_queue = Queue() + finished_queues = [Queue() for _ in range(num_processes)] + check_block = Lock() + + hotkey_bytes = ( + wallet.coldkeypub.public_key if netuid == -1 else wallet.hotkey.public_key + ) + + if cuda: + ## Create a worker per CUDA device + num_processes = len(dev_id) + solvers = [ + _CUDASolver( + i, + num_processes, + update_interval, + finished_queues[i], + solution_queue, + stop_event, + curr_block, + curr_block_num, + curr_diff, + check_block, + limit, + dev_id[i], + tpb, + ) + for i in range(num_processes) + ] + else: + # Start consumers + solvers = [ + _Solver( + i, + num_processes, + update_interval, + finished_queues[i], + solution_queue, + stop_event, + curr_block, + curr_block_num, + curr_diff, + check_block, + limit, + ) + for i in range(num_processes) + ] + + # Get first block + block_number, difficulty, block_hash = await _get_block_with_retry( + subtensor=subtensor, netuid=netuid + ) + + block_bytes = bytes.fromhex(block_hash[2:]) + old_block_number = block_number + # Set to current block + _update_curr_block( + curr_diff, + curr_block, + curr_block_num, + block_number, + block_bytes, + difficulty, + hotkey_bytes, + check_block, + ) + + # Set new block events for each solver to start at the initial block + for worker in solvers: + worker.new_block_event.set() + + for worker in solvers: + worker.start() # start the solver processes + + start_time = time.time() # time that the registration started + time_last = start_time # time that the last work blocks completed + + curr_stats = RegistrationStatistics( + time_spent_total=0.0, + time_average=0.0, + rounds_total=0, + time_spent=0.0, + hash_rate_perpetual=0.0, + hash_rate=0.0, + difficulty=difficulty, + block_number=block_number, + block_hash=block_hash, + ) + + start_time_perpetual = time.time() + + logger = RegistrationStatisticsLogger(console, output_in_place) + logger.start() + + solution = None + + hash_rates = [0] * n_samples # The last n true hash_rates + weights = [alpha_**i for i in range(n_samples)] # weights decay by alpha + + timeout = 0.15 if cuda else 0.15 + while netuid == -1 or not await is_hotkey_registered( + subtensor, netuid, wallet.hotkey.ss58_address + ): + # Wait until a solver finds a solution + try: + solution = solution_queue.get(block=True, timeout=timeout) + if solution is not None: + break + except Empty: + # No solution found, try again + pass + + # check for new block + old_block_number = await _check_for_newest_block_and_update( + subtensor=subtensor, + netuid=netuid, + hotkey_bytes=hotkey_bytes, + old_block_number=old_block_number, + curr_diff=curr_diff, + curr_block=curr_block, + curr_block_num=curr_block_num, + curr_stats=curr_stats, + update_curr_block=_update_curr_block, + check_block=check_block, + solvers=solvers, + ) + + num_time = 0 + for finished_queue in finished_queues: + try: + finished_queue.get(timeout=0.1) + num_time += 1 + + except Empty: + continue + + time_now = time.time() # get current time + time_since_last = time_now - time_last # get time since last work block(s) + if num_time > 0 and time_since_last > 0.0: + # create EWMA of the hash_rate to make measure more robust + + if cuda: + hash_rate_ = (num_time * tpb * update_interval) / time_since_last + else: + hash_rate_ = (num_time * update_interval) / time_since_last + hash_rates.append(hash_rate_) + hash_rates.pop(0) # remove the 0th data point + curr_stats.hash_rate = sum( + [hash_rates[i] * weights[i] for i in range(n_samples)] + ) / (sum(weights)) + + # update time last to now + time_last = time_now + + curr_stats.time_average = ( + curr_stats.time_average * curr_stats.rounds_total + + curr_stats.time_spent + ) / (curr_stats.rounds_total + num_time) + curr_stats.rounds_total += num_time + + # Update stats + curr_stats.time_spent = time_since_last + new_time_spent_total = time_now - start_time_perpetual + if cuda: + curr_stats.hash_rate_perpetual = ( + curr_stats.rounds_total * (tpb * update_interval) + ) / new_time_spent_total + else: + curr_stats.hash_rate_perpetual = ( + curr_stats.rounds_total * update_interval + ) / new_time_spent_total + curr_stats.time_spent_total = new_time_spent_total + + # Update the logger + logger.update(curr_stats, verbose=log_verbose) + + # exited while, solution contains the nonce or wallet is registered + stop_event.set() # stop all other processes + logger.stop() + + # terminate and wait for all solvers to exit + _terminate_workers_and_wait_for_exit(solvers) + + return solution + + +async def _solve_for_difficulty_fast_cuda( + subtensor: "AsyncSubtensor", + wallet: Wallet, + netuid: int, + output_in_place: bool = True, + update_interval: int = 50_000, + tpb: int = 512, + dev_id: typing.Union[list[int], int] = 0, + n_samples: int = 10, + alpha_: float = 0.80, + log_verbose: bool = False, +) -> Optional[POWSolution]: + """ + Solves the registration fast using CUDA + + :param subtensor: The subtensor node to grab blocks + :param wallet: The wallet to register + :param netuid: The netuid of the subnet to register to. + :param output_in_place: If true, prints the output in place, otherwise prints to new lines + :param update_interval: The number of nonces to try before checking for more blocks + :param tpb: The number of threads per block. CUDA param that should match the GPU capability + :param dev_id: The CUDA device IDs to execute the registration on, either a single device or a list of devices + :param n_samples: The number of samples of the hash_rate to keep for the EWMA + :param alpha_: The alpha for the EWMA for the hash_rate calculation + :param log_verbose: If true, prints more verbose logging of the registration metrics. + + Note: The hash rate is calculated as an exponentially weighted moving average in order to make the measure more + robust. + """ + if isinstance(dev_id, int): + dev_id = [dev_id] + elif dev_id is None: + dev_id = [0] + + if update_interval is None: + update_interval = 50_000 + + if not torch.cuda.is_available(): + raise Exception("CUDA not available") + + # Set mp start to use spawn so CUDA doesn't complain + with _UsingSpawnStartMethod(force=True): + curr_block, curr_block_num, curr_diff = _CUDASolver.create_shared_memory() + + solution = await _block_solver( + subtensor=subtensor, + wallet=wallet, + num_processes=None, + netuid=netuid, + dev_id=dev_id, + tpb=tpb, + update_interval=update_interval, + curr_block=curr_block, + curr_block_num=curr_block_num, + curr_diff=curr_diff, + n_samples=n_samples, + alpha_=alpha_, + output_in_place=output_in_place, + log_verbose=log_verbose, + cuda=True, + ) + + return solution + + +async def _solve_for_difficulty_fast( + subtensor, + wallet: Wallet, + netuid: int, + output_in_place: bool = True, + num_processes: Optional[int] = None, + update_interval: Optional[int] = None, + n_samples: int = 10, + alpha_: float = 0.80, + log_verbose: bool = False, +) -> Optional[POWSolution]: + """ + Solves the POW for registration using multiprocessing. + + :param subtensor: Subtensor to connect to for block information and to submit. + :param wallet: wallet to use for registration. + :param netuid: The netuid of the subnet to register to. + :param output_in_place: If true, prints the status in place. Otherwise, prints the status on a new line. + :param num_processes: Number of processes to use. + :param update_interval: Number of nonces to solve before updating block information. + :param n_samples: The number of samples of the hash_rate to keep for the EWMA + :param alpha_: The alpha for the EWMA for the hash_rate calculation + :param log_verbose: If true, prints more verbose logging of the registration metrics. + + Notes: + + - The hash rate is calculated as an exponentially weighted moving average in order to make the measure more robust. + - We can also modify the update interval to do smaller blocks of work, while still updating the block information + after a different number of nonces, to increase the transparency of the process while still keeping the speed. + """ + if not num_processes: + # get the number of allowed processes for this process + num_processes = min(1, get_cpu_count()) + + if update_interval is None: + update_interval = 50_000 + + curr_block, curr_block_num, curr_diff = _Solver.create_shared_memory() + + solution = await _block_solver( + subtensor=subtensor, + wallet=wallet, + num_processes=num_processes, + netuid=netuid, + dev_id=None, + tpb=None, + update_interval=update_interval, + curr_block=curr_block, + curr_block_num=curr_block_num, + curr_diff=curr_diff, + n_samples=n_samples, + alpha_=alpha_, + output_in_place=output_in_place, + log_verbose=log_verbose, + cuda=False, + ) + + return solution + + +def _terminate_workers_and_wait_for_exit( + workers: list[typing.Union[Process, Queue_Type]], +) -> None: + for worker in workers: + if isinstance(worker, Queue_Type): + worker.join_thread() + else: + try: + worker.join(3.0) + except subprocess.TimeoutExpired: + worker.terminate() + try: + worker.close() + except ValueError: + worker.terminate() + + +# TODO verify this works with async +@backoff.on_exception(backoff.constant, Exception, interval=1, max_tries=3) +async def _get_block_with_retry( + subtensor: "AsyncSubtensor", netuid: int +) -> tuple[int, int, bytes]: + """ + Gets the current block number, difficulty, and block hash from the substrate node. + + :param subtensor: The subtensor object to use to get the block number, difficulty, and block hash. + :param netuid: The netuid of the network to get the block number, difficulty, and block hash from. + + :return: The current block number, difficulty of the subnet, block hash + + :raises Exception: If the block hash is None. + :raises ValueError: If the difficulty is None. + """ + block_number = await subtensor.substrate.get_block_number(None) + block_hash = await subtensor.substrate.get_block_hash( + block_number + ) # TODO check if I need to do all this + try: + difficulty = ( + 1_000_000 + if netuid == -1 + else int( + await subtensor.get_hyperparameter( + param_name="Difficulty", netuid=netuid, block_hash=block_hash + ) + ) + ) + except TypeError: + raise ValueError("Chain error. Difficulty is None") + except SubstrateRequestException: + raise Exception( + "Network error. Could not connect to substrate to get block hash" + ) + return block_number, difficulty, block_hash + + +def _registration_diff_unpack(packed_diff: Array) -> int: + """Unpacks the packed two 32-bit integers into one 64-bit integer. Little endian.""" + return int(packed_diff[0] << 32 | packed_diff[1]) + + +def _registration_diff_pack(diff: int, packed_diff: Array): + """Packs the difficulty into two 32-bit integers. Little endian.""" + packed_diff[0] = diff >> 32 + packed_diff[1] = diff & 0xFFFFFFFF # low 32 bits + + +class _UsingSpawnStartMethod: + def __init__(self, force: bool = False): + self._old_start_method = None + self._force = force + + def __enter__(self): + self._old_start_method = mp.get_start_method(allow_none=True) + if self._old_start_method is None: + self._old_start_method = "spawn" # default to spawn + + mp.set_start_method("spawn", force=self._force) + + def __exit__(self, *args): + # restore the old start method + mp.set_start_method(self._old_start_method, force=True) + + +async def create_pow( + subtensor: "AsyncSubtensor", + wallet: Wallet, + netuid: int, + output_in_place: bool = True, + cuda: bool = False, + dev_id: typing.Union[list[int], int] = 0, + tpb: int = 256, + num_processes: int = None, + update_interval: int = None, + log_verbose: bool = False, +) -> Optional[dict[str, typing.Any]]: + """ + Creates a proof of work for the given subtensor and wallet. + + :param subtensor: The subtensor to create a proof of work for. + :param wallet: The wallet to create a proof of work for. + :param netuid: The netuid for the subnet to create a proof of work for. + :param output_in_place: If true, prints the progress of the proof of work to the console + in-place. Meaning the progress is printed on the same lines. + :param cuda: If true, uses CUDA to solve the proof of work. + :param dev_id: The CUDA device id(s) to use. If cuda is true and dev_id is a list, + then multiple CUDA devices will be used to solve the proof of work. + :param tpb: The number of threads per block to use when solving the proof of work. Should be a multiple of 32. + :param num_processes: The number of processes to use when solving the proof of work. + If None, then the number of processes is equal to the number of CPU cores. + :param update_interval: The number of nonces to run before checking for a new block. + :param log_verbose: If true, prints the progress of the proof of work more verbosely. + + :return: The proof of work solution or None if the wallet is already registered or there is a different error. + + :raises ValueError: If the subnet does not exist. + """ + if netuid != -1: + if not await subtensor.subnet_exists(netuid=netuid): + raise ValueError(f"Subnet {netuid} does not exist") + + if cuda: + solution: Optional[POWSolution] = await _solve_for_difficulty_fast_cuda( + subtensor, + wallet, + netuid=netuid, + output_in_place=output_in_place, + dev_id=dev_id, + tpb=tpb, + update_interval=update_interval, + log_verbose=log_verbose, + ) + else: + solution: Optional[POWSolution] = await _solve_for_difficulty_fast( + subtensor, + wallet, + netuid=netuid, + output_in_place=output_in_place, + num_processes=num_processes, + update_interval=update_interval, + log_verbose=log_verbose, + ) + + return solution + + +def _solve_for_nonce_block_cuda( + nonce_start: int, + update_interval: int, + block_and_hotkey_hash_bytes: bytes, + difficulty: int, + limit: int, + block_number: int, + dev_id: int, + tpb: int, +) -> Optional[POWSolution]: + """ + Tries to solve the POW on a CUDA device for a block of nonces (nonce_start, nonce_start + update_interval * tpb + """ + solution, seal = solve_cuda( + nonce_start, + update_interval, + tpb, + block_and_hotkey_hash_bytes, + difficulty, + limit, + dev_id, + ) + + if solution != -1: + # Check if solution is valid (i.e. not -1) + return POWSolution(solution, block_number, difficulty, seal) + + return None + + +def _solve_for_nonce_block( + nonce_start: int, + nonce_end: int, + block_and_hotkey_hash_bytes: bytes, + difficulty: int, + limit: int, + block_number: int, +) -> Optional[POWSolution]: + """ + Tries to solve the POW for a block of nonces (nonce_start, nonce_end) + """ + for nonce in range(nonce_start, nonce_end): + # Create seal. + seal = _create_seal_hash(block_and_hotkey_hash_bytes, nonce) + + # Check if seal meets difficulty + if _seal_meets_difficulty(seal, difficulty, limit): + # Found a solution, save it. + return POWSolution(nonce, block_number, difficulty, seal) + + return None + + +class CUDAException(Exception): + """An exception raised when an error occurs in the CUDA environment.""" + + +def _hex_bytes_to_u8_list(hex_bytes: bytes): + hex_chunks = [int(hex_bytes[i : i + 2], 16) for i in range(0, len(hex_bytes), 2)] + return hex_chunks + + +def _create_seal_hash(block_and_hotkey_hash_bytes: bytes, nonce: int) -> bytes: + """ + Create a cryptographic seal hash from the given block and hotkey hash bytes and nonce. + + This function generates a seal hash by combining the given block and hotkey hash bytes with a nonce. + It first converts the nonce to a byte representation, then concatenates it with the first 64 hex + characters of the block and hotkey hash bytes. The result is then hashed using SHA-256 followed by + the Keccak-256 algorithm to produce the final seal hash. + + :param block_and_hotkey_hash_bytes: The combined hash bytes of the block and hotkey. + :param nonce: The nonce value used for hashing. + + :return: The resulting seal hash. + """ + nonce_bytes = binascii.hexlify(nonce.to_bytes(8, "little")) + pre_seal = nonce_bytes + binascii.hexlify(block_and_hotkey_hash_bytes)[:64] + seal_sh256 = hashlib.sha256(bytearray(_hex_bytes_to_u8_list(pre_seal))).digest() + kec = keccak.new(digest_bits=256) + seal = kec.update(seal_sh256).digest() + return seal + + +def _seal_meets_difficulty(seal: bytes, difficulty: int, limit: int) -> bool: + """Determines if a seal meets the specified difficulty""" + seal_number = int.from_bytes(seal, "big") + product = seal_number * difficulty + return product < limit + + +def _hash_block_with_hotkey(block_bytes: bytes, hotkey_bytes: bytes) -> bytes: + """Hashes the block with the hotkey using Keccak-256 to get 32 bytes""" + kec = keccak.new(digest_bits=256) + kec = kec.update(bytearray(block_bytes + hotkey_bytes)) + block_and_hotkey_hash_bytes = kec.digest() + return block_and_hotkey_hash_bytes + + +def _update_curr_block( + curr_diff: Array, + curr_block: Array, + curr_block_num: Value, + block_number: int, + block_bytes: bytes, + diff: int, + hotkey_bytes: bytes, + lock: Lock, +): + """ + Update the current block data with the provided block information and difficulty. + + This function updates the current block and its difficulty in a thread-safe manner. It sets the current block + number, hashes the block with the hotkey, updates the current block bytes, and packs the difficulty. + + :param curr_diff: Shared array to store the current difficulty. + :param curr_block: Shared array to store the current block data. + :param curr_block_num: Shared value to store the current block number. + :param block_number: The block number to set as the current block number. + :param block_bytes: The block data bytes to be hashed with the hotkey. + :param diff: The difficulty value to be packed into the current difficulty array. + :param hotkey_bytes: The hotkey bytes used for hashing the block. + :param lock: A lock to ensure thread-safe updates. + """ + with lock: + curr_block_num.value = block_number + # Hash the block with the hotkey + block_and_hotkey_hash_bytes = _hash_block_with_hotkey(block_bytes, hotkey_bytes) + for i in range(32): + curr_block[i] = block_and_hotkey_hash_bytes[i] + _registration_diff_pack(diff, curr_diff) + + +def get_cpu_count() -> int: + try: + return len(os.sched_getaffinity(0)) + except AttributeError: + # macOS does not have sched_getaffinity + return os.cpu_count() + + +@dataclass +class RegistrationStatistics: + """Statistics for a registration.""" + + time_spent_total: float + rounds_total: int + time_average: float + time_spent: float + hash_rate_perpetual: float + hash_rate: float + difficulty: int + block_number: int + block_hash: bytes + + +def solve_cuda( + nonce_start: np.int64, + update_interval: np.int64, + tpb: int, + block_and_hotkey_hash_bytes: bytes, + difficulty: int, + limit: int, + dev_id: int = 0, +) -> tuple[np.int64, bytes]: + """ + Solves the PoW problem using CUDA. + + :param nonce_start: Starting nonce. + :param update_interval: Number of nonces to solve before updating block information. + :param tpb: Threads per block. + :param block_and_hotkey_hash_bytes: Keccak(Bytes of the block hash + bytes of the hotkey) 64 bytes. + :param difficulty: Difficulty of the PoW problem. + :param limit: Upper limit of the nonce. + :param dev_id: The CUDA device ID + + :return: (nonce, seal) corresponding to the solution. Returns -1 for nonce if no solution is found. + """ + + try: + import cubit + except ImportError: + raise ImportError("Please install cubit") + + upper = int(limit // difficulty) + + upper_bytes = upper.to_bytes(32, byteorder="little", signed=False) + + # Call cython function + # int blockSize, uint64 nonce_start, uint64 update_interval, const unsigned char[:] limit, + # const unsigned char[:] block_bytes, int dev_id + block_and_hotkey_hash_hex = binascii.hexlify(block_and_hotkey_hash_bytes)[:64] + + solution = cubit.solve_cuda( + tpb, + nonce_start, + update_interval, + upper_bytes, + block_and_hotkey_hash_hex, + dev_id, + ) # 0 is first GPU + seal = None + if solution != -1: + seal = _create_seal_hash(block_and_hotkey_hash_hex, solution) + if _seal_meets_difficulty(seal, difficulty, limit): + return solution, seal + else: + return -1, b"\x00" * 32 + + return solution, seal + + +def reset_cuda(): + """ + Resets the CUDA environment. + """ + try: + import cubit + except ImportError: + raise ImportError("Please install cubit") + + cubit.reset_cuda() + + +def log_cuda_errors() -> str: + """ + Logs any CUDA errors. + """ + try: + import cubit + except ImportError: + raise ImportError("Please install cubit") + + f = io.StringIO() + with redirect_stdout(f): + cubit.log_cuda_errors() + + s = f.getvalue() + + return s From 0d2dea7ac7af24cb13a0964127c2911db68f0fe8 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 20:28:38 -0700 Subject: [PATCH 16/27] add async_root extrinsics --- bittensor/core/async_subtensor.py | 103 +++++++++- bittensor/core/extrinsics/async_root.py | 244 ++++++++++++++++++++++++ 2 files changed, 343 insertions(+), 4 deletions(-) create mode 100644 bittensor/core/extrinsics/async_root.py diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 308bca7a94..e8491a1148 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -2,10 +2,12 @@ from typing import Optional, Any, Union, TypedDict, Iterable import aiohttp +import numpy as np import scalecodec import typer from bittensor_wallet import Wallet from bittensor_wallet.utils import SS58_FORMAT +from rich.prompt import Confirm from scalecodec import GenericCall from scalecodec.base import RuntimeConfiguration from scalecodec.type_registry import load_type_registry_preset @@ -21,9 +23,18 @@ decode_account_id, ) from bittensor.core.extrinsics.async_registration import register_extrinsic +from bittensor.core.extrinsics.async_root import set_root_weights_extrinsic, root_register_extrinsic from bittensor.core.extrinsics.async_transfer import transfer_extrinsic -from bittensor.core.settings import bt_console as console, bt_err_console as err_console, TYPE_REGISTRY, DEFAULTS, \ - NETWORK_MAP, DELEGATES_DETAILS_URL, DEFAULT_NETWORK +from bittensor.core.settings import ( + bt_console as console, + bt_err_console as err_console, + TYPE_REGISTRY, + DEFAULTS, + NETWORK_MAP, + DELEGATES_DETAILS_URL, + DEFAULT_NETWORK, + print_verbose +) from bittensor.utils import ( ss58_to_vec_u8, format_error_message, @@ -1084,6 +1095,20 @@ async def get_delegate_identities( return all_delegates_details + async def is_hotkey_registered( + self, netuid: int, hotkey_ss58: str + ) -> bool: + """Checks to see if the hotkey is registered on a given netuid""" + _result = await self.substrate.query( + module="SubtensorModule", + storage_function="Uids", + params=[netuid, hotkey_ss58], + ) + if _result is not None: + return True + else: + return False + # extrinsics async def transfer( @@ -1095,7 +1120,7 @@ async def transfer( prompt: bool, ): """Transfer token of amount to destination.""" - await transfer_extrinsic( + return await transfer_extrinsic( self, wallet, destination, @@ -1104,6 +1129,53 @@ async def transfer( prompt=prompt, ) + async def register(self, wallet: Wallet, prompt: bool): + """Register neuron by recycling some TAO.""" + console.print( + f"Registering on [dark_orange]netuid 0[/dark_orange] on network: [dark_orange]{self.network}" + ) + + # Check current recycle amount + print_verbose("Fetching recycle amount & balance") + recycle_call, balance_ = await asyncio.gather( + self.get_hyperparameter(param_name="Burn", netuid=0, reuse_block=True), + self.get_balance(wallet.coldkeypub.ss58_address, reuse_block=True), + ) + current_recycle = Balance.from_rao(int(recycle_call)) + try: + balance: Balance = balance_[wallet.coldkeypub.ss58_address] + except TypeError as e: + err_console.print(f"Unable to retrieve current recycle. {e}") + return False + except KeyError: + err_console.print("Unable to retrieve current balance.") + return False + + # Check balance is sufficient + if balance < current_recycle: + err_console.print( + f"[red]Insufficient balance {balance} to register neuron. " + f"Current recycle is {current_recycle} TAO[/red]" + ) + return False + + if prompt: + if not Confirm.ask( + f"Your balance is: [bold green]{balance}[/bold green]\n" + f"The cost to register by recycle is [bold red]{current_recycle}[/bold red]\n" + f"Do you want to continue?", + default=False, + ): + return False + + return await root_register_extrinsic( + self, + wallet, + wait_for_inclusion=True, + wait_for_finalization=True, + prompt=prompt, + ) + async def pow_register( self: "AsyncSubtensor", wallet: Wallet, @@ -1129,4 +1201,27 @@ async def pow_register( dev_id=dev_id, output_in_place=output_in_place, log_verbose=verbose, - ) \ No newline at end of file + ) + + async def set_weights( + self, + wallet: "Wallet", + netuids: list[int], + weights: list[float], + prompt: bool, + ): + """Set weights for root network.""" + netuids_ = np.array(netuids, dtype=np.int64) + weights_ = np.array(weights, dtype=np.float32) + console.print(f"Setting weights in [dark_orange]network: {self.network}") + # Run the set weights operation. + return await set_root_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuids=netuids_, + weights=weights_, + version_key=0, + prompt=prompt, + wait_for_finalization=True, + wait_for_inclusion=True, + ) diff --git a/bittensor/core/extrinsics/async_root.py b/bittensor/core/extrinsics/async_root.py new file mode 100644 index 0000000000..47aaf10a56 --- /dev/null +++ b/bittensor/core/extrinsics/async_root.py @@ -0,0 +1,244 @@ +import asyncio +import time +from typing import Union, TYPE_CHECKING + +import numpy as np +from bittensor_wallet import Wallet +from bittensor_wallet.errors import KeyFileError +from numpy.typing import NDArray +from rich.prompt import Confirm +from rich.table import Table, Column +from substrateinterface.exceptions import SubstrateRequestException + +from bittensor.core.settings import bt_console as console, bt_err_console as err_console, print_verbose +from bittensor.utils import u16_normalized_float, format_error_message +from bittensor.utils.weight_utils import normalize_max_weight, convert_weights_and_uids_for_emit + + +if TYPE_CHECKING: + from bittensor.core.async_subtensor import AsyncSubtensor + + +async def get_limits(subtensor: AsyncSubtensor) -> tuple[int, float]: + # Get weight restrictions. + maw, mwl = await asyncio.gather( + subtensor.get_hyperparameter("MinAllowedWeights", netuid=0), + subtensor.get_hyperparameter("MaxWeightsLimit", netuid=0), + ) + min_allowed_weights = int(maw) + max_weight_limit = u16_normalized_float(int(mwl)) + return min_allowed_weights, max_weight_limit + + +async def root_register_extrinsic( + subtensor: AsyncSubtensor, + wallet: Wallet, + wait_for_inclusion: bool = True, + wait_for_finalization: bool = True, + prompt: bool = False, +) -> bool: + """Registers the wallet to root network. + + :param subtensor: The AsyncSubtensor object + :param wallet: Bittensor wallet object. + :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, or returns `False` if the extrinsic fails to enter the block within the timeout. + :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning `True`, or returns `False` if the extrinsic fails to be finalized within the timeout. + :param prompt: If `True`, the call waits for confirmation from the user before proceeding. + + :return: `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, the response is `True`. + """ + + try: + wallet.unlock_coldkey() + except KeyFileError: + err_console.print("Error decrypting coldkey (possibly incorrect password)") + return False + + print_verbose(f"Checking if hotkey ({wallet.hotkey_str}) is registered on root") + is_registered = await subtensor.is_hotkey_registered( + netuid=0, hotkey_ss58=wallet.hotkey.ss58_address + ) + if is_registered: + console.print( + ":white_heavy_check_mark: [green]Already registered on root network.[/green]" + ) + return True + + with console.status(":satellite: Registering to root network...", spinner="earth"): + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="root_register", + call_params={"hotkey": wallet.hotkey.ss58_address}, + ) + success, err_msg = await subtensor.sign_and_send_extrinsic( + call, + wallet=wallet, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if not success: + err_console.print(f":cross_mark: [red]Failed[/red]: {err_msg}") + time.sleep(0.5) + return False + + # Successful registration, final check for neuron and pubkey + else: + uid = await subtensor.substrate.query( + module="SubtensorModule", + storage_function="Uids", + params=[0, wallet.hotkey.ss58_address], + ) + if uid is not None: + console.print( + f":white_heavy_check_mark: [green]Registered with UID {uid}[/green]" + ) + return True + else: + # neuron not found, try again + err_console.print( + ":cross_mark: [red]Unknown error. Neuron not found.[/red]" + ) + return False + + +async def set_root_weights_extrinsic( + subtensor: AsyncSubtensor, + wallet: Wallet, + netuids: Union[NDArray[np.int64], list[int]], + weights: Union[NDArray[np.float32], list[float]], + version_key: int = 0, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, +) -> bool: + """Sets the given weights and values on chain for wallet hotkey account. + + :param subtensor: The AsyncSubtensor object + :param wallet: Bittensor wallet object. + :param netuids: The `netuid` of the subnet to set weights for. + :param weights: Weights to set. These must be `float` s and must correspond to the passed `netuid` s. + :param version_key: The version key of the validator. + :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, or returns + `False` if the extrinsic fails to enter the block within the timeout. + :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning `True`, + or returns `False` if the extrinsic fails to be finalized within the timeout. + :param prompt: If `True`, the call waits for confirmation from the user before proceeding. + :return: `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, + the response is `True`. + """ + + async def _do_set_weights(): + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="set_root_weights", + call_params={ + "dests": weight_uids, + "weights": weight_vals, + "netuid": 0, + "version_key": version_key, + "hotkey": wallet.hotkey.ss58_address, + }, + ) + # Period dictates how long the extrinsic will stay as part of waiting pool + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, + keypair=wallet.coldkey, + era={"period": 5}, + ) + response = await subtensor.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + # We only wait here if we expect finalization. + if not wait_for_finalization and not wait_for_inclusion: + return True, "Not waiting for finalization or inclusion." + + await response.process_events() + if await response.is_success: + return True, "Successfully set weights." + else: + return False, await response.error_message + + my_uid = await subtensor.substrate.query( + "SubtensorModule", "Uids", [0, wallet.hotkey.ss58_address] + ) + + if my_uid is None: + err_console.print("Your hotkey is not registered to the root network") + return False + + try: + wallet.unlock_coldkey() + except KeyFileError: + err_console.print("Error decrypting coldkey (possibly incorrect password)") + return False + + # First convert types. + if isinstance(netuids, list): + netuids = np.array(netuids, dtype=np.int64) + if isinstance(weights, list): + weights = np.array(weights, dtype=np.float32) + + print_verbose("Fetching weight limits") + min_allowed_weights, max_weight_limit = await get_limits(subtensor) + + # Get non zero values. + non_zero_weight_idx = np.argwhere(weights > 0).squeeze(axis=1) + non_zero_weights = weights[non_zero_weight_idx] + if non_zero_weights.size < min_allowed_weights: + raise ValueError( + "The minimum number of weights required to set weights is {}, got {}".format( + min_allowed_weights, non_zero_weights.size + ) + ) + + # Normalize the weights to max value. + print_verbose("Normalizing weights") + formatted_weights = normalize_max_weight(x=weights, limit=max_weight_limit) + console.print( + f"\nRaw weights -> Normalized weights: \n\t{weights} -> \n\t{formatted_weights}\n" + ) + + # Ask before moving on. + if prompt: + table = Table( + Column("[dark_orange]Netuid", justify="center", style="bold green"), + Column( + "[dark_orange]Weight", justify="center", style="bold light_goldenrod2" + ), + expand=False, + show_edge=False, + ) + + for netuid, weight in zip(netuids, formatted_weights): + table.add_row(str(netuid), f"{weight:.8f}") + + console.print(table) + if not Confirm.ask("\nDo you want to set these root weights?"): + return False + + try: + with console.status("Setting root weights...", spinner="earth"): + weight_uids, weight_vals = convert_weights_and_uids_for_emit( + netuids, weights + ) + + success, error_message = await _do_set_weights() + + if not wait_for_finalization and not wait_for_inclusion: + return True + + if success is True: + console.print(":white_heavy_check_mark: [green]Finalized[/green]") + return True + else: + fmt_err = format_error_message(error_message, subtensor.substrate) + err_console.print(f":cross_mark: [red]Failed[/red]: {fmt_err}") + return False + + except SubstrateRequestException as e: + fmt_err = format_error_message(e, subtensor.substrate) + err_console.print(":cross_mark: [red]Failed[/red]: error:{}".format(fmt_err)) + return False From a03209eefc8b91ee298d6565934764616522e272 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 28 Oct 2024 20:38:22 -0700 Subject: [PATCH 17/27] ruff --- bittensor/core/async_subtensor.py | 25 +++++++++---------- .../core/extrinsics/async_registration.py | 8 +++++- bittensor/core/extrinsics/async_root.py | 11 ++++++-- bittensor/core/extrinsics/async_transfer.py | 8 +++++- bittensor/core/extrinsics/commit_weights.py | 8 ++++-- bittensor/core/extrinsics/registration.py | 8 ++++-- bittensor/core/extrinsics/serving.py | 8 ++++-- bittensor/core/extrinsics/set_weights.py | 4 ++- bittensor/utils/__init__.py | 5 +++- 9 files changed, 60 insertions(+), 25 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index e8491a1148..4c54092bf2 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -23,7 +23,10 @@ decode_account_id, ) from bittensor.core.extrinsics.async_registration import register_extrinsic -from bittensor.core.extrinsics.async_root import set_root_weights_extrinsic, root_register_extrinsic +from bittensor.core.extrinsics.async_root import ( + set_root_weights_extrinsic, + root_register_extrinsic, +) from bittensor.core.extrinsics.async_transfer import transfer_extrinsic from bittensor.core.settings import ( bt_console as console, @@ -33,7 +36,7 @@ NETWORK_MAP, DELEGATES_DETAILS_URL, DEFAULT_NETWORK, - print_verbose + print_verbose, ) from bittensor.utils import ( ss58_to_vec_u8, @@ -91,9 +94,7 @@ def __init__(self, network: str = DEFAULT_NETWORK): self.chain_endpoint = network if network in NETWORK_MAP.values(): self.network = next( - key - for key, value in NETWORK_MAP.items() - if value == network + key for key, value in NETWORK_MAP.items() if value == network ) else: self.network = "custom" @@ -1095,9 +1096,7 @@ async def get_delegate_identities( return all_delegates_details - async def is_hotkey_registered( - self, netuid: int, hotkey_ss58: str - ) -> bool: + async def is_hotkey_registered(self, netuid: int, hotkey_ss58: str) -> bool: """Checks to see if the hotkey is registered on a given netuid""" _result = await self.substrate.query( module="SubtensorModule", @@ -1109,7 +1108,7 @@ async def is_hotkey_registered( else: return False -# extrinsics + # extrinsics async def transfer( self, @@ -1161,10 +1160,10 @@ async def register(self, wallet: Wallet, prompt: bool): if prompt: if not Confirm.ask( - f"Your balance is: [bold green]{balance}[/bold green]\n" - f"The cost to register by recycle is [bold red]{current_recycle}[/bold red]\n" - f"Do you want to continue?", - default=False, + f"Your balance is: [bold green]{balance}[/bold green]\n" + f"The cost to register by recycle is [bold red]{current_recycle}[/bold red]\n" + f"Do you want to continue?", + default=False, ): return False diff --git a/bittensor/core/extrinsics/async_registration.py b/bittensor/core/extrinsics/async_registration.py index 38ffd79449..b89f69a4f4 100644 --- a/bittensor/core/extrinsics/async_registration.py +++ b/bittensor/core/extrinsics/async_registration.py @@ -29,7 +29,12 @@ from substrateinterface.exceptions import SubstrateRequestException from bittensor.core.chain_data import NeuronInfo -from bittensor.core.settings import bt_console as console, bt_err_console as err_console, print_verbose, print_error +from bittensor.core.settings import ( + bt_console as console, + bt_err_console as err_console, + print_verbose, + print_error, +) from bittensor.utils import format_error_message from bittensor.utils.formatting import millify, get_human_readable @@ -39,6 +44,7 @@ # TODO: compair and remove existing code (bittensor.utils.registration) + def use_torch() -> bool: """Force the use of torch over numpy for certain operations.""" return True if os.getenv("USE_TORCH") == "1" else False diff --git a/bittensor/core/extrinsics/async_root.py b/bittensor/core/extrinsics/async_root.py index 47aaf10a56..123e1df395 100644 --- a/bittensor/core/extrinsics/async_root.py +++ b/bittensor/core/extrinsics/async_root.py @@ -10,9 +10,16 @@ from rich.table import Table, Column from substrateinterface.exceptions import SubstrateRequestException -from bittensor.core.settings import bt_console as console, bt_err_console as err_console, print_verbose +from bittensor.core.settings import ( + bt_console as console, + bt_err_console as err_console, + print_verbose, +) from bittensor.utils import u16_normalized_float, format_error_message -from bittensor.utils.weight_utils import normalize_max_weight, convert_weights_and_uids_for_emit +from bittensor.utils.weight_utils import ( + normalize_max_weight, + convert_weights_and_uids_for_emit, +) if TYPE_CHECKING: diff --git a/bittensor/core/extrinsics/async_transfer.py b/bittensor/core/extrinsics/async_transfer.py index 2959f149e6..0bc45309ce 100644 --- a/bittensor/core/extrinsics/async_transfer.py +++ b/bittensor/core/extrinsics/async_transfer.py @@ -5,7 +5,13 @@ from rich.prompt import Confirm from substrateinterface.exceptions import SubstrateRequestException -from bittensor.core.settings import NETWORK_EXPLORER_MAP, bt_console as console, bt_err_console as err_console, print_verbose, print_error +from bittensor.core.settings import ( + NETWORK_EXPLORER_MAP, + bt_console as console, + bt_err_console as err_console, + print_verbose, + print_error, +) from bittensor.utils.balance import Balance from bittensor.utils import ( diff --git a/bittensor/core/extrinsics/commit_weights.py b/bittensor/core/extrinsics/commit_weights.py index 8cb27e94b8..3dcfd5b2c2 100644 --- a/bittensor/core/extrinsics/commit_weights.py +++ b/bittensor/core/extrinsics/commit_weights.py @@ -139,7 +139,9 @@ def commit_weights_extrinsic( logging.info(success_message) return True, success_message else: - error_message = format_error_message(error_message, substrate=subtensor.substrate) + error_message = format_error_message( + error_message, substrate=subtensor.substrate + ) logging.error(f"Failed to commit weights: {error_message}") return False, error_message @@ -269,6 +271,8 @@ def reveal_weights_extrinsic( logging.info(success_message) return True, success_message else: - error_message = format_error_message(error_message, substrate=subtensor.substrate) + error_message = format_error_message( + error_message, substrate=subtensor.substrate + ) logging.error(f"Failed to reveal weights: {error_message}") return False, error_message diff --git a/bittensor/core/extrinsics/registration.py b/bittensor/core/extrinsics/registration.py index 8c34891aaf..6b21378dfa 100644 --- a/bittensor/core/extrinsics/registration.py +++ b/bittensor/core/extrinsics/registration.py @@ -95,7 +95,9 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, format_error_message(response.error_message, substrate=self.substrate) + return False, format_error_message( + response.error_message, substrate=self.substrate + ) # Successful registration else: return True, None @@ -340,7 +342,9 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, format_error_message(response.error_message, substrate=self.substrate) + return False, format_error_message( + response.error_message, substrate=self.substrate + ) # Successful registration else: return True, None diff --git a/bittensor/core/extrinsics/serving.py b/bittensor/core/extrinsics/serving.py index 31349650fc..c4151bbe7a 100644 --- a/bittensor/core/extrinsics/serving.py +++ b/bittensor/core/extrinsics/serving.py @@ -186,7 +186,9 @@ def serve_extrinsic( ) return True else: - logging.error(f"Failed: {format_error_message(error_message, substrate=subtensor.substrate)}") + logging.error( + f"Failed: {format_error_message(error_message, substrate=subtensor.substrate)}" + ) return False else: return True @@ -299,7 +301,9 @@ def publish_metadata( if response.is_success: return True else: - raise MetadataError(format_error_message(response.error_message, substrate=self.substrate)) + raise MetadataError( + format_error_message(response.error_message, substrate=self.substrate) + ) # Community uses this function directly diff --git a/bittensor/core/extrinsics/set_weights.py b/bittensor/core/extrinsics/set_weights.py index 436868ecbc..3ada279f03 100644 --- a/bittensor/core/extrinsics/set_weights.py +++ b/bittensor/core/extrinsics/set_weights.py @@ -184,7 +184,9 @@ def set_weights_extrinsic( ) return True, "Successfully set weights and Finalized." else: - error_message = format_error_message(error_message, substrate=subtensor.substrate) + error_message = format_error_message( + error_message, substrate=subtensor.substrate + ) logging.error(error_message) return False, error_message diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 701f34849f..1371110efb 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -144,7 +144,10 @@ def get_hash(content, encoding="utf-8"): return sha3.hexdigest() -def format_error_message(error_message: Union[dict, Exception], substrate: Union["AsyncSubstrateInterface", "SubstrateInterface"]) -> str: +def format_error_message( + error_message: Union[dict, Exception], + substrate: Union["AsyncSubstrateInterface", "SubstrateInterface"], +) -> str: """ Formats an error message from the Subtensor error information for use in extrinsics. From d1e182dfc91a404c46f44336e9203ad098c6c699 Mon Sep 17 00:00:00 2001 From: Roman <167799377+roman-opentensor@users.noreply.github.com> Date: Tue, 29 Oct 2024 14:46:00 -0700 Subject: [PATCH 18/27] Update bittensor/core/extrinsics/async_transfer.py Co-authored-by: Benjamin Himes <37844818+thewhaleking@users.noreply.github.com> --- bittensor/core/extrinsics/async_transfer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/core/extrinsics/async_transfer.py b/bittensor/core/extrinsics/async_transfer.py index 0bc45309ce..3ecdcf7bda 100644 --- a/bittensor/core/extrinsics/async_transfer.py +++ b/bittensor/core/extrinsics/async_transfer.py @@ -198,7 +198,7 @@ async def do_transfer() -> tuple[bool, str, str]: ) console.print( f"Balance:\n" - f" [blue]{account_balance}[/blue] :arrow_right: [green]{new_balance[wallet.coldkey.ss58_address]}[/green]" + f" [blue]{account_balance}[/blue] :arrow_right: [green]{new_balance[wallet.coldkeypub.ss58_address]}[/green]" ) return True From 2063b21af1ed7c67dd6dc94b143afecfa1ea2779 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 1 Nov 2024 00:38:11 -0700 Subject: [PATCH 19/27] fix comments review --- bittensor/core/async_subtensor.py | 56 +++-- .../core/extrinsics/async_registration.py | 213 +++++++++--------- bittensor/core/extrinsics/async_root.py | 124 +++++----- bittensor/core/extrinsics/async_transfer.py | 125 +++++----- bittensor/core/settings.py | 28 --- bittensor/core/subtensor_async.py | 7 +- bittensor/utils/__init__.py | 6 +- bittensor/utils/async_substrate_interface.py | 6 +- 8 files changed, 257 insertions(+), 308 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 4c54092bf2..2848b0d600 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -13,6 +13,7 @@ from scalecodec.type_registry import load_type_registry_preset from substrateinterface.exceptions import SubstrateRequestException +from bittensor import logging from bittensor.core.chain_data import ( DelegateInfo, custom_rpc_type_registry, @@ -29,14 +30,11 @@ ) from bittensor.core.extrinsics.async_transfer import transfer_extrinsic from bittensor.core.settings import ( - bt_console as console, - bt_err_console as err_console, TYPE_REGISTRY, DEFAULTS, NETWORK_MAP, DELEGATES_DETAILS_URL, DEFAULT_NETWORK, - print_verbose, ) from bittensor.utils import ( ss58_to_vec_u8, @@ -85,7 +83,7 @@ def __init__(self, network: str = DEFAULT_NETWORK): self.chain_endpoint = NETWORK_MAP[network] self.network = network if network == "local": - console.log( + logging.warning( "[yellow]Warning[/yellow]: Verify your local subtensor is running on port 9944." ) else: @@ -99,11 +97,11 @@ def __init__(self, network: str = DEFAULT_NETWORK): else: self.network = "custom" else: - console.log( - f"Network not specified or not valid. Using default chain endpoint: " - f"{NETWORK_MAP[DEFAULTS.subtensor.network]}.\n" - f"You can set this for commands with the `--network` flag, or by setting this" - f" in the config." + logging.info( + f"Network not specified or not valid. Using default chain endpoint: {NETWORK_MAP[DEFAULTS.subtensor.network]}." + ) + logging.info( + "You can set this for commands with the --network flag, or by setting this in the config." ) self.chain_endpoint = NETWORK_MAP[DEFAULTS.subtensor.network] self.network = DEFAULTS.subtensor.network @@ -119,18 +117,17 @@ def __str__(self): return f"Network: {self.network}, Chain: {self.chain_endpoint}" async def __aenter__(self): - with console.status( - f"[yellow]Connecting to Substrate:[/yellow][bold white] {self}..." - ): - try: - async with self.substrate: - return self - except TimeoutException: - err_console.print( - "\n[red]Error[/red]: Timeout occurred connecting to substrate. " - f"Verify your chain and network settings: {self}" - ) - raise typer.Exit(code=1) + logging.info( + f"Connecting to Substrate: {self}..." + ) + try: + async with self.substrate: + return self + except TimeoutException: + logging.error( + f"Error: Timeout occurred connecting to substrate. Verify your chain and network settings: {self}" + ) + raise typer.Exit(code=1) async def __aexit__(self, exc_type, exc_val, exc_tb): await self.substrate.close() @@ -1130,12 +1127,12 @@ async def transfer( async def register(self, wallet: Wallet, prompt: bool): """Register neuron by recycling some TAO.""" - console.print( - f"Registering on [dark_orange]netuid 0[/dark_orange] on network: [dark_orange]{self.network}" + logging.info( + f"Registering on netuid 0 on network: {self.network}" ) # Check current recycle amount - print_verbose("Fetching recycle amount & balance") + logging.info("Fetching recycle amount & balance.") recycle_call, balance_ = await asyncio.gather( self.get_hyperparameter(param_name="Burn", netuid=0, reuse_block=True), self.get_balance(wallet.coldkeypub.ss58_address, reuse_block=True), @@ -1144,17 +1141,16 @@ async def register(self, wallet: Wallet, prompt: bool): try: balance: Balance = balance_[wallet.coldkeypub.ss58_address] except TypeError as e: - err_console.print(f"Unable to retrieve current recycle. {e}") + logging.error(f"Unable to retrieve current recycle. {e}") return False except KeyError: - err_console.print("Unable to retrieve current balance.") + logging.error("Unable to retrieve current balance.") return False # Check balance is sufficient if balance < current_recycle: - err_console.print( - f"[red]Insufficient balance {balance} to register neuron. " - f"Current recycle is {current_recycle} TAO[/red]" + logging.error( + f"Insufficient balance {balance} to register neuron. Current recycle is {current_recycle} TAO" ) return False @@ -1212,7 +1208,7 @@ async def set_weights( """Set weights for root network.""" netuids_ = np.array(netuids, dtype=np.int64) weights_ = np.array(weights, dtype=np.float32) - console.print(f"Setting weights in [dark_orange]network: {self.network}") + logging.info(f"Setting weights in network: {self.network}") # Run the set weights operation. return await set_root_weights_extrinsic( subtensor=self, diff --git a/bittensor/core/extrinsics/async_registration.py b/bittensor/core/extrinsics/async_registration.py index b89f69a4f4..4da7785b1b 100644 --- a/bittensor/core/extrinsics/async_registration.py +++ b/bittensor/core/extrinsics/async_registration.py @@ -29,13 +29,8 @@ from substrateinterface.exceptions import SubstrateRequestException from bittensor.core.chain_data import NeuronInfo -from bittensor.core.settings import ( - bt_console as console, - bt_err_console as err_console, - print_verbose, - print_error, -) from bittensor.utils import format_error_message +from bittensor.utils.btlogging import logging from bittensor.utils.formatting import millify, get_human_readable if typing.TYPE_CHECKING: @@ -91,9 +86,8 @@ def _get_real_torch(): def log_no_torch_error(): - err_console.print( - "This command requires torch. You can install torch" - " with `pip install torch` and run the command again." + logging.info( + "This command requires torch. You can install torch with `pip install torch` and run the command again." ) @@ -135,7 +129,11 @@ class RegistrationStatisticsLogger: console: Console status: Optional[Status] - def __init__(self, console_: Console, output_in_place: bool = True) -> None: + def __init__( + self, console_: Optional["Console"] = None, output_in_place: bool = True + ) -> None: + if console_ is None: + console_ = Console() self.console = console_ if output_in_place: @@ -506,24 +504,22 @@ async def get_neuron_for_pubkey_and_subnet(): return NeuronInfo.from_vec_u8(bytes(result)) - print_verbose("Checking subnet status") + logging.debug("Checking subnet status") if not await subtensor.subnet_exists(netuid): - err_console.print( - f":cross_mark: [red]Failed[/red]: error: [bold white]subnet:{netuid}[/bold white] does not exist." + logging.error( + f":cross_mark: Failed error: subnet {netuid} does not exist." ) return False - with console.status( - f":satellite: Checking Account on [bold]subnet:{netuid}[/bold]...", - spinner="aesthetic", - ) as status: - neuron = await get_neuron_for_pubkey_and_subnet() - if not neuron.is_null: - print_error( - f"Wallet {wallet} is already registered on subnet {neuron.netuid} with uid {neuron.uid}", - status, - ) - return True + logging.info( + f":satellite: Checking Account on subnet {netuid} ..." + ) + neuron = await get_neuron_for_pubkey_and_subnet() + if not neuron.is_null: + logging.debug( + f"Wallet {wallet} is already registered on subnet {neuron.netuid} with uid{neuron.uid}." + ) + return True if prompt: if not Confirm.ask( @@ -542,14 +538,14 @@ async def get_neuron_for_pubkey_and_subnet(): attempts = 1 pow_result: Optional[POWSolution] while True: - console.print( - ":satellite: Registering...({}/{})".format(attempts, max_allowed_attempts) + logging.info( + f":satellite: Registering... ({attempts}/{max_allowed_attempts})" ) # Solve latest POW. if cuda: if not torch.cuda.is_available(): if prompt: - console.print("CUDA is not available.") + logging.info("CUDA is not available.") return False pow_result = await create_pow( subtensor, @@ -582,97 +578,94 @@ async def get_neuron_for_pubkey_and_subnet(): subtensor, netuid=netuid, hotkey_ss58=wallet.hotkey.ss58_address ) if is_registered: - err_console.print( - f":white_heavy_check_mark: [green]Already registered on netuid:{netuid}[/green]" + logging.error( + f":white_heavy_check_mark: Already registered on netuid: {netuid}" ) return True # pow successful, proceed to submit pow to chain for registration else: - with console.status(":satellite: Submitting POW..."): - # check if pow result is still valid - while not await pow_result.is_stale(subtensor=subtensor): - call = await subtensor.substrate.compose_call( - call_module="SubtensorModule", - call_function="register", - call_params={ - "netuid": netuid, - "block_number": pow_result.block_number, - "nonce": pow_result.nonce, - "work": [int(byte_) for byte_ in pow_result.seal], - "hotkey": wallet.hotkey.ss58_address, - "coldkey": wallet.coldkeypub.ss58_address, - }, - ) - extrinsic = await subtensor.substrate.create_signed_extrinsic( - call=call, keypair=wallet.hotkey - ) - response = await subtensor.substrate.submit_extrinsic( - extrinsic, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - if not wait_for_finalization and not wait_for_inclusion: - success, err_msg = True, "" - else: - await response.process_events() - success = await response.is_success - if not success: - success, err_msg = ( - False, - format_error_message( - await response.error_message, - substrate=subtensor.substrate, - ), - ) - # Look error here - # https://github.com/opentensor/subtensor/blob/development/pallets/subtensor/src/errors.rs - - if "HotKeyAlreadyRegisteredInSubNet" in err_msg: - console.print( - f":white_heavy_check_mark: [green]Already Registered on " - f"[bold]subnet:{netuid}[/bold][/green]" - ) - return True - err_console.print( - f":cross_mark: [red]Failed[/red]: {err_msg}" - ) - await asyncio.sleep(0.5) - - # Successful registration, final check for neuron and pubkey - if success: - console.print(":satellite: Checking Registration status...") - is_registered = await is_hotkey_registered( - subtensor, - netuid=netuid, - hotkey_ss58=wallet.hotkey.ss58_address, + logging.info(":satellite: Submitting POW...") + # check if pow result is still valid + while not await pow_result.is_stale(subtensor=subtensor): + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="register", + call_params={ + "netuid": netuid, + "block_number": pow_result.block_number, + "nonce": pow_result.nonce, + "work": [int(byte_) for byte_ in pow_result.seal], + "hotkey": wallet.hotkey.ss58_address, + "coldkey": wallet.coldkeypub.ss58_address, + }, + ) + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, keypair=wallet.hotkey + ) + response = await subtensor.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + if not wait_for_finalization and not wait_for_inclusion: + success, err_msg = True, "" + else: + await response.process_events() + success = await response.is_success + if not success: + success, err_msg = ( + False, + format_error_message( + await response.error_message, + substrate=subtensor.substrate, + ), ) - if is_registered: - console.print( - ":white_heavy_check_mark: [green]Registered[/green]" + # Look error here + # https://github.com/opentensor/subtensor/blob/development/pallets/subtensor/src/errors.rs + + if "HotKeyAlreadyRegisteredInSubNet" in err_msg: + logging.info( + f":white_heavy_check_mark: Already Registered on subnet: {netuid}." ) return True - else: - # neuron not found, try again - err_console.print( - ":cross_mark: [red]Unknown error. Neuron not found.[/red]" - ) - continue - else: - # Exited loop because pow is no longer valid. - err_console.print("[red]POW is stale.[/red]") - # Try again. - continue + logging.error(f":cross_mark: Failed: {err_msg}") + await asyncio.sleep(0.5) + + # Successful registration, final check for neuron and pubkey + if success: + logging.info(":satellite: Checking Registration status...") + is_registered = await is_hotkey_registered( + subtensor, + netuid=netuid, + hotkey_ss58=wallet.hotkey.ss58_address, + ) + if is_registered: + logging.success( + ":white_heavy_check_mark: Registered" + ) + return True + else: + # neuron not found, try again + logging.error( + ":cross_mark: Unknown error. Neuron not found." + ) + continue + else: + # Exited loop because pow is no longer valid. + logging.error("POW is stale.") + # Try again. + continue if attempts < max_allowed_attempts: # Failed registration, retry pow attempts += 1 - err_console.print( - ":satellite: Failed registration, retrying pow ...({attempts}/{max_allowed_attempts})" + logging.error( + f":satellite: Failed registration, retrying pow ... ({attempts}/{max_allowed_attempts})" ) else: # Failed to register after max attempts. - err_console.print("[red]No more attempts.[/red]") + logging.error("No more attempts.") return False @@ -747,7 +740,7 @@ async def run_faucet_extrinsic( if cuda: if not torch.cuda.is_available(): if prompt: - err_console.print("CUDA is not available.") + logging.error("CUDA is not available.") return False, "CUDA is not available." pow_result: Optional[POWSolution] = await create_pow( subtensor, @@ -793,9 +786,8 @@ async def run_faucet_extrinsic( # process if registration successful, try again if pow is still valid await response.process_events() if not await response.is_success: - err_console.print( - f":cross_mark: [red]Failed[/red]: " - f"{format_error_message(await response.error_message, subtensor.substrate)}" + logging.error( + f":cross_mark: Failed: {format_error_message(await response.error_message, subtensor.substrate)}" ) if attempts == max_allowed_attempts: raise MaxAttemptsException @@ -808,9 +800,8 @@ async def run_faucet_extrinsic( new_balance = await subtensor.get_balance( wallet.coldkeypub.ss58_address ) - console.print( - f"Balance: [blue]{old_balance[wallet.coldkeypub.ss58_address]}[/blue] :arrow_right:" - f" [green]{new_balance[wallet.coldkeypub.ss58_address]}[/green]" + logging.info( + f"Balance: {old_balance[wallet.coldkeypub.ss58_address]} :arrow_right: {new_balance[wallet.coldkeypub.ss58_address]}" ) old_balance = new_balance @@ -1010,7 +1001,7 @@ async def _block_solver( start_time_perpetual = time.time() - logger = RegistrationStatisticsLogger(console, output_in_place) + logger = RegistrationStatisticsLogger(output_in_place=output_in_place) logger.start() solution = None diff --git a/bittensor/core/extrinsics/async_root.py b/bittensor/core/extrinsics/async_root.py index 123e1df395..9127b9a813 100644 --- a/bittensor/core/extrinsics/async_root.py +++ b/bittensor/core/extrinsics/async_root.py @@ -10,18 +10,13 @@ from rich.table import Table, Column from substrateinterface.exceptions import SubstrateRequestException -from bittensor.core.settings import ( - bt_console as console, - bt_err_console as err_console, - print_verbose, -) from bittensor.utils import u16_normalized_float, format_error_message +from bittensor.utils.btlogging import logging from bittensor.utils.weight_utils import ( normalize_max_weight, convert_weights_and_uids_for_emit, ) - if TYPE_CHECKING: from bittensor.core.async_subtensor import AsyncSubtensor @@ -58,55 +53,55 @@ async def root_register_extrinsic( try: wallet.unlock_coldkey() except KeyFileError: - err_console.print("Error decrypting coldkey (possibly incorrect password)") + logging.error("Error decrypting coldkey (possibly incorrect password)") return False - print_verbose(f"Checking if hotkey ({wallet.hotkey_str}) is registered on root") + logging.debug( + f"Checking if hotkey ({wallet.hotkey_str}) is registered on root." + ) is_registered = await subtensor.is_hotkey_registered( netuid=0, hotkey_ss58=wallet.hotkey.ss58_address ) if is_registered: - console.print( - ":white_heavy_check_mark: [green]Already registered on root network.[/green]" + logging.error( + ":white_heavy_check_mark: Already registered on root network." ) return True - with console.status(":satellite: Registering to root network...", spinner="earth"): - call = await subtensor.substrate.compose_call( - call_module="SubtensorModule", - call_function="root_register", - call_params={"hotkey": wallet.hotkey.ss58_address}, - ) - success, err_msg = await subtensor.sign_and_send_extrinsic( - call, - wallet=wallet, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) + logging.info(":satellite: Registering to root network...") + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="root_register", + call_params={"hotkey": wallet.hotkey.ss58_address}, + ) + success, err_msg = await subtensor.sign_and_send_extrinsic( + call, + wallet=wallet, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) - if not success: - err_console.print(f":cross_mark: [red]Failed[/red]: {err_msg}") - time.sleep(0.5) - return False + if not success: + logging.error(f":cross_mark: Failed: {err_msg}") + time.sleep(0.5) + return False - # Successful registration, final check for neuron and pubkey - else: - uid = await subtensor.substrate.query( - module="SubtensorModule", - storage_function="Uids", - params=[0, wallet.hotkey.ss58_address], + # Successful registration, final check for neuron and pubkey + else: + uid = await subtensor.substrate.query( + module="SubtensorModule", + storage_function="Uids", + params=[0, wallet.hotkey.ss58_address], + ) + if uid is not None: + logging.info( + f":white_heavy_check_mark: Registered with UID {uid}" ) - if uid is not None: - console.print( - f":white_heavy_check_mark: [green]Registered with UID {uid}[/green]" - ) - return True - else: - # neuron not found, try again - err_console.print( - ":cross_mark: [red]Unknown error. Neuron not found.[/red]" - ) - return False + return True + else: + # neuron not found, try again + logging.error(":cross_mark: Unknown error. Neuron not found.") + return False async def set_root_weights_extrinsic( @@ -173,13 +168,13 @@ async def _do_set_weights(): ) if my_uid is None: - err_console.print("Your hotkey is not registered to the root network") + logging.error("Your hotkey is not registered to the root network") return False try: wallet.unlock_coldkey() except KeyFileError: - err_console.print("Error decrypting coldkey (possibly incorrect password)") + logging.error("Error decrypting coldkey (possibly incorrect password)") return False # First convert types. @@ -188,7 +183,7 @@ async def _do_set_weights(): if isinstance(weights, list): weights = np.array(weights, dtype=np.float32) - print_verbose("Fetching weight limits") + logging.debug("Fetching weight limits") min_allowed_weights, max_weight_limit = await get_limits(subtensor) # Get non zero values. @@ -202,10 +197,10 @@ async def _do_set_weights(): ) # Normalize the weights to max value. - print_verbose("Normalizing weights") + logging.info("Normalizing weights") formatted_weights = normalize_max_weight(x=weights, limit=max_weight_limit) - console.print( - f"\nRaw weights -> Normalized weights: \n\t{weights} -> \n\t{formatted_weights}\n" + logging.info( + f"Raw weights -> Normalized weights: {weights} -> {formatted_weights}" ) # Ask before moving on. @@ -218,34 +213,33 @@ async def _do_set_weights(): expand=False, show_edge=False, ) + print("Netuid | Weight") for netuid, weight in zip(netuids, formatted_weights): table.add_row(str(netuid), f"{weight:.8f}") + print(f"{netuid} | {weight}") - console.print(table) if not Confirm.ask("\nDo you want to set these root weights?"): return False try: - with console.status("Setting root weights...", spinner="earth"): - weight_uids, weight_vals = convert_weights_and_uids_for_emit( - netuids, weights - ) + logging.info(":satellite: Setting root weights...") + weight_uids, weight_vals = convert_weights_and_uids_for_emit(netuids, weights) - success, error_message = await _do_set_weights() + success, error_message = await _do_set_weights() - if not wait_for_finalization and not wait_for_inclusion: - return True + if not wait_for_finalization and not wait_for_inclusion: + return True - if success is True: - console.print(":white_heavy_check_mark: [green]Finalized[/green]") - return True - else: - fmt_err = format_error_message(error_message, subtensor.substrate) - err_console.print(f":cross_mark: [red]Failed[/red]: {fmt_err}") - return False + if success is True: + logging.info(":white_heavy_check_mark: Finalized") + return True + else: + fmt_err = format_error_message(error_message, subtensor.substrate) + logging.error(f":cross_mark: Failed: {fmt_err}") + return False except SubstrateRequestException as e: fmt_err = format_error_message(e, subtensor.substrate) - err_console.print(":cross_mark: [red]Failed[/red]: error:{}".format(fmt_err)) + logging.error(f":cross_mark: Failed: error:{fmt_err}") return False diff --git a/bittensor/core/extrinsics/async_transfer.py b/bittensor/core/extrinsics/async_transfer.py index 3ecdcf7bda..d796380333 100644 --- a/bittensor/core/extrinsics/async_transfer.py +++ b/bittensor/core/extrinsics/async_transfer.py @@ -5,13 +5,8 @@ from rich.prompt import Confirm from substrateinterface.exceptions import SubstrateRequestException -from bittensor.core.settings import ( - NETWORK_EXPLORER_MAP, - bt_console as console, - bt_err_console as err_console, - print_verbose, - print_error, -) +from bittensor import logging +from bittensor.core.settings import NETWORK_EXPLORER_MAP from bittensor.utils.balance import Balance from bittensor.utils import ( @@ -70,10 +65,10 @@ async def get_transfer_fee() -> Balance: ) except SubstrateRequestException as e: payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao - err_console.print( - f":cross_mark: [red]Failed to get payment info[/red]:[bold white]\n" - f" {format_error_message(e, subtensor.substrate)}[/bold white]\n" - f" Defaulting to default transfer fee: {payment_info['partialFee']}" + logging.error(f":cross_mark: Failed to get payment info:") + logging.error(f"\t\t{format_error_message(e, subtensor.substrate)}") + logging.error( + f"\t\tDefaulting to default transfer fee: {payment_info['partialFee']}" ) return Balance.from_rao(payment_info["partialFee"]) @@ -106,38 +101,41 @@ async def do_transfer() -> tuple[bool, str, str]: block_hash_ = response.block_hash return True, block_hash_, "" else: - return False, "", format_error_message(await response.error_message) + return ( + False, + "", + format_error_message( + await response.error_message, substrate=subtensor.substrate + ), + ) # Validate destination address. if not is_valid_bittensor_address_or_public_key(destination): - err_console.print( - f":cross_mark: [red]Invalid destination SS58 address[/red]:[bold white]\n {destination}[/bold white]" + logging.error( + f":cross_mark: Invalid destination SS58 address:[bold white]\n {destination}[/bold white]" ) return False - console.print(f"[dark_orange]Initiating transfer on network: {subtensor.network}") + logging.info(f"Initiating transfer on network: {subtensor.network}") # Unlock wallet coldkey. try: wallet.unlock_coldkey() except KeyFileError: - err_console.print("Error decrypting coldkey (possibly incorrect password)") + logging.error("Error decrypting coldkey (possibly incorrect password)") return False # Check balance. - with console.status( - f":satellite: Checking balance and fees on chain [white]{subtensor.network}[/white]", - spinner="aesthetic", - ) as status: - # check existential deposit and fee - print_verbose("Fetching existential and fee", status) - block_hash = await subtensor.substrate.get_chain_head() - account_balance_, existential_deposit = await asyncio.gather( - subtensor.get_balance( - wallet.coldkeypub.ss58_address, block_hash=block_hash - ), - subtensor.get_existential_deposit(block_hash=block_hash), - ) - account_balance = account_balance_[wallet.coldkeypub.ss58_address] - fee = await get_transfer_fee() + logging.info( + f":satellite: Checking balance and fees on chain {subtensor.network}" + ) + # check existential deposit and fee + logging.debug("Fetching existential and fee") + block_hash = await subtensor.substrate.get_chain_head() + account_balance_, existential_deposit = await asyncio.gather( + subtensor.get_balance(wallet.coldkeypub.ss58_address, block_hash=block_hash), + subtensor.get_existential_deposit(block_hash=block_hash), + ) + account_balance = account_balance_[wallet.coldkeypub.ss58_address] + fee = await get_transfer_fee() if not keep_alive: # Check if the transfer should keep_alive the account @@ -147,16 +145,14 @@ async def do_transfer() -> tuple[bool, str, str]: if transfer_all is True: amount = account_balance - fee - existential_deposit if amount < Balance(0): - print_error("Not enough balance to transfer") + logging.error("Not enough balance to transfer") return False if account_balance < (amount + fee + existential_deposit): - err_console.print( - ":cross_mark: [bold red]Not enough balance[/bold red]:\n\n" - f" balance: [bright_cyan]{account_balance}[/bright_cyan]\n" - f" amount: [bright_cyan]{amount}[/bright_cyan]\n" - f" for fee: [bright_cyan]{fee}[/bright_cyan]" - ) + logging.error(":cross_mark: Not enough balance") + logging.error(f"\t\tBalance:\t{account_balance}") + logging.error(f"\t\tAmount:\t{amount}") + logging.error(f"\t\tFor fee:\t{fee}") return False # Ask before moving on. @@ -169,37 +165,36 @@ async def do_transfer() -> tuple[bool, str, str]: ): return False - with console.status(":satellite: Transferring...", spinner="earth") as status: - success, block_hash, err_msg = await do_transfer() + logging.info(":satellite: Transferring...") + logging.info(f"[green]Block Hash: {block_hash}") - if subtensor.network == "finney": - print_verbose("Fetching explorer URLs", status) - explorer_urls = get_explorer_url_for_network( - subtensor.network, block_hash, NETWORK_EXPLORER_MAP + if subtensor.network == "finney": + logging.debug("Fetching explorer URLs") + explorer_urls = get_explorer_url_for_network( + subtensor.network, block_hash, NETWORK_EXPLORER_MAP + ) + if explorer_urls != {} and explorer_urls: + logging.info( + f"[green]Opentensor Explorer Link: {explorer_urls.get('opentensor')}" ) - if explorer_urls != {} and explorer_urls: - console.print( - f"[green]Opentensor Explorer Link: {explorer_urls.get('opentensor')}[/green]" - ) - console.print( - f"[green]Taostats Explorer Link: {explorer_urls.get('taostats')}[/green]" - ) - else: - console.print(f":cross_mark: [red]Failed[/red]: {err_msg}") + logging.info( + f"[green]Taostats Explorer Link: {explorer_urls.get('taostats')}" + ) + else: + logging.error(f":cross_mark: Failed: {err_msg}") if success: - with console.status(":satellite: Checking Balance...", spinner="aesthetic"): - new_balance = await subtensor.get_balance( - wallet.coldkeypub.ss58_address, reuse_block=False - ) - console.print( - f"Balance:\n" - f" [blue]{account_balance}[/blue] :arrow_right: [green]{new_balance[wallet.coldkeypub.ss58_address]}[/green]" - ) - return True + logging.info(":satellite: Checking Balance...") + new_balance = await subtensor.get_balance( + wallet.coldkeypub.ss58_address, reuse_block=False + ) + logging.info( + f"Balance: [blue]{account_balance} :arrow_right: [green]{new_balance[wallet.coldkeypub.ss58_address]}" + ) + return True return False diff --git a/bittensor/core/settings.py b/bittensor/core/settings.py index 81542fa6d8..311cce23e0 100644 --- a/bittensor/core/settings.py +++ b/bittensor/core/settings.py @@ -53,34 +53,6 @@ def turn_console_on(): turn_console_off() bt_console = __console__ -bt_err_console = Console(stderr=True) -bt_verbose_console = Console(quiet=True) - - -def print_console(message: str, colour: str, title: str, console: Console): - console.print( - f"[bold {colour}][{title}]:[/bold {colour}] [{colour}]{message}[/{colour}]\n" - ) - - -def print_verbose(message: str, status=None): - """Print verbose messages while temporarily pausing the status spinner.""" - if status: - status.stop() - print_console(message, "green", "Verbose", bt_verbose_console) - status.start() - else: - print_console(message, "green", "Verbose", bt_verbose_console) - - -def print_error(message: str, status=None): - """Print error messages while temporarily pausing the status spinner.""" - if status: - status.stop() - print_console(message, "red", "Error", bt_err_console) - status.start() - else: - print_console(message, "red", "Error", bt_err_console) HOME_DIR = Path.home() diff --git a/bittensor/core/subtensor_async.py b/bittensor/core/subtensor_async.py index 31ff5216b5..debafad3ed 100644 --- a/bittensor/core/subtensor_async.py +++ b/bittensor/core/subtensor_async.py @@ -68,6 +68,7 @@ transfer_extrinsic, ) from bittensor.core.metagraph import Metagraph +from bittensor.utils import AsyncSubstrateInterface from bittensor.utils import torch from bittensor.utils import u16_normalized_float, networking from bittensor.utils.balance import Balance @@ -189,7 +190,7 @@ def __init__( self.log_verbose = log_verbose self._connection_timeout = connection_timeout - self.substrate: "SubstrateInterface" = None + self.substrate: "AsyncSubstrateInterface" = None self._get_substrate() def __str__(self) -> str: @@ -1665,8 +1666,8 @@ def get_transfer_fee( call=call, keypair=wallet.coldkeypub ) except Exception as e: - settings.bt_console.print( - f":cross_mark: [red]Failed to get payment info[/red]:[bold white]\n {e}[/bold white]" + logging.error( + f":cross_mark: Failed to get payment info: {e}" ) payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 1371110efb..745726c264 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -24,7 +24,7 @@ from bittensor_wallet import Keypair from substrateinterface.utils import ss58 -from bittensor.core.settings import SS58_FORMAT, bt_err_console +from bittensor.core.settings import SS58_FORMAT from bittensor.utils.btlogging import logging from .registration import torch, use_torch from .version import version_checking, check_version, VersionCheckError @@ -209,8 +209,8 @@ def format_error_message( err_docs = error_dict.get("docs", []) err_description = err_docs[0] if err_docs else err_description except (AttributeError, IndexError): - bt_err_console.print( - "Substrate pallets data unavailable. This is usually caused by an uninitialized substrate." + logging.error( + "Substrate pallets data unavailable. This is usually caused by an uninitialized substrate." ) else: err_description = err_data diff --git a/bittensor/utils/async_substrate_interface.py b/bittensor/utils/async_substrate_interface.py index ec5e267e63..de0547e7b5 100644 --- a/bittensor/utils/async_substrate_interface.py +++ b/bittensor/utils/async_substrate_interface.py @@ -6,20 +6,20 @@ from hashlib import blake2b from typing import Optional, Any, Union, Callable, Awaitable, cast -from bt_decode import PortableRegistry, decode as decode_by_type_string, MetadataV15 +import websockets from async_property import async_property +from bittensor_wallet import Keypair +from bt_decode import PortableRegistry, decode as decode_by_type_string, MetadataV15 from scalecodec import GenericExtrinsic from scalecodec.base import ScaleBytes, ScaleType, RuntimeConfigurationObject from scalecodec.type_registry import load_type_registry_preset from scalecodec.types import GenericCall -from bittensor_wallet import Keypair from substrateinterface.exceptions import ( SubstrateRequestException, ExtrinsicNotFound, BlockNotFound, ) from substrateinterface.storage import StorageKey -import websockets ResultHandler = Callable[[dict, Any], Awaitable[tuple[dict, bool]]] From 43fc02d2b74a1b2e7b1b875f446b32189b666233 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 1 Nov 2024 02:06:23 -0700 Subject: [PATCH 20/27] avoid non-direct import within inner code (fix circular import) --- bittensor/core/async_subtensor.py | 2 +- bittensor/core/extrinsics/async_root.py | 6 ++--- bittensor/core/extrinsics/async_transfer.py | 6 ++--- bittensor/utils/deprecated.py | 2 ++ tests/e2e_tests/conftest.py | 2 +- tests/e2e_tests/test_axon.py | 2 +- tests/e2e_tests/test_commit_weights.py | 13 ++++++----- tests/e2e_tests/test_dendrite.py | 25 +++++++++++---------- tests/e2e_tests/test_liquid_alpha.py | 11 ++++----- tests/e2e_tests/test_metagraph.py | 13 ++++++----- tests/e2e_tests/test_subtensor_functions.py | 8 +++---- tests/e2e_tests/utils/chain_interactions.py | 2 +- 12 files changed, 49 insertions(+), 43 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 2848b0d600..aa2b65fb30 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -13,7 +13,6 @@ from scalecodec.type_registry import load_type_registry_preset from substrateinterface.exceptions import SubstrateRequestException -from bittensor import logging from bittensor.core.chain_data import ( DelegateInfo, custom_rpc_type_registry, @@ -47,6 +46,7 @@ TimeoutException, ) from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging from bittensor.utils.delegates_details import DelegatesDetails diff --git a/bittensor/core/extrinsics/async_root.py b/bittensor/core/extrinsics/async_root.py index 9127b9a813..9e73f98a30 100644 --- a/bittensor/core/extrinsics/async_root.py +++ b/bittensor/core/extrinsics/async_root.py @@ -21,7 +21,7 @@ from bittensor.core.async_subtensor import AsyncSubtensor -async def get_limits(subtensor: AsyncSubtensor) -> tuple[int, float]: +async def get_limits(subtensor: "AsyncSubtensor") -> tuple[int, float]: # Get weight restrictions. maw, mwl = await asyncio.gather( subtensor.get_hyperparameter("MinAllowedWeights", netuid=0), @@ -33,7 +33,7 @@ async def get_limits(subtensor: AsyncSubtensor) -> tuple[int, float]: async def root_register_extrinsic( - subtensor: AsyncSubtensor, + subtensor: "AsyncSubtensor", wallet: Wallet, wait_for_inclusion: bool = True, wait_for_finalization: bool = True, @@ -105,7 +105,7 @@ async def root_register_extrinsic( async def set_root_weights_extrinsic( - subtensor: AsyncSubtensor, + subtensor: "AsyncSubtensor", wallet: Wallet, netuids: Union[NDArray[np.int64], list[int]], weights: Union[NDArray[np.float32], list[float]], diff --git a/bittensor/core/extrinsics/async_transfer.py b/bittensor/core/extrinsics/async_transfer.py index d796380333..b9072ae9b8 100644 --- a/bittensor/core/extrinsics/async_transfer.py +++ b/bittensor/core/extrinsics/async_transfer.py @@ -1,19 +1,19 @@ import asyncio from typing import TYPE_CHECKING + from bittensor_wallet import Wallet from bittensor_wallet.errors import KeyFileError from rich.prompt import Confirm from substrateinterface.exceptions import SubstrateRequestException -from bittensor import logging from bittensor.core.settings import NETWORK_EXPLORER_MAP -from bittensor.utils.balance import Balance - from bittensor.utils import ( format_error_message, get_explorer_url_for_network, is_valid_bittensor_address_or_public_key, ) +from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging if TYPE_CHECKING: from bittensor.core.async_subtensor import AsyncSubtensor diff --git a/bittensor/utils/deprecated.py b/bittensor/utils/deprecated.py index 146e8395d0..124c0daac9 100644 --- a/bittensor/utils/deprecated.py +++ b/bittensor/utils/deprecated.py @@ -45,6 +45,7 @@ from bittensor_wallet import Keypair # noqa: F401 from bittensor.core import settings +from bittensor.core.async_subtensor import AsyncSubtensor from bittensor.core.axon import Axon from bittensor.core.chain_data import ( # noqa: F401 AxonInfo, @@ -116,6 +117,7 @@ from bittensor.utils.subnets import SubnetsAPI # noqa: F401 # Backwards compatibility with previous bittensor versions. +async_subtensor = AsyncSubtensor axon = Axon config = Config dendrite = Dendrite diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index 59170c9512..4a7b2ccf62 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -8,7 +8,7 @@ import pytest from substrateinterface import SubstrateInterface -from bittensor import logging +from bittensor.utils.btlogging import logging from tests.e2e_tests.utils.e2e_test_utils import ( clone_or_update_templates, install_templates, diff --git a/tests/e2e_tests/test_axon.py b/tests/e2e_tests/test_axon.py index 853719f85d..b5d18c5729 100644 --- a/tests/e2e_tests/test_axon.py +++ b/tests/e2e_tests/test_axon.py @@ -4,8 +4,8 @@ import pytest import bittensor -from bittensor import logging from bittensor.utils import networking +from bittensor.utils.btlogging import logging from tests.e2e_tests.utils.chain_interactions import register_neuron, register_subnet from tests.e2e_tests.utils.e2e_test_utils import ( setup_wallet, diff --git a/tests/e2e_tests/test_commit_weights.py b/tests/e2e_tests/test_commit_weights.py index ca9b0a0a2c..962a061a9a 100644 --- a/tests/e2e_tests/test_commit_weights.py +++ b/tests/e2e_tests/test_commit_weights.py @@ -3,8 +3,9 @@ import numpy as np import pytest -import bittensor -from bittensor import logging +from bittensor.core.subtensor import Subtensor +from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging from bittensor.utils.weight_utils import convert_weights_and_uids_for_emit from tests.e2e_tests.utils.chain_interactions import ( add_stake, @@ -48,7 +49,7 @@ async def test_commit_and_reveal_weights(local_chain): ), "Unable to register Alice as a neuron" # Stake to become to top neuron after the first epoch - add_stake(local_chain, alice_wallet, bittensor.Balance.from_tao(100_000)) + add_stake(local_chain, alice_wallet, Balance.from_tao(100_000)) # Enable commit_reveal on the subnet assert sudo_set_hyperparameter_bool( @@ -59,7 +60,7 @@ async def test_commit_and_reveal_weights(local_chain): netuid, ), "Unable to enable commit reveal on the subnet" - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") assert subtensor.get_subnet_hyperparameters( netuid=netuid ).commit_reveal_weights_enabled, "Failed to enable commit/reveal" @@ -73,7 +74,7 @@ async def test_commit_and_reveal_weights(local_chain): return_error_message=True, ) - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") assert ( subtensor.get_subnet_hyperparameters( netuid=netuid @@ -92,7 +93,7 @@ async def test_commit_and_reveal_weights(local_chain): call_params={"netuid": netuid, "weights_set_rate_limit": "0"}, return_error_message=True, ) - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") assert ( subtensor.get_subnet_hyperparameters(netuid=netuid).weights_rate_limit == 0 ), "Failed to set weights_rate_limit" diff --git a/tests/e2e_tests/test_dendrite.py b/tests/e2e_tests/test_dendrite.py index e075326ca5..279e151346 100644 --- a/tests/e2e_tests/test_dendrite.py +++ b/tests/e2e_tests/test_dendrite.py @@ -3,20 +3,21 @@ import pytest -import bittensor -from bittensor import logging, Subtensor - -from tests.e2e_tests.utils.e2e_test_utils import ( - setup_wallet, - template_path, - templates_repo, -) +from bittensor.core.metagraph import Metagraph +from bittensor.core.subtensor import Subtensor +from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging from tests.e2e_tests.utils.chain_interactions import ( register_neuron, register_subnet, add_stake, wait_epoch, ) +from tests.e2e_tests.utils.e2e_test_utils import ( + setup_wallet, + template_path, + templates_repo, +) @pytest.mark.asyncio @@ -56,7 +57,7 @@ async def test_dendrite(local_chain): local_chain, bob_wallet, netuid ), f"Neuron wasn't registered to subnet {netuid}" - metagraph = bittensor.Metagraph(netuid=netuid, network="ws://localhost:9945") + metagraph = Metagraph(netuid=netuid, network="ws://localhost:9945") subtensor = Subtensor(network="ws://localhost:9945") # Assert one neuron is Bob @@ -69,10 +70,10 @@ async def test_dendrite(local_chain): assert neuron.stake.tao == 0 # Stake to become to top neuron after the first epoch - assert add_stake(local_chain, bob_wallet, bittensor.Balance.from_tao(10_000)) + assert add_stake(local_chain, bob_wallet, Balance.from_tao(10_000)) # Refresh metagraph - metagraph = bittensor.Metagraph(netuid=netuid, network="ws://localhost:9945") + metagraph = Metagraph(netuid=netuid, network="ws://localhost:9945") old_neuron = metagraph.neurons[0] # Assert stake is 10000 @@ -121,7 +122,7 @@ async def test_dendrite(local_chain): await wait_epoch(subtensor, netuid=netuid) # Refresh metagraph - metagraph = bittensor.Metagraph(netuid=netuid, network="ws://localhost:9945") + metagraph = Metagraph(netuid=netuid, network="ws://localhost:9945") # Refresh validator neuron updated_neuron = metagraph.neurons[0] diff --git a/tests/e2e_tests/test_liquid_alpha.py b/tests/e2e_tests/test_liquid_alpha.py index d73162fbb4..4725704f61 100644 --- a/tests/e2e_tests/test_liquid_alpha.py +++ b/tests/e2e_tests/test_liquid_alpha.py @@ -1,5 +1,6 @@ -import bittensor -from bittensor import logging +from bittensor.core.subtensor import Subtensor +from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging from tests.e2e_tests.utils.chain_interactions import ( add_stake, register_neuron, @@ -49,10 +50,10 @@ def test_liquid_alpha(local_chain): ), "Unable to register Alice as a neuron" # Stake to become to top neuron after the first epoch - add_stake(local_chain, alice_wallet, bittensor.Balance.from_tao(100_000)) + add_stake(local_chain, alice_wallet, Balance.from_tao(100_000)) # Assert liquid alpha is disabled - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") assert ( subtensor.get_subnet_hyperparameters(netuid=netuid).liquid_alpha_enabled is False @@ -118,7 +119,7 @@ def test_liquid_alpha(local_chain): alpha_high_too_high = u16_max + 1 # One more than the max acceptable value call_params = liquid_alpha_call_params(netuid, f"6553, {alpha_high_too_high}") try: - result, error_message = sudo_set_hyperparameter_values( + sudo_set_hyperparameter_values( local_chain, alice_wallet, call_function="sudo_set_alpha_values", diff --git a/tests/e2e_tests/test_metagraph.py b/tests/e2e_tests/test_metagraph.py index ff16dde369..8999b30358 100644 --- a/tests/e2e_tests/test_metagraph.py +++ b/tests/e2e_tests/test_metagraph.py @@ -1,7 +1,8 @@ import time -import bittensor -from bittensor import logging +from bittensor.core.subtensor import Subtensor +from bittensor.utils.balance import Balance +from bittensor.utils.btlogging import logging from tests.e2e_tests.utils.chain_interactions import ( add_stake, register_neuron, @@ -64,7 +65,7 @@ def test_metagraph(local_chain): ).serialize(), "Subnet wasn't created successfully" # Initialize metagraph - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") metagraph = subtensor.metagraph(netuid=1) # Assert metagraph is empty @@ -129,17 +130,17 @@ def test_metagraph(local_chain): # Test staking with low balance assert not add_stake( - local_chain, dave_wallet, bittensor.Balance.from_tao(10_000) + local_chain, dave_wallet, Balance.from_tao(10_000) ), "Low balance stake should fail" # Add stake by Bob assert add_stake( - local_chain, bob_wallet, bittensor.Balance.from_tao(10_000) + local_chain, bob_wallet, Balance.from_tao(10_000) ), "Failed to add stake for Bob" # Assert stake is added after updating metagraph metagraph.sync(subtensor=subtensor) - assert metagraph.neurons[0].stake == bittensor.Balance.from_tao( + assert metagraph.neurons[0].stake == Balance.from_tao( 10_000 ), "Bob's stake not updated in metagraph" diff --git a/tests/e2e_tests/test_subtensor_functions.py b/tests/e2e_tests/test_subtensor_functions.py index 32d0f6e14d..ffa7b716ee 100644 --- a/tests/e2e_tests/test_subtensor_functions.py +++ b/tests/e2e_tests/test_subtensor_functions.py @@ -3,8 +3,8 @@ import pytest -import bittensor -from bittensor import logging +from bittensor.core.subtensor import Subtensor +from bittensor.utils.btlogging import logging from tests.e2e_tests.utils.chain_interactions import ( register_neuron, register_subnet, @@ -31,7 +31,7 @@ async def test_subtensor_extrinsics(local_chain): AssertionError: If any of the checks or verifications fail """ netuid = 1 - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") # Subnets 0 and 3 are bootstrapped from the start assert subtensor.get_subnets() == [0, 3] @@ -139,7 +139,7 @@ async def test_subtensor_extrinsics(local_chain): await asyncio.sleep( 5 ) # wait for 5 seconds for the metagraph and subtensor to refresh with latest data - subtensor = bittensor.Subtensor(network="ws://localhost:9945") + subtensor = Subtensor(network="ws://localhost:9945") # Verify neuron info is updated after running as a validator neuron_info = subtensor.get_neuron_for_pubkey_and_subnet( diff --git a/tests/e2e_tests/utils/chain_interactions.py b/tests/e2e_tests/utils/chain_interactions.py index aad53812c8..20e4a65dea 100644 --- a/tests/e2e_tests/utils/chain_interactions.py +++ b/tests/e2e_tests/utils/chain_interactions.py @@ -6,7 +6,7 @@ import asyncio from typing import Union, Optional, TYPE_CHECKING -from bittensor import logging +from bittensor.utils.btlogging import logging # for typing purposes if TYPE_CHECKING: From d4d1e9c89e077d834bd78eaf9ec131162f2d59a4 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 1 Nov 2024 10:34:43 -0700 Subject: [PATCH 21/27] del unused code --- bittensor/core/subtensor_async.py | 1891 ----------------------------- 1 file changed, 1891 deletions(-) delete mode 100644 bittensor/core/subtensor_async.py diff --git a/bittensor/core/subtensor_async.py b/bittensor/core/subtensor_async.py deleted file mode 100644 index debafad3ed..0000000000 --- a/bittensor/core/subtensor_async.py +++ /dev/null @@ -1,1891 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2024 Opentensor Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -# documentation files (the “Software”), to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of -# the Software. -# -# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -""" -Async version of `bittensor.core.subtensor.Subtensor` module. -""" - -import argparse -import copy -import socket -from typing import Union, Optional, TypedDict, Any - -import numpy as np -import scalecodec -from bittensor_wallet import Wallet -from numpy.typing import NDArray -from retry import retry -from scalecodec.base import RuntimeConfiguration -from scalecodec.exceptions import RemainingScaleBytesNotEmptyException -from scalecodec.type_registry import load_type_registry_preset -from scalecodec.types import ScaleType -from substrateinterface.base import QueryMapResult, SubstrateInterface - -from bittensor.core import settings -from bittensor.core.axon import Axon -from bittensor.core.chain_data import ( - NeuronInfo, - PrometheusInfo, - SubnetHyperparameters, - NeuronInfoLite, - custom_rpc_type_registry, -) -from bittensor.core.config import Config -from bittensor.core.extrinsics.commit_weights import ( - commit_weights_extrinsic, - reveal_weights_extrinsic, -) -from bittensor.core.extrinsics.prometheus import ( - do_serve_prometheus, - prometheus_extrinsic, -) -from bittensor.core.extrinsics.registration import ( - burned_register_extrinsic, - register_extrinsic, -) -from bittensor.core.extrinsics.serving import ( - do_serve_axon, - serve_axon_extrinsic, - publish_metadata, - get_metadata, -) -from bittensor.core.extrinsics.set_weights import set_weights_extrinsic -from bittensor.core.extrinsics.transfer import ( - transfer_extrinsic, -) -from bittensor.core.metagraph import Metagraph -from bittensor.utils import AsyncSubstrateInterface -from bittensor.utils import torch -from bittensor.utils import u16_normalized_float, networking -from bittensor.utils.balance import Balance -from bittensor.utils.btlogging import logging -from bittensor.utils.weight_utils import generate_weight_hash - -KEY_NONCE: dict[str, int] = {} - - -class ParamWithTypes(TypedDict): - name: str # Name of the parameter. - type: str # ScaleType string of the parameter. - - -class Subtensor: - """ - The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, - facilitating a range of operations essential for the decentralized machine learning network. - - This class enables neurons (network participants) to engage in activities such as registering on the network, - managing staked weights, setting inter-neuronal weights, and participating in consensus mechanisms. - - The Bittensor network operates on a digital ledger where each neuron holds stakes (S) and learns a set - of inter-peer weights (W). These weights, set by the neurons themselves, play a critical role in determining - the ranking and incentive mechanisms within the network. Higher-ranked neurons, as determined by their - contributions and trust within the network, receive more incentives. - - The Subtensor class connects to various Bittensor networks like the main ``finney`` network or local test - networks, providing a gateway to the blockchain layer of Bittensor. It leverages a staked weighted trust - system and consensus to ensure fair and distributed incentive mechanisms, where incentives (I) are - primarily allocated to neurons that are trusted by the majority of the network. - - Additionally, Bittensor introduces a speculation-based reward mechanism in the form of bonds (B), allowing - neurons to accumulate bonds in other neurons, speculating on their future value. This mechanism aligns - with market-based speculation, incentivizing neurons to make judicious decisions in their inter-neuronal - investments. - - Example Usage:: - - from bittensor.core.subtensor import Subtensor - - # Connect to the main Bittensor network (Finney). - finney_subtensor = Subtensor(network='finney') - - # Close websocket connection with the Bittensor network. - finney_subtensor.close() - - # Register a new neuron on the network. - wallet = bittensor_wallet.Wallet(...) # Assuming a wallet instance is created. - netuid = 1 - success = finney_subtensor.register(wallet=wallet, netuid=netuid) - - # Set inter-neuronal weights for collaborative learning. - success = finney_subtensor.set_weights(wallet=wallet, netuid=netuid, uids=[...], weights=[...]) - - # Get the metagraph for a specific subnet using given subtensor connection - metagraph = finney_subtensor.metagraph(netuid=netuid) - - By facilitating these operations, the Subtensor class is instrumental in maintaining the decentralized - intelligence and dynamic learning environment of the Bittensor network, as envisioned in its foundational - principles and mechanisms described in the `NeurIPS paper - `_. paper. - """ - - def __init__( - self, - network: Optional[str] = None, - config: Optional["Config"] = None, - _mock: bool = False, - log_verbose: bool = False, - connection_timeout: int = 600, - ) -> None: - """ - Initializes a Subtensor interface for interacting with the Bittensor blockchain. - - NOTE: - Currently subtensor defaults to the ``finney`` network. This will change in a future release. - - We strongly encourage users to run their own local subtensor node whenever possible. This increases decentralization and resilience of the network. In a future release, local subtensor will become the default and the fallback to ``finney`` removed. Please plan ahead for this change. We will provide detailed instructions on how to run a local subtensor node in the documentation in a subsequent release. - - Args: - network (Optional[str]): The network name to connect to (e.g., ``finney``, ``local``). This can also be the chain endpoint (e.g., ``wss://entrypoint-finney.opentensor.ai:443``) and will be correctly parsed into the network and chain endpoint. If not specified, defaults to the main Bittensor network. - config (Optional[bittensor.core.config.Config]): Configuration object for the subtensor. If not provided, a default configuration is used. - _mock (bool): If set to ``True``, uses a mocked connection for testing purposes. Default is ``False``. - log_verbose (bool): Whether to enable verbose logging. If set to ``True``, detailed log information about the connection and network operations will be provided. Default is ``True``. - connection_timeout (int): The maximum time in seconds to keep the connection alive. Default is ``600``. - - This initialization sets up the connection to the specified Bittensor network, allowing for various blockchain operations such as neuron registration, stake management, and setting weights. - """ - # Determine config.subtensor.chain_endpoint and config.subtensor.network config. - # If chain_endpoint is set, we override the network flag, otherwise, the chain_endpoint is assigned by the - # network. - # Argument importance: network > chain_endpoint > config.subtensor.chain_endpoint > config.subtensor.network - - if config is None: - config = Subtensor.config() - self._config = copy.deepcopy(config) - - # Setup config.subtensor.network and config.subtensor.chain_endpoint - self.chain_endpoint, self.network = Subtensor.setup_config( - network, self._config - ) - - if ( - self.network == "finney" - or self.chain_endpoint == settings.FINNEY_ENTRYPOINT - ) and log_verbose: - logging.info( - f"You are connecting to {self.network} network with endpoint {self.chain_endpoint}." - ) - logging.debug( - "We strongly encourage running a local subtensor node whenever possible. " - "This increases decentralization and resilience of the network." - ) - logging.debug( - "In a future release, local subtensor will become the default endpoint. " - "To get ahead of this change, please run a local subtensor node and point to it." - ) - - self.log_verbose = log_verbose - self._connection_timeout = connection_timeout - self.substrate: "AsyncSubstrateInterface" = None - self._get_substrate() - - def __str__(self) -> str: - if self.network == self.chain_endpoint: - # Connecting to chain endpoint without network known. - return f"subtensor({self.chain_endpoint})" - else: - # Connecting to network with endpoint known. - return f"subtensor({self.network}, {self.chain_endpoint})" - - def __repr__(self) -> str: - return self.__str__() - - def close(self): - """Cleans up resources for this subtensor instance like active websocket connection and active extensions.""" - if self.substrate: - self.substrate.close() - - def _get_substrate(self): - """Establishes a connection to the Substrate node using configured parameters.""" - try: - # Set up params. - self.substrate = SubstrateInterface( - ss58_format=settings.SS58_FORMAT, - use_remote_preset=True, - url=self.chain_endpoint, - type_registry=settings.TYPE_REGISTRY, - ) - if self.log_verbose: - logging.debug( - f"Connected to {self.network} network and {self.chain_endpoint}." - ) - - try: - self.substrate.websocket.settimeout(self._connection_timeout) - except (AttributeError, TypeError, socket.error, OSError) as e: - logging.warning(f"Error setting timeout: {e}") - - except ConnectionRefusedError as error: - logging.error( - f"Could not connect to {self.network} network with {self.chain_endpoint} chain endpoint.", - ) - logging.info( - "You can check if you have connectivity by running this command: nc -vz localhost " - f"{self.chain_endpoint}" - ) - raise ConnectionRefusedError(error.args) - - @staticmethod - def config() -> "Config": - """ - Creates and returns a Bittensor configuration object. - - Returns: - config (bittensor.core.config.Config): A Bittensor configuration object configured with arguments added by the `subtensor.add_args` method. - """ - parser = argparse.ArgumentParser() - Subtensor.add_args(parser) - return Config(parser, args=[]) - - @staticmethod - def setup_config(network: Optional[str], config: "Config"): - """ - Sets up and returns the configuration for the Subtensor network and endpoint. - - This method determines the appropriate network and chain endpoint based on the provided network string or - configuration object. It evaluates the network and endpoint in the following order of precedence: - 1. Provided network string. - 2. Configured chain endpoint in the `config` object. - 3. Configured network in the `config` object. - 4. Default chain endpoint. - 5. Default network. - - Args: - network (Optional[str]): The name of the Subtensor network. If None, the network and endpoint will be determined from the `config` object. - config (bittensor.core.config.Config): The configuration object containing the network and chain endpoint settings. - - Returns: - tuple: A tuple containing the formatted WebSocket endpoint URL and the evaluated network name. - """ - if network is not None: - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network(network) - else: - if config.is_set("subtensor.chain_endpoint"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.chain_endpoint - ) - - elif config.is_set("subtensor.network"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.network - ) - - elif config.subtensor.get("chain_endpoint"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.chain_endpoint - ) - - elif config.subtensor.get("network"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.network - ) - - else: - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - settings.DEFAULTS.subtensor.network - ) - - return ( - networking.get_formatted_ws_endpoint_url(evaluated_endpoint), - evaluated_network, - ) - - @classmethod - def help(cls): - """Print help to stdout.""" - parser = argparse.ArgumentParser() - cls.add_args(parser) - print(cls.__new__.__doc__) - parser.print_help() - - @classmethod - def add_args(cls, parser: "argparse.ArgumentParser", prefix: Optional[str] = None): - """ - Adds command-line arguments to the provided ArgumentParser for configuring the Subtensor settings. - - Args: - parser (argparse.ArgumentParser): The ArgumentParser object to which the Subtensor arguments will be added. - prefix (Optional[str]): An optional prefix for the argument names. If provided, the prefix is prepended to each argument name. - - Arguments added: - --subtensor.network: The Subtensor network flag. Possible values are 'finney', 'test', 'archive', and 'local'. Overrides the chain endpoint if set. - --subtensor.chain_endpoint: The Subtensor chain endpoint flag. If set, it overrides the network flag. - --subtensor._mock: If true, uses a mocked connection to the chain. - - Example: - parser = argparse.ArgumentParser() - Subtensor.add_args(parser) - """ - prefix_str = "" if prefix is None else f"{prefix}." - try: - default_network = settings.DEFAULT_NETWORK - default_chain_endpoint = settings.FINNEY_ENTRYPOINT - - parser.add_argument( - f"--{prefix_str}subtensor.network", - default=default_network, - type=str, - help="""The subtensor network flag. The likely choices are: - -- finney (main network) - -- test (test network) - -- archive (archive network +300 blocks) - -- local (local running network) - If this option is set it overloads subtensor.chain_endpoint with - an entry point node from that network. - """, - ) - parser.add_argument( - f"--{prefix_str}subtensor.chain_endpoint", - default=default_chain_endpoint, - type=str, - help="""The subtensor endpoint flag. If set, overrides the --network flag.""", - ) - parser.add_argument( - f"--{prefix_str}subtensor._mock", - default=False, - type=bool, - help="""If true, uses a mocked connection to the chain.""", - ) - - except argparse.ArgumentError: - # re-parsing arguments. - pass - - # Inner private functions - @networking.ensure_connected - def _encode_params( - self, - call_definition: list["ParamWithTypes"], - params: Union[list[Any], dict[str, Any]], - ) -> str: - """Returns a hex encoded string of the params using their types.""" - param_data = scalecodec.ScaleBytes(b"") - - for i, param in enumerate(call_definition["params"]): # type: ignore - scale_obj = self.substrate.create_scale_object(param["type"]) - if type(params) is list: - param_data += scale_obj.encode(params[i]) - else: - if param["name"] not in params: - raise ValueError(f"Missing param {param['name']} in params dict.") - - param_data += scale_obj.encode(params[param["name"]]) - - return param_data.to_hex() - - def _get_hyperparameter( - self, param_name: str, netuid: int, block: Optional[int] = None - ) -> Optional[Any]: - """ - Retrieves a specified hyperparameter for a specific subnet. - - Args: - param_name (str): The name of the hyperparameter to retrieve. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[Union[int, float]]: The value of the specified hyperparameter if the subnet exists, ``None`` otherwise. - """ - if not self.subnet_exists(netuid, block): - return None - - result = self.query_subtensor(param_name, block, [netuid]) - if result is None or not hasattr(result, "value"): - return None - - return result.value - - # Calls methods - @networking.ensure_connected - def query_subtensor( - self, name: str, block: Optional[int] = None, params: Optional[list] = None - ) -> "ScaleType": - """ - Queries named storage from the Subtensor module on the Bittensor blockchain. This function is used to retrieve specific data or parameters from the blockchain, such as stake, rank, or other neuron-specific attributes. - - Args: - name (str): The name of the storage function to query. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): A list of parameters to pass to the query function. - - Returns: - query_response (scalecodec.ScaleType): An object containing the requested data. - - This query function is essential for accessing detailed information about the network and its neurons, providing valuable insights into the state and dynamics of the Bittensor ecosystem. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry() -> "ScaleType": - return self.substrate.query( - module="SubtensorModule", - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - return make_substrate_call_with_retry() - - @networking.ensure_connected - def query_map_subtensor( - self, name: str, block: Optional[int] = None, params: Optional[list] = None - ) -> "QueryMapResult": - """ - Queries map storage from the Subtensor module on the Bittensor blockchain. This function is designed to retrieve a map-like data structure, which can include various neuron-specific details or network-wide attributes. - - Args: - name (str): The name of the map storage function to query. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): A list of parameters to pass to the query function. - - Returns: - QueryMapResult (substrateinterface.base.QueryMapResult): An object containing the map-like data structure, or ``None`` if not found. - - This function is particularly useful for analyzing and understanding complex network structures and relationships within the Bittensor ecosystem, such as inter-neuronal connections and stake distributions. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry(): - return self.substrate.query_map( - module="SubtensorModule", - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - return make_substrate_call_with_retry() - - def query_runtime_api( - self, - runtime_api: str, - method: str, - params: Optional[Union[list[int], dict[str, int]]], - block: Optional[int] = None, - ) -> Optional[str]: - """ - Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users who need to interact with specific runtime methods and decode complex data types. - - Args: - runtime_api (str): The name of the runtime API to query. - method (str): The specific method within the runtime API to call. - params (Optional[list[ParamWithTypes]]): The parameters to pass to the method call. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - Optional[str]: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. - - This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed and specific interactions with the network's runtime environment. - """ - call_definition = settings.TYPE_REGISTRY["runtime_api"][runtime_api]["methods"][ - method - ] - - json_result = self.state_call( - method=f"{runtime_api}_{method}", - data=( - "0x" - if params is None - else self._encode_params(call_definition=call_definition, params=params) - ), - block=block, - ) - - if json_result is None: - return None - - return_type = call_definition["type"] - - as_scale_bytes = scalecodec.ScaleBytes(json_result["result"]) - - rpc_runtime_config = RuntimeConfiguration() - rpc_runtime_config.update_type_registry(load_type_registry_preset("legacy")) - rpc_runtime_config.update_type_registry(custom_rpc_type_registry) - - obj = rpc_runtime_config.create_scale_object(return_type, as_scale_bytes) - if obj.data.to_hex() == "0x0400": # RPC returned None result - return None - - return obj.decode() - - @networking.ensure_connected - def state_call( - self, method: str, data: str, block: Optional[int] = None - ) -> dict[Any, Any]: - """ - Makes a state call to the Bittensor blockchain, allowing for direct queries of the blockchain's state. This function is typically used for advanced queries that require specific method calls and data inputs. - - Args: - method (str): The method name for the state call. - data (str): The data to be passed to the method. - block (Optional[int]): The blockchain block number at which to perform the state call. - - Returns: - result (dict[Any, Any]): The result of the rpc call. - - The state call function provides a more direct and flexible way of querying blockchain data, useful for specific use cases where standard queries are insufficient. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry() -> dict[Any, Any]: - block_hash = None if block is None else self.substrate.get_block_hash(block) - return self.substrate.rpc_request( - method="state_call", - params=[method, data, block_hash] if block_hash else [method, data], - ) - - return make_substrate_call_with_retry() - - @networking.ensure_connected - def query_map( - self, - module: str, - name: str, - block: Optional[int] = None, - params: Optional[list] = None, - ) -> "QueryMapResult": - """ - Queries map storage from any module on the Bittensor blockchain. This function retrieves data structures that represent key-value mappings, essential for accessing complex and structured data within the blockchain modules. - - Args: - module (str): The name of the module from which to query the map storage. - name (str): The specific storage function within the module to query. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): Parameters to be passed to the query. - - Returns: - result (substrateinterface.base.QueryMapResult): A data structure representing the map storage if found, ``None`` otherwise. - - This function is particularly useful for retrieving detailed and structured data from various blockchain modules, offering insights into the network's state and the relationships between its different components. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry() -> "QueryMapResult": - return self.substrate.query_map( - module=module, - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - return make_substrate_call_with_retry() - - @networking.ensure_connected - def query_constant( - self, module_name: str, constant_name: str, block: Optional[int] = None - ) -> Optional["ScaleType"]: - """ - Retrieves a constant from the specified module on the Bittensor blockchain. This function is used to access fixed parameters or values defined within the blockchain's modules, which are essential for understanding the network's configuration and rules. - - Args: - module_name (str): The name of the module containing the constant. - constant_name (str): The name of the constant to retrieve. - block (Optional[int]): The blockchain block number at which to query the constant. - - Returns: - Optional[scalecodec.ScaleType]: The value of the constant if found, ``None`` otherwise. - - Constants queried through this function can include critical network parameters such as inflation rates, consensus rules, or validation thresholds, providing a deeper understanding of the Bittensor network's operational parameters. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry(): - return self.substrate.get_constant( - module_name=module_name, - constant_name=constant_name, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - return make_substrate_call_with_retry() - - @networking.ensure_connected - def query_module( - self, - module: str, - name: str, - block: Optional[int] = None, - params: Optional[list] = None, - ) -> "ScaleType": - """ - Queries any module storage on the Bittensor blockchain with the specified parameters and block number. This function is a generic query interface that allows for flexible and diverse data retrieval from various blockchain modules. - - Args: - module (str): The name of the module from which to query data. - name (str): The name of the storage function within the module. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): A list of parameters to pass to the query function. - - Returns: - Optional[scalecodec.ScaleType]: An object containing the requested data if found, ``None`` otherwise. - - This versatile query function is key to accessing a wide range of data and insights from different parts of the Bittensor blockchain, enhancing the understanding and analysis of the network's state and dynamics. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry() -> "ScaleType": - return self.substrate.query( - module=module, - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - return make_substrate_call_with_retry() - - # Common subtensor methods - def metagraph( - self, netuid: int, lite: bool = True, block: Optional[int] = None - ) -> "Metagraph": # type: ignore - """ - Returns a synced metagraph for a specified subnet within the Bittensor network. The metagraph represents the network's structure, including neuron connections and interactions. - - Args: - netuid (int): The network UID of the subnet to query. - lite (bool): If true, returns a metagraph using a lightweight sync (no weights, no bonds). Default is ``True``. - block (Optional[int]): Block number for synchronization, or ``None`` for the latest block. - - Returns: - bittensor.core.metagraph.Metagraph: The metagraph representing the subnet's structure and neuron relationships. - - The metagraph is an essential tool for understanding the topology and dynamics of the Bittensor network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus processes. - """ - metagraph = Metagraph( - network=self.network, netuid=netuid, lite=lite, sync=False - ) - metagraph.sync(block=block, lite=lite, subtensor=self) - - return metagraph - - @staticmethod - def determine_chain_endpoint_and_network( - network: str, - ) -> tuple[Optional[str], Optional[str]]: - """Determines the chain endpoint and network from the passed network or chain_endpoint. - - Args: - network (str): The network flag. The choices are: ``finney`` (main network), ``archive`` (archive network +300 blocks), ``local`` (local running network), ``test`` (test network). - - Returns: - tuple[Optional[str], Optional[str]]: The network and chain endpoint flag. If passed, overrides the ``network`` argument. - """ - - if network is None: - return None, None - if network in ["finney", "local", "test", "archive"]: - if network == "finney": - # Kiru Finney staging network. - return network, settings.FINNEY_ENTRYPOINT - elif network == "local": - return network, settings.LOCAL_ENTRYPOINT - elif network == "test": - return network, settings.FINNEY_TEST_ENTRYPOINT - elif network == "archive": - return network, settings.ARCHIVE_ENTRYPOINT - else: - if ( - network == settings.FINNEY_ENTRYPOINT - or "entrypoint-finney.opentensor.ai" in network - ): - return "finney", settings.FINNEY_ENTRYPOINT - elif ( - network == settings.FINNEY_TEST_ENTRYPOINT - or "test.finney.opentensor.ai" in network - ): - return "test", settings.FINNEY_TEST_ENTRYPOINT - elif ( - network == settings.ARCHIVE_ENTRYPOINT - or "archive.chain.opentensor.ai" in network - ): - return "archive", settings.ARCHIVE_ENTRYPOINT - elif "127.0.0.1" in network or "localhost" in network: - return "local", network - else: - return "unknown", network - return None, None - - def get_netuids_for_hotkey( - self, hotkey_ss58: str, block: Optional[int] = None - ) -> list[int]: - """ - Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function identifies the specific subnets within the Bittensor network where the neuron associated with the hotkey is active. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - list[int]: A list of netuids where the neuron is a member. - """ - result = self.query_map_subtensor("IsNetworkMember", block, [hotkey_ss58]) - return ( - [record[0].value for record in result if record[1]] - if result and hasattr(result, "records") - else [] - ) - - @networking.ensure_connected - def get_current_block(self) -> int: - """ - Returns the current block number on the Bittensor blockchain. This function provides the latest block number, indicating the most recent state of the blockchain. - - Returns: - int: The current chain block number. - - Knowing the current block number is essential for querying real-time data and performing time-sensitive operations on the blockchain. It serves as a reference point for network activities and data synchronization. - """ - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry(): - return self.substrate.get_block_number(None) # type: ignore - - return make_substrate_call_with_retry() - - def is_hotkey_registered_any( - self, hotkey_ss58: str, block: Optional[int] = None - ) -> bool: - """ - Checks if a neuron's hotkey is registered on any subnet within the Bittensor network. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int]): The blockchain block number at which to perform the check. - - Returns: - bool: ``True`` if the hotkey is registered on any subnet, False otherwise. - - This function is essential for determining the network-wide presence and participation of a neuron. - """ - return len(self.get_netuids_for_hotkey(hotkey_ss58, block)) > 0 - - def is_hotkey_registered_on_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None - ) -> bool: - """ - Checks if a neuron's hotkey is registered on a specific subnet within the Bittensor network. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number at which to perform the check. - - Returns: - bool: ``True`` if the hotkey is registered on the specified subnet, False otherwise. - - This function helps in assessing the participation of a neuron in a particular subnet, indicating its specific area of operation or influence within the network. - """ - return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) is not None - - def is_hotkey_registered( - self, - hotkey_ss58: str, - netuid: Optional[int] = None, - block: Optional[int] = None, - ) -> bool: - """ - Determines whether a given hotkey (public key) is registered in the Bittensor network, either globally across any subnet or specifically on a specified subnet. This function checks the registration status of a neuron identified by its hotkey, which is crucial for validating its participation and activities within the network. - - Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. - netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. - - This function is important for verifying the active status of neurons in the Bittensor network. It aids in understanding whether a neuron is eligible to participate in network processes such as consensus, validation, and incentive distribution based on its registration status. - """ - if netuid is None: - return self.is_hotkey_registered_any(hotkey_ss58, block) - else: - return self.is_hotkey_registered_on_subnet(hotkey_ss58, netuid, block) - - # Not used in Bittensor, but is actively used by the community in almost all subnets - def set_weights( - self, - wallet: "Wallet", - netuid: int, - uids: Union[NDArray[np.int64], "torch.LongTensor", list], - weights: Union[NDArray[np.float32], "torch.FloatTensor", list], - version_key: int = settings.version_as_int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - prompt: bool = False, - max_retries: int = 5, - ) -> tuple[bool, str]: - """ - Sets the inter-neuronal weights for the specified neuron. This process involves specifying the influence or trust a neuron places on other neurons in the network, which is a fundamental aspect of Bittensor's decentralized learning architecture. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron setting the weights. - netuid (int): The unique identifier of the subnet. - uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being set for. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. - version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. - max_retries (int): The number of maximum attempts to set weights. Default is ``5``. - - Returns: - tuple[bool, str]: ``True`` if the setting of weights is successful, False otherwise. And `msg`, a string value describing the success or potential error. - - This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】. - """ - uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) - retries = 0 - success = False - message = "No attempt made. Perhaps it is too soon to set weights!" - while ( - self.blocks_since_last_update(netuid, uid) > self.weights_rate_limit(netuid) # type: ignore - and retries < max_retries - ): - try: - logging.info( - f"Setting weights for subnet #{netuid}. Attempt {retries + 1} of {max_retries}." - ) - success, message = set_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - uids=uids, - weights=weights, - version_key=version_key, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - prompt=prompt, - ) - except Exception as e: - logging.error(f"Error setting weights: {e}") - finally: - retries += 1 - - return success, message - - def register( - self, - wallet: "Wallet", - netuid: int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - prompt: bool = False, - max_allowed_attempts: int = 3, - output_in_place: bool = True, - cuda: bool = False, - dev_id: Union[list[int], int] = 0, - tpb: int = 256, - num_processes: Optional[int] = None, - update_interval: Optional[int] = None, - log_verbose: bool = False, - ) -> bool: - """ - Registers a neuron on the Bittensor network using the provided wallet. - - Registration is a critical step for a neuron to become an active participant in the network, enabling it to stake, set weights, and receive incentives. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron to be registered. - netuid (int): The unique identifier of the subnet. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Defaults to `False`. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. - prompt (bool): If ``True``, prompts for user confirmation before proceeding. - max_allowed_attempts (int): Maximum number of attempts to register the wallet. - output_in_place (bool): If true, prints the progress of the proof of work to the console in-place. Meaning the progress is printed on the same lines. Defaults to `True`. - cuda (bool): If ``true``, the wallet should be registered using CUDA device(s). Defaults to `False`. - dev_id (Union[List[int], int]): The CUDA device id to use, or a list of device ids. Defaults to `0` (zero). - tpb (int): The number of threads per block (CUDA). Default to `256`. - num_processes (Optional[int]): The number of processes to use to register. Default to `None`. - update_interval (Optional[int]): The number of nonces to solve between updates. Default to `None`. - log_verbose (bool): If ``true``, the registration process will log more information. Default to `False`. - - Returns: - bool: ``True`` if the registration is successful, False otherwise. - - This function facilitates the entry of new neurons into the network, supporting the decentralized - growth and scalability of the Bittensor ecosystem. - """ - return register_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - prompt=prompt, - max_allowed_attempts=max_allowed_attempts, - output_in_place=output_in_place, - cuda=cuda, - dev_id=dev_id, - tpb=tpb, - num_processes=num_processes, - update_interval=update_interval, - log_verbose=log_verbose, - ) - - def burned_register( - self, - wallet: "Wallet", - netuid: int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - prompt: bool = False, - ) -> bool: - """ - Registers a neuron on the Bittensor network by recycling TAO. This method of registration involves recycling TAO tokens, allowing them to be re-mined by performing work on the network. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron to be registered. - netuid (int): The unique identifier of the subnet. - wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Defaults to `False`. - wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. - prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Defaults to `False`. - - Returns: - bool: ``True`` if the registration is successful, False otherwise. - """ - return burned_register_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - prompt=prompt, - ) - - def serve_axon( - self, - netuid: int, - axon: "Axon", - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - ) -> bool: - """ - Registers an ``Axon`` serving endpoint on the Bittensor network for a specific neuron. This function is used to set up the Axon, a key component of a neuron that handles incoming queries and data processing tasks. - - Args: - netuid (int): The unique identifier of the subnetwork. - axon (bittensor.core.axon.Axon): The Axon instance to be registered for serving. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``True``. - - Returns: - bool: ``True`` if the Axon serve registration is successful, False otherwise. - - By registering an Axon, the neuron becomes an active part of the network's distributed computing infrastructure, contributing to the collective intelligence of Bittensor. - """ - return serve_axon_extrinsic( - self, netuid, axon, wait_for_inclusion, wait_for_finalization - ) - - # metagraph - @property - def block(self) -> int: - """Returns current chain block. - - Returns: - block (int): Current chain block. - """ - return self.get_current_block() - - def blocks_since_last_update(self, netuid: int, uid: int) -> Optional[int]: - """ - Returns the number of blocks since the last update for a specific UID in the subnetwork. - - Args: - netuid (int): The unique identifier of the subnetwork. - uid (int): The unique identifier of the neuron. - - Returns: - Optional[int]: The number of blocks since the last update, or ``None`` if the subnetwork or UID does not exist. - """ - call = self._get_hyperparameter(param_name="LastUpdate", netuid=netuid) - return None if call is None else self.get_current_block() - int(call[uid]) - - @networking.ensure_connected - def get_block_hash(self, block_id: int) -> str: - """ - Retrieves the hash of a specific block on the Bittensor blockchain. The block hash is a unique identifier representing the cryptographic hash of the block's content, ensuring its integrity and immutability. - - Args: - block_id (int): The block number for which the hash is to be retrieved. - - Returns: - str: The cryptographic hash of the specified block. - - The block hash is a fundamental aspect of blockchain technology, providing a secure reference to each block's data. It is crucial for verifying transactions, ensuring data consistency, and maintaining the trustworthiness of the blockchain. - """ - return self.substrate.get_block_hash(block_id=block_id) - - def weights_rate_limit(self, netuid: int) -> Optional[int]: - """ - Returns network WeightsSetRateLimit hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - - Returns: - Optional[int]: The value of the WeightsSetRateLimit hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter(param_name="WeightsSetRateLimit", netuid=netuid) - return None if call is None else int(call) - - # Keep backwards compatibility for community usage. - # Make some commitment on-chain about arbitrary data. - def commit(self, wallet, netuid: int, data: str): - """ - Commits arbitrary data to the Bittensor network by publishing metadata. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the data. - netuid (int): The unique identifier of the subnetwork. - data (str): The data to be committed to the network. - """ - publish_metadata(self, wallet, netuid, f"Raw{len(data)}", data.encode()) - - # Keep backwards compatibility for community usage. - def subnetwork_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """ - Returns network SubnetworkN hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[int]: The value of the SubnetworkN hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter( - param_name="SubnetworkN", netuid=netuid, block=block - ) - return None if call is None else int(call) - - # Community uses this method - def transfer( - self, - wallet: "Wallet", - dest: str, - amount: Union["Balance", float], - wait_for_inclusion: bool = True, - wait_for_finalization: bool = False, - prompt: bool = False, - ) -> bool: - """ - Executes a transfer of funds from the provided wallet to the specified destination address. This function is used to move TAO tokens within the Bittensor network, facilitating transactions between neurons. - - Args: - wallet (bittensor_wallet.Wallet): The wallet from which funds are being transferred. - dest (str): The destination public key address. - amount (Union[bittensor.utils.balance.Balance, float]): The amount of TAO to be transferred. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``True``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. - - Returns: - transfer_extrinsic (bool): ``True`` if the transfer is successful, False otherwise. - - This function is essential for the fluid movement of tokens in the network, supporting various economic activities such as staking, delegation, and reward distribution. - """ - return transfer_extrinsic( - subtensor=self, - wallet=wallet, - dest=dest, - amount=amount, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - prompt=prompt, - ) - - # Community uses this method via `bittensor.api.extrinsics.prometheus.prometheus_extrinsic` - def get_neuron_for_pubkey_and_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None - ) -> Optional["NeuronInfo"]: - """ - Retrieves information about a neuron based on its public key (hotkey SS58 address) and the specific subnet UID (netuid). This function provides detailed neuron information for a particular subnet within the Bittensor network. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - Optional[bittensor.core.chain_data.neuron_info.NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. - - This function is crucial for accessing specific neuron data and understanding its status, stake, and other attributes within a particular subnet of the Bittensor ecosystem. - """ - return self.neuron_for_uid( - self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block=block), - netuid, - block=block, - ) - - @networking.ensure_connected - def neuron_for_uid( - self, uid: Optional[int], netuid: int, block: Optional[int] = None - ) -> "NeuronInfo": - """ - Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive view of a neuron's attributes, including its stake, rank, and operational status. - - Args: - uid (Optional[int]): The unique identifier of the neuron. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - bittensor.core.chain_data.neuron_info.NeuronInfo: Detailed information about the neuron if found, ``None`` otherwise. - - This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. - """ - if uid is None: - return NeuronInfo.get_null_neuron() - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry(): - block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [netuid, uid] - if block_hash: - params = params + [block_hash] - return self.substrate.rpc_request( - method="neuronInfo_getNeuron", - params=params, # custom rpc method - ) - - json_body = make_substrate_call_with_retry() - - if not (result := json_body.get("result", None)): - return NeuronInfo.get_null_neuron() - - return NeuronInfo.from_vec_u8(result) - - # Community uses this method - def serve_prometheus( - self, - wallet: "Wallet", - port: int, - netuid: int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - ) -> bool: - """ - Serves Prometheus metrics by submitting an extrinsic to a blockchain network via the specified wallet. The function allows configuring whether to wait for the transaction's inclusion in a block and its finalization. - - Args: - wallet (bittensor_wallet.Wallet): Bittensor wallet instance used for submitting the extrinsic. - port (int): The port number on which Prometheus metrics are served. - netuid (int): The unique identifier of the subnetwork. - wait_for_inclusion (bool): If True, waits for the transaction to be included in a block. Defaults to ``False``. - wait_for_finalization (bool): If True, waits for the transaction to be finalized. Defaults to ``True``. - - Returns: - bool: Returns True if the Prometheus extrinsic is successfully processed, otherwise False. - """ - return prometheus_extrinsic( - self, - wallet=wallet, - port=port, - netuid=netuid, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - - # Community uses this method - def get_subnet_hyperparameters( - self, netuid: int, block: Optional[int] = None - ) -> Optional[Union[list, "SubnetHyperparameters"]]: - """ - Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters define the operational settings and rules governing the subnet's behavior. - - Args: - netuid (int): The network UID of the subnet to query. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[bittensor.core.chain_data.subnet_hyperparameters.SubnetHyperparameters]: The subnet's hyperparameters, or ``None`` if not available. - - Understanding the hyperparameters is crucial for comprehending how subnets are configured and managed, and how they interact with the network's consensus and incentive mechanisms. - """ - hex_bytes_result = self.query_runtime_api( - runtime_api="SubnetInfoRuntimeApi", - method="get_subnet_hyperparams", - params=[netuid], - block=block, - ) - - if hex_bytes_result is None: - return [] - - if hex_bytes_result.startswith("0x"): - bytes_result = bytes.fromhex(hex_bytes_result[2:]) - else: - bytes_result = bytes.fromhex(hex_bytes_result) - - return SubnetHyperparameters.from_vec_u8(bytes_result) # type: ignore - - # Community uses this method - # Returns network ImmunityPeriod hyper parameter. - def immunity_period( - self, netuid: int, block: Optional[int] = None - ) -> Optional[int]: - """ - Retrieves the 'ImmunityPeriod' hyperparameter for a specific subnet. This parameter defines the duration during which new neurons are protected from certain network penalties or restrictions. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, ``None`` otherwise. - - The 'ImmunityPeriod' is a critical aspect of the network's governance system, ensuring that new participants have a grace period to establish themselves and contribute to the network without facing immediate punitive actions. - """ - call = self._get_hyperparameter( - param_name="ImmunityPeriod", netuid=netuid, block=block - ) - return None if call is None else int(call) - - # Community uses this method - def get_uid_for_hotkey_on_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None - ) -> Optional[int]: - """ - Retrieves the unique identifier (UID) for a neuron's hotkey on a specific subnet. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. - - The UID is a critical identifier within the network, linking the neuron's hotkey to its operational and governance activities on a particular subnet. - """ - _result = self.query_subtensor("Uids", block, [netuid, hotkey_ss58]) - return getattr(_result, "value", None) - - # Community uses this method - def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """ - Returns network Tempo hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[int]: The value of the Tempo hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter(param_name="Tempo", netuid=netuid, block=block) - return None if call is None else int(call) - - # Community uses this method - def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> str: - """ - Retrieves the on-chain commitment for a specific neuron in the Bittensor network. - - Args: - netuid (int): The unique identifier of the subnetwork. - uid (int): The unique identifier of the neuron. - block (Optional[int]): The block number to retrieve the commitment from. If None, the latest block is used. Default is ``None``. - - Returns: - str: The commitment data as a string. - """ - metagraph = self.metagraph(netuid) - hotkey = metagraph.hotkeys[uid] # type: ignore - - metadata = get_metadata(self, netuid, hotkey, block) - commitment = metadata["info"]["fields"][0] # type: ignore - hex_data = commitment[list(commitment.keys())[0]][2:] # type: ignore - - return bytes.fromhex(hex_data).decode() - - # Community uses this via `bittensor.utils.weight_utils.process_weights_for_netuid` function. - def min_allowed_weights( - self, netuid: int, block: Optional[int] = None - ) -> Optional[int]: - """ - Returns network MinAllowedWeights hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[int]: The value of the MinAllowedWeights hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter( - param_name="MinAllowedWeights", block=block, netuid=netuid - ) - return None if call is None else int(call) - - # Community uses this via `bittensor.utils.weight_utils.process_weights_for_netuid` function. - def max_weight_limit( - self, netuid: int, block: Optional[int] = None - ) -> Optional[float]: - """ - Returns network MaxWeightsLimit hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[float]: The value of the MaxWeightsLimit hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter( - param_name="MaxWeightsLimit", block=block, netuid=netuid - ) - return None if call is None else u16_normalized_float(int(call)) - - # # Community uses this method. It is used in subtensor in neuron_info, and serving. - def get_prometheus_info( - self, netuid: int, hotkey_ss58: str, block: Optional[int] = None - ) -> Optional["PrometheusInfo"]: - """ - Returns the prometheus information for this hotkey account. - - Args: - netuid (int): The unique identifier of the subnetwork. - hotkey_ss58 (str): The SS58 address of the hotkey. - block (Optional[int]): The block number to retrieve the prometheus information from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[bittensor.core.chain_data.prometheus_info.PrometheusInfo]: A PrometheusInfo object containing the prometheus information, or ``None`` if the prometheus information is not found. - """ - result = self.query_subtensor("Prometheus", block, [netuid, hotkey_ss58]) - if result is not None and hasattr(result, "value"): - return PrometheusInfo( - ip=networking.int_to_ip(result.value["ip"]), - ip_type=result.value["ip_type"], - port=result.value["port"], - version=result.value["version"], - block=result.value["block"], - ) - return None - - # Community uses this method - def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: - """ - Checks if a subnet with the specified unique identifier (netuid) exists within the Bittensor network. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number at which to check the subnet's existence. - - Returns: - bool: ``True`` if the subnet exists, False otherwise. - - This function is critical for verifying the presence of specific subnets in the network, enabling a deeper understanding of the network's structure and composition. - """ - _result = self.query_subtensor("NetworksAdded", block, [netuid]) - return getattr(_result, "value", False) - - # Metagraph uses this method - def bonds( - self, netuid: int, block: Optional[int] = None - ) -> list[tuple[int, list[tuple[int, int]]]]: - """ - Retrieves the bond distribution set by neurons within a specific subnet of the Bittensor network. Bonds represent the investments or commitments made by neurons in one another, indicating a level of trust and perceived value. This bonding mechanism is integral to the network's market-based approach to measuring and rewarding machine intelligence. - - Args: - netuid (int): The network UID of the subnet to query. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[tuple[int, list[tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other neurons. - - Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior within the subnet. It reflects how neurons recognize and invest in each other's intelligence and contributions, supporting diverse and niche systems within the Bittensor ecosystem. - """ - b_map = [] - b_map_encoded = self.query_map_subtensor( - name="Bonds", block=block, params=[netuid] - ) - if b_map_encoded.records: - for uid, b in b_map_encoded: - b_map.append((uid.serialize(), b.serialize())) - - return b_map - - def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: - """ - Retrieves the burn cost for registering a new subnet within the Bittensor network. This cost represents the amount of Tao that needs to be locked or burned to establish a new subnet. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - int: The burn cost for subnet registration. - - The subnet burn cost is an important economic parameter, reflecting the network's mechanisms for controlling the proliferation of subnets and ensuring their commitment to the network's long-term viability. - """ - lock_cost = self.query_runtime_api( - runtime_api="SubnetRegistrationRuntimeApi", - method="get_network_registration_cost", - params=[], - block=block, - ) - - if lock_cost is None: - return None - - return lock_cost - - # Metagraph uses this method - def neurons(self, netuid: int, block: Optional[int] = None) -> list["NeuronInfo"]: - """ - Retrieves a list of all neurons within a specified subnet of the Bittensor network. This function provides a snapshot of the subnet's neuron population, including each neuron's attributes and network interactions. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[bittensor.core.chain_data.neuron_info.NeuronInfo]: A list of NeuronInfo objects detailing each neuron's characteristics in the subnet. - - Understanding the distribution and status of neurons within a subnet is key to comprehending the network's decentralized structure and the dynamics of its consensus and governance processes. - """ - neurons_lite = self.neurons_lite(netuid=netuid, block=block) - weights = self.weights(block=block, netuid=netuid) - bonds = self.bonds(block=block, netuid=netuid) - - weights_as_dict = {uid: w for uid, w in weights} - bonds_as_dict = {uid: b for uid, b in bonds} - - neurons = [ - NeuronInfo.from_weights_bonds_and_neuron_lite( - neuron_lite, weights_as_dict, bonds_as_dict - ) - for neuron_lite in neurons_lite - ] - - return neurons - - # Metagraph uses this method - def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: - """ - Retrieves the total number of subnets within the Bittensor network as of a specific blockchain block. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The total number of subnets in the network. - - Understanding the total number of subnets is essential for assessing the network's growth and the extent of its decentralized infrastructure. - """ - _result = self.query_subtensor("TotalNetworks", block) - return getattr(_result, "value", None) - - # Metagraph uses this method - def get_subnets(self, block: Optional[int] = None) -> list[int]: - """ - Retrieves a list of all subnets currently active within the Bittensor network. This function provides an overview of the various subnets and their identifiers. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[int]: A list of network UIDs representing each active subnet. - - This function is valuable for understanding the network's structure and the diversity of subnets available for neuron participation and collaboration. - """ - result = self.query_map_subtensor("NetworksAdded", block) - return ( - [network[0].value for network in result.records if network[1]] - if result and hasattr(result, "records") - else [] - ) - - # Metagraph uses this method - def neurons_lite( - self, netuid: int, block: Optional[int] = None - ) -> list["NeuronInfoLite"]: - """ - Retrieves a list of neurons in a 'lite' format from a specific subnet of the Bittensor network. This function provides a streamlined view of the neurons, focusing on key attributes such as stake and network participation. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[bittensor.core.chain_data.neuron_info_lite.NeuronInfoLite]: A list of simplified neuron information for the subnet. - - This function offers a quick overview of the neuron population within a subnet, facilitating efficient analysis of the network's decentralized structure and neuron dynamics. - """ - hex_bytes_result = self.query_runtime_api( - runtime_api="NeuronInfoRuntimeApi", - method="get_neurons_lite", - params=[netuid], - block=block, - ) - - if hex_bytes_result is None: - return [] - - if hex_bytes_result.startswith("0x"): - bytes_result = bytes.fromhex(hex_bytes_result[2:]) - else: - bytes_result = bytes.fromhex(hex_bytes_result) - - return NeuronInfoLite.list_from_vec_u8(bytes_result) # type: ignore - - # Used in the `neurons` method which is used in metagraph.py - def weights( - self, netuid: int, block: Optional[int] = None - ) -> list[tuple[int, list[tuple[int, int]]]]: - """ - Retrieves the weight distribution set by neurons within a specific subnet of the Bittensor network. This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the network's trust and value assignment mechanisms. - - Args: - netuid (int): The network UID of the subnet to query. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[tuple[int, list[tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its assigned weights. - - The weight distribution is a key factor in the network's consensus algorithm and the ranking of neurons, influencing their influence and reward allocation within the subnet. - """ - w_map = [] - w_map_encoded = self.query_map_subtensor( - name="Weights", block=block, params=[netuid] - ) - if w_map_encoded.records: - for uid, w in w_map_encoded: - w_map.append((uid.serialize(), w.serialize())) - - return w_map - - # Used by community via `transfer_extrinsic` - @networking.ensure_connected - def get_balance(self, address: str, block: Optional[int] = None) -> "Balance": - """ - Retrieves the token balance of a specific address within the Bittensor network. This function queries the blockchain to determine the amount of Tao held by a given account. - - Args: - address (str): The Substrate address in ``ss58`` format. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - bittensor.utils.balance.Balance: The account balance at the specified block, represented as a Balance object. - - This function is important for monitoring account holdings and managing financial transactions within the Bittensor ecosystem. It helps in assessing the economic status and capacity of network participants. - """ - try: - - @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=logging) - def make_substrate_call_with_retry(): - return self.substrate.query( - module="System", - storage_function="Account", - params=[address], - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - result = make_substrate_call_with_retry() - - except RemainingScaleBytesNotEmptyException: - logging.error( - "Received a corrupted message. This likely points to an error with the network or subnet." - ) - return Balance(1000) - return Balance(result.value["data"]["free"]) - - # Used in community via `bittensor.core.subtensor.Subtensor.transfer` - @networking.ensure_connected - def get_transfer_fee( - self, wallet: "Wallet", dest: str, value: Union["Balance", float, int] - ) -> "Balance": - """ - Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. This function simulates the transfer to estimate the associated cost, taking into account the current network conditions and transaction complexity. - - Args: - wallet (bittensor_wallet.Wallet): The wallet from which the transfer is initiated. - dest (str): The ``SS58`` address of the destination account. - value (Union[bittensor.utils.balance.Balance, float, int]): The amount of tokens to be transferred, specified as a Balance object, or in Tao (float) or Rao (int) units. - - Returns: - bittensor.utils.balance.Balance: The estimated transaction fee for the transfer, represented as a Balance object. - - Estimating the transfer fee is essential for planning and executing token transactions, ensuring that the wallet has sufficient funds to cover both the transfer amount and the associated costs. This function provides a crucial tool for managing financial operations within the Bittensor network. - """ - if isinstance(value, float): - value = Balance.from_tao(value) - elif isinstance(value, int): - value = Balance.from_rao(value) - - if isinstance(value, Balance): - call = self.substrate.compose_call( - call_module="Balances", - call_function="transfer_allow_death", - call_params={"dest": dest, "value": value.rao}, - ) - - try: - payment_info = self.substrate.get_payment_info( - call=call, keypair=wallet.coldkeypub - ) - except Exception as e: - logging.error( - f":cross_mark: Failed to get payment info: {e}" - ) - payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao - - fee = Balance.from_rao(payment_info["partialFee"]) - return fee - else: - fee = Balance.from_rao(int(2e7)) - logging.error( - "To calculate the transaction fee, the value must be Balance, float, or int. Received type: %s. Fee " - "is %s", - type(value), - 2e7, - ) - return fee - - # Used in community via `bittensor.core.subtensor.Subtensor.transfer` - def get_existential_deposit( - self, block: Optional[int] = None - ) -> Optional["Balance"]: - """ - Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with balances below this threshold can be reaped to conserve network resources. - - Args: - block (Optional[int]): Block number at which to query the deposit amount. If ``None``, the current block is used. - - Returns: - Optional[bittensor.utils.balance.Balance]: The existential deposit amount, or ``None`` if the query fails. - - The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring efficient use of storage and preventing the proliferation of dust accounts. - """ - result = self.query_constant( - module_name="Balances", constant_name="ExistentialDeposit", block=block - ) - if result is None or not hasattr(result, "value"): - return None - return Balance.from_rao(result.value) - - # Community uses this method - def commit_weights( - self, - wallet: "Wallet", - netuid: int, - salt: list[int], - uids: Union[NDArray[np.int64], list], - weights: Union[NDArray[np.int64], list], - version_key: int = settings.version_as_int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - prompt: bool = False, - max_retries: int = 5, - ) -> tuple[bool, str]: - """ - Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. - This action serves as a commitment or snapshot of the neuron's current weight distribution. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the weights. - netuid (int): The unique identifier of the subnet. - salt (list[int]): list of randomly generated integers as salt to generated weighted hash. - uids (np.ndarray): NumPy array of neuron UIDs for which weights are being committed. - weights (np.ndarray): NumPy array of weight values corresponding to each UID. - version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. - max_retries (int): The number of maximum attempts to commit weights. Default is ``5``. - - Returns: - tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string - value describing the success or potential error. - - This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, - enhancing transparency and accountability within the Bittensor network. - """ - retries = 0 - success = False - message = "No attempt made. Perhaps it is too soon to commit weights!" - - logging.info( - f"Committing weights with params: netuid={netuid}, uids={uids}, weights={weights}, version_key={version_key}" - ) - - # Generate the hash of the weights - commit_hash = generate_weight_hash( - address=wallet.hotkey.ss58_address, - netuid=netuid, - uids=list(uids), - values=list(weights), - salt=salt, - version_key=version_key, - ) - - logging.info(f"Commit Hash: {commit_hash}") - - while retries < max_retries: - try: - success, message = commit_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - commit_hash=commit_hash, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - prompt=prompt, - ) - if success: - break - except Exception as e: - logging.error(f"Error committing weights: {e}") - finally: - retries += 1 - - return success, message - - # Community uses this method - def reveal_weights( - self, - wallet: "Wallet", - netuid: int, - uids: Union[NDArray[np.int64], list], - weights: Union[NDArray[np.int64], list], - salt: Union[NDArray[np.int64], list], - version_key: int = settings.version_as_int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - prompt: bool = False, - max_retries: int = 5, - ) -> tuple[bool, str]: - """ - Reveals the weights for a specific subnet on the Bittensor blockchain using the provided wallet. - This action serves as a revelation of the neuron's previously committed weight distribution. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron revealing the weights. - netuid (int): The unique identifier of the subnet. - uids (np.ndarray): NumPy array of neuron UIDs for which weights are being revealed. - weights (np.ndarray): NumPy array of weight values corresponding to each UID. - salt (np.ndarray): NumPy array of salt values corresponding to the hash function. - version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version``. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - prompt (bool): If ``True``, prompts for user confirmation before proceeding. Default is ``False``. - max_retries (int): The number of maximum attempts to reveal weights. Default is ``5``. - - Returns: - tuple[bool, str]: ``True`` if the weight revelation is successful, False otherwise. And `msg`, a string - value describing the success or potential error. - - This function allows neurons to reveal their previously committed weight distribution, ensuring transparency - and accountability within the Bittensor network. - """ - - retries = 0 - success = False - message = "No attempt made. Perhaps it is too soon to reveal weights!" - - while retries < max_retries: - try: - success, message = reveal_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - uids=list(uids), - weights=list(weights), - salt=list(salt), - version_key=version_key, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - prompt=prompt, - ) - if success: - break - except Exception as e: - logging.error(f"Error revealing weights: {e}") - finally: - retries += 1 - - return success, message - - def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """ - Retrieves the 'Difficulty' hyperparameter for a specified subnet in the Bittensor network. - - This parameter is instrumental in determining the computational challenge required for neurons to participate in consensus and validation processes. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. - - The 'Difficulty' parameter directly impacts the network's security and integrity by setting the computational effort required for validating transactions and participating in the network's consensus mechanism. - """ - call = self._get_hyperparameter( - param_name="Difficulty", netuid=netuid, block=block - ) - if call is None: - return None - return int(call) - - def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance"]: - """ - Retrieves the 'Burn' hyperparameter for a specified subnet. The 'Burn' parameter represents the amount of Tao that is effectively recycled within the Bittensor network. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[Balance]: The value of the 'Burn' hyperparameter if the subnet exists, None otherwise. - - Understanding the 'Burn' rate is essential for analyzing the network registration usage, particularly how it is correlated with user activity and the overall cost of participation in a given subnet. - """ - call = self._get_hyperparameter(param_name="Burn", netuid=netuid, block=block) - return None if call is None else Balance.from_rao(int(call)) - - # Subnet 27 uses this method - _do_serve_prometheus = do_serve_prometheus - # Subnet 27 uses this method name - _do_serve_axon = do_serve_axon From fc4dcf1b748a621c5a8970a00ec649d7ba034478 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 1 Nov 2024 14:42:20 -0700 Subject: [PATCH 22/27] update AsyncSubtensor (add methods, fix tests) --- bittensor/core/async_subtensor.py | 278 +++++++++++++++++- bittensor/core/extrinsics/async_weights.py | 257 ++++++++++++++++ bittensor/core/extrinsics/set_weights.py | 9 +- .../unit_tests/extrinsics/test_set_weights.py | 4 +- 4 files changed, 538 insertions(+), 10 deletions(-) create mode 100644 bittensor/core/extrinsics/async_weights.py diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index aa2b65fb30..9cff7aee1d 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -7,6 +7,7 @@ import typer from bittensor_wallet import Wallet from bittensor_wallet.utils import SS58_FORMAT +from numpy.typing import NDArray from rich.prompt import Confirm from scalecodec import GenericCall from scalecodec.base import RuntimeConfiguration @@ -28,6 +29,10 @@ root_register_extrinsic, ) from bittensor.core.extrinsics.async_transfer import transfer_extrinsic +from bittensor.core.extrinsics.async_weights import ( + commit_weights_extrinsic, + set_weights_extrinsic, +) from bittensor.core.settings import ( TYPE_REGISTRY, DEFAULTS, @@ -35,7 +40,9 @@ DELEGATES_DETAILS_URL, DEFAULT_NETWORK, ) +from bittensor.core.settings import version_as_int from bittensor.utils import ( + torch, ss58_to_vec_u8, format_error_message, decode_hex_identity_dict, @@ -152,9 +159,79 @@ async def encode_params( return param_data.to_hex() - async def get_all_subnet_netuids( - self, block_hash: Optional[str] = None - ) -> list[int]: + async def get_current_block(self): + """ + Returns the current block number on the Bittensor blockchain. This function provides the latest block number, indicating the most recent state of the blockchain. + + Returns: + int: The current chain block number. + + Knowing the current block number is essential for querying real-time data and performing time-sensitive operations on the blockchain. It serves as a reference point for network activities and data synchronization. + """ + return await self.substrate.get_block_number() + + async def is_hotkey_registered_any( + self, hotkey_ss58: str, block: Optional[int] = None + ) -> bool: + """ + Checks if a neuron's hotkey is registered on any subnet within the Bittensor network. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + block (Optional[int]): The blockchain block number at which to perform the check. + + Returns: + bool: ``True`` if the hotkey is registered on any subnet, False otherwise. + + This function is essential for determining the network-wide presence and participation of a neuron. + """ + return len(await self.get_netuids_for_hotkey(hotkey_ss58, block)) > 0 + + async def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: + """ + Retrieves the burn cost for registering a new subnet within the Bittensor network. This cost represents the amount of Tao that needs to be locked or burned to establish a new subnet. + + Args: + block (Optional[int]): The blockchain block number for the query. + + Returns: + int: The burn cost for subnet registration. + + The subnet burn cost is an important economic parameter, reflecting the network's mechanisms for controlling the proliferation of subnets and ensuring their commitment to the network's long-term viability. + """ + lock_cost = await self.query_runtime_api( + runtime_api="SubnetRegistrationRuntimeApi", + method="get_network_registration_cost", + params=[], + block_hash=block, + ) + + if lock_cost is None: + return None + + return lock_cost + + async def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: + """ + Retrieves the total number of subnets within the Bittensor network as of a specific blockchain block. + + Args: + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[int]: The total number of subnets in the network. + + Understanding the total number of subnets is essential for assessing the network's growth and the extent of its decentralized infrastructure. + """ + result = await self.substrate.query( + module="SubtensorModule", + storage_function="TotalNetworks", + params=[], + block_hash=block, + ) + return result + + async def get_subnets(self, block_hash: Optional[str] = None) -> list[int]: """ Retrieves the list of all subnet unique identifiers (netuids) currently present in the Bittensor network. @@ -369,6 +446,56 @@ async def get_balance( results.update({item[0].params[0]: Balance(value["data"]["free"])}) return results + async def get_transfer_fee( + self, wallet: "Wallet", dest: str, value: Union["Balance", float, int] + ) -> "Balance": + """ + Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. This function simulates the transfer to estimate the associated cost, taking into account the current network conditions and transaction complexity. + + Args: + wallet (bittensor_wallet.Wallet): The wallet from which the transfer is initiated. + dest (str): The ``SS58`` address of the destination account. + value (Union[bittensor.utils.balance.Balance, float, int]): The amount of tokens to be transferred, specified as a Balance object, or in Tao (float) or Rao (int) units. + + Returns: + bittensor.utils.balance.Balance: The estimated transaction fee for the transfer, represented as a Balance object. + + Estimating the transfer fee is essential for planning and executing token transactions, ensuring that the wallet has sufficient funds to cover both the transfer amount and the associated costs. This function provides a crucial tool for managing financial operations within the Bittensor network. + """ + if isinstance(value, float): + value = Balance.from_tao(value) + elif isinstance(value, int): + value = Balance.from_rao(value) + + if isinstance(value, Balance): + call = await self.substrate.compose_call( + call_module="Balances", + call_function="transfer_allow_death", + call_params={"dest": dest, "value": value.rao}, + ) + + try: + payment_info = await self.substrate.get_payment_info( + call=call, keypair=wallet.coldkeypub + ) + except Exception as e: + logging.error( + f":cross_mark: Failed to get payment info: {e}" + ) + payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao + + fee = Balance.from_rao(payment_info["partialFee"]) + return fee + else: + fee = Balance.from_rao(int(2e7)) + logging.error( + "To calculate the transaction fee, the value must be Balance, float, or int. Received type: %s. Fee " + "is %s", + type(value), + 2e7, + ) + return fee + async def get_total_stake_for_coldkey( self, *ss58_addresses, @@ -1105,6 +1232,31 @@ async def is_hotkey_registered(self, netuid: int, hotkey_ss58: str) -> bool: else: return False + async def get_uid_for_hotkey_on_subnet( + self, hotkey_ss58: str, netuid: int, block: Optional[int] = None + ): + """ + Retrieves the unique identifier (UID) for a neuron's hotkey on a specific subnet. + + Args: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. + + The UID is a critical identifier within the network, linking the neuron's hotkey to its operational and governance activities on a particular subnet. + """ + return self.substrate.query( + module="SubtensorModule", + storage_function="Uids", + params=[netuid, hotkey_ss58], + block_hash=( + None if block is None else await self.substrate.get_block_hash(block) + ), + ) + # extrinsics async def transfer( @@ -1199,6 +1351,64 @@ async def pow_register( ) async def set_weights( + self, + wallet: "Wallet", + netuid: int, + uids: Union[NDArray[np.int64], "torch.LongTensor", list], + weights: Union[NDArray[np.float32], "torch.FloatTensor", list], + version_key: int = version_as_int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + max_retries: int = 5, + ): + """ + Sets the inter-neuronal weights for the specified neuron. This process involves specifying the influence or trust a neuron places on other neurons in the network, which is a fundamental aspect of Bittensor's decentralized learning architecture. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron setting the weights. + netuid (int): The unique identifier of the subnet. + uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being set for. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. + version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + max_retries (int): The number of maximum attempts to set weights. Default is ``5``. + + Returns: + tuple[bool, str]: ``True`` if the setting of weights is successful, False otherwise. And `msg`, a string value describing the success or potential error. + + This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】. + """ + uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to set weights!" + while ( + self.blocks_since_last_update(netuid, uid) > self.weights_rate_limit(netuid) # type: ignore + and retries < max_retries + ): + try: + logging.info( + f"Setting weights for subnet #{netuid}. Attempt {retries + 1} of {max_retries}." + ) + success, message = await set_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + uids=uids, + weights=weights, + version_key=version_key, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + except Exception as e: + logging.error(f"Error setting weights: {e}") + finally: + retries += 1 + + return success, message + + async def root_set_weights( self, wallet: "Wallet", netuids: list[int], @@ -1220,3 +1430,65 @@ async def set_weights( wait_for_finalization=True, wait_for_inclusion=True, ) + + async def commit_weights( + self, + wallet: "Wallet", + netuid: int, + salt: list[int], + uids: Union[NDArray[np.int64], list], + weights: Union[NDArray[np.int64], list], + version_key: int = version_as_int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + max_retries: int = 5, + ) -> tuple[bool, str]: + """ + Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. + This action serves as a commitment or snapshot of the neuron's current weight distribution. + + Args: + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + salt (list[int]): list of randomly generated integers as salt to generated weighted hash. + uids (np.ndarray): NumPy array of neuron UIDs for which weights are being committed. + weights (np.ndarray): NumPy array of weight values corresponding to each UID. + version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + max_retries (int): The number of maximum attempts to commit weights. Default is ``5``. + + Returns: + tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string value describing the success or potential error. + + This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, enhancing transparency and accountability within the Bittensor network. + """ + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to commit weights!" + + logging.info( + f"Committing weights with params: netuid={netuid}, uids={uids}, weights={weights}, version_key={version_key}" + ) + + while retries < max_retries: + try: + success, message = commit_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + uids=uids, + weights=weights, + salt=salt, + version_key=version_key, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + if success: + break + except Exception as e: + logging.error(f"Error committing weights: {e}") + finally: + retries += 1 + + return success, message diff --git a/bittensor/core/extrinsics/async_weights.py b/bittensor/core/extrinsics/async_weights.py new file mode 100644 index 0000000000..031cdfa9cd --- /dev/null +++ b/bittensor/core/extrinsics/async_weights.py @@ -0,0 +1,257 @@ +"""This module provides functionality for setting weights on the Bittensor network.""" + +from typing import Union, TYPE_CHECKING, Optional + +import numpy as np +from numpy.typing import NDArray + +import bittensor.utils.weight_utils as weight_utils +from bittensor.core.settings import version_as_int +from bittensor.utils import format_error_message +from bittensor.utils.btlogging import logging +from bittensor.utils.registration import torch, use_torch + +if TYPE_CHECKING: + from bittensor_wallet import Wallet + from bittensor.core.async_subtensor import AsyncSubtensor + + +async def _do_set_weights( + subtensor: "AsyncSubtensor", + wallet: "Wallet", + uids: list[int], + vals: list[int], + netuid: int, + version_key: int = version_as_int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, +) -> tuple[bool, Optional[str]]: # (success, error_message) + """ + Internal method to send a transaction to the Bittensor blockchain, setting weights + for specified neurons. This method constructs and submits the transaction, handling + retries and blockchain communication. + + Args: + subtensor (subtensor.core.async_subtensor.AsyncSubtensor): Async Subtensor instance. + wallet (bittensor.wallet): The wallet associated with the neuron setting the weights. + uids (List[int]): List of neuron UIDs for which weights are being set. + vals (List[int]): List of weight values corresponding to each UID. + netuid (int): Unique identifier for the network. + version_key (int, optional): Version key for compatibility with the network. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + + Returns: + Tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. + + This method is vital for the dynamic weighting mechanism in Bittensor, where neurons adjust their + trust in other neurons based on observed performance and contributions. + """ + + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="set_weights", + call_params={ + "dests": uids, + "weights": vals, + "netuid": netuid, + "version_key": version_key, + }, + ) + # Period dictates how long the extrinsic will stay as part of waiting pool + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, + keypair=wallet.hotkey, + era={"period": 5}, + ) + response = await subtensor.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + # We only wait here if we expect finalization. + if not wait_for_finalization and not wait_for_inclusion: + return True, "Not waiting for finalization or inclusion." + + response.process_events() + if response.is_success: + return True, "Successfully set weights." + else: + return False, format_error_message( + response.error_message, substrate=subtensor.substrate + ) + + +async def set_weights_extrinsic( + subtensor: "AsyncSubtensor", + wallet: "Wallet", + netuid: int, + uids: Union[NDArray[np.int64], "torch.LongTensor", list], + weights: Union[NDArray[np.float32], "torch.FloatTensor", list], + version_key: int = 0, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, +) -> tuple[bool, str]: + """Sets the given weights and values on chain for wallet hotkey account. + + Args: + subtensor (bittensor.subtensor): Bittensor subtensor object. + wallet (bittensor.wallet): Bittensor wallet object. + netuid (int): The ``netuid`` of the subnet to set weights for. + uids (Union[NDArray[np.int64], torch.LongTensor, list]): The ``uint64`` uids of destination neurons. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The weights to set. These must be ``float`` s and correspond to the passed ``uid`` s. + version_key (int): The version key of the validator. + wait_for_inclusion (bool): If set, waits for the extrinsic to enter a block before returning ``true``, or returns ``false`` if the extrinsic fails to enter the block within the timeout. + wait_for_finalization (bool): If set, waits for the extrinsic to be finalized on the chain before returning ``true``, or returns ``false`` if the extrinsic fails to be finalized within the timeout. + prompt (bool): If ``true``, the call waits for confirmation from the user before proceeding. + + Returns: + success (bool): Flag is ``true`` if extrinsic was finalized or included in the block. If we did not wait for finalization / inclusion, the response is ``true``. + """ + # First convert types. + if use_torch(): + if isinstance(uids, list): + uids = torch.tensor(uids, dtype=torch.int64) + if isinstance(weights, list): + weights = torch.tensor(weights, dtype=torch.float32) + else: + if isinstance(uids, list): + uids = np.array(uids, dtype=np.int64) + if isinstance(weights, list): + weights = np.array(weights, dtype=np.float32) + + # Reformat and normalize. + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( + uids, weights + ) + + logging.info( + ":satellite: Setting weights on {subtensor.network} ..." + ) + try: + success, error_message = await _do_set_weights( + subtensor=subtensor, + wallet=wallet, + netuid=netuid, + uids=weight_uids, + vals=weight_vals, + version_key=version_key, + wait_for_finalization=wait_for_finalization, + wait_for_inclusion=wait_for_inclusion, + ) + + if not wait_for_finalization and not wait_for_inclusion: + return True, "Not waiting for finalization or inclusion." + + if success is True: + message = "Successfully set weights and Finalized." + logging.success(f":white_heavy_check_mark: {message}") + return True, message + else: + logging.error(f"Failed set weights. Error: {error_message}") + return False, error_message + + except Exception as error: + logging.error(f":cross_mark: Failed set weights. Error: {error}") + return False, str(error) + + +async def _do_commit_weights( + subtensor: "AsyncSubtensor", + wallet: "Wallet", + netuid: int, + commit_hash: str, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, +) -> tuple[bool, Optional[str]]: + """ + Internal method to send a transaction to the Bittensor blockchain, committing the hash of a neuron's weights. + This method constructs and submits the transaction, handling retries and blockchain communication. + + Args: + subtensor (bittensor.core.subtensor.Subtensor): The subtensor instance used for blockchain interaction. + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + commit_hash (str): The hash of the neuron's weights to be committed. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. + + Returns: + tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. + + This method ensures that the weight commitment is securely recorded on the Bittensor blockchain, providing a verifiable record of the neuron's weight distribution at a specific point in time. + """ + call = await subtensor.substrate.compose_call( + call_module="SubtensorModule", + call_function="commit_weights", + call_params={ + "netuid": netuid, + "commit_hash": commit_hash, + }, + ) + extrinsic = await subtensor.substrate.create_signed_extrinsic( + call=call, + keypair=wallet.hotkey, + ) + response = await subtensor.substrate.submit_extrinsic( + substrate=subtensor.substrate, + extrinsic=extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if not wait_for_finalization and not wait_for_inclusion: + return True, None + + await response.process_events() + if await response.is_success: + return True, None + else: + return False, format_error_message( + response.error_message, substrate=subtensor.substrate + ) + + +async def commit_weights_extrinsic( + subtensor: "AsyncSubtensor", + wallet: "Wallet", + netuid: int, + commit_hash: str, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, +) -> tuple[bool, str]: + """ + Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. + This function is a wrapper around the `do_commit_weights` method, handling user prompts and error messages. + + Args: + subtensor (bittensor.core.subtensor.Subtensor): The subtensor instance used for blockchain interaction. + wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + commit_hash (str): The hash of the neuron's weights to be committed. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. + + Returns: + tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + + This function provides a user-friendly interface for committing weights to the Bittensor blockchain, ensuring proper error handling and user interaction when required. + """ + + success, error_message = await _do_commit_weights( + subtensor=subtensor, + wallet=wallet, + netuid=netuid, + commit_hash=commit_hash, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if success: + success_message = "Successfully committed weights." + logging.info(success_message) + return True, success_message + else: + logging.error(f"Failed to commit weights: {error_message}") + return False, error_message diff --git a/bittensor/core/extrinsics/set_weights.py b/bittensor/core/extrinsics/set_weights.py index 3ada279f03..d99cbaed8d 100644 --- a/bittensor/core/extrinsics/set_weights.py +++ b/bittensor/core/extrinsics/set_weights.py @@ -47,7 +47,7 @@ def do_set_weights( version_key: int = version_as_int, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, -) -> tuple[bool, Optional[dict]]: # (success, error_message) +) -> tuple[bool, Optional[str]]: # (success, error_message) """ Internal method to send a transaction to the Bittensor blockchain, setting weights for specified neurons. This method constructs and submits the transaction, handling retries and blockchain communication. @@ -99,7 +99,9 @@ def make_substrate_call_with_retry(): if response.is_success: return True, "Successfully set weights." else: - return False, response.error_message + return False, format_error_message( + response.error_message, substrate=self.substrate + ) return make_substrate_call_with_retry() @@ -184,9 +186,6 @@ def set_weights_extrinsic( ) return True, "Successfully set weights and Finalized." else: - error_message = format_error_message( - error_message, substrate=subtensor.substrate - ) logging.error(error_message) return False, error_message diff --git a/tests/unit_tests/extrinsics/test_set_weights.py b/tests/unit_tests/extrinsics/test_set_weights.py index 9c32fc9bdf..fbeee34dc0 100644 --- a/tests/unit_tests/extrinsics/test_set_weights.py +++ b/tests/unit_tests/extrinsics/test_set_weights.py @@ -61,7 +61,7 @@ def mock_wallet(): True, True, False, - "Subtensor returned `UnknownError(UnknownType)` error. This means: `Unknown Description`.", + "Mock error message", ), ([1, 2], [0.5, 0.5], 0, True, True, True, False, False, "Prompt refused."), ], @@ -226,7 +226,7 @@ def test_do_set_weights_is_not_success(mock_subtensor, mocker): mock_subtensor.substrate.submit_extrinsic.return_value.process_events.assert_called_once() assert result == ( False, - mock_subtensor.substrate.submit_extrinsic.return_value.error_message, + "Subtensor returned `UnknownError(UnknownType)` error. This means: `Unknown Description`.", ) From 318436befc913570966b73f04b70ec120e299a5c Mon Sep 17 00:00:00 2001 From: Roman <167799377+roman-opentensor@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:14:35 -0800 Subject: [PATCH 23/27] Update bittensor/core/async_subtensor.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Paweł Polewicz --- bittensor/core/async_subtensor.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 9cff7aee1d..75166bc685 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -206,9 +206,6 @@ async def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[st block_hash=block, ) - if lock_cost is None: - return None - return lock_cost async def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: From fc6f1e76e5f893c192c371772e9ffbf4e4674dd1 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 4 Nov 2024 11:13:02 -0800 Subject: [PATCH 24/27] fix await properties and remove double format_error_message call --- bittensor/core/extrinsics/async_weights.py | 4 ++-- bittensor/core/extrinsics/set_weights.py | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/bittensor/core/extrinsics/async_weights.py b/bittensor/core/extrinsics/async_weights.py index 031cdfa9cd..82f2dc6dc3 100644 --- a/bittensor/core/extrinsics/async_weights.py +++ b/bittensor/core/extrinsics/async_weights.py @@ -73,8 +73,8 @@ async def _do_set_weights( if not wait_for_finalization and not wait_for_inclusion: return True, "Not waiting for finalization or inclusion." - response.process_events() - if response.is_success: + await response.process_events() + if await response.is_success: return True, "Successfully set weights." else: return False, format_error_message( diff --git a/bittensor/core/extrinsics/set_weights.py b/bittensor/core/extrinsics/set_weights.py index efdd16e0a6..ceab305b42 100644 --- a/bittensor/core/extrinsics/set_weights.py +++ b/bittensor/core/extrinsics/set_weights.py @@ -181,9 +181,6 @@ def set_weights_extrinsic( logging.success(f"Finalized! Set weights: {str(success)}") return True, "Successfully set weights and Finalized." else: - error_message = format_error_message( - error_message, substrate=subtensor.substrate - ) logging.error(error_message) return False, error_message From dbf91f2ac3ba1be1ea5ed71ad58cf91d8cf73d14 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 4 Nov 2024 14:10:49 -0800 Subject: [PATCH 25/27] fix review comments --- bittensor/core/async_subtensor.py | 554 ++++++++++++++---------- bittensor/core/extrinsics/async_root.py | 92 ++-- 2 files changed, 363 insertions(+), 283 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 75166bc685..e7aecde124 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -8,7 +8,6 @@ from bittensor_wallet import Wallet from bittensor_wallet.utils import SS58_FORMAT from numpy.typing import NDArray -from rich.prompt import Confirm from scalecodec import GenericCall from scalecodec.base import RuntimeConfiguration from scalecodec.type_registry import load_type_registry_preset @@ -55,6 +54,7 @@ from bittensor.utils.balance import Balance from bittensor.utils.btlogging import logging from bittensor.utils.delegates_details import DelegatesDetails +from bittensor.utils.weight_utils import generate_weight_hash class ParamWithTypes(TypedDict): @@ -170,29 +170,47 @@ async def get_current_block(self): """ return await self.substrate.get_block_number() + async def get_block_hash(self, block_id: Optional[int] = None): + """ + Retrieves the hash of a specific block on the Bittensor blockchain. The block hash is a unique identifier representing the cryptographic hash of the block's content, ensuring its integrity and immutability. + + Args: + block_id (int): The block number for which the hash is to be retrieved. + + Returns: + str: The cryptographic hash of the specified block. + + The block hash is a fundamental aspect of blockchain technology, providing a secure reference to each block's data. It is crucial for verifying transactions, ensuring data consistency, and maintaining the trustworthiness of the blockchain. + """ + return await self.substrate.get_block_hash( + block_id if block_id else await self.get_current_block() + ) + async def is_hotkey_registered_any( - self, hotkey_ss58: str, block: Optional[int] = None + self, hotkey_ss58: str, block_hash: Optional[str] = None ) -> bool: """ Checks if a neuron's hotkey is registered on any subnet within the Bittensor network. Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int]): The blockchain block number at which to perform the check. + block_hash (Optional[str]): The blockchain block_hash representation of block id. Returns: bool: ``True`` if the hotkey is registered on any subnet, False otherwise. This function is essential for determining the network-wide presence and participation of a neuron. """ - return len(await self.get_netuids_for_hotkey(hotkey_ss58, block)) > 0 + return len(await self.get_netuids_for_hotkey(hotkey_ss58, block_hash)) > 0 - async def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: + async def get_subnet_burn_cost( + self, block_hash: Optional[str] = None + ) -> Optional[str]: """ Retrieves the burn cost for registering a new subnet within the Bittensor network. This cost represents the amount of Tao that needs to be locked or burned to establish a new subnet. Args: - block (Optional[int]): The blockchain block number for the query. + block_hash (Optional[int]): The blockchain block_hash of the block id. Returns: int: The burn cost for subnet registration. @@ -203,20 +221,22 @@ async def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[st runtime_api="SubnetRegistrationRuntimeApi", method="get_network_registration_cost", params=[], - block_hash=block, + block_hash=block_hash, ) return lock_cost - async def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: + async def get_total_subnets( + self, block_hash: Optional[str] = None + ) -> Optional[int]: """ Retrieves the total number of subnets within the Bittensor network as of a specific blockchain block. Args: - block (Optional[int]): The blockchain block number for the query. + block_hash (Optional[str]): The blockchain block_hash representation of block id. Returns: - Optional[int]: The total number of subnets in the network. + Optional[str]: The total number of subnets in the network. Understanding the total number of subnets is essential for assessing the network's growth and the extent of its decentralized infrastructure. """ @@ -224,7 +244,9 @@ async def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: module="SubtensorModule", storage_function="TotalNetworks", params=[], - block_hash=block, + block_hash=block_hash + if block_hash is not None + else await self.get_block_hash(await self.get_current_block()), ) return result @@ -232,8 +254,11 @@ async def get_subnets(self, block_hash: Optional[str] = None) -> list[int]: """ Retrieves the list of all subnet unique identifiers (netuids) currently present in the Bittensor network. - :param block_hash: The hash of the block to retrieve the subnet unique identifiers from. - :return: A list of subnet netuids. + Arguments: + block_hash (Optional[str]): The hash of the block to retrieve the subnet unique identifiers from. + + Returns: + A list of subnet netuids. This function provides a comprehensive view of the subnets within the Bittensor network, offering insights into its diversity and scale. @@ -254,20 +279,20 @@ async def is_hotkey_delegate( self, hotkey_ss58: str, block_hash: Optional[str] = None, - reuse_block: Optional[bool] = False, + reuse_block: bool = False, ) -> bool: """ - Determines whether a given hotkey (public key) is a delegate on the Bittensor network. This function - checks if the neuron associated with the hotkey is part of the network's delegation system. + Determines whether a given hotkey (public key) is a delegate on the Bittensor network. This function checks if the neuron associated with the hotkey is part of the network's delegation system. - :param hotkey_ss58: The SS58 address of the neuron's hotkey. - :param block_hash: The hash of the blockchain block number for the query. - :param reuse_block: Whether to reuse the last-used block hash. + Arguments: + hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + block_hash (Optional[str]): The hash of the blockchain block number for the query. + reuse_block (Optional[bool]): Whether to reuse the last-used block hash. - :return: `True` if the hotkey is a delegate, `False` otherwise. + Returns: + `True` if the hotkey is a delegate, `False` otherwise. - Being a delegate is a significant status within the Bittensor network, indicating a neuron's - involvement in consensus and governance processes. + Being a delegate is a significant status within the Bittensor network, indicating a neuron's involvement in consensus and governance processes. """ delegates = await self.get_delegates( block_hash=block_hash, reuse_block=reuse_block @@ -275,21 +300,24 @@ async def is_hotkey_delegate( return hotkey_ss58 in [info.hotkey_ss58 for info in delegates] async def get_delegates( - self, block_hash: Optional[str] = None, reuse_block: Optional[bool] = False + self, block_hash: Optional[str] = None, reuse_block: bool = False ) -> list[DelegateInfo]: """ Fetches all delegates on the chain - :param block_hash: hash of the blockchain block number for the query. - :param reuse_block: whether to reuse the last-used block hash. + Arguments: + block_hash (Optional[str]): hash of the blockchain block number for the query. + reuse_block (Optional[bool]): whether to reuse the last-used block hash. - :return: List of DelegateInfo objects, or an empty list if there are no delegates. + Returns: + List of DelegateInfo objects, or an empty list if there are no delegates. """ hex_bytes_result = await self.query_runtime_api( runtime_api="DelegateInfoRuntimeApi", method="get_delegates", params=[], block_hash=block_hash, + reuse_block=reuse_block, ) if hex_bytes_result is not None: try: @@ -308,17 +336,17 @@ async def get_stake_info_for_coldkey( reuse_block: bool = False, ) -> list[StakeInfo]: """ - Retrieves stake information associated with a specific coldkey. This function provides details - about the stakes held by an account, including the staked amounts and associated delegates. + Retrieves stake information associated with a specific coldkey. This function provides details about the stakes held by an account, including the staked amounts and associated delegates. - :param coldkey_ss58: The ``SS58`` address of the account's coldkey. - :param block_hash: The hash of the blockchain block number for the query. - :param reuse_block: Whether to reuse the last-used block hash. + Arguments: + coldkey_ss58 (str): The ``SS58`` address of the account's coldkey. + block_hash (Optional[str]): The hash of the blockchain block number for the query. + reuse_block (bool): Whether to reuse the last-used block hash. - :return: A list of StakeInfo objects detailing the stake allocations for the account. + Returns: + A list of StakeInfo objects detailing the stake allocations for the account. - Stake information is vital for account holders to assess their investment and participation - in the network's delegation and consensus processes. + Stake information is vital for account holders to assess their investment and participation in the network's delegation and consensus processes. """ encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) @@ -341,14 +369,18 @@ async def get_stake_info_for_coldkey( return StakeInfo.list_from_vec_u8(bytes_result) async def get_stake_for_coldkey_and_hotkey( - self, hotkey_ss58: str, coldkey_ss58: str, block_hash: Optional[str] + self, hotkey_ss58: str, coldkey_ss58: str, block_hash: Optional[str] = None ) -> Balance: """ Retrieves stake information associated with a specific coldkey and hotkey. - :param hotkey_ss58: the hotkey SS58 address to query - :param coldkey_ss58: the coldkey SS58 address to query - :param block_hash: the hash of the blockchain block number for the query. - :return: Stake Balance for the given coldkey and hotkey + + Arguments: + hotkey_ss58 (str): the hotkey SS58 address to query + coldkey_ss58 (str): the coldkey SS58 address to query + block_hash (Optional[str]): the hash of the blockchain block number for the query. + + Returns: + Stake Balance for the given coldkey and hotkey """ _result = await self.substrate.query( module="SubtensorModule", @@ -362,25 +394,24 @@ async def query_runtime_api( self, runtime_api: str, method: str, - params: Optional[Union[list[list[int]], dict[str, int]]], + params: Optional[Union[list[list[int]], dict[str, int], list[int]]], block_hash: Optional[str] = None, - reuse_block: Optional[bool] = False, + reuse_block: bool = False, ) -> Optional[str]: """ - Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying - runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users - who need to interact with specific runtime methods and decode complex data types. + Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users who need to interact with specific runtime methods and decode complex data types. - :param runtime_api: The name of the runtime API to query. - :param method: The specific method within the runtime API to call. - :param params: The parameters to pass to the method call. - :param block_hash: The hash of the blockchain block number at which to perform the query. - :param reuse_block: Whether to reuse the last-used block hash. + Arguments: + runtime_api (str): The name of the runtime API to query. + method (str): The specific method within the runtime API to call. + params (Optional[Union[list[list[int]], dict[str, int]]]): The parameters to pass to the method call. + block_hash (Optional[str]): The hash of the blockchain block number at which to perform the query. + reuse_block (bool): Whether to reuse the last-used block hash. - :return: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. + Returns: + The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. - This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed - and specific interactions with the network's runtime environment. + This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed and specific interactions with the network's runtime environment. """ call_definition = TYPE_REGISTRY["runtime_api"][runtime_api]["methods"][method] @@ -396,6 +427,7 @@ async def query_runtime_api( json_result = await self.substrate.rpc_request( method="state_call", params=[api_method, data, block_hash] if block_hash else [api_method, data], + reuse_block_hash=reuse_block, ) if json_result is None: @@ -419,14 +451,16 @@ async def get_balance( self, *addresses: str, block_hash: Optional[str] = None, - reuse_block: bool = False, ) -> dict[str, Balance]: """ Retrieves the balance for given coldkey(s) - :param addresses: coldkey addresses(s) - :param block_hash: the block hash, optional - :param reuse_block: Whether to reuse the last-used block hash when retrieving info. - :return: dict of {address: Balance objects} + + Arguments: + addresses (str): coldkey addresses(s). + block_hash (Optional[str]): the block hash, optional. + + Returns: + Dict of {address: Balance objects}. """ calls = [ ( @@ -497,16 +531,16 @@ async def get_total_stake_for_coldkey( self, *ss58_addresses, block_hash: Optional[str] = None, - reuse_block: bool = False, ) -> dict[str, Balance]: """ Returns the total stake held on a coldkey. - :param ss58_addresses: The SS58 address(es) of the coldkey(s) - :param block_hash: The hash of the block number to retrieve the stake from. - :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + Arguments: + ss58_addresses (tuple[str]): The SS58 address(es) of the coldkey(s) + block_hash (str): The hash of the block number to retrieve the stake from. - :return: {address: Balance objects} + Returns: + Dict in view {address: Balance objects}. """ calls = [ ( @@ -534,11 +568,13 @@ async def get_total_stake_for_hotkey( """ Returns the total stake held on a hotkey. - :param ss58_addresses: The SS58 address(es) of the hotkey(s) - :param block_hash: The hash of the block number to retrieve the stake from. - :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + Arguments: + ss58_addresses (tuple[str]): The SS58 address(es) of the hotkey(s) + block_hash (str): The hash of the block number to retrieve the stake from. + reuse_block (bool): Whether to reuse the last-used block hash when retrieving info. - :return: {address: Balance objects} + Returns: + Dict {address: Balance objects}. """ results = await self.substrate.query_multiple( params=[s for s in ss58_addresses], @@ -556,15 +592,15 @@ async def get_netuids_for_hotkey( reuse_block: bool = False, ) -> list[int]: """ - Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function - identifies the specific subnets within the Bittensor network where the neuron associated with - the hotkey is active. + Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function identifies the specific subnets within the Bittensor network where the neuron associated with the hotkey is active. - :param hotkey_ss58: The ``SS58`` address of the neuron's hotkey. - :param block_hash: The hash of the blockchain block number at which to perform the query. - :param reuse_block: Whether to reuse the last-used block hash when retrieving info. + Arguments: + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. + block_hash (Optional[str]): The hash of the blockchain block number at which to perform the query. + reuse_block (Optional[bool]): Whether to reuse the last-used block hash when retrieving info. - :return: A list of netuids where the neuron is a member. + Returns: + A list of netuids where the neuron is a member. """ result = await self.substrate.query_map( @@ -586,11 +622,13 @@ async def subnet_exists( """ Checks if a subnet with the specified unique identifier (netuid) exists within the Bittensor network. - :param netuid: The unique identifier of the subnet. - :param block_hash: The hash of the blockchain block number at which to check the subnet existence. - :param reuse_block: Whether to reuse the last-used block hash. + Arguments: + netuid (int): The unique identifier of the subnet. + block_hash (Optional[str]): The hash of the blockchain block number at which to check the subnet existence. + reuse_block (bool): Whether to reuse the last-used block hash. - :return: `True` if the subnet exists, `False` otherwise. + Returns: + `True` if the subnet exists, `False` otherwise. This function is critical for verifying the presence of specific subnets in the network, enabling a deeper understanding of the network's structure and composition. @@ -614,12 +652,14 @@ async def get_hyperparameter( """ Retrieves a specified hyperparameter for a specific subnet. - :param param_name: The name of the hyperparameter to retrieve. - :param netuid: The unique identifier of the subnet. - :param block_hash: The hash of blockchain block number for the query. - :param reuse_block: Whether to reuse the last-used block hash. + Arguments: + param_name (str): The name of the hyperparameter to retrieve. + netuid (int): The unique identifier of the subnet. + block_hash (Optional[str]): The hash of blockchain block number for the query. + reuse_block (bool): Whether to reuse the last-used block hash. - :return: The value of the specified hyperparameter if the subnet exists, or None + Returns: + The value of the specified hyperparameter if the subnet exists, or None """ if not await self.subnet_exists(netuid, block_hash): print("subnet does not exist") @@ -649,13 +689,15 @@ async def filter_netuids_by_registered_hotkeys( """ Filters a given list of all netuids for certain specified netuids and hotkeys - :param all_netuids: A list of netuids to filter. - :param filter_for_netuids: A subset of all_netuids to filter from the main list - :param all_hotkeys: Hotkeys to filter from the main list - :param block_hash: hash of the blockchain block number at which to perform the query. - :param reuse_block: whether to reuse the last-used blockchain hash when retrieving info. + Argumens: + all_netuids (Iterable[int]): A list of netuids to filter. + filter_for_netuids (Iterable[int]): A subset of all_netuids to filter from the main list + all_hotkeys (Iterable[Wallet]): Hotkeys to filter from the main list + block_hash (str): hash of the blockchain block number at which to perform the query. + reuse_block (bool): whether to reuse the last-used blockchain hash when retrieving info. - :return: the filtered list of netuids. + Returns: + The filtered list of netuids. """ netuids_with_registered_hotkeys = [ item @@ -695,17 +737,18 @@ async def get_existential_deposit( self, block_hash: Optional[str] = None, reuse_block: bool = False ) -> Balance: """ - Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit - is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with - balances below this threshold can be reaped to conserve network resources. + Retrieves the existential deposit amount for the Bittensor blockchain. + The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. + Accounts with balances below this threshold can be reaped to conserve network resources. - :param block_hash: Block hash at which to query the deposit amount. If `None`, the current block is used. - :param reuse_block: Whether to reuse the last-used blockchain block hash. + Arguments: + block_hash (str): Block hash at which to query the deposit amount. If `None`, the current block is used. + reuse_block (bool): Whether to reuse the last-used blockchain block hash. - :return: The existential deposit amount + Returns: + The existential deposit amount. - The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring - efficient use of storage and preventing the proliferation of dust accounts. + The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring efficient use of storage and preventing the proliferation of dust accounts. """ result = await self.substrate.get_constant( module_name="Balances", @@ -723,17 +766,17 @@ async def neurons( self, netuid: int, block_hash: Optional[str] = None ) -> list[NeuronInfo]: """ - Retrieves a list of all neurons within a specified subnet of the Bittensor network. This function - provides a snapshot of the subnet's neuron population, including each neuron's attributes and network - interactions. + Retrieves a list of all neurons within a specified subnet of the Bittensor network. + This function provides a snapshot of the subnet's neuron population, including each neuron's attributes and network interactions. - :param netuid: The unique identifier of the subnet. - :param block_hash: The hash of the blockchain block number for the query. + Arguments: + netuid (int): The unique identifier of the subnet. + block_hash (str): The hash of the blockchain block number for the query. - :return: A list of NeuronInfo objects detailing each neuron's characteristics in the subnet. + Returns: + A list of NeuronInfo objects detailing each neuron's characteristics in the subnet. - Understanding the distribution and status of neurons within a subnet is key to comprehending the - network's decentralized structure and the dynamics of its consensus and governance processes. + Understanding the distribution and status of neurons within a subnet is key to comprehending the network's decentralized structure and the dynamics of its consensus and governance processes. """ neurons_lite, weights, bonds = await asyncio.gather( self.neurons_lite(netuid=netuid, block_hash=block_hash), @@ -758,17 +801,17 @@ async def neurons_lite( ) -> list[NeuronInfoLite]: """ Retrieves a list of neurons in a 'lite' format from a specific subnet of the Bittensor network. - This function provides a streamlined view of the neurons, focusing on key attributes such as stake - and network participation. + This function provides a streamlined view of the neurons, focusing on key attributes such as stake and network participation. - :param netuid: The unique identifier of the subnet. - :param block_hash: The hash of the blockchain block number for the query. - :param reuse_block: Whether to reuse the last-used blockchain block hash. + Arguments: + netuid (int): The unique identifier of the subnet. + block_hash (str): The hash of the blockchain block number for the query. + reuse_block (bool): Whether to reuse the last-used blockchain block hash. - :return: A list of simplified neuron information for the subnet. + Returns: + A list of simplified neuron information for the subnet. - This function offers a quick overview of the neuron population within a subnet, facilitating - efficient analysis of the network's decentralized structure and neuron dynamics. + This function offers a quick overview of the neuron population within a subnet, facilitating efficient analysis of the network's decentralized structure and neuron dynamics. """ hex_bytes_result = await self.query_runtime_api( runtime_api="NeuronInfoRuntimeApi", @@ -794,19 +837,17 @@ async def neuron_for_uid( self, uid: Optional[int], netuid: int, block_hash: Optional[str] = None ) -> NeuronInfo: """ - Retrieves detailed information about a specific neuron identified by its unique identifier (UID) - within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive - view of a neuron's attributes, including its stake, rank, and operational status. + Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive view of a neuron's attributes, including its stake, rank, and operational status. + Arguments: + uid (int): The unique identifier of the neuron. + netuid (int): The unique identifier of the subnet. + block_hash (str): The hash of the blockchain block number for the query. - :param uid: The unique identifier of the neuron. - :param netuid: The unique identifier of the subnet. - :param block_hash: The hash of the blockchain block number for the query. - - :return: Detailed information about the neuron if found, a null neuron otherwise + Returns: + Detailed information about the neuron if found, a null neuron otherwise - This function is crucial for analyzing individual neurons' contributions and status within a specific - subnet, offering insights into their roles in the network's consensus and validation mechanisms. + This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. """ if uid is None: return NeuronInfo.get_null_neuron() @@ -830,17 +871,17 @@ async def get_delegated( reuse_block: bool = False, ) -> list[tuple[DelegateInfo, Balance]]: """ - Retrieves a list of delegates and their associated stakes for a given coldkey. This function - identifies the delegates that a specific account has staked tokens on. + Retrieves a list of delegates and their associated stakes for a given coldkey. This function identifies the delegates that a specific account has staked tokens on. - :param coldkey_ss58: The `SS58` address of the account's coldkey. - :param block_hash: The hash of the blockchain block number for the query. - :param reuse_block: Whether to reuse the last-used blockchain block hash. + Arguments: + coldkey_ss58 (str): The `SS58` address of the account's coldkey. + block_hash (Optional[str]): The hash of the blockchain block number for the query. + reuse_block (bool): Whether to reuse the last-used blockchain block hash. - :return: A list of tuples, each containing a delegate's information and staked amount. + Returns: + A list of tuples, each containing a delegate's information and staked amount. - This function is important for account holders to understand their stake allocations and their - involvement in the network's delegation and consensus mechanisms. + This function is important for account holders to understand their stake allocations and their involvement in the network's delegation and consensus mechanisms. """ block_hash = ( @@ -866,22 +907,20 @@ async def query_identity( reuse_block: bool = False, ) -> dict: """ - Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves - detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized - identity and governance system. + Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. - Note: - See the `Bittensor CLI documentation `_ for supported identity - parameters. + Arguments: + key (str): The key used to query the neuron's identity, typically the neuron's SS58 address. + block_hash (str): The hash of the blockchain block number at which to perform the query. + reuse_block (bool): Whether to reuse the last-used blockchain block hash. - :param key: The key used to query the neuron's identity, typically the neuron's SS58 address. - :param block_hash: The hash of the blockchain block number at which to perform the query. - :param reuse_block: Whether to reuse the last-used blockchain block hash. + Returns: + An object containing the identity information of the neuron if found, ``None`` otherwise. - :return: An object containing the identity information of the neuron if found, ``None`` otherwise. + The identity information can include various attributes such as the neuron's stake, rank, and other network-specific details, providing insights into the neuron's role and status within the Bittensor network. - The identity information can include various attributes such as the neuron's stake, rank, and other - network-specific details, providing insights into the neuron's role and status within the Bittensor network. + Note: + See the `Bittensor CLI documentation `_ for supported identity parameters. """ def decode_hex_identity_dict_(info_dictionary): @@ -925,17 +964,16 @@ async def weights( ) -> list[tuple[int, list[tuple[int, int]]]]: """ Retrieves the weight distribution set by neurons within a specific subnet of the Bittensor network. - This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the - network's trust and value assignment mechanisms. + This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the network's trust and value assignment mechanisms. - Args: - :param netuid: The network UID of the subnet to query. - :param block_hash: The hash of the blockchain block for the query. + Arguments: + netuid (int): The network UID of the subnet to query. + block_hash (str): The hash of the blockchain block for the query. - :return: A list of tuples mapping each neuron's UID to its assigned weights. + Returns: + A list of tuples mapping each neuron's UID to its assigned weights. - The weight distribution is a key factor in the network's consensus algorithm and the ranking of neurons, - influencing their influence and reward allocation within the subnet. + The weight distribution is a key factor in the network's consensus algorithm and the ranking of neurons, influencing their influence and reward allocation within the subnet. """ # TODO look into seeing if we can speed this up with storage query w_map_encoded = await self.substrate.query_map( @@ -953,18 +991,16 @@ async def bonds( ) -> list[tuple[int, list[tuple[int, int]]]]: """ Retrieves the bond distribution set by neurons within a specific subnet of the Bittensor network. - Bonds represent the investments or commitments made by neurons in one another, indicating a level - of trust and perceived value. This bonding mechanism is integral to the network's market-based approach - to measuring and rewarding machine intelligence. + Bonds represent the investments or commitments made by neurons in one another, indicating a level of trust and perceived value. This bonding mechanism is integral to the network's market-based approach to measuring and rewarding machine intelligence. - :param netuid: The network UID of the subnet to query. - :param block_hash: The hash of the blockchain block number for the query. + Arguments: + netuid (int): The network UID of the subnet to query. + block_hash (Optional[str]): The hash of the blockchain block number for the query. - :return: list of tuples mapping each neuron's UID to its bonds with other neurons. + Returns: + List of tuples mapping each neuron's UID to its bonds with other neurons. - Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior - within the subnet. It reflects how neurons recognize and invest in each other's intelligence and - contributions, supporting diverse and niche systems within the Bittensor ecosystem. + Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior within the subnet. It reflects how neurons recognize and invest in each other's intelligence and contributions, supporting diverse and niche systems within the Bittensor ecosystem. """ b_map_encoded = await self.substrate.query_map( module="SubtensorModule", @@ -985,11 +1021,13 @@ async def does_hotkey_exist( """ Returns true if the hotkey is known by the chain and there are accounts. - :param hotkey_ss58: The SS58 address of the hotkey. - :param block_hash: The hash of the block number to check the hotkey against. - :param reuse_block: Whether to reuse the last-used blockchain hash. + Arguments: + hotkey_ss58 (str): The SS58 address of the hotkey. + block_hash (Optional[str]): The hash of the block number to check the hotkey against. + reuse_block (bool): Whether to reuse the last-used blockchain hash. - :return: `True` if the hotkey is known by the chain and there are accounts, `False` otherwise. + Returns: + `True` if the hotkey is known by the chain and there are accounts, `False` otherwise. """ _result = await self.substrate.query( module="SubtensorModule", @@ -1009,6 +1047,17 @@ async def does_hotkey_exist( async def get_hotkey_owner( self, hotkey_ss58: str, block_hash: str ) -> Optional[str]: + """ + Retrieves the owner of the given hotkey at a specific block hash. + This function queries the blockchain for the owner of the provided hotkey. If the hotkey does not exist at the specified block hash, it returns None. + + Arguments: + hotkey_ss58 (str): The SS58 address of the hotkey. + block_hash (str): The hash of the block at which to check the hotkey ownership. + + Returns: + Optional[str]: The SS58 address of the owner if the hotkey exists, or None if it doesn't. + """ hk_owner_query = await self.substrate.query( module="SubtensorModule", storage_function="Owner", @@ -1025,20 +1074,22 @@ async def get_hotkey_owner( async def sign_and_send_extrinsic( self, - call: GenericCall, - wallet: Wallet, + call: "GenericCall", + wallet: "Wallet", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> tuple[bool, str]: """ Helper method to sign and submit an extrinsic call to chain. - :param call: a prepared Call object - :param wallet: the wallet whose coldkey will be used to sign the extrinsic - :param wait_for_inclusion: whether to wait until the extrinsic call is included on the chain - :param wait_for_finalization: whether to wait until the extrinsic call is finalized on the chain + Arguments: + call (scalecodec.types.GenericCall): a prepared Call object + wallet (bittensor_wallet.Wallet): the wallet whose coldkey will be used to sign the extrinsic + wait_for_inclusion (bool): whether to wait until the extrinsic call is included on the chain + wait_for_finalization (bool): whether to wait until the extrinsic call is finalized on the chain - :return: (success, error message) + Returns: + (success, error message) """ extrinsic = await self.substrate.create_signed_extrinsic( call=call, keypair=wallet.coldkey @@ -1062,16 +1113,16 @@ async def sign_and_send_extrinsic( except SubstrateRequestException as e: return False, format_error_message(e, substrate=self.substrate) - async def get_children(self, hotkey, netuid) -> tuple[bool, list, str]: + async def get_children(self, hotkey: str, netuid: int) -> tuple[bool, list, str]: """ - This method retrieves the children of a given hotkey and netuid. It queries the SubtensorModule's ChildKeys - storage function to get the children and formats them before returning as a tuple. + This method retrieves the children of a given hotkey and netuid. It queries the SubtensorModule's ChildKeys storage function to get the children and formats them before returning as a tuple. - :param hotkey: The hotkey value. - :param netuid: The netuid value. + Arguments: + hotkey (str): The hotkey value. + netuid (int): The netuid value. - :return: A tuple containing a boolean indicating success or failure, a list of formatted children, and an error - message (if applicable) + Returns: + A tuple containing a boolean indicating success or failure, a list of formatted children, and an error message (if applicable) """ try: children = await self.substrate.query( @@ -1096,16 +1147,16 @@ async def get_subnet_hyperparameters( self, netuid: int, block_hash: Optional[str] = None ) -> Optional[Union[list, SubnetHyperparameters]]: """ - Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters - define the operational settings and rules governing the subnet's behavior. + Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters define the operational settings and rules governing the subnet's behavior. - :param netuid: The network UID of the subnet to query. - :param block_hash: The hash of the blockchain block number for the query. + Arguments: + netuid (int): The network UID of the subnet to query. + block_hash (Optional[str]): The hash of the blockchain block number for the query. - :return: The subnet's hyperparameters, or `None` if not available. + Returns: + The subnet's hyperparameters, or `None` if not available. - Understanding the hyperparameters is crucial for comprehending how subnets are configured and - managed, and how they interact with the network's consensus and incentive mechanisms. + Understanding the hyperparameters is crucial for comprehending how subnets are configured and managed, and how they interact with the network's consensus and incentive mechanisms. """ hex_bytes_result = await self.query_runtime_api( runtime_api="SubnetInfoRuntimeApi", @@ -1131,17 +1182,17 @@ async def get_vote_data( reuse_block: bool = False, ) -> Optional["ProposalVoteData"]: """ - Retrieves the voting data for a specific proposal on the Bittensor blockchain. This data includes - information about how senate members have voted on the proposal. + Retrieves the voting data for a specific proposal on the Bittensor blockchain. This data includes information about how senate members have voted on the proposal. - :param proposal_hash: The hash of the proposal for which voting data is requested. - :param block_hash: The hash of the blockchain block number to query the voting data. - :param reuse_block: Whether to reuse the last-used blockchain block hash. + Arguments: + proposal_hash (str): The hash of the proposal for which voting data is requested. + block_hash (Optional[str]): The hash of the blockchain block number to query the voting data. + reuse_block (bool): Whether to reuse the last-used blockchain block hash. - :return: An object containing the proposal's voting data, or `None` if not found. + Returns: + An object containing the proposal's voting data, or `None` if not found. - This function is important for tracking and understanding the decision-making processes within - the Bittensor network, particularly how proposals are received and acted upon by the governing body. + This function is important for tracking and understanding the decision-making processes within the Bittensor network, particularly how proposals are received and acted upon by the governing body. """ vote_data = await self.substrate.query( module="Triumvirate", @@ -1159,14 +1210,13 @@ async def get_delegate_identities( self, block_hash: Optional[str] = None ) -> dict[str, DelegatesDetails]: """ - Fetches delegates identities from the chain and GitHub. Preference is given to chain data, and missing info - is filled-in by the info from GitHub. At some point, we want to totally move away from fetching this info - from GitHub, but chain data is still limited in that regard. + Fetches delegates identities from the chain and GitHub. Preference is given to chain data, and missing info is filled-in by the info from GitHub. At some point, we want to totally move away from fetching this info from GitHub, but chain data is still limited in that regard. Args: - block_hash: the hash of the blockchain block for the query + block_hash (str): the hash of the blockchain block for the query - Returns: {ss58: DelegatesDetails, ...} + Returns: + Dict {ss58: DelegatesDetails, ...} """ timeout = aiohttp.ClientTimeout(10.0) @@ -1230,7 +1280,7 @@ async def is_hotkey_registered(self, netuid: int, hotkey_ss58: str) -> bool: return False async def get_uid_for_hotkey_on_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None + self, hotkey_ss58: str, netuid: int, block_hash: Optional[str] = None ): """ Retrieves the unique identifier (UID) for a neuron's hotkey on a specific subnet. @@ -1238,7 +1288,7 @@ async def get_uid_for_hotkey_on_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. + block_hash (Optional[str]): The blockchain block_hash representation of the block id. Returns: Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. @@ -1249,22 +1299,34 @@ async def get_uid_for_hotkey_on_subnet( module="SubtensorModule", storage_function="Uids", params=[netuid, hotkey_ss58], - block_hash=( - None if block is None else await self.substrate.get_block_hash(block) - ), + block_hash=block_hash + if block_hash is not None + else await self.get_block_hash(), ) # extrinsics async def transfer( self, - wallet: Wallet, + wallet: "Wallet", destination: str, amount: float, transfer_all: bool, prompt: bool, - ): - """Transfer token of amount to destination.""" + ) -> bool: + """ + Transfer token of amount to destination. + + Arguments: + wallet (bittensor_wallet.Wallet): Source wallet for the transfer. + destination (str): Destination address for the transfer. + amount (float): Amount of tokens to transfer. + transfer_all (bool): Flag to transfer all tokens. + prompt (bool): Flag to prompt user for confirmation before transferring. + + Returns: + `True` if the transferring was successful, otherwise `False`. + """ return await transfer_extrinsic( self, wallet, @@ -1274,17 +1336,37 @@ async def transfer( prompt=prompt, ) - async def register(self, wallet: Wallet, prompt: bool): - """Register neuron by recycling some TAO.""" + async def register( + self, + wallet: "Wallet", + netuid: int, + block_hash: Optional[str] = None, + wait_for_inclusion: bool = True, + wait_for_finalization: bool = True, + ) -> bool: + """ + Register neuron by recycling some TAO. + + Arguments: + wallet (bittensor_wallet.Wallet): Bittensor wallet instance. + netuid (int): Subnet uniq id. + block_hash (Optional[str]): The hash of the blockchain block for the query. + wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. + wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. + + Returns: + `True` if registration was successful, otherwise `False`. + """ logging.info( f"Registering on netuid 0 on network: {self.network}" ) # Check current recycle amount logging.info("Fetching recycle amount & balance.") + block_hash = block_hash if block_hash else await self.get_block_hash() recycle_call, balance_ = await asyncio.gather( - self.get_hyperparameter(param_name="Burn", netuid=0, reuse_block=True), - self.get_balance(wallet.coldkeypub.ss58_address, reuse_block=True), + self.get_hyperparameter(param_name="Burn", netuid=netuid, reuse_block=True), + self.get_balance(wallet.coldkeypub.ss58_address, block_hash=block_hash), ) current_recycle = Balance.from_rao(int(recycle_call)) try: @@ -1299,25 +1381,16 @@ async def register(self, wallet: Wallet, prompt: bool): # Check balance is sufficient if balance < current_recycle: logging.error( - f"Insufficient balance {balance} to register neuron. Current recycle is {current_recycle} TAO" + f"Insufficient balance {balance} to register neuron. Current recycle is {current_recycle} TAO." ) return False - if prompt: - if not Confirm.ask( - f"Your balance is: [bold green]{balance}[/bold green]\n" - f"The cost to register by recycle is [bold red]{current_recycle}[/bold red]\n" - f"Do you want to continue?", - default=False, - ): - return False - return await root_register_extrinsic( - self, - wallet, - wait_for_inclusion=True, - wait_for_finalization=True, - prompt=prompt, + subtensor=self, + wallet=wallet, + netuid=netuid, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, ) async def pow_register( @@ -1410,9 +1483,18 @@ async def root_set_weights( wallet: "Wallet", netuids: list[int], weights: list[float], - prompt: bool, - ): - """Set weights for root network.""" + ) -> bool: + """ + Set weights for root network. + + Arguments: + wallet (bittensor_wallet.Wallet): bittensor wallet instance. + netuids (list[int]): The list of subnet uids. + weights (list[float]): The list of weights to be set. + + Returns: + `True` if the setting of weights is successful, `False` otherwise. + """ netuids_ = np.array(netuids, dtype=np.int64) weights_ = np.array(weights, dtype=np.float32) logging.info(f"Setting weights in network: {self.network}") @@ -1423,7 +1505,6 @@ async def root_set_weights( netuids=netuids_, weights=weights_, version_key=0, - prompt=prompt, wait_for_finalization=True, wait_for_inclusion=True, ) @@ -1468,16 +1549,23 @@ async def commit_weights( f"Committing weights with params: netuid={netuid}, uids={uids}, weights={weights}, version_key={version_key}" ) + # Generate the hash of the weights + commit_hash = generate_weight_hash( + address=wallet.hotkey.ss58_address, + netuid=netuid, + uids=list(uids), + values=list(weights), + salt=salt, + version_key=version_key, + ) + while retries < max_retries: try: - success, message = commit_weights_extrinsic( + success, message = await commit_weights_extrinsic( subtensor=self, wallet=wallet, netuid=netuid, - uids=uids, - weights=weights, - salt=salt, - version_key=version_key, + commit_hash=commit_hash, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, ) diff --git a/bittensor/core/extrinsics/async_root.py b/bittensor/core/extrinsics/async_root.py index 9e73f98a30..dd44a55f2a 100644 --- a/bittensor/core/extrinsics/async_root.py +++ b/bittensor/core/extrinsics/async_root.py @@ -6,8 +6,6 @@ from bittensor_wallet import Wallet from bittensor_wallet.errors import KeyFileError from numpy.typing import NDArray -from rich.prompt import Confirm -from rich.table import Table, Column from substrateinterface.exceptions import SubstrateRequestException from bittensor.utils import u16_normalized_float, format_error_message @@ -22,6 +20,19 @@ async def get_limits(subtensor: "AsyncSubtensor") -> tuple[int, float]: + """ + Retrieves the minimum allowed weights and maximum weight limit for the given subnet. + + These values are fetched asynchronously using `asyncio.gather` to run both requests concurrently. + + Args: + subtensor (AsyncSubtensor): The AsyncSubtensor object used to interface with the network's substrate node. + + Returns: + tuple[int, float]: A tuple containing: + - `min_allowed_weights` (int): The minimum allowed weights. + - `max_weight_limit` (float): The maximum weight limit, normalized to a float value. + """ # Get weight restrictions. maw, mwl = await asyncio.gather( subtensor.get_hyperparameter("MinAllowedWeights", netuid=0), @@ -35,19 +46,21 @@ async def get_limits(subtensor: "AsyncSubtensor") -> tuple[int, float]: async def root_register_extrinsic( subtensor: "AsyncSubtensor", wallet: Wallet, + netuid: int, wait_for_inclusion: bool = True, wait_for_finalization: bool = True, - prompt: bool = False, ) -> bool: """Registers the wallet to root network. - :param subtensor: The AsyncSubtensor object - :param wallet: Bittensor wallet object. - :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, or returns `False` if the extrinsic fails to enter the block within the timeout. - :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning `True`, or returns `False` if the extrinsic fails to be finalized within the timeout. - :param prompt: If `True`, the call waits for confirmation from the user before proceeding. + Arguments: + subtensor (bittensor.core.async_subtensor.AsyncSubtensor): The AsyncSubtensor object + wallet (bittensor_wallet.Wallet): Bittensor wallet object. + netuid (int): Subnet uid. + wait_for_inclusion (bool): If set, waits for the extrinsic to enter a block before returning `True`, or returns `False` if the extrinsic fails to enter the block within the timeout. + wait_for_finalization (bool): If set, waits for the extrinsic to be finalized on the chain before returning `True`, or returns `False` if the extrinsic fails to be finalized within the timeout. - :return: `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, the response is `True`. + Returns: + `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, the response is `True`. """ try: @@ -60,7 +73,7 @@ async def root_register_extrinsic( f"Checking if hotkey ({wallet.hotkey_str}) is registered on root." ) is_registered = await subtensor.is_hotkey_registered( - netuid=0, hotkey_ss58=wallet.hotkey.ss58_address + netuid=netuid, hotkey_ss58=wallet.hotkey.ss58_address ) if is_registered: logging.error( @@ -82,7 +95,7 @@ async def root_register_extrinsic( ) if not success: - logging.error(f":cross_mark: Failed: {err_msg}") + logging.error(f":cross_mark: Failed error: {err_msg}") time.sleep(0.5) return False @@ -91,11 +104,11 @@ async def root_register_extrinsic( uid = await subtensor.substrate.query( module="SubtensorModule", storage_function="Uids", - params=[0, wallet.hotkey.ss58_address], + params=[netuid, wallet.hotkey.ss58_address], ) if uid is not None: logging.info( - f":white_heavy_check_mark: Registered with UID {uid}" + f":white_heavy_check_mark: Registered with UID {uid}." ) return True else: @@ -106,28 +119,26 @@ async def root_register_extrinsic( async def set_root_weights_extrinsic( subtensor: "AsyncSubtensor", - wallet: Wallet, + wallet: "Wallet", netuids: Union[NDArray[np.int64], list[int]], weights: Union[NDArray[np.float32], list[float]], version_key: int = 0, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, - prompt: bool = False, ) -> bool: """Sets the given weights and values on chain for wallet hotkey account. - :param subtensor: The AsyncSubtensor object - :param wallet: Bittensor wallet object. - :param netuids: The `netuid` of the subnet to set weights for. - :param weights: Weights to set. These must be `float` s and must correspond to the passed `netuid` s. - :param version_key: The version key of the validator. - :param wait_for_inclusion: If set, waits for the extrinsic to enter a block before returning `True`, or returns - `False` if the extrinsic fails to enter the block within the timeout. - :param wait_for_finalization: If set, waits for the extrinsic to be finalized on the chain before returning `True`, - or returns `False` if the extrinsic fails to be finalized within the timeout. - :param prompt: If `True`, the call waits for confirmation from the user before proceeding. - :return: `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, - the response is `True`. + Arguments: + subtensor (bittensor.core.async_subtensor.AsyncSubtensor): The AsyncSubtensor object + wallet (bittensor_wallet.Wallet): Bittensor wallet object. + netuids (Union[NDArray[np.int64], list[int]]): The `netuid` of the subnet to set weights for. + weights (Union[NDArray[np.float32], list[float]]): Weights to set. These must be `float` s and must correspond to the passed `netuid` s. + version_key (int): The version key of the validator. + wait_for_inclusion (bool): If set, waits for the extrinsic to enter a block before returning `True`, or returns `False` if the extrinsic fails to enter the block within the timeout. + wait_for_finalization (bool): If set, waits for the extrinsic to be finalized on the chain before returning `True`, or returns `False` if the extrinsic fails to be finalized within the timeout. + + Returns: + `True` if extrinsic was finalized or included in the block. If we did not wait for finalization/inclusion, the response is `True`. """ async def _do_set_weights(): @@ -168,13 +179,13 @@ async def _do_set_weights(): ) if my_uid is None: - logging.error("Your hotkey is not registered to the root network") + logging.error("Your hotkey is not registered to the root network.") return False try: wallet.unlock_coldkey() except KeyFileError: - logging.error("Error decrypting coldkey (possibly incorrect password)") + logging.error("Error decrypting coldkey (possibly incorrect password).") return False # First convert types. @@ -203,25 +214,6 @@ async def _do_set_weights(): f"Raw weights -> Normalized weights: {weights} -> {formatted_weights}" ) - # Ask before moving on. - if prompt: - table = Table( - Column("[dark_orange]Netuid", justify="center", style="bold green"), - Column( - "[dark_orange]Weight", justify="center", style="bold light_goldenrod2" - ), - expand=False, - show_edge=False, - ) - print("Netuid | Weight") - - for netuid, weight in zip(netuids, formatted_weights): - table.add_row(str(netuid), f"{weight:.8f}") - print(f"{netuid} | {weight}") - - if not Confirm.ask("\nDo you want to set these root weights?"): - return False - try: logging.info(":satellite: Setting root weights...") weight_uids, weight_vals = convert_weights_and_uids_for_emit(netuids, weights) @@ -236,10 +228,10 @@ async def _do_set_weights(): return True else: fmt_err = format_error_message(error_message, subtensor.substrate) - logging.error(f":cross_mark: Failed: {fmt_err}") + logging.error(f":cross_mark: Failed error: {fmt_err}") return False except SubstrateRequestException as e: fmt_err = format_error_message(e, subtensor.substrate) - logging.error(f":cross_mark: Failed: error:{fmt_err}") + logging.error(f":cross_mark: Failed error: {fmt_err}") return False From 41259954aed503a7fad481b5edb80069b13b5bcf Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 4 Nov 2024 14:28:05 -0800 Subject: [PATCH 26/27] improve docstrings --- bittensor/core/async_subtensor.py | 58 +++++++++++++++---------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index e7aecde124..66b008882e 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -254,7 +254,7 @@ async def get_subnets(self, block_hash: Optional[str] = None) -> list[int]: """ Retrieves the list of all subnet unique identifiers (netuids) currently present in the Bittensor network. - Arguments: + Args: block_hash (Optional[str]): The hash of the block to retrieve the subnet unique identifiers from. Returns: @@ -284,7 +284,7 @@ async def is_hotkey_delegate( """ Determines whether a given hotkey (public key) is a delegate on the Bittensor network. This function checks if the neuron associated with the hotkey is part of the network's delegation system. - Arguments: + Args: hotkey_ss58 (str): The SS58 address of the neuron's hotkey. block_hash (Optional[str]): The hash of the blockchain block number for the query. reuse_block (Optional[bool]): Whether to reuse the last-used block hash. @@ -305,7 +305,7 @@ async def get_delegates( """ Fetches all delegates on the chain - Arguments: + Args: block_hash (Optional[str]): hash of the blockchain block number for the query. reuse_block (Optional[bool]): whether to reuse the last-used block hash. @@ -338,7 +338,7 @@ async def get_stake_info_for_coldkey( """ Retrieves stake information associated with a specific coldkey. This function provides details about the stakes held by an account, including the staked amounts and associated delegates. - Arguments: + Args: coldkey_ss58 (str): The ``SS58`` address of the account's coldkey. block_hash (Optional[str]): The hash of the blockchain block number for the query. reuse_block (bool): Whether to reuse the last-used block hash. @@ -374,7 +374,7 @@ async def get_stake_for_coldkey_and_hotkey( """ Retrieves stake information associated with a specific coldkey and hotkey. - Arguments: + Args: hotkey_ss58 (str): the hotkey SS58 address to query coldkey_ss58 (str): the coldkey SS58 address to query block_hash (Optional[str]): the hash of the blockchain block number for the query. @@ -401,7 +401,7 @@ async def query_runtime_api( """ Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users who need to interact with specific runtime methods and decode complex data types. - Arguments: + Args: runtime_api (str): The name of the runtime API to query. method (str): The specific method within the runtime API to call. params (Optional[Union[list[list[int]], dict[str, int]]]): The parameters to pass to the method call. @@ -455,7 +455,7 @@ async def get_balance( """ Retrieves the balance for given coldkey(s) - Arguments: + Args: addresses (str): coldkey addresses(s). block_hash (Optional[str]): the block hash, optional. @@ -535,7 +535,7 @@ async def get_total_stake_for_coldkey( """ Returns the total stake held on a coldkey. - Arguments: + Args: ss58_addresses (tuple[str]): The SS58 address(es) of the coldkey(s) block_hash (str): The hash of the block number to retrieve the stake from. @@ -568,7 +568,7 @@ async def get_total_stake_for_hotkey( """ Returns the total stake held on a hotkey. - Arguments: + Args: ss58_addresses (tuple[str]): The SS58 address(es) of the hotkey(s) block_hash (str): The hash of the block number to retrieve the stake from. reuse_block (bool): Whether to reuse the last-used block hash when retrieving info. @@ -594,7 +594,7 @@ async def get_netuids_for_hotkey( """ Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function identifies the specific subnets within the Bittensor network where the neuron associated with the hotkey is active. - Arguments: + Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block_hash (Optional[str]): The hash of the blockchain block number at which to perform the query. reuse_block (Optional[bool]): Whether to reuse the last-used block hash when retrieving info. @@ -622,7 +622,7 @@ async def subnet_exists( """ Checks if a subnet with the specified unique identifier (netuid) exists within the Bittensor network. - Arguments: + Args: netuid (int): The unique identifier of the subnet. block_hash (Optional[str]): The hash of the blockchain block number at which to check the subnet existence. reuse_block (bool): Whether to reuse the last-used block hash. @@ -652,7 +652,7 @@ async def get_hyperparameter( """ Retrieves a specified hyperparameter for a specific subnet. - Arguments: + Args: param_name (str): The name of the hyperparameter to retrieve. netuid (int): The unique identifier of the subnet. block_hash (Optional[str]): The hash of blockchain block number for the query. @@ -741,7 +741,7 @@ async def get_existential_deposit( The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with balances below this threshold can be reaped to conserve network resources. - Arguments: + Args: block_hash (str): Block hash at which to query the deposit amount. If `None`, the current block is used. reuse_block (bool): Whether to reuse the last-used blockchain block hash. @@ -769,7 +769,7 @@ async def neurons( Retrieves a list of all neurons within a specified subnet of the Bittensor network. This function provides a snapshot of the subnet's neuron population, including each neuron's attributes and network interactions. - Arguments: + Args: netuid (int): The unique identifier of the subnet. block_hash (str): The hash of the blockchain block number for the query. @@ -803,7 +803,7 @@ async def neurons_lite( Retrieves a list of neurons in a 'lite' format from a specific subnet of the Bittensor network. This function provides a streamlined view of the neurons, focusing on key attributes such as stake and network participation. - Arguments: + Args: netuid (int): The unique identifier of the subnet. block_hash (str): The hash of the blockchain block number for the query. reuse_block (bool): Whether to reuse the last-used blockchain block hash. @@ -839,7 +839,7 @@ async def neuron_for_uid( """ Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive view of a neuron's attributes, including its stake, rank, and operational status. - Arguments: + Args: uid (int): The unique identifier of the neuron. netuid (int): The unique identifier of the subnet. block_hash (str): The hash of the blockchain block number for the query. @@ -873,7 +873,7 @@ async def get_delegated( """ Retrieves a list of delegates and their associated stakes for a given coldkey. This function identifies the delegates that a specific account has staked tokens on. - Arguments: + Args: coldkey_ss58 (str): The `SS58` address of the account's coldkey. block_hash (Optional[str]): The hash of the blockchain block number for the query. reuse_block (bool): Whether to reuse the last-used blockchain block hash. @@ -909,7 +909,7 @@ async def query_identity( """ Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. - Arguments: + Args: key (str): The key used to query the neuron's identity, typically the neuron's SS58 address. block_hash (str): The hash of the blockchain block number at which to perform the query. reuse_block (bool): Whether to reuse the last-used blockchain block hash. @@ -966,7 +966,7 @@ async def weights( Retrieves the weight distribution set by neurons within a specific subnet of the Bittensor network. This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the network's trust and value assignment mechanisms. - Arguments: + Args: netuid (int): The network UID of the subnet to query. block_hash (str): The hash of the blockchain block for the query. @@ -993,7 +993,7 @@ async def bonds( Retrieves the bond distribution set by neurons within a specific subnet of the Bittensor network. Bonds represent the investments or commitments made by neurons in one another, indicating a level of trust and perceived value. This bonding mechanism is integral to the network's market-based approach to measuring and rewarding machine intelligence. - Arguments: + Args: netuid (int): The network UID of the subnet to query. block_hash (Optional[str]): The hash of the blockchain block number for the query. @@ -1021,7 +1021,7 @@ async def does_hotkey_exist( """ Returns true if the hotkey is known by the chain and there are accounts. - Arguments: + Args: hotkey_ss58 (str): The SS58 address of the hotkey. block_hash (Optional[str]): The hash of the block number to check the hotkey against. reuse_block (bool): Whether to reuse the last-used blockchain hash. @@ -1051,7 +1051,7 @@ async def get_hotkey_owner( Retrieves the owner of the given hotkey at a specific block hash. This function queries the blockchain for the owner of the provided hotkey. If the hotkey does not exist at the specified block hash, it returns None. - Arguments: + Args: hotkey_ss58 (str): The SS58 address of the hotkey. block_hash (str): The hash of the block at which to check the hotkey ownership. @@ -1082,7 +1082,7 @@ async def sign_and_send_extrinsic( """ Helper method to sign and submit an extrinsic call to chain. - Arguments: + Args: call (scalecodec.types.GenericCall): a prepared Call object wallet (bittensor_wallet.Wallet): the wallet whose coldkey will be used to sign the extrinsic wait_for_inclusion (bool): whether to wait until the extrinsic call is included on the chain @@ -1117,7 +1117,7 @@ async def get_children(self, hotkey: str, netuid: int) -> tuple[bool, list, str] """ This method retrieves the children of a given hotkey and netuid. It queries the SubtensorModule's ChildKeys storage function to get the children and formats them before returning as a tuple. - Arguments: + Args: hotkey (str): The hotkey value. netuid (int): The netuid value. @@ -1149,7 +1149,7 @@ async def get_subnet_hyperparameters( """ Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters define the operational settings and rules governing the subnet's behavior. - Arguments: + Args: netuid (int): The network UID of the subnet to query. block_hash (Optional[str]): The hash of the blockchain block number for the query. @@ -1184,7 +1184,7 @@ async def get_vote_data( """ Retrieves the voting data for a specific proposal on the Bittensor blockchain. This data includes information about how senate members have voted on the proposal. - Arguments: + Args: proposal_hash (str): The hash of the proposal for which voting data is requested. block_hash (Optional[str]): The hash of the blockchain block number to query the voting data. reuse_block (bool): Whether to reuse the last-used blockchain block hash. @@ -1317,7 +1317,7 @@ async def transfer( """ Transfer token of amount to destination. - Arguments: + Args: wallet (bittensor_wallet.Wallet): Source wallet for the transfer. destination (str): Destination address for the transfer. amount (float): Amount of tokens to transfer. @@ -1347,7 +1347,7 @@ async def register( """ Register neuron by recycling some TAO. - Arguments: + Args: wallet (bittensor_wallet.Wallet): Bittensor wallet instance. netuid (int): Subnet uniq id. block_hash (Optional[str]): The hash of the blockchain block for the query. @@ -1487,7 +1487,7 @@ async def root_set_weights( """ Set weights for root network. - Arguments: + Args: wallet (bittensor_wallet.Wallet): bittensor wallet instance. netuids (list[int]): The list of subnet uids. weights (list[float]): The list of weights to be set. From a4b21282c5666fabdcb78359a6304e442fb75874 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 4 Nov 2024 15:02:13 -0800 Subject: [PATCH 27/27] fix --- bittensor/core/async_subtensor.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 66b008882e..508ae02439 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -159,7 +159,7 @@ async def encode_params( return param_data.to_hex() - async def get_current_block(self): + async def get_current_block(self) -> int: """ Returns the current block number on the Bittensor blockchain. This function provides the latest block number, indicating the most recent state of the blockchain. @@ -182,9 +182,10 @@ async def get_block_hash(self, block_id: Optional[int] = None): The block hash is a fundamental aspect of blockchain technology, providing a secure reference to each block's data. It is crucial for verifying transactions, ensuring data consistency, and maintaining the trustworthiness of the blockchain. """ - return await self.substrate.get_block_hash( - block_id if block_id else await self.get_current_block() - ) + if block_id: + return await self.substrate.get_block_hash(block_id) + else: + return await self.substrate.get_chain_head() async def is_hotkey_registered_any( self, hotkey_ss58: str, block_hash: Optional[str] = None @@ -245,8 +246,6 @@ async def get_total_subnets( storage_function="TotalNetworks", params=[], block_hash=block_hash - if block_hash is not None - else await self.get_block_hash(await self.get_current_block()), ) return result @@ -1300,8 +1299,6 @@ async def get_uid_for_hotkey_on_subnet( storage_function="Uids", params=[netuid, hotkey_ss58], block_hash=block_hash - if block_hash is not None - else await self.get_block_hash(), ) # extrinsics