Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 33 additions & 2 deletions bittensor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,39 @@ def debug(on: bool = True):
}

# --- Type Registry ---
__type_registry__ = {"types": {"Balance": "u64"}} # Need to override default u128

__type_registry__ = {
"types": {
"Balance": "u64", # Need to override default u128
},
"runtime_api": {
"NeuronInfoRuntimeApi": {
"methods": {
"get_neuron_lite": {
"params": [
{
"name": "netuid",
"type": "u16",
},
{
"name": "uid",
"type": "u16",
},
],
"type": "Vec<u8>",
},
"get_neurons_lite": {
"params": [
{
"name": "netuid",
"type": "u16",
},
],
"type": "Vec<u8>",
},
}
},
},
}

from .errors import *

Expand Down
19 changes: 15 additions & 4 deletions bittensor/chain_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from enum import Enum
from dataclasses import dataclass
from scalecodec.types import GenericCall
from typing import List, Tuple, Dict, Optional, Any, TypedDict
from typing import List, Tuple, Dict, Optional, Any, TypedDict, Union
from scalecodec.base import RuntimeConfiguration, ScaleBytes
from scalecodec.type_registry import load_type_registry_preset
from scalecodec.utils.ss58 import ss58_encode
Expand Down Expand Up @@ -227,13 +227,24 @@ class ChainDataType(Enum):


def from_scale_encoding(
vec_u8: List[int],
input: Union[List[int], bytes, ScaleBytes],
type_name: ChainDataType,
is_vec: bool = False,
is_option: bool = False,
) -> Optional[Dict]:
as_bytes = bytes(vec_u8)
as_scale_bytes = ScaleBytes(as_bytes)
if isinstance(input, ScaleBytes):
as_scale_bytes = input
else:
if isinstance(input, list) and all([isinstance(i, int) for i in input]):
vec_u8 = input
as_bytes = bytes(vec_u8)
elif isinstance(input, bytes):
as_bytes = input
else:
raise TypeError("input must be a List[int], bytes, or ScaleBytes")

as_scale_bytes = ScaleBytes(as_bytes)

rpc_runtime_config = RuntimeConfiguration()
rpc_runtime_config.update_type_registry(load_type_registry_preset("legacy"))
rpc_runtime_config.update_type_registry(custom_rpc_type_registry)
Expand Down
147 changes: 113 additions & 34 deletions bittensor/subtensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,10 @@

from retry import retry
from loguru import logger
from typing import List, Dict, Union, Optional, Tuple
from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any
from substrateinterface.base import QueryMapResult, SubstrateInterface
from scalecodec.base import RuntimeConfiguration
from scalecodec.type_registry import load_type_registry_preset

# Local imports.
from .chain_data import (
Expand All @@ -38,6 +40,7 @@
AxonInfo,
ProposalVoteData,
ProposalCallData,
custom_rpc_type_registry,
)
from .errors import *
from .extrinsics.network import register_subnetwork_extrinsic
Expand Down Expand Up @@ -70,6 +73,11 @@
logger = logger.opt(colors=True)


class ParamWithTypes(TypedDict):
name: str # Name of the parameter.
type: str # ScaleType string of the parameter.


class subtensor:
"""Factory Class for bittensor.subtensor

Expand Down Expand Up @@ -1190,6 +1198,81 @@ def make_substrate_call_with_retry():

return make_substrate_call_with_retry()

def state_call(
self,
method: str,
data: str,
block: Optional[int] = None,
) -> Optional[object]:
@retry(delay=2, tries=3, backoff=2, max_delay=4)
def make_substrate_call_with_retry():
with self.substrate as substrate:
block_hash = None if block == None else substrate.get_block_hash(block)
params = [method, data]
if block_hash:
params = params + [block_hash]
return substrate.rpc_request(method="state_call", params=params)

return make_substrate_call_with_retry()

def query_runtime_api(
self,
runtime_api: str,
method: str,
params: Optional[List[ParamWithTypes]],
block: Optional[int] = None,
) -> Optional[bytes]:
"""
Returns a Scale Bytes type that should be decoded.
"""
call_definition = bittensor.__type_registry__["runtime_api"][runtime_api][
"methods"
][method]
json_result = self.state_call(
method=f"{runtime_api}_{method}",
data="0x"
if params is None
else self._encode_params(call_definition=call_definition, params=params),
block=block,
)

if json_result is None:
return None

return_type = call_definition["type"]

as_scale_bytes = scalecodec.ScaleBytes(json_result["result"])

rpc_runtime_config = RuntimeConfiguration()
rpc_runtime_config.update_type_registry(load_type_registry_preset("legacy"))
rpc_runtime_config.update_type_registry(custom_rpc_type_registry)

obj = rpc_runtime_config.create_scale_object(return_type)

return obj.decode(as_scale_bytes)

def _encode_params(
self,
call_definition: List[ParamWithTypes],
params: Union[List[Any], Dict[str, str]],
) -> str:
"""
Returns a hex encoded string of the params using their types.
"""
param_data = scalecodec.ScaleBytes(b"")

for i, param in enumerate(call_definition["params"]):
scale_obj = self.substrate.create_scale_object(param["type"])
if type(params) is list:
param_data += scale_obj.encode(params[i])
else:
if param["name"] not in params:
raise ValueError(f"Missing param {param['name']} in params dict.")

param_data += scale_obj.encode(params[param["name"]])

return param_data.to_hex()

#####################################
#### Hyper parameter calls. ####
#####################################
Expand Down Expand Up @@ -1859,25 +1942,25 @@ def neuron_for_uid_lite(
if uid == None:
return NeuronInfoLite._null_neuron()

@retry(delay=2, tries=3, backoff=2, max_delay=4)
def make_substrate_call_with_retry():
with self.substrate as substrate:
block_hash = None if block == None else substrate.get_block_hash(block)
params = [netuid, uid]
if block_hash:
params = params + [block_hash]
return substrate.rpc_request(
method="neuronInfo_getNeuronLite", # custom rpc method
params=params,
)

json_body = make_substrate_call_with_retry()
result = json_body["result"]
hex_bytes_result = self.query_runtime_api(
runtime_api="NeuronInfoRuntimeApi",
method="get_neuron_lite",
params={
"netuid": netuid,
"uid": uid,
},
block=block,
)

if result in (None, []):
if hex_bytes_result == None:
return NeuronInfoLite._null_neuron()

return NeuronInfoLite.from_vec_u8(result)
if hex_bytes_result.startswith("0x"):
bytes_result = bytes.fromhex(hex_bytes_result[2:])
else:
bytes_result = bytes.fromhex(hex_bytes_result)

return NeuronInfoLite.from_vec_u8(bytes_result)

def neurons_lite(
self, netuid: int, block: Optional[int] = None
Expand All @@ -1892,26 +1975,22 @@ def neurons_lite(
neuron (List[NeuronInfoLite]):
List of neuron lite metadata objects.
"""
hex_bytes_result = self.query_runtime_api(
runtime_api="NeuronInfoRuntimeApi",
method="get_neurons_lite",
params=[netuid],
block=block,
)

@retry(delay=2, tries=3, backoff=2, max_delay=4)
def make_substrate_call_with_retry():
with self.substrate as substrate:
block_hash = None if block == None else substrate.get_block_hash(block)
params = [netuid]
if block_hash:
params = params + [block_hash]
return substrate.rpc_request(
method="neuronInfo_getNeuronsLite", # custom rpc method
params=params,
)

json_body = make_substrate_call_with_retry()
result = json_body["result"]
if hex_bytes_result == None:
return None

if result in (None, []):
return []
if hex_bytes_result.startswith("0x"):
bytes_result = bytes.fromhex(hex_bytes_result[2:])
else:
bytes_result = bytes.fromhex(hex_bytes_result)

return NeuronInfoLite.list_from_vec_u8(result)
return NeuronInfoLite.list_from_vec_u8(bytes_result)

def metagraph(
self, netuid: int, lite: bool = True, block: Optional[int] = None
Expand Down
76 changes: 5 additions & 71 deletions tests/unit_tests/utils/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,72 +46,6 @@
from tests.helpers import _get_mock_wallet as _generate_wallet, _get_mock_keypair


@fixture(scope="function")
def setup_chain():
operating_system = "OSX" if platform == "darwin" else "Linux"
path = "./bin/chain/{}/node-subtensor".format(operating_system)
logger.info(path)
if not path:
logger.error(
"make sure the NODE_SUBTENSOR_BIN env var is set and points to the node-subtensor binary"
)
sys.exit()

# Select a port
port = select_port()

# Delete existing wallets
# subprocess.Popen(["rm", '-r', '~/.bittensor/wallets/*testwallet'], close_fds=True, shell=False)

# Purge chain first
subprocess.Popen([path, "purge-chain", "--dev", "-y"], close_fds=True, shell=False)
proc = subprocess.Popen(
[
path,
"--dev",
"--port",
str(port + 1),
"--ws-port",
str(port),
"--rpc-port",
str(port + 2),
"--tmp",
],
close_fds=True,
shell=False,
)

# Wait 4 seconds for the node to come up
time.sleep(4)

yield port

# Wait 4 seconds for the node to come up
time.sleep(4)

# Kill process
os.system("kill %i" % proc.pid)


@pytest.fixture(scope="session", autouse=True)
def initialize_tests():
# Kill any running process before running tests
os.system("pkill node-subtensor")


def select_port():
port = random.randrange(1000, 65536, 5)
return port


def setup_subtensor(port: int):
chain_endpoint = "localhost:{}".format(port)
subtensor = bittensor.subtensor(
chain_endpoint=chain_endpoint,
)
return subtensor, port


def construct_config():
parser = bittensor.cli.__create_parser__()
defaults = bittensor.config(parser=parser, args=[])
Expand Down Expand Up @@ -191,7 +125,7 @@ def test_solve_for_difficulty_fast(self):

subtensor.difficulty = MagicMock(return_value=10)
solution = bittensor.utils.registration._solve_for_difficulty_fast(
subtensor, wallet, netuid=-1, num_processes=num_proc
subtensor, wallet, netuid=-2, num_processes=num_proc
)
seal = solution.seal
assert bittensor.utils.registration._seal_meets_difficulty(seal, 10, limit)
Expand All @@ -213,7 +147,7 @@ def test_solve_for_difficulty_fast_registered_already(self):
subtensor = MagicMock()
subtensor.get_current_block = MagicMock(return_value=1)
subtensor.difficulty = MagicMock(
return_value=int(1e10)
return_value=int(1e20)
) # set high to make solving take a long time
subtensor.substrate = MagicMock()
subtensor.get_block_hash = MagicMock(return_value=block_hash)
Expand All @@ -226,7 +160,7 @@ def test_solve_for_difficulty_fast_registered_already(self):

# all arugments should return None to indicate an early return
solution = bittensor.utils.registration._solve_for_difficulty_fast(
subtensor, wallet, netuid=-1, num_processes=1, update_interval=1000
subtensor, wallet, netuid=-2, num_processes=1, update_interval=1000
)

assert solution is None
Expand Down Expand Up @@ -261,7 +195,7 @@ def test_solve_for_difficulty_fast_missing_hash(self):
assert bittensor.utils.registration._seal_meets_difficulty(seal, 1, limit)
subtensor.difficulty = MagicMock(return_value=10)
solution = bittensor.utils.registration._solve_for_difficulty_fast(
subtensor, wallet, netuid=-1, num_processes=num_proc
subtensor, wallet, netuid=-2, num_processes=num_proc
)
seal = solution.seal
assert bittensor.utils.registration._seal_meets_difficulty(seal, 10, limit)
Expand Down Expand Up @@ -494,7 +428,7 @@ def test_check_for_newest_block_and_update_new_block(self):
self.assertEqual(
bittensor.utils.registration._check_for_newest_block_and_update(
subtensor,
-1, # netuid
-2, # netuid
MagicMock(),
mock_hotkey_bytes,
MagicMock(),
Expand Down