diff --git a/.circleci/config.yml b/.circleci/config.yml index 6bd5e47978..a4d07de74b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,7 +6,7 @@ orbs: # coveralls: coveralls/coveralls@1.0.6 jobs: - black: + ruff: resource_class: small parameters: python-version: @@ -18,29 +18,29 @@ jobs: - checkout - restore_cache: - name: Restore cached black venv + name: Restore cached ruff venv keys: - - v2-pypi-py-black-<< parameters.python-version >> + - v2-pypi-py-ruff-<< parameters.python-version >> - run: - name: Update & Activate black venv + name: Update & Activate ruff venv command: | - python -m venv env/ - . env/bin/activate + python -m venv .venv + . .venv/bin/activate python -m pip install --upgrade pip - pip install black==23.7.0 + pip install ruff -c requirements/dev.txt - save_cache: - name: Save cached black venv + name: Save cached ruff venv paths: - - "env/" - key: v2-pypi-py-black-<< parameters.python-version >> + - ".venv/" + key: v2-pypi-py-ruff-<< parameters.python-version >> - run: - name: Black format check + name: Ruff format check command: | - . env/bin/activate - python -m black --exclude '(env|venv|.eggs)' --check . + . .venv/bin/activate + ruff format --diff . check_compatibility: parameters: @@ -85,8 +85,8 @@ jobs: - run: name: Update & Activate venv command: | - python -m venv env/ - . env/bin/activate + python -m venv .venv + . .venv/bin/activate python -m pip install --upgrade pip python -m pip install '.[dev]' @@ -99,20 +99,20 @@ jobs: - run: name: Install Bittensor command: | - . env/bin/activate + . .venv/bin/activate pip install -e '.[dev]' - run: name: Instantiate Mock Wallet command: | - . env/bin/activate + . .venv/bin/activate ./scripts/create_wallet.sh - # TODO: Update test durations on different runs - run: name: Unit Tests + no_output_timeout: 20m command: | - . env/bin/activate + . .venv/bin/activate export PYTHONUNBUFFERED=1 pytest -n2 --reruns 3 --durations=0 --verbose --junitxml=test-results/unit_tests.xml \ --cov=. --cov-append --cov-config .coveragerc \ @@ -122,8 +122,9 @@ jobs: - run: name: Integration Tests + no_output_timeout: 30m command: | - . env/bin/activate + . .venv/bin/activate export PYTHONUNBUFFERED=1 pytest -n2 --reruns 3 --reruns-delay 15 --durations=0 --verbose --junitxml=test-results/integration_tests.xml \ --cov=. --cov-append --cov-config .coveragerc \ @@ -143,7 +144,7 @@ jobs: #- run: #name: Upload Coverage #command: | - #. env/bin/activate && coveralls + #. .venv/bin/activate && coveralls #env: #CI_NAME: circleci #CI_BUILD_NUMBER: $CIRCLE_BUILD_NUM @@ -173,8 +174,8 @@ jobs: - run: name: Update & Activate venv command: | - python -m venv env/ - . env/bin/activate + python -m venv .venv + . .venv/bin/activate python -m pip install --upgrade pip python -m pip install '.[dev]' pip install flake8 @@ -188,19 +189,19 @@ jobs: - run: name: Install Bittensor command: | - . env/bin/activate + . .venv/bin/activate pip install -e '.[dev]' - run: name: Lint with flake8 command: | - . env/bin/activate + . .venv/bin/activate python -m flake8 bittensor/ --count - run: name: Type check with mypy command: | - . env/bin/activate + . .venv/bin/activate python -m mypy --ignore-missing-imports bittensor/ unit-tests-all-python-versions: @@ -290,7 +291,7 @@ workflows: pr-requirements: jobs: - - black: + - ruff: python-version: "3.9.13" - build-and-test: matrix: diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..adff4d0aab --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "" + file: "requirements/prod.txt" + schedule: + interval: "daily" + open-pull-requests-limit: 0 # Only security updates will be opened as PRs diff --git a/.github/workflows/e2e-subtensor-tests.yaml b/.github/workflows/e2e-subtensor-tests.yaml new file mode 100644 index 0000000000..1d3d6bb5ce --- /dev/null +++ b/.github/workflows/e2e-subtensor-tests.yaml @@ -0,0 +1,84 @@ +name: E2E Subtensor Tests + +concurrency: + group: e2e-subtensor-${{ github.ref }} + cancel-in-progress: true + +on: + ## Run automatically for all PRs against main, regardless of what the changes are + ## to be safe and so we can more easily force re-run the CI when github is being + ## weird by using a blank commit + push: + branches: [main, development, staging] + + ## + # Run automatically for PRs against default/main branch if Rust files change + pull_request: + branches: [main, development, staging] + + ## Allow running workflow manually from the Actions tab + workflow_dispatch: + inputs: + verbose: + description: "Output more information when triggered manually" + required: false + default: "" + +env: + CARGO_TERM_COLOR: always + VERBOSE: ${{ github.events.input.verbose }} + +jobs: + run: + runs-on: SubtensorCI + strategy: + matrix: + rust-branch: + - nightly-2024-03-05 + rust-target: + - x86_64-unknown-linux-gnu + # - x86_64-apple-darwin + os: + - ubuntu-latest + # - macos-latest + include: + - os: ubuntu-latest + # - os: macos-latest + env: + RELEASE_NAME: development + RUSTV: ${{ matrix.rust-branch }} + RUST_BACKTRACE: full + RUST_BIN_DIR: target/${{ matrix.rust-target }} + TARGET: ${{ matrix.rust-target }} + steps: + - name: Check-out repository under $GITHUB_WORKSPACE + uses: actions/checkout@v2 + + - name: Install dependencies + run: | + sudo apt-get update && + sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler + + - name: Install Rust ${{ matrix.rust-branch }} + uses: actions-rs/toolchain@v1.0.6 + with: + toolchain: ${{ matrix.rust-branch }} + components: rustfmt + profile: minimal + + - name: Add wasm32-unknown-unknown target + run: | + rustup target add wasm32-unknown-unknown --toolchain stable-x86_64-unknown-linux-gnu + rustup component add rust-src --toolchain stable-x86_64-unknown-linux-gnu + + - name: Clone subtensor repo + run: git clone https://github.com/opentensor/subtensor.git + + - name: Setup subtensor repo + working-directory: ${{ github.workspace }}/subtensor + run: git checkout testnet + + - name: Run tests + run: | + python3 -m pip install -e .[dev] pytest + LOCALNET_SH_PATH="${{ github.workspace }}/subtensor/scripts/localnet.sh" pytest tests/e2e_tests/ -s diff --git a/CHANGELOG.md b/CHANGELOG.md index 21e90b2a96..7cb2964f6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,30 @@ # Changelog +## 7.2.0 / 2024-06-12 + +## What's Changed +* less verbose handled synapse exceptions by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1928 +* Clean up the imports in commands/stake.py by @thewhaleking in https://github.com/opentensor/bittensor/pull/1951 +* Fix E2E test for Commit/Reveal with Salt flag by @opendansor in https://github.com/opentensor/bittensor/pull/1952 +* `bittensor.chain_data.py` module refactoring. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1955 +* ci: e2e tests by @orriin in https://github.com/opentensor/bittensor/pull/1915 +* Dependency cleanup by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1967 +* replace `black` with `ruff` by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1968 +* post-black to ruff migration cleanup by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1979 +* Revert Axon IP decoding changes by @camfairchild in https://github.com/opentensor/bittensor/pull/1981 +* A wrapper for presenting extrinsics errors in a human-readable form. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1980 +* Feat: Added normalized hyperparams by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1891 +* deprecate nest_asyncio use by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1974 +* Add e2e test for axon by @opendansor in https://github.com/opentensor/bittensor/pull/1984 +* Dendrite E2E test by @opendansor in https://github.com/opentensor/bittensor/pull/1988 +* fix __version_as_int__ for >10 minor/patch release vers (resolves #1982) by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1993 +* Test Incentive E2E by @opendansor in https://github.com/opentensor/bittensor/pull/2002 +* Add E2E faucet test by @opendansor in https://github.com/opentensor/bittensor/pull/1987 +* Allow unstake below network min by @camfairchild in https://github.com/opentensor/bittensor/pull/2016 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.1.1...v7.2.0 + + ## 7.1.1 / 2024-06-11 ## What's Changed @@ -25,16 +50,6 @@ ## New Contributors * @renesweet24 made their first contribution in https://github.com/opentensor/bittensor/pull/1960 -**Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.0.0...v7.0.1 - -## 7.0.0 / 2024-05-17 - -## What's Changed -* Release/7.0.0 by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1899 -* Fix return of ip version. by @opendansor in https://github.com/opentensor/bittensor/pull/1961 -* Fix trigger use_torch() by @renesweet24 https://github.com/opentensor/bittensor/pull/1960 - - **Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.0.0...v7.0.1 @@ -948,4 +963,3 @@ This release refactors the registration code for CPU registration to improve sol ## - diff --git a/VERSION b/VERSION index 21c8c7b46b..4b49d9bb63 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -7.1.1 +7.2.0 \ No newline at end of file diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 20693f11be..7cb37ef0d0 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -16,25 +16,41 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +import os +import warnings from rich.console import Console from rich.traceback import install -# Install and apply nest asyncio to allow the async functions -# to run in a .ipynb -import nest_asyncio -nest_asyncio.apply() +if (NEST_ASYNCIO_ENV := os.getenv("NEST_ASYNCIO")) in ("1", None): + if NEST_ASYNCIO_ENV is None: + warnings.warn( + "NEST_ASYNCIO implicitly set to '1'. In the future, the default value will be '0'." + "If you use `nest_asyncio` make sure to add it explicitly to your project dependencies," + "as it will be removed from `bittensor` package dependencies in the future." + "To silence this warning, explicitly set the environment variable, e.g. `export NEST_ASYNCIO=0`.", + DeprecationWarning, + ) + # Install and apply nest asyncio to allow the async functions + # to run in a .ipynb + import nest_asyncio + + nest_asyncio.apply() + # Bittensor code and protocol version. -__version__ = "7.1.1" +__version__ = "7.2.0" -version_split = __version__.split(".") -__version_as_int__: int = ( - (100 * int(version_split[0])) - + (10 * int(version_split[1])) - + (1 * int(version_split[2])) +_version_split = __version__.split(".") +__version_info__ = tuple(int(part) for part in _version_split) +_version_int_base = 1000 +assert max(__version_info__) < _version_int_base + +__version_as_int__: int = sum( + e * (_version_int_base**i) for i, e in enumerate(reversed(__version_info__)) ) +assert __version_as_int__ < 2**31 # fits in int32 __new_signature_version__ = 360 # Rich console. @@ -45,6 +61,16 @@ install(show_locals=False) +def __getattr__(name): + if name == "version_split": + warnings.warn( + "version_split is deprecated and will be removed in future versions. Use __version__ instead.", + DeprecationWarning, + ) + return _version_split + raise AttributeError(f"module {__name__} has no attribute {name}") + + def turn_console_off(): global __use_console__ global __console__ @@ -79,7 +105,7 @@ def debug(on: bool = True): # Pip address for versioning __pipaddress__ = "https://pypi.org/pypi/bittensor/json" -# Raw github url for delegates registry file +# Raw GitHub url for delegates registry file __delegates_details_url__: str = "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" # Substrate ss58_format @@ -106,7 +132,7 @@ def debug(on: bool = True): __rao_symbol__: str = chr(0x03C1) # Block Explorers map network to explorer url -## Must all be polkadotjs explorer urls +# Must all be polkadotjs explorer urls __network_explorer_map__ = { "opentensor": { "local": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer", @@ -233,7 +259,7 @@ def debug(on: bool = True): UnstakeError, ) -from substrateinterface import Keypair as Keypair +from substrateinterface import Keypair # noqa: F401 from .config import InvalidConfigFile, DefaultConfig, config, T from .keyfile import ( serialized_keypair_to_keyfile_data, @@ -286,8 +312,14 @@ def debug(on: bool = True): ProposalVoteData, ) +# Allows avoiding name spacing conflicts and continue access to the `subtensor` module with `subtensor_module` name from . import subtensor as subtensor_module -from .subtensor import subtensor as subtensor + +# Double import allows using class `Subtensor` by referencing `bittensor.Subtensor` and `bittensor.subtensor`. +# This will be available for a while until we remove reference `bittensor.subtensor` +from .subtensor import Subtensor +from .subtensor import Subtensor as subtensor + from .cli import cli as cli, COMMANDS as ALL_COMMANDS from .btlogging import logging from .metagraph import metagraph as metagraph diff --git a/bittensor/axon.py b/bittensor/axon.py index b5ec9a4d01..ca06335307 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -1,5 +1,4 @@ -""" Create and initialize Axon, which services the forward and backward requests from other neurons. -""" +"""Create and initialize Axon, which services the forward and backward requests from other neurons.""" # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -30,13 +29,15 @@ import threading import time import traceback +import typing import uuid from inspect import signature, Signature, Parameter -from typing import List, Optional, Tuple, Callable, Any, Dict +from typing import List, Optional, Tuple, Callable, Any, Dict, Awaitable import uvicorn from fastapi import FastAPI, APIRouter, Depends from fastapi.responses import JSONResponse +from fastapi.routing import serialize_response from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from starlette.requests import Request from starlette.responses import Response @@ -51,10 +52,10 @@ NotVerifiedException, BlacklistedException, PriorityException, - RunException, PostProcessException, - InternalServerError, + SynapseException, ) +from bittensor.constants import ALLOWED_DELTA, V_7_2_0 from bittensor.threadpool import PriorityThreadPoolExecutor from bittensor.utils import networking @@ -342,12 +343,12 @@ def __init__( self.port = self.config.axon.port self.external_ip = ( self.config.axon.external_ip - if self.config.axon.external_ip != None + if self.config.axon.external_ip is not None else bittensor.utils.networking.get_external_ip() ) self.external_port = ( self.config.axon.external_port - if self.config.axon.external_port != None + if self.config.axon.external_port is not None else self.config.axon.port ) self.full_address = str(self.config.axon.ip) + ":" + str(self.config.axon.port) @@ -365,7 +366,6 @@ def __init__( self.priority_fns: Dict[str, Optional[Callable]] = {} self.forward_fns: Dict[str, Optional[Callable]] = {} self.verify_fns: Dict[str, Optional[Callable]] = {} - self.required_hash_fields: Dict[str, str] = {} # Instantiate FastAPI self.app = FastAPI() @@ -378,7 +378,8 @@ def __init__( self.app.include_router(self.router) # Build ourselves as the middleware. - self.app.add_middleware(AxonMiddleware, axon=self) + self.middleware_cls = AxonMiddleware + self.app.add_middleware(self.middleware_cls, axon=self) # Attach default forward. def ping(r: bittensor.Synapse) -> bittensor.Synapse: @@ -467,89 +468,72 @@ def verify_custom(synapse: MyCustomSynapse): offered by this method allows developers to tailor the Axon's behavior to specific requirements and use cases. """ - - # Assert 'forward_fn' has exactly one argument forward_sig = signature(forward_fn) - assert ( - len(list(forward_sig.parameters)) == 1 - ), "The passed function must have exactly one argument" - - # Obtain the class of the first argument of 'forward_fn' - request_class = forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation + try: + first_param = next(iter(forward_sig.parameters.values())) + except StopIteration: + raise ValueError( + "The forward_fn first argument must be a subclass of bittensor.Synapse, but it has no arguments" + ) - # Assert that the first argument of 'forward_fn' is a subclass of 'bittensor.Synapse' + param_class = first_param.annotation assert issubclass( - request_class, bittensor.Synapse - ), "The argument of forward_fn must inherit from bittensor.Synapse" + param_class, bittensor.Synapse + ), "The first argument of forward_fn must inherit from bittensor.Synapse" + request_name = param_class.__name__ + + async def endpoint(*args, **kwargs): + start_time = time.time() + response_synapse = forward_fn(*args, **kwargs) + if isinstance(response_synapse, Awaitable): + response_synapse = await response_synapse + return await self.middleware_cls.synapse_to_response( + synapse=response_synapse, start_time=start_time + ) - # Obtain the class name of the first argument of 'forward_fn' - request_name = forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation.__name__ + # replace the endpoint signature, but set return annotation to JSONResponse + endpoint.__signature__ = Signature( # type: ignore + parameters=list(forward_sig.parameters.values()), + return_annotation=JSONResponse, + ) # Add the endpoint to the router, making it available on both GET and POST methods self.router.add_api_route( f"/{request_name}", - forward_fn, + endpoint, methods=["GET", "POST"], dependencies=[Depends(self.verify_body_integrity)], ) self.app.include_router(self.router) - # Expected signatures for 'blacklist_fn', 'priority_fn' and 'verify_fn' - blacklist_sig = Signature( - [ - Parameter( - "synapse", - Parameter.POSITIONAL_OR_KEYWORD, - annotation=forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation, - ) - ], - return_annotation=Tuple[bool, str], - ) - priority_sig = Signature( - [ - Parameter( - "synapse", - Parameter.POSITIONAL_OR_KEYWORD, - annotation=forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation, - ) - ], - return_annotation=float, - ) - verify_sig = Signature( - [ - Parameter( - "synapse", - Parameter.POSITIONAL_OR_KEYWORD, - annotation=forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation, - ) - ], - return_annotation=None, - ) - # Check the signature of blacklist_fn, priority_fn and verify_fn if they are provided + expected_params = [ + Parameter( + "synapse", + Parameter.POSITIONAL_OR_KEYWORD, + annotation=forward_sig.parameters[ + list(forward_sig.parameters)[0] + ].annotation, + ) + ] if blacklist_fn: + blacklist_sig = Signature( + expected_params, return_annotation=Tuple[bool, str] + ) assert ( signature(blacklist_fn) == blacklist_sig ), "The blacklist_fn function must have the signature: blacklist( synapse: {} ) -> Tuple[bool, str]".format( request_name ) if priority_fn: + priority_sig = Signature(expected_params, return_annotation=float) assert ( signature(priority_fn) == priority_sig ), "The priority_fn function must have the signature: priority( synapse: {} ) -> float".format( request_name ) if verify_fn: + verify_sig = Signature(expected_params, return_annotation=None) assert ( signature(verify_fn) == verify_sig ), "The verify_fn function must have the signature: verify( synapse: {} ) -> None".format( @@ -557,9 +541,7 @@ def verify_custom(synapse: MyCustomSynapse): ) # Store functions in appropriate attribute dictionaries - self.forward_class_types[request_name] = forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation + self.forward_class_types[request_name] = param_class self.blacklist_fns[request_name] = blacklist_fn self.priority_fns[request_name] = priority_fn self.verify_fns[request_name] = ( @@ -567,12 +549,6 @@ def verify_custom(synapse: MyCustomSynapse): ) # Use 'default_verify' if 'verify_fn' is None self.forward_fns[request_name] = forward_fn - # Parse required hash fields from the forward function protocol defaults - required_hash_fields = request_class.__dict__["model_fields"][ - "required_hash_fields" - ].default - self.required_hash_fields[request_name] = required_hash_fields - return self @classmethod @@ -697,9 +673,7 @@ async def verify_body_integrity(self, request: Request): body = await request.body() request_body = body.decode() if isinstance(body, bytes) else body - # Gather the required field names from the axon's required_hash_fields dict request_name = request.url.path.split("/")[1] - required_hash_fields = self.required_hash_fields[request_name] # Load the body dict and check if all required field hashes match body_dict = json.loads(request_body) @@ -915,14 +889,37 @@ async def default_verify(self, synapse: bittensor.Synapse): # Build the unique endpoint key. endpoint_key = f"{synapse.dendrite.hotkey}:{synapse.dendrite.uuid}" - # Check the nonce from the endpoint key. + # Requests must have nonces to be safe from replays + if synapse.dendrite.nonce is None: + raise Exception("Missing Nonce") + + # If we don't have a nonce stored, ensure that the nonce falls within + # a reasonable delta. + if ( - endpoint_key in self.nonces.keys() - and self.nonces[endpoint_key] is not None - and synapse.dendrite.nonce is not None - and synapse.dendrite.nonce <= self.nonces[endpoint_key] + synapse.dendrite.version is not None + and synapse.dendrite.version >= V_7_2_0 ): - raise Exception("Nonce is too small") + # If we don't have a nonce stored, ensure that the nonce falls within + # a reasonable delta. + if ( + self.nonces.get(endpoint_key) is None + and synapse.dendrite.nonce + <= time.time_ns() - ALLOWED_DELTA - (synapse.timeout or 0) + ): + raise Exception("Nonce is too old") + if ( + self.nonces.get(endpoint_key) is not None + and synapse.dendrite.nonce <= self.nonces[endpoint_key] + ): + raise Exception("Nonce is too old") + else: + if ( + endpoint_key in self.nonces.keys() + and self.nonces[endpoint_key] is not None + and synapse.dendrite.nonce <= self.nonces[endpoint_key] + ): + raise Exception("Nonce is too small") if not keypair.verify(message, synapse.dendrite.signature): raise Exception( @@ -932,7 +929,7 @@ async def default_verify(self, synapse: bittensor.Synapse): # Success self.nonces[endpoint_key] = synapse.dendrite.nonce # type: ignore else: - raise SynapseDendriteNoneException() + raise SynapseDendriteNoneException(synapse=synapse) def create_error_response(synapse: bittensor.Synapse): @@ -953,28 +950,55 @@ def create_error_response(synapse: bittensor.Synapse): def log_and_handle_error( synapse: bittensor.Synapse, exception: Exception, - status_code: int, - start_time: float, + status_code: typing.Optional[int] = None, + start_time: typing.Optional[float] = None, ): - # Display the traceback for user clarity. - bittensor.logging.trace(f"Forward exception: {traceback.format_exc()}") + if isinstance(exception, SynapseException): + synapse = exception.synapse or synapse + + bittensor.logging.trace(f"Forward handled exception: {exception}") + else: + bittensor.logging.trace(f"Forward exception: {traceback.format_exc()}") + + if synapse.axon is None: + synapse.axon = bittensor.TerminalInfo() # Set the status code of the synapse to the given status code. + error_id = str(uuid.uuid4()) error_type = exception.__class__.__name__ - error_message = str(exception) - detailed_error_message = f"{error_type}: {error_message}" # Log the detailed error message for internal use - bittensor.logging.error(detailed_error_message) + bittensor.logging.error(f"{error_type}#{error_id}: {exception}") + + if not status_code and synapse.axon.status_code != 100: + status_code = synapse.axon.status_code + status_message = synapse.axon.status_message + if isinstance(exception, SynapseException): + if not status_code: + if isinstance(exception, PriorityException): + status_code = 503 + elif isinstance(exception, UnknownSynapseError): + status_code = 404 + elif isinstance(exception, BlacklistedException): + status_code = 403 + elif isinstance(exception, NotVerifiedException): + status_code = 401 + elif isinstance(exception, (InvalidRequestNameError, SynapseParsingError)): + status_code = 400 + else: + status_code = 500 + status_message = status_message or str(exception) + else: + status_code = status_code or 500 + status_message = status_message or f"Internal Server Error #{error_id}" - if synapse.axon is None: - raise SynapseParsingError(detailed_error_message) # Set a user-friendly error message synapse.axon.status_code = status_code - synapse.axon.status_message = error_message + synapse.axon.status_message = status_message - # Calculate the processing time by subtracting the start time from the current time. - synapse.axon.process_time = str(time.time() - start_time) # type: ignore + if start_time: + # Calculate the processing time by subtracting the start time from the current time. + synapse.axon.process_time = str(time.time() - start_time) # type: ignore return synapse @@ -1044,7 +1068,14 @@ async def dispatch( try: # Set up the synapse from its headers. - synapse: bittensor.Synapse = await self.preprocess(request) + try: + synapse: bittensor.Synapse = await self.preprocess(request) + except Exception as exc: + if isinstance(exc, SynapseException) and exc.synapse is not None: + synapse = exc.synapse + else: + synapse = bittensor.Synapse() + raise # Logs the start of the request processing if synapse.dendrite is not None: @@ -1068,56 +1099,22 @@ async def dispatch( # Call the run function response = await self.run(synapse, call_next, request) - # Call the postprocess function - response = await self.postprocess(synapse, response, start_time) - # Handle errors related to preprocess. except InvalidRequestNameError as e: - if "synapse" not in locals(): - synapse: bittensor.Synapse = bittensor.Synapse() # type: ignore - log_and_handle_error(synapse, e, 400, start_time) + if synapse.axon is None: + synapse.axon = bittensor.TerminalInfo() + synapse.axon.status_code = 400 + synapse.axon.status_message = str(e) + synapse = log_and_handle_error(synapse, e, start_time=start_time) response = create_error_response(synapse) - - except SynapseParsingError as e: - if "synapse" not in locals(): - synapse = bittensor.Synapse() - log_and_handle_error(synapse, e, 400, start_time) - response = create_error_response(synapse) - - except UnknownSynapseError as e: - if "synapse" not in locals(): - synapse = bittensor.Synapse() - log_and_handle_error(synapse, e, 404, start_time) - response = create_error_response(synapse) - - # Handle errors related to verify. - except NotVerifiedException as e: - log_and_handle_error(synapse, e, 401, start_time) - response = create_error_response(synapse) - - # Handle errors related to blacklist. - except BlacklistedException as e: - log_and_handle_error(synapse, e, 403, start_time) - response = create_error_response(synapse) - - # Handle errors related to priority. - except PriorityException as e: - log_and_handle_error(synapse, e, 503, start_time) - response = create_error_response(synapse) - - # Handle errors related to run. - except RunException as e: - log_and_handle_error(synapse, e, 500, start_time) - response = create_error_response(synapse) - - # Handle errors related to postprocess. - except PostProcessException as e: - log_and_handle_error(synapse, e, 500, start_time) + except SynapseException as e: + synapse = e.synapse or synapse + synapse = log_and_handle_error(synapse, e, start_time=start_time) response = create_error_response(synapse) # Handle all other errors. except Exception as e: - log_and_handle_error(synapse, InternalServerError(str(e)), 500, start_time) + synapse = log_and_handle_error(synapse, e, start_time=start_time) response = create_error_response(synapse) # Logs the end of request processing and returns the response @@ -1165,7 +1162,7 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: # Extracts the request name from the URL path. try: request_name = request.url.path.split("/")[1] - except: + except Exception: raise InvalidRequestNameError( f"Improperly formatted request. Could not parser request {request.url.path}." ) @@ -1180,7 +1177,7 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: try: synapse = request_synapse.from_headers(request.headers) # type: ignore - except Exception as e: + except Exception: raise SynapseParsingError( f"Improperly formatted request. Could not parse headers {request.headers} into synapse of type {request_name}." ) @@ -1191,9 +1188,8 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: { "version": str(bittensor.__version_as_int__), "uuid": str(self.axon.uuid), - "nonce": f"{time.monotonic_ns()}", - "status_message": "Success", - "status_code": "100", + "nonce": time.time_ns(), + "status_code": 100, } ) @@ -1262,7 +1258,9 @@ async def verify(self, synapse: bittensor.Synapse): # We raise an exception to stop the process and return the error to the requester. # The error message includes the original exception message. - raise NotVerifiedException(f"Not Verified with error: {str(e)}") + raise NotVerifiedException( + f"Not Verified with error: {str(e)}", synapse=synapse + ) async def blacklist(self, synapse: bittensor.Synapse): """ @@ -1316,7 +1314,9 @@ async def blacklist(self, synapse: bittensor.Synapse): raise Exception("Synapse.axon object is None") # We raise an exception to halt the process and return the error message to the requester. - raise BlacklistedException(f"Forbidden. Key is blacklisted: {reason}.") + raise BlacklistedException( + f"Forbidden. Key is blacklisted: {reason}.", synapse=synapse + ) async def priority(self, synapse: bittensor.Synapse): """ @@ -1379,7 +1379,9 @@ async def submit_task( synapse.axon.status_code = 408 # Raise an exception to stop the process and return an appropriate error message to the requester. - raise PriorityException(f"Response timeout after: {synapse.timeout}s") + raise PriorityException( + f"Response timeout after: {synapse.timeout}s", synapse=synapse + ) async def run( self, @@ -1409,32 +1411,22 @@ async def run( response = await call_next(request) except Exception as e: - # If an exception occurs during the execution of the requested function, - # it is caught and handled here. - # Log the exception for debugging purposes. bittensor.logging.trace(f"Run exception: {str(e)}") - - # Set the status code of the synapse to "500" which indicates an internal server error. - if synapse.axon is not None: - synapse.axon.status_code = 500 - - # Raise an exception to stop the process and return an appropriate error message to the requester. - raise RunException(f"Internal server error with error: {str(e)}") + raise # Return the starlet response return response - async def postprocess( - self, synapse: bittensor.Synapse, response: Response, start_time: float - ) -> Response: + @classmethod + async def synapse_to_response( + cls, synapse: bittensor.Synapse, start_time: float + ) -> JSONResponse: """ - Performs the final processing on the response before sending it back to the client. This method - updates the response headers and logs the end of the request processing. + Converts the Synapse object into a JSON response with HTTP headers. Args: synapse (bittensor.Synapse): The Synapse object representing the request. - response (Response): The response generated by processing the request. start_time (float): The timestamp when the request processing started. Returns: @@ -1443,24 +1435,37 @@ async def postprocess( Postprocessing is the last step in the request handling process, ensuring that the response is properly formatted and contains all necessary information. """ - # Set the status code of the synapse to "200" which indicates a successful response. - if synapse.axon is not None: + if synapse.axon is None: + synapse.axon = bittensor.TerminalInfo() + + if synapse.axon.status_code is None: synapse.axon.status_code = 200 - # Set the status message of the synapse to "Success". + if synapse.axon.status_code == 200 and not synapse.axon.status_message: synapse.axon.status_message = "Success" + synapse.axon.process_time = time.time() - start_time + + serialized_synapse = await serialize_response(response_content=synapse) + response = JSONResponse( + status_code=synapse.axon.status_code, + content=serialized_synapse, + ) + try: - # Update the response headers with the headers from the synapse. updated_headers = synapse.to_headers() - response.headers.update(updated_headers) except Exception as e: - # If there is an exception during the response header update, we log the exception. raise PostProcessException( - f"Error while parsing or updating response headers. Postprocess exception: {str(e)}." - ) + f"Error while parsing response headers. Postprocess exception: {str(e)}.", + synapse=synapse, + ) from e - # Calculate the processing time by subtracting the start time from the current time. - synapse.axon.process_time = str(time.time() - start_time) # type: ignore + try: + response.headers.update(updated_headers) + except Exception as e: + raise PostProcessException( + f"Error while updating response headers. Postprocess exception: {str(e)}.", + synapse=synapse, + ) from e return response diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 8a11d209f0..4a9f98244c 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -14,17 +14,24 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import bittensor + +""" +This module provides data structures and functions for working with the Bittensor network, +including neuron and subnet information, SCALE encoding/decoding, and custom RPC type registry. +""" + import json -from enum import Enum from dataclasses import dataclass, asdict -from scalecodec.types import GenericCall +from enum import Enum from typing import List, Tuple, Dict, Optional, Any, TypedDict, Union + from scalecodec.base import RuntimeConfiguration, ScaleBytes from scalecodec.type_registry import load_type_registry_preset +from scalecodec.types import GenericCall from scalecodec.utils.ss58 import ss58_encode -from .utils import networking as net, U16_MAX, U16_NORMALIZED_FLOAT +import bittensor +from .utils import networking as net, RAOPERTAO, U16_NORMALIZED_FLOAT from .utils.balance import Balance from .utils.registration import torch, use_torch @@ -200,18 +207,16 @@ class AxonInfo: @property def is_serving(self) -> bool: """True if the endpoint is serving.""" - if self.ip == "0.0.0.0": - return False - else: - return True + return self.ip != "0.0.0.0" def ip_str(self) -> str: """Return the whole IP as string""" return net.ip__str__(self.ip_type, self.ip, self.port) def __eq__(self, other: "AxonInfo"): - if other == None: + if other is None: return False + if ( self.version == other.version and self.ip == other.ip @@ -221,8 +226,8 @@ def __eq__(self, other: "AxonInfo"): and self.hotkey == other.hotkey ): return True - else: - return False + + return False def __str__(self): return "AxonInfo( {}, {}, {}, {} )".format( @@ -241,10 +246,23 @@ def to_string(self) -> str: return AxonInfo(0, "", 0, 0, "", "").to_string() @classmethod - def from_string(cls, s: str) -> "AxonInfo": - """Creates an AxonInfo object from its string representation using JSON.""" + def from_string(cls, json_string: str) -> "AxonInfo": + """ + Creates an AxonInfo object from its string representation using JSON. + + Args: + json_string (str): The JSON string representation of the AxonInfo object. + + Returns: + AxonInfo: An instance of AxonInfo created from the JSON string. If decoding fails, returns a default AxonInfo object with default values. + + Raises: + json.JSONDecodeError: If there is an error in decoding the JSON string. + TypeError: If there is a type error when creating the AxonInfo object. + ValueError: If there is a value error when creating the AxonInfo object. + """ try: - data = json.loads(s) + data = json.loads(json_string) return cls(**data) except json.JSONDecodeError as e: bittensor.logging.error(f"Error decoding JSON: {e}") @@ -274,33 +292,14 @@ def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo": coldkey=neuron_info["coldkey"], ) - def _to_parameter_dict( - self, return_type: str - ) -> Union[dict[str, Union[int, str]], "torch.nn.ParameterDict"]: - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict( self, ) -> Union[dict[str, Union[int, str]], "torch.nn.ParameterDict"]: - """Returns a torch tensor or dict of the subnet info, depending on the USE_TORCH flag set""" + """Returns a torch tensor or dict of the subnet info, depending on the USE_TORCH flag set.""" if use_torch(): - return self._to_parameter_dict("torch") - else: - return self._to_parameter_dict("numpy") - - @classmethod - def _from_parameter_dict( - cls, - parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"], - return_type: str, - ) -> "AxonInfo": - if return_type == "torch": - return cls(**dict(parameter_dict)) + return torch.nn.ParameterDict(self.__dict__) else: - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( @@ -308,9 +307,9 @@ def from_parameter_dict( ) -> "AxonInfo": """Returns an axon_info object from a torch parameter_dict or a parameter dict.""" if use_torch(): - return cls._from_parameter_dict(parameter_dict, "torch") + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict(parameter_dict, "numpy") + return cls(**parameter_dict) class ChainDataType(Enum): @@ -324,18 +323,24 @@ class ChainDataType(Enum): SubnetHyperparameters = 8 -# Constants -RAOPERTAO = 1e9 -U16_MAX = 65535 -U64_MAX = 18446744073709551615 - - def from_scale_encoding( - input: Union[List[int], bytes, ScaleBytes], + input_: Union[List[int], bytes, ScaleBytes], type_name: ChainDataType, is_vec: bool = False, is_option: bool = False, ) -> Optional[Dict]: + """ + Decodes input_ data from SCALE encoding based on the specified type name and modifiers. + + Args: + input_ (Union[List[int], bytes, ScaleBytes]): The input_ data to decode. + type_name (ChainDataType): The type of data being decoded. + is_vec (bool, optional): Whether the data is a vector of the specified type. Default is ``False``. + is_option (bool, optional): Whether the data is an optional value of the specified type. Default is ``False``. + + Returns: + Optional[Dict]: The decoded data as a dictionary, or ``None`` if the decoding fails. + """ type_string = type_name.name if type_name == ChainDataType.DelegatedInfo: # DelegatedInfo is a tuple of (DelegateInfo, Compact) @@ -345,22 +350,22 @@ def from_scale_encoding( if is_vec: type_string = f"Vec<{type_string}>" - return from_scale_encoding_using_type_string(input, type_string) + return from_scale_encoding_using_type_string(input_, type_string) def from_scale_encoding_using_type_string( - input: Union[List[int], bytes, ScaleBytes], type_string: str + input_: Union[List[int], bytes, ScaleBytes], type_string: str ) -> Optional[Dict]: - if isinstance(input, ScaleBytes): - as_scale_bytes = input + if isinstance(input_, ScaleBytes): + as_scale_bytes = input_ else: - if isinstance(input, list) and all([isinstance(i, int) for i in input]): - vec_u8 = input + if isinstance(input_, list) and all([isinstance(i, int) for i in input_]): + vec_u8 = input_ as_bytes = bytes(vec_u8) - elif isinstance(input, bytes): - as_bytes = input + elif isinstance(input_, bytes): + as_bytes = input_ else: - raise TypeError("input must be a List[int], bytes, or ScaleBytes") + raise TypeError("input_ must be a List[int], bytes, or ScaleBytes") as_scale_bytes = ScaleBytes(as_bytes) @@ -376,9 +381,7 @@ def from_scale_encoding_using_type_string( # Dataclasses for chain data. @dataclass class NeuronInfo: - r""" - Dataclass for neuron metadata. - """ + """Dataclass for neuron metadata.""" hotkey: str coldkey: str @@ -407,7 +410,7 @@ class NeuronInfo: @classmethod def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfo": - r"""Fixes the values of the NeuronInfo object.""" + """Fixes the values of the NeuronInfo object.""" neuron_info_decoded["hotkey"] = ss58_encode( neuron_info_decoded["hotkey"], bittensor.__ss58_format__ ) @@ -453,26 +456,23 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfo": neuron_info_decoded["axon_info"] = AxonInfo.from_neuron_info( neuron_info_decoded ) - return cls(**neuron_info_decoded) @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfo": - r"""Returns a NeuronInfo object from a ``vec_u8``.""" + """Returns a NeuronInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() decoded = from_scale_encoding(vec_u8, ChainDataType.NeuronInfo) if decoded is None: - return NeuronInfo._null_neuron() - - decoded = NeuronInfo.fix_decoded_values(decoded) + return NeuronInfo.get_null_neuron() - return decoded + return NeuronInfo.fix_decoded_values(decoded) @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfo"]: - r"""Returns a list of NeuronInfo objects from a ``vec_u8``.""" + """Returns a list of NeuronInfo objects from a ``vec_u8``""" decoded_list = from_scale_encoding( vec_u8, ChainDataType.NeuronInfo, is_vec=True @@ -486,7 +486,7 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfo"]: return decoded_list @staticmethod - def _null_neuron() -> "NeuronInfo": + def get_null_neuron() -> "NeuronInfo": neuron = NeuronInfo( uid=0, netuid=0, @@ -527,34 +527,10 @@ def from_weights_bonds_and_neuron_lite( return cls(**n_dict) - @staticmethod - def _neuron_dict_to_namespace(neuron_dict) -> "NeuronInfo": - # TODO: Legacy: remove? - if neuron_dict["hotkey"] == "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM": - return NeuronInfo._null_neuron() - else: - neuron = NeuronInfo(**neuron_dict) - neuron.stake_dict = { - hk: Balance.from_rao(stake) for hk, stake in neuron.stake.items() - } - neuron.stake = Balance.from_rao(neuron.total_stake) - neuron.total_stake = neuron.stake - neuron.rank = neuron.rank / U16_MAX - neuron.trust = neuron.trust / U16_MAX - neuron.consensus = neuron.consensus / U16_MAX - neuron.validator_trust = neuron.validator_trust / U16_MAX - neuron.incentive = neuron.incentive / U16_MAX - neuron.dividends = neuron.dividends / U16_MAX - neuron.emission = neuron.emission / RAOPERTAO - - return neuron - @dataclass class NeuronInfoLite: - r""" - Dataclass for neuron metadata, but without the weights and bonds. - """ + """Dataclass for neuron metadata, but without the weights and bonds.""" hotkey: str coldkey: str @@ -574,16 +550,14 @@ class NeuronInfoLite: dividends: float last_update: int validator_permit: bool - # weights: List[List[int]] - # bonds: List[List[int]] No weights or bonds in lite version - prometheus_info: "PrometheusInfo" + prometheus_info: Optional["PrometheusInfo"] axon_info: "axon_info" pruning_score: int is_null: bool = False @classmethod def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": - r"""Fixes the values of the NeuronInfoLite object.""" + """Fixes the values of the NeuronInfoLite object.""" neuron_info_decoded["hotkey"] = ss58_encode( neuron_info_decoded["hotkey"], bittensor.__ss58_format__ ) @@ -599,9 +573,6 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": neuron_info_decoded["stake_dict"] = stake_dict neuron_info_decoded["stake"] = sum(stake_dict.values()) neuron_info_decoded["total_stake"] = neuron_info_decoded["stake"] - # Don't need weights and bonds in lite version - # neuron_info_decoded['weights'] = [[int(weight[0]), int(weight[1])] for weight in neuron_info_decoded['weights']] - # neuron_info_decoded['bonds'] = [[int(bond[0]), int(bond[1])] for bond in neuron_info_decoded['bonds']] neuron_info_decoded["rank"] = U16_NORMALIZED_FLOAT(neuron_info_decoded["rank"]) neuron_info_decoded["emission"] = neuron_info_decoded["emission"] / RAOPERTAO neuron_info_decoded["incentive"] = U16_NORMALIZED_FLOAT( @@ -629,21 +600,19 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfoLite": - r"""Returns a NeuronInfoLite object from a ``vec_u8``.""" + """Returns a NeuronInfoLite object from a ``vec_u8``.""" if len(vec_u8) == 0: - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() decoded = from_scale_encoding(vec_u8, ChainDataType.NeuronInfoLite) if decoded is None: - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() - decoded = NeuronInfoLite.fix_decoded_values(decoded) - - return decoded + return NeuronInfoLite.fix_decoded_values(decoded) @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfoLite"]: - r"""Returns a list of NeuronInfoLite objects from a ``vec_u8``.""" + """Returns a list of NeuronInfoLite objects from a ``vec_u8``.""" decoded_list = from_scale_encoding( vec_u8, ChainDataType.NeuronInfoLite, is_vec=True @@ -657,7 +626,7 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfoLite"]: return decoded_list @staticmethod - def _null_neuron() -> "NeuronInfoLite": + def get_null_neuron() -> "NeuronInfoLite": neuron = NeuronInfoLite( uid=0, netuid=0, @@ -674,8 +643,6 @@ def _null_neuron() -> "NeuronInfoLite": dividends=0, last_update=0, validator_permit=False, - # weights = [], // No weights or bonds in lite version - # bonds = [], prometheus_info=None, axon_info=None, is_null=True, @@ -685,34 +652,10 @@ def _null_neuron() -> "NeuronInfoLite": ) return neuron - @staticmethod - def _neuron_dict_to_namespace(neuron_dict) -> "NeuronInfoLite": - # TODO: Legacy: remove? - if neuron_dict["hotkey"] == "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM": - return NeuronInfoLite._null_neuron() - else: - neuron = NeuronInfoLite(**neuron_dict) - neuron.stake = Balance.from_rao(neuron.total_stake) - neuron.stake_dict = { - hk: Balance.from_rao(stake) for hk, stake in neuron.stake.items() - } - neuron.total_stake = neuron.stake - neuron.rank = neuron.rank / U16_MAX - neuron.trust = neuron.trust / U16_MAX - neuron.consensus = neuron.consensus / U16_MAX - neuron.validator_trust = neuron.validator_trust / U16_MAX - neuron.incentive = neuron.incentive / U16_MAX - neuron.dividends = neuron.dividends / U16_MAX - neuron.emission = neuron.emission / RAOPERTAO - - return neuron - @dataclass class PrometheusInfo: - r""" - Dataclass for prometheus info. - """ + """Dataclass for prometheus info.""" block: int version: int @@ -722,7 +665,7 @@ class PrometheusInfo: @classmethod def fix_decoded_values(cls, prometheus_info_decoded: Dict) -> "PrometheusInfo": - r"""Returns a PrometheusInfo object from a prometheus_info_decoded dictionary.""" + """Returns a PrometheusInfo object from a prometheus_info_decoded dictionary.""" prometheus_info_decoded["ip"] = net.int_to_ip( int(prometheus_info_decoded["ip"]) ) @@ -730,10 +673,38 @@ def fix_decoded_values(cls, prometheus_info_decoded: Dict) -> "PrometheusInfo": return cls(**prometheus_info_decoded) +@dataclass +class DelegateInfoLite: + """ + Dataclass for DelegateLiteInfo. This is a lighter version of :func:`DelegateInfo`. + + Args: + delegate_ss58 (str): Hotkey of the delegate for which the information is being fetched. + take (float): Take of the delegate as a percentage. + nominators (int): Count of the nominators of the delegate. + owner_ss58 (str): Coldkey of the owner. + registrations (list[int]): List of subnets that the delegate is registered on. + validator_permits (list[int]): List of subnets that the delegate is allowed to validate on. + return_per_1000 (int): Return per 1000 TAO, for the delegate over a day. + total_daily_return (int): Total daily return of the delegate. + """ + + delegate_ss58: str # Hotkey of delegate + take: float # Take of the delegate as a percentage + nominators: int # Count of the nominators of the delegate. + owner_ss58: str # Coldkey of owner + registrations: list[int] # List of subnets that the delegate is registered on + validator_permits: list[ + int + ] # List of subnets that the delegate is allowed to validate on + return_per_1000: int # Return per 1000 tao for the delegate over a day + total_daily_return: int # Total daily return of the delegate + + @dataclass class DelegateInfo: - r""" - Dataclass for delegate information. + """ + Dataclass for delegate information. For a lighter version of this class, see :func:`DelegateInfoLite`. Args: hotkey_ss58 (str): Hotkey of the delegate for which the information is being fetched. @@ -764,7 +735,7 @@ class DelegateInfo: @classmethod def fix_decoded_values(cls, decoded: Any) -> "DelegateInfo": - r"""Fixes the decoded values.""" + """Fixes the decoded values.""" return cls( hotkey_ss58=ss58_encode( @@ -790,57 +761,47 @@ def fix_decoded_values(cls, decoded: Any) -> "DelegateInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["DelegateInfo"]: - r"""Returns a DelegateInfo object from a ``vec_u8``.""" + """Returns a DelegateInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.DelegateInfo) - if decoded is None: return None - decoded = DelegateInfo.fix_decoded_values(decoded) - - return decoded + return DelegateInfo.fix_decoded_values(decoded) @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["DelegateInfo"]: - r"""Returns a list of DelegateInfo objects from a ``vec_u8``.""" + """Returns a list of DelegateInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.DelegateInfo, is_vec=True) if decoded is None: return [] - decoded = [DelegateInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [DelegateInfo.fix_decoded_values(d) for d in decoded] @classmethod def delegated_list_from_vec_u8( cls, vec_u8: List[int] ) -> List[Tuple["DelegateInfo", Balance]]: - r"""Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. + """Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. This is the list of delegates that the user has delegated to, and the amount of stake delegated. """ decoded = from_scale_encoding(vec_u8, ChainDataType.DelegatedInfo, is_vec=True) - if decoded is None: return [] - decoded = [ + return [ (DelegateInfo.fix_decoded_values(d), Balance.from_rao(s)) for d, s in decoded ] - return decoded - @dataclass class StakeInfo: - r""" - Dataclass for stake info. - """ + """Dataclass for stake info.""" hotkey_ss58: str # Hotkey address coldkey_ss58: str # Coldkey address @@ -848,8 +809,7 @@ class StakeInfo: @classmethod def fix_decoded_values(cls, decoded: Any) -> "StakeInfo": - r"""Fixes the decoded values.""" - + """Fixes the decoded values.""" return cls( hotkey_ss58=ss58_encode(decoded["hotkey"], bittensor.__ss58_format__), coldkey_ss58=ss58_encode(decoded["coldkey"], bittensor.__ss58_format__), @@ -858,60 +818,50 @@ def fix_decoded_values(cls, decoded: Any) -> "StakeInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["StakeInfo"]: - r"""Returns a StakeInfo object from a ``vec_u8``.""" + """Returns a StakeInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.StakeInfo) - if decoded is None: return None - decoded = StakeInfo.fix_decoded_values(decoded) - - return decoded + return StakeInfo.fix_decoded_values(decoded) @classmethod def list_of_tuple_from_vec_u8( cls, vec_u8: List[int] ) -> Dict[str, List["StakeInfo"]]: - r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" - decoded: Optional[ - list[tuple[str, list[object]]] - ] = from_scale_encoding_using_type_string( - input=vec_u8, type_string="Vec<(AccountId, Vec)>" + """Returns a list of StakeInfo objects from a ``vec_u8``.""" + decoded: Optional[list[tuple[str, list[object]]]] = ( + from_scale_encoding_using_type_string( + input_=vec_u8, type_string="Vec<(AccountId, Vec)>" + ) ) if decoded is None: return {} - stake_map = { + return { ss58_encode(address=account_id, ss58_format=bittensor.__ss58_format__): [ StakeInfo.fix_decoded_values(d) for d in stake_info ] for account_id, stake_info in decoded } - return stake_map - @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["StakeInfo"]: - r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" + """Returns a list of StakeInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.StakeInfo, is_vec=True) - if decoded is None: return [] - decoded = [StakeInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [StakeInfo.fix_decoded_values(d) for d in decoded] @dataclass class SubnetInfo: - r""" - Dataclass for subnet info. - """ + """Dataclass for subnet info.""" netuid: int rho: int @@ -932,16 +882,14 @@ class SubnetInfo: emission_value: float burn: Balance owner_ss58: str - # adjustment_alpha: int @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetInfo"]: - r"""Returns a SubnetInfo object from a ``vec_u8``.""" + """Returns a SubnetInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.SubnetInfo) - if decoded is None: return None @@ -957,13 +905,11 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["SubnetInfo"]: if decoded is None: return [] - decoded = [SubnetInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [SubnetInfo.fix_decoded_values(d) for d in decoded] @classmethod def fix_decoded_values(cls, decoded: Dict) -> "SubnetInfo": - r"""Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" + """Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" return SubnetInfo( netuid=decoded["netuid"], rho=decoded["rho"], @@ -990,48 +936,26 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetInfo": owner_ss58=ss58_encode(decoded["owner"], bittensor.__ss58_format__), ) - def _to_parameter_dict( - self, return_type: str - ) -> Union[dict[str, Any], "torch.nn.ParameterDict"]: - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict(self) -> Union[dict[str, Any], "torch.nn.ParameterDict"]: """Returns a torch tensor or dict of the subnet info.""" if use_torch(): - return self._to_parameter_dict("torch") + return torch.nn.ParameterDict(self.__dict__) else: - return self._to_parameter_dict("numpy") - - @classmethod - def _from_parameter_dict_torch( - cls, parameter_dict: "torch.nn.ParameterDict" - ) -> "SubnetInfo": - """Returns a SubnetInfo object from a torch parameter_dict.""" - return cls(**dict(parameter_dict)) - - @classmethod - def _from_parameter_dict_numpy(cls, parameter_dict: dict[str, Any]) -> "SubnetInfo": - r"""Returns a SubnetInfo object from a parameter_dict.""" - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] ) -> "SubnetInfo": if use_torch(): - return cls._from_parameter_dict_torch(parameter_dict) + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict_numpy(parameter_dict) + return cls(**parameter_dict) @dataclass class SubnetHyperparameters: - r""" - Dataclass for subnet hyperparameters. - """ + """Dataclass for subnet hyperparameters.""" rho: int kappa: int @@ -1060,12 +984,11 @@ class SubnetHyperparameters: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: - r"""Returns a SubnetHyperparameters object from a ``vec_u8``.""" + """Returns a SubnetHyperparameters object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.SubnetHyperparameters) - if decoded is None: return None @@ -1073,21 +996,18 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["SubnetHyperparameters"]: - r"""Returns a list of SubnetHyperparameters objects from a ``vec_u8``.""" + """Returns a list of SubnetHyperparameters objects from a ``vec_u8``.""" decoded = from_scale_encoding( vec_u8, ChainDataType.SubnetHyperparameters, is_vec=True, is_option=True ) - if decoded is None: return [] - decoded = [SubnetHyperparameters.fix_decoded_values(d) for d in decoded] - - return decoded + return [SubnetHyperparameters.fix_decoded_values(d) for d in decoded] @classmethod def fix_decoded_values(cls, decoded: Dict) -> "SubnetHyperparameters": - r"""Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" + """Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" return SubnetHyperparameters( rho=decoded["rho"], kappa=decoded["kappa"], @@ -1115,59 +1035,35 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetHyperparameters": commit_reveal_weights_enabled=decoded["commit_reveal_weights_enabled"], ) - def _to_parameter_dict_torch( - self, return_type: str - ) -> Union[dict[str, Union[int, float, bool]], "torch.nn.ParameterDict"]: - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict( self, ) -> Union[dict[str, Union[int, float, bool]], "torch.nn.ParameterDict"]: """Returns a torch tensor or dict of the subnet hyperparameters.""" if use_torch(): - return self._to_parameter_dict_torch("torch") + return torch.nn.ParameterDict(self.__dict__) else: - return self._to_parameter_dict_torch("numpy") - - @classmethod - def _from_parameter_dict_torch( - cls, parameter_dict: "torch.nn.ParameterDict" - ) -> "SubnetHyperparameters": - """Returns a SubnetHyperparameters object from a torch parameter_dict.""" - return cls(**dict(parameter_dict)) - - @classmethod - def _from_parameter_dict_numpy( - cls, parameter_dict: dict[str, Any] - ) -> "SubnetHyperparameters": - """Returns a SubnetHyperparameters object from a parameter_dict.""" - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] ) -> "SubnetHyperparameters": if use_torch(): - return cls._from_parameter_dict_torch(parameter_dict) + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict_numpy(parameter_dict) + return cls(**parameter_dict) @dataclass class IPInfo: - r""" - Dataclass for associated IP Info. - """ + """Dataclass for associated IP Info.""" ip: str ip_type: int protocol: int def encode(self) -> Dict[str, Any]: - r"""Returns a dictionary of the IPInfo object that can be encoded.""" + """Returns a dictionary of the IPInfo object that can be encoded.""" return { "ip": net.ip_to_int( self.ip @@ -1177,12 +1073,11 @@ def encode(self) -> Dict[str, Any]: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["IPInfo"]: - r"""Returns a IPInfo object from a ``vec_u8``.""" + """Returns a IPInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.IPInfo) - if decoded is None: return None @@ -1196,62 +1091,37 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["IPInfo"]: if decoded is None: return [] - decoded = [IPInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [IPInfo.fix_decoded_values(d) for d in decoded] @classmethod def fix_decoded_values(cls, decoded: Dict) -> "IPInfo": - r"""Returns a SubnetInfo object from a decoded IPInfo dictionary.""" + """Returns a SubnetInfo object from a decoded IPInfo dictionary.""" return IPInfo( - ip=bittensor.utils.networking.int_to_ip(decoded["ip"]), + ip=net.int_to_ip(decoded["ip"]), ip_type=decoded["ip_type_and_protocol"] >> 4, protocol=decoded["ip_type_and_protocol"] & 0xF, ) - def _to_parameter_dict( - self, return_type: str - ) -> Union[dict[str, Union[str, int]], "torch.nn.ParameterDict"]: - """Returns a torch tensor of the subnet info.""" - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict( self, ) -> Union[dict[str, Union[str, int]], "torch.nn.ParameterDict"]: """Returns a torch tensor or dict of the subnet IP info.""" if use_torch(): - return self._to_parameter_dict("torch") + return torch.nn.ParameterDict(self.__dict__) else: - return self._to_parameter_dict("numpy") - - @classmethod - def _from_parameter_dict_torch( - cls, parameter_dict: "torch.nn.ParameterDict" - ) -> "IPInfo": - """Returns a IPInfo object from a torch parameter_dict.""" - return cls(**dict(parameter_dict)) - - @classmethod - def _from_parameter_dict_numpy(cls, parameter_dict: dict[str, Any]) -> "IPInfo": - """Returns a IPInfo object from a parameter_dict.""" - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] ) -> "IPInfo": if use_torch(): - return cls._from_parameter_dict_torch(parameter_dict) + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict_numpy(parameter_dict) + return cls(**parameter_dict) # Senate / Proposal data - - class ProposalVoteData(TypedDict): index: int threshold: int diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index 3cad50ecd8..4d03b289e4 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -45,6 +45,112 @@ def _get_coldkey_wallets_for_path(path: str) -> List["bittensor.wallet"]: console = bittensor.__console__ +def show_delegates_lite( + delegates_lite: List["bittensor.DelegateInfoLite"], width: Optional[int] = None +): + """ + This method is a lite version of the :func:`show_delegates`. This method displays a formatted table of Bittensor network delegates with detailed statistics to the console. + + The table is sorted by total stake in descending order and provides + a snapshot of delegate performance and status, helping users make informed decisions for staking or nominating. + + This helper function is not intended to be used directly in user code unless specifically required. + + Args: + delegates_lite (List[bittensor.DelegateInfoLite]): A list of delegate information objects to be displayed. + width (Optional[int]): The width of the console output table. Defaults to ``None``, which will make the table expand to the maximum width of the console. + + The output table contains the following columns. To display more columns, use the :func:`show_delegates` function. + + - INDEX: The numerical index of the delegate. + - DELEGATE: The name of the delegate. + - SS58: The truncated SS58 address of the delegate. + - NOMINATORS: The number of nominators supporting the delegate. + - VPERMIT: Validator permits held by the delegate for the subnets. + - TAKE: The percentage of the delegate's earnings taken by the network. + - DELEGATE/(24h): The earnings of the delegate in the last 24 hours. + - Desc: A brief description provided by the delegate. + + Usage: + This function is typically used within the Bittensor CLI to show current delegate options to users who are considering where to stake their tokens. + + Example usage:: + + show_delegates_lite(delegates_lite, width=80) + + Note: + This function is primarily for display purposes within a command-line interface and does not return any values. It relies on the `rich `_ Python library to render + the table in the console. + """ + + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) + if registered_delegate_info is None: + bittensor.__console__.print( + ":warning:[yellow]Could not get delegate info from chain.[/yellow]" + ) + registered_delegate_info = {} + + table = Table(show_footer=True, width=width, pad_edge=False, box=None, expand=True) + table.add_column( + "[overline white]INDEX", + str(len(delegates_lite)), + footer_style="overline white", + style="bold white", + ) + table.add_column( + "[overline white]DELEGATE", + style="rgb(50,163,219)", + no_wrap=True, + justify="left", + ) + table.add_column( + "[overline white]SS58", + str(len(delegates_lite)), + footer_style="overline white", + style="bold yellow", + ) + table.add_column( + "[overline white]NOMINATORS", justify="center", style="green", no_wrap=True + ) + table.add_column("[overline white]VPERMIT", justify="right", no_wrap=False) + table.add_column("[overline white]TAKE", style="white", no_wrap=True) + table.add_column("[overline white]DELEGATE/(24h)", style="green", justify="center") + table.add_column("[overline white]Desc", style="rgb(50,163,219)") + + for i, d in enumerate(delegates_lite): + if d.delegate_ss58 in registered_delegate_info: + delegate_name = registered_delegate_info[d.delegate_ss58].name + delegate_url = registered_delegate_info[d.delegate_ss58].url + delegate_description = registered_delegate_info[d.delegate_ss58].description + else: + delegate_name = "" + delegate_url = "" + delegate_description = "" + + table.add_row( + # `INDEX` column + str(i), + # `DELEGATE` column + Text(delegate_name, style=f"link {delegate_url}"), + # `SS58` column + f"{d.delegate_ss58:8.8}...", + # `NOMINATORS` column + str(d.nominators), + # `VPERMIT` column + str(d.registrations), + # `TAKE` column + f"{d.take * 100:.1f}%", + # `DELEGATE/(24h)` column + f"τ{bittensor.Balance.from_tao(d.total_daily_return * 0.18) !s:6.6}", + # `Desc` column + str(delegate_description), + end_section=True, + ) + bittensor.__console__.print(table) + + # Uses rich console to pretty print a table of delegates. def show_delegates( delegates: List["bittensor.DelegateInfo"], @@ -100,9 +206,9 @@ def show_delegates( for prev_delegate in prev_delegates: prev_delegates_dict[prev_delegate.hotkey_ss58] = prev_delegate - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -132,10 +238,10 @@ def show_delegates( "[overline white]NOMINATORS", justify="center", style="green", no_wrap=True ) table.add_column( - "[overline white]DELEGATE STAKE(\u03C4)", justify="right", no_wrap=True + "[overline white]DELEGATE STAKE(\u03c4)", justify="right", no_wrap=True ) table.add_column( - "[overline white]TOTAL STAKE(\u03C4)", + "[overline white]TOTAL STAKE(\u03c4)", justify="right", style="green", no_wrap=True, @@ -144,7 +250,7 @@ def show_delegates( table.add_column("[overline white]VPERMIT", justify="right", no_wrap=False) table.add_column("[overline white]TAKE", style="white", no_wrap=True) table.add_column( - "[overline white]NOMINATOR/(24h)/k\u03C4", style="green", justify="center" + "[overline white]NOMINATOR/(24h)/k\u03c4", style="green", justify="center" ) table.add_column("[overline white]DELEGATE/(24h)", style="green", justify="center") table.add_column("[overline white]Desc", style="rgb(50,163,219)") @@ -799,7 +905,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): style="bold green", ) table.add_column( - "[overline green]\u03C4/24h", + "[overline green]\u03c4/24h", footer_style="overline green", style="bold green", ) @@ -807,10 +913,10 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]NOMS", justify="center", style="green", no_wrap=True ) table.add_column( - "[overline white]OWNER STAKE(\u03C4)", justify="right", no_wrap=True + "[overline white]OWNER STAKE(\u03c4)", justify="right", no_wrap=True ) table.add_column( - "[overline white]TOTAL STAKE(\u03C4)", + "[overline white]TOTAL STAKE(\u03c4)", justify="right", style="green", no_wrap=True, @@ -819,7 +925,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]SUBNETS", justify="right", style="white", no_wrap=True ) table.add_column("[overline white]VPERMIT", justify="right", no_wrap=True) - table.add_column("[overline white]24h/k\u03C4", style="green", justify="center") + table.add_column("[overline white]24h/k\u03c4", style="green", justify="center") table.add_column("[overline white]Desc", style="rgb(50,163,219)") total_delegated = 0 @@ -842,9 +948,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): delegates.sort(key=lambda delegate: delegate[0].total_stake, reverse=True) total_delegated += sum(my_delegates.values()) - registered_delegate_info: Optional[ - DelegatesDetails - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[DelegatesDetails] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" diff --git a/bittensor/commands/inspect.py b/bittensor/commands/inspect.py index 76b015b774..4ef0e84c4e 100644 --- a/bittensor/commands/inspect.py +++ b/bittensor/commands/inspect.py @@ -138,9 +138,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) bittensor.logging.debug(f"Netuids to check: {netuids}") - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -181,9 +181,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]Emission", footer_style="overline white", style="green" ) for wallet in tqdm(wallets): - delegates: List[ - Tuple[bittensor.DelegateInfo, bittensor.Balance] - ] = subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) + delegates: List[Tuple[bittensor.DelegateInfo, bittensor.Balance]] = ( + subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) + ) if not wallet.coldkeypub_file.exists_on_device(): continue cold_balance = subtensor.get_balance(wallet.coldkeypub.ss58_address) diff --git a/bittensor/commands/metagraph.py b/bittensor/commands/metagraph.py index b6999fe553..1075f50d31 100644 --- a/bittensor/commands/metagraph.py +++ b/bittensor/commands/metagraph.py @@ -159,8 +159,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): style="yellow", ) table.add_column( - "[overline white]STAKE(\u03C4)", - "\u03C4{:.5f}".format(total_stake), + "[overline white]STAKE(\u03c4)", + "\u03c4{:.5f}".format(total_stake), footer_style="overline white", justify="right", style="green", @@ -207,8 +207,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): no_wrap=True, ) table.add_column( - "[overline white]EMISSION(\u03C1)", - "\u03C1{}".format(int(total_emission)), + "[overline white]EMISSION(\u03c1)", + "\u03c1{}".format(int(total_emission)), footer_style="overline white", justify="right", style="green", diff --git a/bittensor/commands/network.py b/bittensor/commands/network.py index 5c60d8c2c7..0843b71c70 100644 --- a/bittensor/commands/network.py +++ b/bittensor/commands/network.py @@ -21,7 +21,12 @@ from rich.prompt import Prompt from rich.table import Table from typing import List, Optional, Dict -from .utils import get_delegates_details, DelegatesDetails, check_netuid_set +from .utils import ( + get_delegates_details, + DelegatesDetails, + check_netuid_set, + normalize_hyperparameters, +) from .identity import SetIdentityCommand console = bittensor.__console__ @@ -494,11 +499,14 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): table.title = "[white]Subnet Hyperparameters - NETUID: {} - {}".format( cli.config.netuid, subtensor.network ) - table.add_column("[overline white]HYPERPARAMETER", style="bold white") + table.add_column("[overline white]HYPERPARAMETER", style="white") table.add_column("[overline white]VALUE", style="green") + table.add_column("[overline white]NORMALIZED", style="cyan") + + normalized_values = normalize_hyperparameters(subnet) - for param in subnet.__dict__: - table.add_row(" " + param, str(subnet.__dict__[param])) + for param, value, norm_value in normalized_values: + table.add_row(" " + param, value, norm_value) bittensor.__console__.print(table) @@ -600,9 +608,12 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) table.add_column("[overline white]HYPERPARAMETER", style="white") table.add_column("[overline white]VALUE", style="green") + table.add_column("[overline white]NORMALIZED", style="cyan") + + normalized_values = normalize_hyperparameters(subnet) - for param in subnet.__dict__: - table.add_row(param, str(subnet.__dict__[param])) + for param, value, norm_value in normalized_values: + table.add_row(" " + param, value, norm_value) bittensor.__console__.print(table) diff --git a/bittensor/commands/overview.py b/bittensor/commands/overview.py index 477ad9f01a..b572847e49 100644 --- a/bittensor/commands/overview.py +++ b/bittensor/commands/overview.py @@ -258,9 +258,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): for neuron in neuron_list: if neuron.hotkey in checked_hotkeys: continue - total_coldkey_stake_from_metagraph[ - neuron.coldkey - ] += neuron.stake_dict[neuron.coldkey] + total_coldkey_stake_from_metagraph[neuron.coldkey] += ( + neuron.stake_dict[neuron.coldkey] + ) checked_hotkeys.add(neuron.hotkey) alerts_table = Table(show_header=True, header_style="bold magenta") @@ -317,7 +317,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): de_registered_neurons = [] for hotkey_addr, our_stake in de_registered_stake: # Make a neuron info lite for this hotkey and coldkey. - de_registered_neuron = bittensor.NeuronInfoLite._null_neuron() + de_registered_neuron = bittensor.NeuronInfoLite.get_null_neuron() de_registered_neuron.hotkey = hotkey_addr de_registered_neuron.coldkey = ( coldkey_wallet.coldkeypub.ss58_address @@ -483,8 +483,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) if last_subnet: table.add_column( - "[overline white]STAKE(\u03C4)", - "\u03C4{:.5f}".format(total_stake), + "[overline white]STAKE(\u03c4)", + "\u03c4{:.5f}".format(total_stake), footer_style="overline white", justify="right", style="green", @@ -493,7 +493,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): else: # No footer for non-last subnet. table.add_column( - "[overline white]STAKE(\u03C4)", + "[overline white]STAKE(\u03c4)", justify="right", style="green", no_wrap=True, @@ -539,8 +539,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): no_wrap=True, ) table.add_column( - "[overline white]EMISSION(\u03C1)", - "\u03C1{:_}".format(total_emission), + "[overline white]EMISSION(\u03c1)", + "\u03c1{:_}".format(total_emission), footer_style="overline white", justify="right", style="green", @@ -603,7 +603,7 @@ def overview_sort_function(row): console.clear() - caption = "[italic][dim][white]Wallet balance: [green]\u03C4" + str( + caption = "[italic][dim][white]Wallet balance: [green]\u03c4" + str( total_balance.tao ) grid.add_row(Align(caption, vertical="middle", align="center")) @@ -613,7 +613,7 @@ def overview_sort_function(row): @staticmethod def _get_neurons_for_netuid( - args_tuple: Tuple["bittensor.Config", int, List[str]] + args_tuple: Tuple["bittensor.Config", int, List[str]], ) -> Tuple[int, List["bittensor.NeuronInfoLite"], Optional[str]]: subtensor_config, netuid, hot_wallets = args_tuple diff --git a/bittensor/commands/root.py b/bittensor/commands/root.py index a3658d03ea..5607921b19 100644 --- a/bittensor/commands/root.py +++ b/bittensor/commands/root.py @@ -173,7 +173,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): no_wrap=True, ) table.add_column( - "[overline white]STAKE(\u03C4)", + "[overline white]STAKE(\u03c4)", footer_style="overline white", justify="right", style="green", diff --git a/bittensor/commands/senate.py b/bittensor/commands/senate.py index c92290af89..03a73cde5b 100644 --- a/bittensor/commands/senate.py +++ b/bittensor/commands/senate.py @@ -211,9 +211,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): senate_members = subtensor.get_senate_members() proposals = subtensor.get_proposals() - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) table = Table(show_footer=False) table.title = ( @@ -342,9 +342,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): console.print(":cross_mark: [red]Failed[/red]: Proposal not found.") return - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) table = Table(show_footer=False) table.title = "[white]Votes for Proposal {}".format(proposal_hash) diff --git a/bittensor/commands/stake.py b/bittensor/commands/stake.py index 9a28cd4f13..1bc2cf2786 100644 --- a/bittensor/commands/stake.py +++ b/bittensor/commands/stake.py @@ -15,14 +15,22 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import sys import argparse -import bittensor -from tqdm import tqdm +import os +import sys +from typing import List, Union, Optional, Dict, Tuple + from rich.prompt import Confirm, Prompt +from rich.table import Table +from tqdm import tqdm + +import bittensor from bittensor.utils.balance import Balance -from typing import List, Union, Optional, Dict, Tuple -from .utils import get_hotkey_wallets_for_wallet +from .utils import ( + get_hotkey_wallets_for_wallet, + get_delegates_details, + DelegatesDetails, +) from . import defaults console = bittensor.__console__ @@ -291,23 +299,6 @@ def add_args(cls, parser: argparse.ArgumentParser): bittensor.subtensor.add_args(stake_parser) -### Stake list. -import argparse -import bittensor -from tqdm import tqdm -from rich.table import Table -from rich.prompt import Prompt -from typing import Dict, Union, List, Tuple -from .utils import get_delegates_details, DelegatesDetails -from . import defaults - -console = bittensor.__console__ - -import os -import bittensor -from typing import List, Tuple, Optional, Dict - - def _get_coldkey_wallets_for_path(path: str) -> List["bittensor.wallet"]: try: wallet_names = next(os.walk(os.path.expanduser(path)))[1] @@ -390,9 +381,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): wallets = _get_coldkey_wallets_for_path(cli.config.wallet.path) else: wallets = [bittensor.wallet(config=cli.config)] - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) def get_stake_accounts( wallet, subtensor @@ -524,7 +515,7 @@ def get_all_wallet_accounts( ) table.add_column( "[overline white]Balance", - "\u03C4{:.5f}".format(total_balance), + "\u03c4{:.5f}".format(total_balance), footer_style="overline white", style="green", ) @@ -533,13 +524,13 @@ def get_all_wallet_accounts( ) table.add_column( "[overline white]Stake", - "\u03C4{:.5f}".format(total_stake), + "\u03c4{:.5f}".format(total_stake), footer_style="overline white", style="green", ) table.add_column( "[overline white]Rate", - "\u03C4{:.5f}/d".format(total_rate), + "\u03c4{:.5f}/d".format(total_rate), footer_style="overline white", style="green", ) diff --git a/bittensor/commands/utils.py b/bittensor/commands/utils.py index 4ea8fa3dd1..1694d3bc5e 100644 --- a/bittensor/commands/utils.py +++ b/bittensor/commands/utils.py @@ -20,7 +20,9 @@ import bittensor import requests from bittensor.utils.registration import torch -from typing import List, Dict, Any, Optional +from bittensor.utils.balance import Balance +from bittensor.utils import U64_NORMALIZED_FLOAT, U16_NORMALIZED_FLOAT +from typing import List, Dict, Any, Optional, Tuple from rich.prompt import Confirm, PromptBase from dataclasses import dataclass from . import defaults @@ -194,6 +196,50 @@ def filter_netuids_by_registered_hotkeys( return list(set(netuids)) +def normalize_hyperparameters( + subnet: bittensor.SubnetHyperparameters, +) -> List[Tuple[str, str, str]]: + """ + Normalizes the hyperparameters of a subnet. + + Args: + subnet: The subnet hyperparameters object. + + Returns: + A list of tuples containing the parameter name, value, and normalized value. + """ + param_mappings = { + "adjustment_alpha": U64_NORMALIZED_FLOAT, + "min_difficulty": U64_NORMALIZED_FLOAT, + "max_difficulty": U64_NORMALIZED_FLOAT, + "difficulty": U64_NORMALIZED_FLOAT, + "bonds_moving_avg": U64_NORMALIZED_FLOAT, + "max_weight_limit": U16_NORMALIZED_FLOAT, + "kappa": U16_NORMALIZED_FLOAT, + "min_burn": Balance.from_rao, + "max_burn": Balance.from_rao, + } + + normalized_values: List[Tuple[str, str, str]] = [] + subnet_dict = subnet.__dict__ + + for param, value in subnet_dict.items(): + try: + if param in param_mappings: + norm_value = param_mappings[param](value) + if isinstance(norm_value, float): + norm_value = f"{norm_value:.{10}g}" + else: + norm_value = value + except Exception as e: + bittensor.logging.warning(f"Error normalizing parameter '{param}': {e}") + norm_value = "-" + + normalized_values.append((param, str(value), str(norm_value))) + + return normalized_values + + @dataclass class DelegatesDetails: name: str diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 19989c94f3..ac4d9dfc36 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -19,7 +19,6 @@ """Module that encapsulates the CommitWeightCommand and the RevealWeightCommand. Used to commit and reveal weights for a specific subnet on the Bittensor Network.""" - import argparse import os import re diff --git a/bittensor/constants.py b/bittensor/constants.py new file mode 100644 index 0000000000..2b52cfd4bd --- /dev/null +++ b/bittensor/constants.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright © 2023 OpenTensor Foundation + +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# documentation files (the “Software”), to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# the Software. + +# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + + +ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation +V_7_2_0 = 7002000 diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 22e4ad93f4..dca513e0b2 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -23,6 +23,7 @@ import uuid import time import aiohttp + import bittensor from typing import Optional, List, Union, AsyncGenerator, Any from bittensor.utils.registration import torch, use_torch @@ -311,7 +312,7 @@ def query( try: loop = asyncio.get_event_loop() result = loop.run_until_complete(self.forward(*args, **kwargs)) - except: + except Exception: new_loop = asyncio.new_event_loop() asyncio.set_event_loop(new_loop) result = loop.run_until_complete(self.forward(*args, **kwargs)) @@ -653,12 +654,10 @@ def preprocess_synapse_for_request( """ # Set the timeout for the synapse synapse.timeout = timeout - - # Build the Dendrite headers using the local system's details synapse.dendrite = bittensor.TerminalInfo( ip=self.external_ip, version=bittensor.__version_as_int__, - nonce=time.monotonic_ns(), + nonce=time.time_ns(), uuid=self.uuid, hotkey=self.keypair.ss58_address, ) @@ -705,9 +704,18 @@ def process_server_response( # Set the attribute in the local synapse from the corresponding # attribute in the server synapse setattr(local_synapse, key, getattr(server_synapse, key)) - except: + except Exception as e: + bittensor.logging.info( + f"Ignoring error when setting attribute: {e}" + ) # Ignore errors during attribute setting pass + else: + # If the server responded with an error, update the local synapse state + if local_synapse.axon is None: + local_synapse.axon = bittensor.TerminalInfo() + local_synapse.axon.status_code = server_response.status + local_synapse.axon.status_message = json_response.get("message") # Extract server headers and overwrite None values in local synapse headers server_headers = bittensor.Synapse.from_headers(server_response.headers) # type: ignore diff --git a/bittensor/errors.py b/bittensor/errors.py index de51b5d48a..b8366ee681 100644 --- a/bittensor/errors.py +++ b/bittensor/errors.py @@ -14,6 +14,12 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +from __future__ import annotations + +import typing + +if typing.TYPE_CHECKING: + import bittensor class ChainError(BaseException): @@ -112,7 +118,16 @@ class InvalidRequestNameError(Exception): pass -class UnknownSynapseError(Exception): +class SynapseException(Exception): + def __init__( + self, message="Synapse Exception", synapse: "bittensor.Synapse" | None = None + ): + self.message = message + self.synapse = synapse + super().__init__(self.message) + + +class UnknownSynapseError(SynapseException): r"""This exception is raised when the request name is not found in the Axon's forward_fns dictionary.""" pass @@ -124,43 +139,47 @@ class SynapseParsingError(Exception): pass -class NotVerifiedException(Exception): +class NotVerifiedException(SynapseException): r"""This exception is raised when the request is not verified.""" pass -class BlacklistedException(Exception): +class BlacklistedException(SynapseException): r"""This exception is raised when the request is blacklisted.""" pass -class PriorityException(Exception): +class PriorityException(SynapseException): r"""This exception is raised when the request priority is not met.""" pass -class PostProcessException(Exception): +class PostProcessException(SynapseException): r"""This exception is raised when the response headers cannot be updated.""" pass -class RunException(Exception): +class RunException(SynapseException): r"""This exception is raised when the requested function cannot be executed. Indicates a server error.""" pass -class InternalServerError(Exception): +class InternalServerError(SynapseException): r"""This exception is raised when the requested function fails on the server. Indicates a server error.""" pass -class SynapseDendriteNoneException(Exception): - def __init__(self, message="Synapse Dendrite is None"): +class SynapseDendriteNoneException(SynapseException): + def __init__( + self, + message="Synapse Dendrite is None", + synapse: "bittensor.Synapse" | None = None, + ): self.message = message - super().__init__(self.message) + super().__init__(self.message, synapse) diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py index a27e1941ba..a9192952ef 100644 --- a/bittensor/extrinsics/commit_weights.py +++ b/bittensor/extrinsics/commit_weights.py @@ -16,7 +16,7 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -""" Module commit weights and reveal weights extrinsic. """ +"""Module commit weights and reveal weights extrinsic.""" from typing import Tuple, List @@ -24,6 +24,8 @@ import bittensor +from bittensor.utils import format_error_message + def commit_weights_extrinsic( subtensor: "bittensor.subtensor", @@ -67,7 +69,7 @@ def commit_weights_extrinsic( return True, "Successfully committed weights." else: bittensor.logging.error(f"Failed to commit weights: {error_message}") - return False, error_message + return False, format_error_message(error_message) def reveal_weights_extrinsic( diff --git a/bittensor/extrinsics/delegation.py b/bittensor/extrinsics/delegation.py index 66accfc983..54bdb5273c 100644 --- a/bittensor/extrinsics/delegation.py +++ b/bittensor/extrinsics/delegation.py @@ -69,7 +69,7 @@ def nominate_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) @@ -138,7 +138,7 @@ def delegate_extrinsic( ) # Convert to bittensor.Balance - if amount == None: + if amount is None: # Stake it all. staking_balance = bittensor.Balance.from_tao(my_prev_coldkey_balance.tao) elif not isinstance(amount, bittensor.Balance): @@ -184,7 +184,7 @@ def delegate_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully staked. + if staking_response is True: # If we successfully staked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -273,7 +273,7 @@ def undelegate_extrinsic( ) # Convert to bittensor.Balance - if amount == None: + if amount is None: # Stake it all. unstaking_balance = bittensor.Balance.from_tao(my_prev_delegated_stake.tao) @@ -315,7 +315,7 @@ def undelegate_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully staked. + if staking_response is True: # If we successfully staked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -403,13 +403,13 @@ def decrease_take_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) bittensor.logging.success( prefix="Decrease Delegate Take", - sufix="Finalized: " + str(success), + suffix="Finalized: " + str(success), ) return success @@ -419,7 +419,7 @@ def decrease_take_extrinsic( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) bittensor.logging.warning( - prefix="Set weights", sufix="Failed: " + str(e) + prefix="Set weights", suffix="Failed: " + str(e) ) return False @@ -463,13 +463,13 @@ def increase_take_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) bittensor.logging.success( prefix="Increase Delegate Take", - sufix="Finalized: " + str(success), + suffix="Finalized: " + str(success), ) return success @@ -479,14 +479,14 @@ def increase_take_extrinsic( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) bittensor.logging.warning( - prefix="Set weights", sufix="Failed: " + str(e) + prefix="Set weights", suffix="Failed: " + str(e) ) except TakeError as e: bittensor.__console__.print( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) bittensor.logging.warning( - prefix="Set weights", sufix="Failed: " + str(e) + prefix="Set weights", suffix="Failed: " + str(e) ) return False diff --git a/bittensor/extrinsics/network.py b/bittensor/extrinsics/network.py index 3e0c3d8661..c03e5cf77b 100644 --- a/bittensor/extrinsics/network.py +++ b/bittensor/extrinsics/network.py @@ -15,11 +15,39 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. + import time -import bittensor +import substrateinterface from rich.prompt import Confirm +import bittensor +from bittensor.utils import format_error_message +from ..commands.network import HYPERPARAMS + + +def _find_event_attributes_in_extrinsic_receipt( + response: "substrateinterface.base.ExtrinsicReceipt", event_name: str +) -> list: + """ + Searches for the attributes of a specified event within an extrinsic receipt. + + Args: + response (substrateinterface.base.ExtrinsicReceipt): The receipt of the extrinsic to be searched. + event_name (str): The name of the event to search for. + + Returns: + list: A list of attributes for the specified event. Returns [-1] if the event is not found. + """ + for event in response.triggered_events: + # Access the event details + event_details = event.value["event"] + # Check if the event_id is 'NetworkAdded' + if event_details["event_id"] == event_name: + # Once found, you can access the attributes of the event_name + return event_details["attributes"] + return [-1] + def register_subnetwork_extrinsic( subtensor: "bittensor.subtensor", @@ -86,15 +114,13 @@ def register_subnetwork_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) # Successful registration, final check for membership else: - attributes = find_event_attributes_in_extrinsic_receipt( + attributes = _find_event_attributes_in_extrinsic_receipt( response, "NetworkAdded" ) bittensor.__console__.print( @@ -103,20 +129,6 @@ def register_subnetwork_extrinsic( return True -def find_event_attributes_in_extrinsic_receipt(response, event_name) -> list: - for event in response.triggered_events: - # Access the event details - event_details = event.value["event"] - # Check if the event_id is 'NetworkAdded' - if event_details["event_id"] == event_name: - # Once found, you can access the attributes of the event_name - return event_details["attributes"] - return [-1] - - -from ..commands.network import HYPERPARAMS - - def set_hyperparameter_extrinsic( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", @@ -158,7 +170,7 @@ def set_hyperparameter_extrinsic( wallet.coldkey # unlock coldkey extrinsic = HYPERPARAMS.get(parameter) - if extrinsic == None: + if extrinsic is None: bittensor.__console__.print( ":cross_mark: [red]Invalid hyperparameter specified.[/red]" ) @@ -198,9 +210,7 @@ def set_hyperparameter_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) diff --git a/bittensor/extrinsics/prometheus.py b/bittensor/extrinsics/prometheus.py index 350817e11f..97f7c17714 100644 --- a/bittensor/extrinsics/prometheus.py +++ b/bittensor/extrinsics/prometheus.py @@ -15,6 +15,7 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. + import bittensor import json @@ -54,7 +55,7 @@ def prometheus_extrinsic( """ # ---- Get external ip ---- - if ip == None: + if ip is None: try: external_ip = net.get_external_ip() bittensor.__console__.print( @@ -125,7 +126,7 @@ def prometheus_extrinsic( ) if wait_for_inclusion or wait_for_finalization: - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Served prometheus[/green]\n [bold white]{}[/bold white]".format( json.dumps(call_params, indent=4, sort_keys=True) @@ -133,11 +134,7 @@ def prometheus_extrinsic( ) return True else: - bittensor.__console__.print( - ":cross_mark: [green]Failed to serve prometheus[/green] error: {}".format( - err - ) - ) + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err}") return False else: return True diff --git a/bittensor/extrinsics/registration.py b/bittensor/extrinsics/registration.py index 879214ad92..e82add8383 100644 --- a/bittensor/extrinsics/registration.py +++ b/bittensor/extrinsics/registration.py @@ -16,10 +16,14 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import bittensor import time -from rich.prompt import Confirm from typing import List, Union, Optional, Tuple + +from rich.prompt import Confirm + +import bittensor +from bittensor.utils import format_error_message + from bittensor.utils.registration import ( POWSolution, create_pow, @@ -171,16 +175,17 @@ def register_extrinsic( ) success, err_msg = result - if success != True or success == False: - if "key is already registered" in err_msg: - # Error meant that the key is already registered. + if not success: + # Look error here + # https://github.com/opentensor/subtensor/blob/development/pallets/subtensor/src/errors.rs + if "HotKeyAlreadyRegisteredInSubNet" in err_msg: bittensor.__console__.print( f":white_heavy_check_mark: [green]Already Registered on [bold]subnet:{netuid}[/bold][/green]" ) return True bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) + f":cross_mark: [red]Failed[/red]: {err_msg}" ) time.sleep(0.5) @@ -290,10 +295,8 @@ def burned_register_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success != True or success == False: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + if not success: + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") time.sleep(0.5) return False # Successful registration, final check for neuron and pubkey @@ -454,11 +457,13 @@ def run_faucet_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - f":cross_mark: [red]Failed[/red]: Error: {response.error_message}" + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) if attempts == max_allowed_attempts: raise MaxAttemptsException attempts += 1 + # Wait a bit before trying again + time.sleep(1) # Successful registration else: @@ -470,6 +475,8 @@ def run_faucet_extrinsic( if successes == 3: raise MaxSuccessException + + attempts = 1 # Reset attempts on success successes += 1 except KeyboardInterrupt: @@ -506,10 +513,8 @@ def swap_hotkey_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success != True or success == False: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + if not success: + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") time.sleep(0.5) return False diff --git a/bittensor/extrinsics/root.py b/bittensor/extrinsics/root.py index b2d5aef91a..8a7e9e3863 100644 --- a/bittensor/extrinsics/root.py +++ b/bittensor/extrinsics/root.py @@ -77,10 +77,8 @@ def root_register_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success != True or success == False: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + if not success: + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") time.sleep(0.5) # Successful registration, final check for neuron and pubkey @@ -208,7 +206,7 @@ def set_root_weights_extrinsic( return True else: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(error_message) + f":cross_mark: [red]Failed[/red]: {error_message}" ) bittensor.logging.warning( prefix="Set weights", diff --git a/bittensor/extrinsics/senate.py b/bittensor/extrinsics/senate.py index 233a78d614..043233996c 100644 --- a/bittensor/extrinsics/senate.py +++ b/bittensor/extrinsics/senate.py @@ -16,12 +16,13 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -# Imports -import bittensor - import time + from rich.prompt import Confirm +import bittensor +from bittensor.utils import format_error_message + def register_senate_extrinsic( subtensor: "bittensor.subtensor", @@ -78,9 +79,7 @@ def register_senate_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]:{format_error_message(response.error_message)}" ) time.sleep(0.5) @@ -155,9 +154,7 @@ def leave_senate_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) @@ -240,9 +237,7 @@ def vote_senate_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) diff --git a/bittensor/extrinsics/serving.py b/bittensor/extrinsics/serving.py index 05bf4c4369..bba5367de1 100644 --- a/bittensor/extrinsics/serving.py +++ b/bittensor/extrinsics/serving.py @@ -15,10 +15,16 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. + import json +from typing import Optional + +from retry import retry +from rich.prompt import Confirm + import bittensor import bittensor.utils.networking as net -from rich.prompt import Confirm +from bittensor.utils import format_error_message from ..errors import MetadataError @@ -123,15 +129,13 @@ def serve_extrinsic( ) if wait_for_inclusion or wait_for_finalization: - if success == True: + if success is True: bittensor.logging.debug( f"Axon served with: AxonInfo({wallet.hotkey.ss58_address},{ip}:{port}) on {subtensor.network}:{netuid} " ) return True else: - bittensor.logging.debug( - f"Axon failed to served with error: {error_message} " - ) + bittensor.logging.error(f"Failed: {error_message}") return False else: return True @@ -167,7 +171,7 @@ def serve_axon_extrinsic( external_port = axon.external_port # ---- Get external ip ---- - if axon.external_ip == None: + if axon.external_ip is None: try: external_ip = net.get_external_ip() bittensor.__console__.print( @@ -196,7 +200,6 @@ def serve_axon_extrinsic( protocol=4, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, - prompt=prompt, ) return serve_success @@ -205,7 +208,7 @@ def publish_metadata( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", netuid: int, - type: str, + data_type: str, data: bytes, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -220,7 +223,7 @@ def publish_metadata( The wallet object used for authentication in the transaction. netuid (int): Network UID on which the metadata is to be published. - type (str): + data_type (str): The data type of the information being submitted. It should be one of the following: ``'Sha256'``, ``'Blake256'``, ``'Keccak256'``, or ``'Raw0-128'``. This specifies the format or hashing algorithm used for the data. data (str): The actual metadata content to be published. This should be formatted or hashed according to the ``type`` specified. (Note: max ``str`` length is 128 bytes) @@ -244,7 +247,10 @@ def publish_metadata( call = substrate.compose_call( call_module="Commitments", call_function="set_commitment", - call_params={"netuid": netuid, "info": {"fields": [[{f"{type}": data}]]}}, + call_params={ + "netuid": netuid, + "info": {"fields": [[{f"{data_type}": data}]]}, + }, ) extrinsic = substrate.create_signed_extrinsic(call=call, keypair=wallet.hotkey) @@ -260,11 +266,7 @@ def publish_metadata( if response.is_success: return True else: - raise MetadataError(response.error_message) - - -from retry import retry -from typing import Optional + raise MetadataError(format_error_message(response.error_message)) def get_metadata(self, netuid: int, hotkey: str, block: Optional[int] = None) -> str: @@ -275,7 +277,7 @@ def make_substrate_call_with_retry(): module="Commitments", storage_function="CommitmentOf", params=[netuid, hotkey], - block_hash=None if block == None else substrate.get_block_hash(block), + block_hash=None if block is None else substrate.get_block_hash(block), ) commit_data = make_substrate_call_with_retry() diff --git a/bittensor/extrinsics/set_weights.py b/bittensor/extrinsics/set_weights.py index 5db0a1a7a9..dc3052d0a0 100644 --- a/bittensor/extrinsics/set_weights.py +++ b/bittensor/extrinsics/set_weights.py @@ -44,7 +44,7 @@ def set_weights_extrinsic( r"""Sets the given weights and values on chain for wallet hotkey account. Args: - subtensor_endpoint (bittensor.subtensor): + subtensor (bittensor.subtensor): Subtensor endpoint to use. wallet (bittensor.wallet): Bittensor wallet object. @@ -109,7 +109,7 @@ def set_weights_extrinsic( if not wait_for_finalization and not wait_for_inclusion: return True, "Not waiting for finalization or inclusion." - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) @@ -119,12 +119,10 @@ def set_weights_extrinsic( ) return True, "Successfully set weights and Finalized." else: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(error_message) - ) - bittensor.logging.warning( + bittensor.logging.error( + msg=error_message, prefix="Set weights", - suffix="Failed: " + str(error_message), + suffix="Failed: ", ) return False, error_message diff --git a/bittensor/extrinsics/staking.py b/bittensor/extrinsics/staking.py index f3249a8b1c..298bb1f0d3 100644 --- a/bittensor/extrinsics/staking.py +++ b/bittensor/extrinsics/staking.py @@ -19,10 +19,34 @@ import bittensor from rich.prompt import Confirm from time import sleep -from typing import List, Union, Optional +from typing import List, Union, Optional, Tuple from bittensor.utils.balance import Balance +def _check_threshold_amount( + subtensor: "bittensor.subtensor", stake_balance: Balance +) -> Tuple[bool, Balance]: + """ + Checks if the new stake balance will be above the minimum required stake threshold. + + Args: + stake_balance (Balance): + the balance to check for threshold limits. + + Returns: + success, threshold (bool, Balance): + ``true`` if the staking balance is above the threshold, or ``false`` if the + staking balance is below the threshold. + The threshold balance required to stake. + """ + min_req_stake: Balance = subtensor.get_minimum_required_stake() + + if min_req_stake > stake_balance: + return False, min_req_stake + else: + return True, min_req_stake + + def add_stake_extrinsic( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", @@ -91,8 +115,11 @@ def add_stake_extrinsic( coldkey_ss58=wallet.coldkeypub.ss58_address, hotkey_ss58=hotkey_ss58 ) + # Grab the existential deposit. + existential_deposit = subtensor.get_existential_deposit() + # Convert to bittensor.Balance - if amount == None: + if amount is None: # Stake it all. staking_balance = bittensor.Balance.from_tao(old_balance.tao) elif not isinstance(amount, bittensor.Balance): @@ -100,9 +127,10 @@ def add_stake_extrinsic( else: staking_balance = amount - # Remove existential balance to keep key alive. - if staking_balance > bittensor.Balance.from_rao(1000): - staking_balance = staking_balance - bittensor.Balance.from_rao(1000) + # Leave existential balance to keep key alive. + if staking_balance > old_balance - existential_deposit: + # If we are staking all, we need to leave at least the existential deposit. + staking_balance = old_balance - existential_deposit else: staking_balance = staking_balance @@ -115,6 +143,18 @@ def add_stake_extrinsic( ) return False + # If nominating, we need to check if the new stake balance will be above the minimum required stake threshold. + if not own_hotkey: + new_stake_balance = old_stake + staking_balance + is_above_threshold, threshold = _check_threshold_amount( + subtensor, new_stake_balance + ) + if not is_above_threshold: + bittensor.__console__.print( + f":cross_mark: [red]New stake balance of {new_stake_balance} is below the minimum required nomination stake threshold {threshold}.[/red]" + ) + return False + # Ask before moving on. if prompt: if not own_hotkey: @@ -148,7 +188,7 @@ def add_stake_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully staked. + if staking_response is True: # If we successfully staked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -167,7 +207,7 @@ def add_stake_extrinsic( block = subtensor.get_current_block() new_stake = subtensor.get_stake_for_coldkey_and_hotkey( coldkey_ss58=wallet.coldkeypub.ss58_address, - hotkey_ss58=wallet.hotkey.ss58_address, + hotkey_ss58=hotkey_ss58, block=block, ) # Get current stake diff --git a/bittensor/extrinsics/transfer.py b/bittensor/extrinsics/transfer.py index ae09803199..91ef3237eb 100644 --- a/bittensor/extrinsics/transfer.py +++ b/bittensor/extrinsics/transfer.py @@ -130,7 +130,7 @@ def transfer_extrinsic( explorer_urls = bittensor.utils.get_explorer_url_for_network( subtensor.network, block_hash, bittensor.__network_explorer_map__ ) - if explorer_urls != {}: + if explorer_urls != {} and explorer_urls: bittensor.__console__.print( "[green]Opentensor Explorer Link: {}[/green]".format( explorer_urls.get("opentensor") @@ -142,9 +142,7 @@ def transfer_extrinsic( ) ) else: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") if success: with bittensor.__console__.status(":satellite: Checking Balance..."): diff --git a/bittensor/extrinsics/unstaking.py b/bittensor/extrinsics/unstaking.py index 6046124f40..105bb145b9 100644 --- a/bittensor/extrinsics/unstaking.py +++ b/bittensor/extrinsics/unstaking.py @@ -72,13 +72,13 @@ def __do_remove_stake_single( def check_threshold_amount( - subtensor: "bittensor.subtensor", unstaking_balance: Balance + subtensor: "bittensor.subtensor", stake_balance: Balance ) -> bool: """ - Checks if the unstaking amount is above the threshold or 0 + Checks if the remaining stake balance is above the minimum required stake threshold. Args: - unstaking_balance (Balance): + stake_balance (Balance): the balance to check for threshold limits. Returns: @@ -88,9 +88,9 @@ def check_threshold_amount( """ min_req_stake: Balance = subtensor.get_minimum_required_stake() - if min_req_stake > unstaking_balance > 0: + if min_req_stake > stake_balance > 0: bittensor.__console__.print( - f":cross_mark: [red]Unstaking balance of {unstaking_balance} less than minimum of {min_req_stake} TAO[/red]" + f":cross_mark: [yellow]Remaining stake balance of {stake_balance} less than minimum of {min_req_stake} TAO[/yellow]" ) return False else: @@ -141,8 +141,11 @@ def unstake_extrinsic( coldkey_ss58=wallet.coldkeypub.ss58_address, hotkey_ss58=hotkey_ss58 ) + hotkey_owner = subtensor.get_hotkey_owner(hotkey_ss58) + own_hotkey: bool = wallet.coldkeypub.ss58_address == hotkey_owner + # Convert to bittensor.Balance - if amount == None: + if amount is None: # Unstake it all. unstaking_balance = old_stake elif not isinstance(amount, bittensor.Balance): @@ -160,10 +163,14 @@ def unstake_extrinsic( ) return False - if not check_threshold_amount( - subtensor=subtensor, unstaking_balance=unstaking_balance + # If nomination stake, check threshold. + if not own_hotkey and not check_threshold_amount( + subtensor=subtensor, stake_balance=(stake_on_uid - unstaking_balance) ): - return False + bittensor.__console__.print( + f":warning: [yellow]This action will unstake the entire staked balance![/yellow]" + ) + unstaking_balance = stake_on_uid # Ask before moving on. if prompt: @@ -189,7 +196,7 @@ def unstake_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully unstaked. + if staking_response is True: # If we successfully unstaked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -221,7 +228,7 @@ def unstake_extrinsic( return True else: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: Error unknown." + ":cross_mark: [red]Failed[/red]: Unknown Error." ) return False @@ -300,6 +307,7 @@ def unstake_multiple_extrinsic( wallet.coldkey old_stakes = [] + own_hotkeys = [] with bittensor.__console__.status( ":satellite: Syncing with chain: [white]{}[/white] ...".format( subtensor.network @@ -313,12 +321,15 @@ def unstake_multiple_extrinsic( ) # Get stake on hotkey. old_stakes.append(old_stake) # None if not registered. + hotkey_owner = subtensor.get_hotkey_owner(hotkey_ss58) + own_hotkeys.append(wallet.coldkeypub.ss58_address == hotkey_owner) + successful_unstakes = 0 - for idx, (hotkey_ss58, amount, old_stake) in enumerate( - zip(hotkey_ss58s, amounts, old_stakes) + for idx, (hotkey_ss58, amount, old_stake, own_hotkey) in enumerate( + zip(hotkey_ss58s, amounts, old_stakes, own_hotkeys) ): # Covert to bittensor.Balance - if amount == None: + if amount is None: # Unstake it all. unstaking_balance = old_stake elif not isinstance(amount, bittensor.Balance): @@ -336,10 +347,14 @@ def unstake_multiple_extrinsic( ) continue - if not check_threshold_amount( - subtensor=subtensor, unstaking_balance=unstaking_balance + # If nomination stake, check threshold. + if not own_hotkey and not check_threshold_amount( + subtensor=subtensor, stake_balance=(stake_on_uid - unstaking_balance) ): - return False + bittensor.__console__.print( + f":warning: [yellow]This action will unstake the entire staked balance![/yellow]" + ) + unstaking_balance = stake_on_uid # Ask before moving on. if prompt: @@ -365,7 +380,7 @@ def unstake_multiple_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully unstaked. + if staking_response is True: # If we successfully unstaked. # We only wait here if we expect finalization. if idx < len(hotkey_ss58s) - 1: @@ -405,7 +420,7 @@ def unstake_multiple_extrinsic( successful_unstakes += 1 else: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: Error unknown." + ":cross_mark: [red]Failed[/red]: Unknown Error." ) continue diff --git a/bittensor/metagraph.py b/bittensor/metagraph.py index 8bad4d6c78..8d7e97bcc0 100644 --- a/bittensor/metagraph.py +++ b/bittensor/metagraph.py @@ -618,7 +618,9 @@ def _set_weights_and_bonds(self, subtensor: Optional[bittensor.subtensor] = None # TODO: Check and test the computation of weights and bonds if self.netuid == 0: self.weights = self._process_root_weights( - [neuron.weights for neuron in self.neurons], "weights", subtensor # type: ignore + [neuron.weights for neuron in self.neurons], + "weights", + subtensor, # type: ignore ) else: self.weights = self._process_weights_or_bonds( @@ -659,16 +661,16 @@ def _process_weights_or_bonds( if attribute == "weights": data_array.append( bittensor.utils.weight_utils.convert_weight_uids_and_vals_to_tensor( - len(self.neurons), list(uids), list(values) # type: ignore + len(self.neurons), + list(uids), + list(values), # type: ignore ) ) else: data_array.append( bittensor.utils.weight_utils.convert_bond_uids_and_vals_to_tensor( # type: ignore len(self.neurons), list(uids), list(values) - ).astype( - np.float32 - ) + ).astype(np.float32) ) tensor_param: Union["torch.nn.Parameter", NDArray] = ( ( diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index 4ca08cfb22..30d58f22e0 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -35,7 +35,7 @@ AxonInfo, ) from ..errors import ChainQueryError -from ..subtensor import subtensor +from ..subtensor import Subtensor from ..utils import RAOPERTAO, U16_NORMALIZED_FLOAT from ..utils.balance import Balance from ..utils.registration import POWSolution @@ -196,7 +196,7 @@ class MockChainState(TypedDict): SubtensorModule: MockSubtensorState -class MockSubtensor(subtensor): +class MockSubtensor(Subtensor): """ A Mock Subtensor class for running tests. This should mock only methods that make queries to the chain. @@ -430,9 +430,9 @@ def _register_neuron(self, netuid: int, hotkey: str, coldkey: str) -> int: subtensor_state["Active"][netuid][uid][self.block_number] = True subtensor_state["LastUpdate"][netuid][uid] = {} - subtensor_state["LastUpdate"][netuid][uid][ + subtensor_state["LastUpdate"][netuid][uid][self.block_number] = ( self.block_number - ] = self.block_number + ) subtensor_state["Rank"][netuid][uid] = {} subtensor_state["Rank"][netuid][uid][self.block_number] = 0.0 @@ -756,7 +756,7 @@ def neuron_for_uid( self, uid: int, netuid: int, block: Optional[int] = None ) -> Optional[NeuronInfo]: if uid is None: - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() if block: if self.block_number < block: @@ -1064,9 +1064,9 @@ def _do_nominate( else: subtensor_state["Delegates"][hotkey_ss58] = {} - subtensor_state["Delegates"][hotkey_ss58][ - self.block_number - ] = 0.18 # Constant for now + subtensor_state["Delegates"][hotkey_ss58][self.block_number] = ( + 0.18 # Constant for now + ) return True @@ -1189,9 +1189,9 @@ def _do_stake( if not wallet.coldkeypub.ss58_address in stake_state[hotkey_ss58]: stake_state[hotkey_ss58][wallet.coldkeypub.ss58_address] = {} - stake_state[hotkey_ss58][wallet.coldkeypub.ss58_address][ - self.block_number - ] = amount.rao + stake_state[hotkey_ss58][wallet.coldkeypub.ss58_address][self.block_number] = ( + amount.rao + ) # Add to total_stake storage subtensor_state["TotalStake"][self.block_number] = ( @@ -1275,9 +1275,9 @@ def _do_unstake( total_hotkey_stake_state = subtensor_state["TotalHotkeyStake"] if not hotkey_ss58 in total_hotkey_stake_state: total_hotkey_stake_state[hotkey_ss58] = {} - total_hotkey_stake_state[hotkey_ss58][ - self.block_number - ] = 0 # Shouldn't happen + total_hotkey_stake_state[hotkey_ss58][self.block_number] = ( + 0 # Shouldn't happen + ) total_coldkey_stake_state = subtensor_state["TotalColdkeyStake"] if not wallet.coldkeypub.ss58_address in total_coldkey_stake_state: diff --git a/bittensor/subnets.py b/bittensor/subnets.py index c10d4716a4..836a20dcb7 100644 --- a/bittensor/subnets.py +++ b/bittensor/subnets.py @@ -49,8 +49,6 @@ async def query_api( axons: Union[bt.axon, List[bt.axon]], deserialize: Optional[bool] = False, timeout: Optional[int] = 12, - n: Optional[float] = 0.1, - uid: Optional[int] = None, **kwargs: Optional[Any], ) -> Any: """ @@ -60,15 +58,13 @@ async def query_api( axons (Union[bt.axon, List[bt.axon]]): The list of axon(s) to query. deserialize (bool, optional): Whether to deserialize the responses. Defaults to False. timeout (int, optional): The timeout in seconds for the query. Defaults to 12. - n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1. - uid (int, optional): The specific UID of the API node to query. Defaults to None. **kwargs: Keyword arguments for the prepare_synapse_fn. Returns: Any: The result of the process_responses_fn. """ synapse = self.prepare_synapse(**kwargs) - bt.logging.debug(f"Quering valdidator axons with synapse {synapse.name}...") + bt.logging.debug(f"Querying validator axons with synapse {synapse.name}...") responses = await self.dendrite( axons=axons, synapse=synapse, diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 4c351d42b0..0fffa1cc7e 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -20,12 +20,13 @@ The ``bittensor.subtensor`` module in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. """ + import argparse import copy import functools import socket import time -from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any, TypeVar +from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any import numpy as np import scalecodec @@ -34,15 +35,15 @@ from scalecodec.base import RuntimeConfiguration from scalecodec.exceptions import RemainingScaleBytesNotEmptyException from scalecodec.type_registry import load_type_registry_preset -from scalecodec.types import GenericCall +from scalecodec.types import GenericCall, ScaleType from substrateinterface.base import QueryMapResult, SubstrateInterface, ExtrinsicReceipt from substrateinterface.exceptions import SubstrateRequestException - import bittensor from bittensor.btlogging import logging as _logger -from bittensor.utils import torch, weight_utils +from bittensor.utils import torch, weight_utils, format_error_message from .chain_data import ( + DelegateInfoLite, NeuronInfo, DelegateInfo, PrometheusInfo, @@ -106,11 +107,8 @@ from .utils.registration import legacy_torch_api_compat from .utils.subtensor import get_subtensor_errors - KEY_NONCE: Dict[str, int] = {} -T = TypeVar("T") - ####### # Monkey patch in caching the convert_type_string method ####### @@ -118,7 +116,7 @@ original_convert_type_string = RuntimeConfiguration.convert_type_string @functools.lru_cache(maxsize=None) - def convert_type_string(cls, name): + def convert_type_string(_, name): return original_convert_type_string(name) RuntimeConfiguration.convert_type_string = convert_type_string @@ -130,7 +128,7 @@ class ParamWithTypes(TypedDict): type: str # ScaleType string of the parameter. -class subtensor: +class Subtensor: """ The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. @@ -153,12 +151,6 @@ class subtensor: with market-based speculation, incentivizing neurons to make judicious decisions in their inter-neuronal investments. - Args: - network (str): The name of the Bittensor network (e.g., 'finney', 'test', 'archive', 'local') the instance is - connected to, determining the blockchain interaction context. - chain_endpoint (str): The blockchain node endpoint URL, enabling direct communication with the Bittensor - blockchain for transaction processing and data retrieval. - Example Usage:: # Connect to the main Bittensor network (Finney). @@ -236,11 +228,11 @@ def __init__( network = None if config is None: - config = subtensor.config() + config = Subtensor.config() self.config = copy.deepcopy(config) # type: ignore # Setup config.subtensor.network and config.subtensor.chain_endpoint - self.chain_endpoint, self.network = subtensor.setup_config(network, config) # type: ignore + self.chain_endpoint, self.network = Subtensor.setup_config(network, config) # type: ignore if ( self.network == "finney" @@ -267,7 +259,7 @@ def __init__( url=self.chain_endpoint, type_registry=bittensor.__type_registry__, ) - except ConnectionRefusedError as e: + except ConnectionRefusedError: _logger.error( f"Could not connect to {self.network} network with {self.chain_endpoint} chain endpoint. Exiting...", ) @@ -308,7 +300,7 @@ def __repr__(self) -> str: return self.__str__() @staticmethod - def config() -> bittensor.config: + def config() -> "bittensor.config": """ Creates and returns a Bittensor configuration object. @@ -317,7 +309,7 @@ def config() -> bittensor.config: `subtensor.add_args` method. """ parser = argparse.ArgumentParser() - subtensor.add_args(parser) + Subtensor.add_args(parser) return bittensor.config(parser, args=[]) @classmethod @@ -329,7 +321,7 @@ def help(cls): parser.print_help() @classmethod - def add_args(cls, parser: argparse.ArgumentParser, prefix: Optional[str] = None): + def add_args(cls, parser: "argparse.ArgumentParser", prefix: Optional[str] = None): """ Adds command-line arguments to the provided ArgumentParser for configuring the Subtensor settings. @@ -428,7 +420,7 @@ def determine_chain_endpoint_and_network(network: str): return "unknown", network @staticmethod - def setup_config(network: str, config: bittensor.config): + def setup_config(network: str, config: "bittensor.config"): """ Sets up and returns the configuration for the Subtensor network and endpoint. @@ -452,13 +444,13 @@ def setup_config(network: str, config: bittensor.config): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network(network) + ) = Subtensor.determine_chain_endpoint_and_network(network) else: if config.get("__is_set", {}).get("subtensor.chain_endpoint"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.chain_endpoint ) @@ -466,7 +458,7 @@ def setup_config(network: str, config: bittensor.config): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.network ) @@ -474,7 +466,7 @@ def setup_config(network: str, config: bittensor.config): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.chain_endpoint ) @@ -482,7 +474,7 @@ def setup_config(network: str, config: bittensor.config): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.network ) @@ -490,7 +482,7 @@ def setup_config(network: str, config: bittensor.config): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( bittensor.defaults.subtensor.network ) @@ -508,7 +500,7 @@ def close(self): ############## def nominate( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_finalization: bool = False, wait_for_inclusion: bool = True, ) -> bool: @@ -538,7 +530,7 @@ def nominate( def delegate( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", delegate_ss58: Optional[str] = None, amount: Optional[Union[Balance, float]] = None, wait_for_inclusion: bool = True, @@ -577,7 +569,7 @@ def delegate( def undelegate( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", delegate_ss58: Optional[str] = None, amount: Optional[Union[Balance, float]] = None, wait_for_inclusion: bool = True, @@ -614,7 +606,7 @@ def undelegate( def set_take( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", delegate_ss58: Optional[str] = None, take: float = 0.0, wait_for_inclusion: bool = True, @@ -679,7 +671,7 @@ def set_take( def send_extrinsic( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", module: str, function: str, params: dict, @@ -781,9 +773,10 @@ def send_extrinsic( ############### # Set Weights # ############### + # TODO: still needed? Can't find any usage of this method. def set_weights( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", netuid: int, uids: Union[NDArray[np.int64], "torch.LongTensor", list], weights: Union[NDArray[np.float32], "torch.FloatTensor", list], @@ -801,8 +794,10 @@ def set_weights( Args: wallet (bittensor.wallet): The wallet associated with the neuron setting the weights. netuid (int): The unique identifier of the subnet. - uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being set for. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. + uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being + set for. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each + UID. version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -845,7 +840,7 @@ def set_weights( def _do_set_weights( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", uids: List[int], vals: List[int], netuid: int, @@ -905,7 +900,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, "Successfully set weights." else: - return False, response.error_message + return False, format_error_message(response.error_message) return make_substrate_call_with_retry() @@ -948,7 +943,6 @@ def commit_weights( This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, enhancing transparency and accountability within the Bittensor network. """ - uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) retries = 0 success = False message = "No attempt made. Perhaps it is too soon to commit weights!" @@ -1087,7 +1081,7 @@ def reveal_weights( This function allows neurons to reveal their previously committed weight distribution, ensuring transparency and accountability within the Bittensor network. """ - uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) + retries = 0 success = False message = "No attempt made. Perhaps it is too soon to reveal weights!" @@ -1177,7 +1171,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, None else: - return False, response.error_message + return False, format_error_message(response.error_message) return make_substrate_call_with_retry() @@ -1186,7 +1180,7 @@ def make_substrate_call_with_retry(): ################ def register( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", netuid: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -1248,8 +1242,8 @@ def register( def swap_hotkey( self, - wallet: bittensor.wallet, - new_wallet: bittensor.wallet, + wallet: "bittensor.wallet", + new_wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, prompt: bool = False, @@ -1282,7 +1276,7 @@ def swap_hotkey( def run_faucet( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, prompt: bool = False, @@ -1346,7 +1340,7 @@ def run_faucet( def burned_register( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", netuid: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -1380,7 +1374,7 @@ def burned_register( def _do_pow_register( self, netuid: int, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", pow_result: POWSolution, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -1432,7 +1426,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -1442,7 +1436,7 @@ def make_substrate_call_with_retry(): def _do_burned_register( self, netuid: int, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: @@ -1459,8 +1453,7 @@ def _do_burned_register( wait_for_finalization (bool): Whether to wait for the transaction to be finalized. Default is True. Returns: - Tuple[bool, Optional[str]]: A tuple containing a boolean indicating success or failure, and an optional - error message. + Tuple[bool, Optional[str]]: A tuple containing a boolean indicating success or failure, and an optional error message. """ @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) @@ -1490,7 +1483,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -1499,8 +1492,8 @@ def make_substrate_call_with_retry(): def _do_swap_hotkey( self, - wallet: bittensor.wallet, - new_wallet: bittensor.wallet, + wallet: "bittensor.wallet", + new_wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: @@ -1546,7 +1539,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -1558,7 +1551,7 @@ def make_substrate_call_with_retry(): ############ def transfer( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", dest: str, amount: Union[Balance, float], wait_for_inclusion: bool = True, @@ -1595,8 +1588,8 @@ def transfer( ) def get_transfer_fee( - self, wallet: bittensor.wallet, dest: str, value: Union[Balance, float, int] - ) -> Balance: + self, wallet: "bittensor.wallet", dest: str, value: Union["Balance", float, int] + ) -> "Balance": """ Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. This function simulates the transfer to estimate the associated cost, taking into account the current @@ -1655,7 +1648,7 @@ def _do_transfer( self, wallet: "bittensor.wallet", dest: str, - transfer_balance: Balance, + transfer_balance: "Balance", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> Tuple[bool, Optional[str], Optional[str]]: @@ -1669,11 +1662,12 @@ def _do_transfer( wait_for_finalization (bool): If ``true``, waits for finalization. Returns: success (bool): ``True`` if transfer was successful. - block_hash (str): Block hash of the transfer. On success and if wait_for_ finalization/inclusion is ``True``. + block_hash (str): Block hash of the transfer. On success and if wait_for_ finalization/inclusion is + ``True``. error (str): Error message if transfer failed. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="Balances", @@ -1698,18 +1692,21 @@ def make_substrate_call_with_retry(): block_hash = response.block_hash return True, block_hash, None else: - return False, None, response.error_message + return False, None, format_error_message(response.error_message) return make_substrate_call_with_retry() - def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balance]: + def get_existential_deposit( + self, block: Optional[int] = None + ) -> Optional["Balance"]: """ Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with balances below this threshold can be reaped to conserve network resources. Args: - block (Optional[int], optional): Block number at which to query the deposit amount. If ``None``, the current block is used. + block (Optional[int]): Block number at which to query the deposit amount. If ``None``, the current block is + used. Returns: Optional[Balance]: The existential deposit amount, or ``None`` if the query fails. @@ -1726,9 +1723,9 @@ def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balan return Balance.from_rao(result.value) - ################# - #### Network #### - ################# + ########### + # Network # + ########### def register_subnetwork( self, wallet: "bittensor.wallet", @@ -1802,9 +1799,9 @@ def set_hyperparameter( prompt=prompt, ) - ################# - #### Serving #### - ################# + ########### + # Serving # + ########### def serve( self, wallet: "bittensor.wallet", @@ -1816,7 +1813,6 @@ def serve( placeholder2: int = 0, wait_for_inclusion: bool = False, wait_for_finalization=True, - prompt: bool = False, ) -> bool: """ Registers a neuron's serving endpoint on the Bittensor network. This function announces the @@ -1829,10 +1825,12 @@ def serve( port (int): The port number on which the neuron is serving. protocol (int): The protocol type used by the neuron (e.g., GRPC, HTTP). netuid (int): The unique identifier of the subnetwork. - Other arguments: Placeholder parameters for future extensions. - wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. - wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. + placeholder1 (int, optional): Placeholder parameter for future extensions. Default is ``0``. + placeholder2 (int, optional): Placeholder parameter for future extensions. Default is ``0``. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Default is + ``False``. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Default + is ``True``. Returns: bool: ``True`` if the serve registration is successful, False otherwise. @@ -1859,7 +1857,6 @@ def serve_axon( axon: "bittensor.axon", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, - prompt: bool = False, ) -> bool: """ Registers an Axon serving endpoint on the Bittensor network for a specific neuron. This function @@ -1871,7 +1868,6 @@ def serve_axon( axon (bittensor.Axon): The Axon instance to be registered for serving. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: bool: ``True`` if the Axon serve registration is successful, False otherwise. @@ -1907,7 +1903,7 @@ def _do_serve_axon( enhancing the decentralized computation capabilities of Bittensor. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -1927,7 +1923,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, None else: - return False, response.error_message + return False, format_error_message(response.error_message) else: return True, None @@ -1969,7 +1965,7 @@ def _do_serve_prometheus( error (:func:`Optional[str]`): Error message if serve prometheus failed, ``None`` otherwise. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -1989,7 +1985,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, None else: - return False, response.error_message + return False, format_error_message(response.error_message) else: return True, None @@ -1998,7 +1994,7 @@ def make_substrate_call_with_retry(): def _do_associate_ips( self, wallet: "bittensor.wallet", - ip_info_list: List[IPInfo], + ip_info_list: List["IPInfo"], netuid: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -2018,7 +2014,7 @@ def _do_associate_ips( error (:func:`Optional[str]`): Error message if associate IPs failed, None otherwise. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -2047,14 +2043,14 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - ################# - #### Staking #### - ################# + ########### + # Staking # + ########### def add_stake( self, wallet: "bittensor.wallet", hotkey_ss58: Optional[str] = None, - amount: Optional[Union[Balance, float]] = None, + amount: Optional[Union["Balance", float]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -2092,7 +2088,7 @@ def add_stake_multiple( self, wallet: "bittensor.wallet", hotkey_ss58s: List[str], - amounts: Optional[List[Union[Balance, float]]] = None, + amounts: Optional[List[Union["Balance", float]]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -2129,7 +2125,7 @@ def _do_stake( self, wallet: "bittensor.wallet", hotkey_ss58: str, - amount: Balance, + amount: "Balance", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: @@ -2147,7 +2143,7 @@ def _do_stake( StakeError: If the extrinsic failed. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -2170,18 +2166,18 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() - ################### - #### Unstaking #### - ################### + ############# + # Unstaking # + ############# def unstake_multiple( self, wallet: "bittensor.wallet", hotkey_ss58s: List[str], - amounts: Optional[List[Union[Balance, float]]] = None, + amounts: Optional[List[Union["Balance", float]]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -2193,7 +2189,8 @@ def unstake_multiple( Args: wallet (bittensor.wallet): The wallet linked to the coldkey from which the stakes are being withdrawn. hotkey_ss58s (List[str]): A list of hotkey ``SS58`` addresses to unstake from. - amounts (List[Union[Balance, float]], optional): The amounts of TAO to unstake from each hotkey. If not provided, unstakes all available stakes. + amounts (List[Union[Balance, float]], optional): The amounts of TAO to unstake from each hotkey. If not + provided, unstakes all available stakes. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2218,7 +2215,7 @@ def unstake( self, wallet: "bittensor.wallet", hotkey_ss58: Optional[str] = None, - amount: Optional[Union[Balance, float]] = None, + amount: Optional[Union["Balance", float]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -2255,7 +2252,7 @@ def _do_unstake( self, wallet: "bittensor.wallet", hotkey_ss58: str, - amount: Balance, + amount: "Balance", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: @@ -2273,7 +2270,7 @@ def _do_unstake( StakeError: If the extrinsic failed. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -2296,13 +2293,13 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() - ################ - #### Senate #### - ################ + ########## + # Senate # + ########## def register_senate( self, @@ -2317,8 +2314,6 @@ def register_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. - amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2346,8 +2341,6 @@ def leave_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. - amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2378,8 +2371,9 @@ def vote_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. - amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. + proposal_hash (str): The hash of the proposal being voted on. + proposal_idx (int): The index of the proposal being voted on. + vote (bool): The vote to be cast (True for yes, False for no). wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2409,7 +2403,7 @@ def is_senate_member(self, hotkey_ss58: str, block: Optional[int] = None) -> boo Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to check senate membership. + block (Optional[int]): The blockchain block number at which to check senate membership. Returns: bool: ``True`` if the neuron is a senate member at the given block, False otherwise. @@ -2438,7 +2432,7 @@ def get_vote_data( Args: proposal_hash (str): The hash of the proposal for which voting data is requested. - block (Optional[int], optional): The blockchain block number to query the voting data. + block (Optional[int]): The blockchain block number to query the voting data. Returns: Optional[ProposalVoteData]: An object containing the proposal's voting data, or ``None`` if not found. @@ -2451,7 +2445,7 @@ def get_vote_data( ) if not hasattr(vote_data, "serialize"): return None - return vote_data.serialize() if vote_data != None else None + return vote_data.serialize() if vote_data is not None else None get_proposal_vote_data = get_vote_data @@ -2461,7 +2455,7 @@ def get_senate_members(self, block: Optional[int] = None) -> Optional[List[str]] responsible for governance and decision-making within the network. Args: - block (Optional[int], optional): The blockchain block number at which to retrieve the senate members. + block (Optional[int]): The blockchain block number at which to retrieve the senate members. Returns: Optional[List[str]]: A list of ``SS58`` addresses of current senate members, or ``None`` if not available. @@ -2472,21 +2466,21 @@ def get_senate_members(self, block: Optional[int] = None) -> Optional[List[str]] senate_members = self.query_module("SenateMembers", "Members", block=block) if not hasattr(senate_members, "serialize"): return None - return senate_members.serialize() if senate_members != None else None + return senate_members.serialize() if senate_members is not None else None def get_proposal_call_data( self, proposal_hash: str, block: Optional[int] = None - ) -> Optional[GenericCall]: + ) -> Optional["GenericCall"]: """ Retrieves the call data of a specific proposal on the Bittensor blockchain. This data provides detailed information about the proposal, including its purpose and specifications. Args: proposal_hash (str): The hash of the proposal. - block (Optional[int], optional): The blockchain block number at which to query the proposal call data. + block (Optional[int]): The blockchain block number at which to query the proposal call data. Returns: - Optional[bittensor.ProposalCallData]: An object containing the proposal's call data, or ``None`` if not found. + Optional[GenericCall]: An object containing the proposal's call data, or ``None`` if not found. This function is crucial for analyzing the types of proposals made within the network and the specific changes or actions they intend to implement or address. @@ -2497,7 +2491,7 @@ def get_proposal_call_data( if not hasattr(proposal_data, "serialize"): return None - return proposal_data.serialize() if proposal_data != None else None + return proposal_data.serialize() if proposal_data is not None else None def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str]]: """ @@ -2505,7 +2499,7 @@ def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str] uniquely identifies a proposal made within the network. Args: - block (Optional[int], optional): The blockchain block number to query the proposal hashes. + block (Optional[int]): The blockchain block number to query the proposal hashes. Returns: Optional[List[str]]: A list of proposal hashes, or ``None`` if not available. @@ -2519,22 +2513,22 @@ def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str] if not hasattr(proposal_hashes, "serialize"): return None - return proposal_hashes.serialize() if proposal_hashes != None else None + return proposal_hashes.serialize() if proposal_hashes is not None else None def get_proposals( self, block: Optional[int] = None - ) -> Optional[Dict[str, Tuple[GenericCall, ProposalVoteData]]]: + ) -> Optional[Dict[str, Tuple["GenericCall", "ProposalVoteData"]]]: """ Retrieves all active proposals on the Bittensor blockchain, along with their call and voting data. This comprehensive view allows for a thorough understanding of the proposals and their reception by the senate. Args: - block (Optional[int], optional): The blockchain block number to query the proposals. + block (Optional[int]): The blockchain block number to query the proposals. Returns: - Optional[Dict[str, Tuple[bittensor.ProposalCallData, bittensor.ProposalVoteData]]]: - A dictionary mapping proposal hashes to their corresponding call and vote data, or ``None`` if not available. + Optional[Dict[str, Tuple[bittensor.ProposalCallData, bittensor.ProposalVoteData]]]: A dictionary mapping + proposal hashes to their corresponding call and vote data, or ``None`` if not available. This function is integral for analyzing the governance activity on the Bittensor network, providing a holistic view of the proposals and their impact or potential changes within the network. @@ -2550,9 +2544,9 @@ def get_proposals( for proposal_hash in proposal_hashes } - ############## - #### Root #### - ############## + ######## + # Root # + ######## def root_register( self, @@ -2591,7 +2585,7 @@ def _do_root_register( wait_for_inclusion: bool = False, wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): # create extrinsic call call = self.substrate.compose_call( @@ -2615,7 +2609,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -2639,8 +2633,10 @@ def root_set_weights( Args: wallet (bittensor.wallet): The wallet associated with the neuron setting the weights. - netuids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs for which weights are being set. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. + netuids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs for which weights are + being set. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each + UID. version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -2730,38 +2726,38 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - ######################## - #### Registry Calls #### - ######################## - - """ Queries subtensor registry named storage with params and block. """ + ################## + # Registry Calls # + ################## + # Queries subtensor registry named storage with params and block. def query_identity( self, key: str, block: Optional[int] = None, - ) -> Optional[object]: + ) -> dict: """ Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. NOTE: - See the `Bittensor CLI documentation `_ for supported identity parameters. + See the `Bittensor CLI documentation `_ for supported identity + parameters. Args: key (str): The key used to query the neuron's identity, typically the neuron's ``SS58`` address. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: - Optional[object]: An object containing the identity information of the neuron if found, ``None`` otherwise. + result (dict): An object containing the identity information of the neuron if found, ``None`` otherwise. The identity information can include various attributes such as the neuron's stake, rank, and other network-specific details, providing insights into the neuron's role and status within the Bittensor network. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> "ScaleType": return self.substrate.query( module="Registry", storage_function="IdentityOf", @@ -2772,6 +2768,7 @@ def make_substrate_call_with_retry(): ) identity_info = make_substrate_call_with_retry() + return bittensor.utils.wallet_utils.decode_hex_identity_dict( identity_info.value["info"] ) @@ -2780,7 +2777,7 @@ def update_identity( self, wallet: "bittensor.wallet", identified: Optional[str] = None, - params: dict = {}, + params: Optional[dict] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: @@ -2789,11 +2786,13 @@ def update_identity( identity attributes, reflecting changes in their roles, stakes, or other network-specific parameters. NOTE: - See the `Bittensor CLI documentation `_ for supported identity parameters. + See the `Bittensor CLI documentation `_ for supported identity + parameters. Args: wallet (bittensor.wallet): The wallet associated with the neuron whose identity is being updated. - identified (str, optional): The identified ``SS58`` address of the neuron. Defaults to the wallet's coldkey address. + identified (str, optional): The identified ``SS58`` address of the neuron. Defaults to the wallet's coldkey + address. params (dict, optional): A dictionary of parameters to update in the neuron's identity. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -2807,11 +2806,13 @@ def update_identity( if identified is None: identified = wallet.coldkey.ss58_address + params = {} if params is None else params + call_params = bittensor.utils.wallet_utils.create_identity_dict(**params) call_params["identified"] = identified - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> bool: call = self.substrate.compose_call( call_module="Registry", call_function="set_identity", @@ -2836,12 +2837,31 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Make some commitment on-chain about arbitary data """ - + # Make some commitment on-chain about arbitrary data. def commit(self, wallet, netuid: int, data: str): + """ + Commits arbitrary data to the Bittensor network by publishing metadata. + + Args: + wallet (bittensor.wallet): The wallet associated with the neuron committing the data. + netuid (int): The unique identifier of the subnetwork. + data (str): The data to be committed to the network. + """ publish_metadata(self, wallet, netuid, f"Raw{len(data)}", data.encode()) def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> str: + """ + Retrieves the on-chain commitment for a specific neuron in the Bittensor network. + + Args: + netuid (int): The unique identifier of the subnetwork. + uid (int): The unique identifier of the neuron. + block (Optional[int]): The block number to retrieve the commitment from. If None, the latest block + is used. Default is ``None``. + + Returns: + str: The commitment data as a string. + """ metagraph = self.metagraph(netuid) hotkey = metagraph.hotkeys[uid] # type: ignore @@ -2851,36 +2871,35 @@ def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> return bytes.fromhex(hex_data).decode() - ######################## - #### Standard Calls #### - ######################## - - """ Queries subtensor named storage with params and block. """ + ################## + # Standard Calls # + ################## + # Queries subtensor named storage with params and block. def query_subtensor( self, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> Optional[T]: + params: Optional[list] = None, + ) -> "ScaleType": """ Queries named storage from the Subtensor module on the Bittensor blockchain. This function is used to retrieve specific data or parameters from the blockchain, such as stake, rank, or other neuron-specific attributes. Args: name (str): The name of the storage function to query. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - Optional[object]: An object containing the requested data if found, ``None`` otherwise. + query_response (ScaleType): An object containing the requested data. This query function is essential for accessing detailed information about the network and its neurons, providing valuable insights into the state and dynamics of the Bittensor ecosystem. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> "ScaleType": return self.substrate.query( module="SubtensorModule", storage_function=name, @@ -2892,21 +2911,21 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Queries subtensor map storage with params and block. """ - + # Queries subtensor map storage with params and block. def query_map_subtensor( self, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> QueryMapResult: + params: Optional[list] = None, + ) -> "QueryMapResult": """ Queries map storage from the Subtensor module on the Bittensor blockchain. This function is designed to - retrieve a map-like data structure, which can include various neuron-specific details or network-wide attributes. + retrieve a map-like data structure, which can include various neuron-specific details or network-wide + attributes. Args: name (str): The name of the map storage function to query. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: @@ -2916,7 +2935,7 @@ def query_map_subtensor( relationships within the Bittensor ecosystem, such as inter-neuronal connections and stake distributions. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query_map( module="SubtensorModule", @@ -2931,7 +2950,7 @@ def make_substrate_call_with_retry(): def query_constant( self, module_name: str, constant_name: str, block: Optional[int] = None - ) -> Optional[object]: + ) -> Optional["ScaleType"]: """ Retrieves a constant from the specified module on the Bittensor blockchain. This function is used to access fixed parameters or values defined within the blockchain's modules, which are essential for @@ -2940,17 +2959,17 @@ def query_constant( Args: module_name (str): The name of the module containing the constant. constant_name (str): The name of the constant to retrieve. - block (Optional[int], optional): The blockchain block number at which to query the constant. + block (Optional[int]): The blockchain block number at which to query the constant. Returns: - Optional[object]: The value of the constant if found, ``None`` otherwise. + Optional[ScaleType]: The value of the constant if found, ``None`` otherwise. Constants queried through this function can include critical network parameters such as inflation rates, consensus rules, or validation thresholds, providing a deeper understanding of the Bittensor network's operational parameters. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.get_constant( module_name=module_name, @@ -2962,15 +2981,14 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Queries any module storage with params and block. """ - + # Queries any module storage with params and block. def query_module( self, module: str, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> Optional[object]: + params: Optional[list] = None, + ) -> "ScaleType": """ Queries any module storage on the Bittensor blockchain with the specified parameters and block number. This function is a generic query interface that allows for flexible and diverse data retrieval from @@ -2979,18 +2997,18 @@ def query_module( Args: module (str): The name of the module from which to query data. name (str): The name of the storage function within the module. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - Optional[object]: An object containing the requested data if found, ``None`` otherwise. + Optional[ScaleType]: An object containing the requested data if found, ``None`` otherwise. This versatile query function is key to accessing a wide range of data and insights from different parts of the Bittensor blockchain, enhancing the understanding and analysis of the network's state and dynamics. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> "ScaleType": return self.substrate.query( module=module, storage_function=name, @@ -3002,34 +3020,34 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Queries any module map storage with params and block. """ - + # Queries any module map storage with params and block. def query_map( self, module: str, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> Optional[object]: + params: Optional[list] = None, + ) -> QueryMapResult: """ Queries map storage from any module on the Bittensor blockchain. This function retrieves data structures - that represent key-value mappings, essential for accessing complex and structured data within the blockchain modules. + that represent key-value mappings, essential for accessing complex and structured data within the blockchain + modules. Args: module (str): The name of the module from which to query the map storage. name (str): The specific storage function within the module to query. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): Parameters to be passed to the query. Returns: - Optional[object]: A data structure representing the map storage if found, ``None`` otherwise. + result (QueryMapResult): A data structure representing the map storage if found, ``None`` otherwise. This function is particularly useful for retrieving detailed and structured data from various blockchain modules, offering insights into the network's state and the relationships between its different components. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> "QueryMapResult": return self.substrate.query_map( module=module, storage_function=name, @@ -3046,7 +3064,7 @@ def state_call( method: str, data: str, block: Optional[int] = None, - ) -> Optional[object]: + ) -> Dict[Any, Any]: """ Makes a state call to the Bittensor blockchain, allowing for direct queries of the blockchain's state. This function is typically used for advanced queries that require specific method calls and data inputs. @@ -3054,22 +3072,23 @@ def state_call( Args: method (str): The method name for the state call. data (str): The data to be passed to the method. - block (Optional[int], optional): The blockchain block number at which to perform the state call. + block (Optional[int]): The blockchain block number at which to perform the state call. Returns: - Optional[object]: The result of the state call if successful, ``None`` otherwise. + result (Dict[Any, Any]): The result of the rpc call. The state call function provides a more direct and flexible way of querying blockchain data, useful for specific use cases where standard queries are insufficient. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> Dict[Any, Any]: block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [method, data] - if block_hash: - params = params + [block_hash] - return self.substrate.rpc_request(method="state_call", params=params) + + return self.substrate.rpc_request( + method="state_call", + params=[method, data, block_hash] if block_hash else [method, data], + ) return make_substrate_call_with_retry() @@ -3089,7 +3108,7 @@ def query_runtime_api( runtime_api (str): The name of the runtime API to query. method (str): The specific method within the runtime API to call. params (Optional[List[ParamWithTypes]], optional): The parameters to pass to the method call. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[bytes]: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. @@ -3099,9 +3118,7 @@ def query_runtime_api( """ call_definition = bittensor.__type_registry__["runtime_api"][runtime_api][ # type: ignore "methods" # type: ignore - ][ - method - ] # type: ignore + ][method] # type: ignore json_result = self.state_call( method=f"{runtime_api}_{method}", @@ -3132,12 +3149,10 @@ def query_runtime_api( def _encode_params( self, - call_definition: List[ParamWithTypes], + call_definition: List["ParamWithTypes"], params: Union[List[Any], Dict[str, Any]], ) -> str: - """ - Returns a hex encoded string of the params using their types. - """ + """Returns a hex encoded string of the params using their types.""" param_data = scalecodec.ScaleBytes(b"") for i, param in enumerate(call_definition["params"]): # type: ignore @@ -3152,21 +3167,45 @@ def _encode_params( return param_data.to_hex() - ##################################### - #### Hyper parameter calls. #### - ##################################### + ########################## + # Hyper parameter calls. # + ########################## + + def _get_hyperparameter( + self, param_name: str, netuid: int, block: Optional[int] = None + ) -> Optional[Any]: + """ + Retrieves a specified hyperparameter for a specific subnet. + + Args: + param_name (str): The name of the hyperparameter to retrieve. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[Union[int, float]]: The value of the specified hyperparameter if the subnet exists, ``None`` + otherwise. + """ + if not self.subnet_exists(netuid, block): + return None + + result = self.query_subtensor(param_name, block, [netuid]) + if result is None or not hasattr(result, "value"): + return None + + return result.value def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: """ - Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the global inflation rate, which directly influences the network's - token emission rate and economic model. + Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the + global inflation rate, which directly influences the network's token emission rate and economic model. Note: This is currently fixed such that the Bittensor blockchain emmits 7200 Tao per day. Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to query the parameter. + block (Optional[int]): The blockchain block number at which to query the parameter. Returns: Optional[int]: The value of the 'Rho' hyperparameter if the subnet exists, ``None`` otherwise. @@ -3181,12 +3220,8 @@ def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: 'Rho' is essential for understanding the network's economic dynamics, affecting the reward distribution and incentive structures across the network's neurons. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Rho", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter(param_name="Rho", netuid=netuid, block=block) + return None if call is None else int(call) def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: """ @@ -3196,7 +3231,7 @@ def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[float]: The value of the 'Kappa' hyperparameter if the subnet exists, None otherwise. @@ -3211,13 +3246,8 @@ def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: Understanding 'Kappa' is crucial for analyzing stake dynamics and the consensus mechanism within the network, as it plays a significant role in neuron ranking and incentive allocation processes. """ - if not self.subnet_exists(netuid, block): - return None - - _result = self.query_subtensor("Kappa", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + call = self._get_hyperparameter(param_name="Kappa", netuid=netuid, block=block) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: """ @@ -3227,29 +3257,30 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. The 'Difficulty' parameter directly impacts the network's security and integrity by setting the - computational effort required for validating transactions and participating in the network's consensus mechanism. + computational effort required for validating transactions and participating in the network's consensus + mechanism. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Difficulty", block, [netuid]) - if not hasattr(_result, "value") or _result is None: + call = self._get_hyperparameter( + param_name="Difficulty", netuid=netuid, block=block + ) + if call is None: return None - return _result.value + return int(call) - def recycle(self, netuid: int, block: Optional[int] = None) -> Optional[Balance]: + def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance"]: """ Retrieves the 'Burn' hyperparameter for a specified subnet. The 'Burn' parameter represents the amount of Tao that is effectively recycled within the Bittensor network. Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[Balance]: The value of the 'Burn' hyperparameter if the subnet exists, None otherwise. @@ -3257,15 +3288,10 @@ def recycle(self, netuid: int, block: Optional[int] = None) -> Optional[Balance] Understanding the 'Burn' rate is essential for analyzing the network registration usage, particularly how it is correlated with user activity and the overall cost of participation in a given subnet. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Burn", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) - - """ Returns network ImmunityPeriod hyper parameter """ + call = self._get_hyperparameter(param_name="Burn", netuid=netuid, block=block) + return None if call is None else Balance.from_rao(int(call)) + # Returns network ImmunityPeriod hyper parameter. def immunity_period( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: @@ -3275,7 +3301,7 @@ def immunity_period( Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, ``None`` otherwise. @@ -3284,285 +3310,529 @@ def immunity_period( participants have a grace period to establish themselves and contribute to the network without facing immediate punitive actions. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ImmunityPeriod", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter( + param_name="ImmunityPeriod", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_batch_size( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorBatchSize hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorBatchSize", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorBatchSize hyper parameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If None, the latest block + is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorBatchSize hyperparameter, or None if the subnetwork does not exist + or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorBatchSize", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_prune_len( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorPruneLen hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorPruneLen", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorPruneLen hyper parameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If None, the latest block + is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorPruneLen hyperparameter, or None if the subnetwork does not exist + or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorPruneLen", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_logits_divergence( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network ValidatorLogitsDivergence hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorLogitsDivergence", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + """ + Returns network ValidatorLogitsDivergence hyper parameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int]): The block number to retrieve the parameter from. If None, the latest block + is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ValidatorLogitsDivergence hyperparameter, or None if the subnetwork does + not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorLogitsDivergence", netuid=netuid, block=block + ) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def validator_sequence_length( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorSequenceLength hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorSequenceLength", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorSequenceLength hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorSequenceLength hyperparameter, or ``None`` if the subnetwork does + not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorSequenceLength", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_epochs_per_reset( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorEpochsPerReset hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorEpochsPerReset", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorEpochsPerReset hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorEpochsPerReset hyperparameter, or ``None`` if the subnetwork does + not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorEpochsPerReset", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_epoch_length( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorEpochLen hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorEpochLen", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None + """ + Returns network ValidatorEpochLen hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. - return _result.value + Returns: + Optional[int]: The value of the ValidatorEpochLen hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorEpochLen", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_exclude_quantile( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network ValidatorEpochLen hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorExcludeQuantile", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + """ + Returns network ValidatorExcludeQuantile hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ValidatorExcludeQuantile hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorExcludeQuantile", netuid=netuid, block=block + ) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def max_allowed_validators( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network MaxAllowedValidators hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MaxAllowedValidators", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorExcludeQuantile hyperparameter. - def min_allowed_weights( + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ValidatorExcludeQuantile hyperparameter, or ``None`` if the subnetwork + does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxAllowedValidators", netuid=netuid, block=block + ) + return None if call is None else int(call) + + def min_allowed_weights( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network MinAllowedWeights hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MinAllowedWeights", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network MinAllowedWeights hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the MinAllowedWeights hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MinAllowedWeights", block=block, netuid=netuid + ) + return None if call is None else int(call) def max_weight_limit( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network MaxWeightsLimit hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MaxWeightsLimit", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + """ + Returns network MaxWeightsLimit hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the MaxWeightsLimit hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxWeightsLimit", block=block, netuid=netuid + ) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def adjustment_alpha( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network AdjustmentAlpha hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("AdjustmentAlpha", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U64_NORMALIZED_FLOAT(_result.value) + """ + Returns network AdjustmentAlpha hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the AdjustmentAlpha hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="AdjustmentAlpha", block=block, netuid=netuid + ) + return None if call is None else U64_NORMALIZED_FLOAT(int(call)) def bonds_moving_avg( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network BondsMovingAverage hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("BondsMovingAverage", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U64_NORMALIZED_FLOAT(_result.value) + """ + Returns network BondsMovingAverage hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the BondsMovingAverage hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="BondsMovingAverage", netuid=netuid, block=block + ) + return None if call is None else U64_NORMALIZED_FLOAT(int(call)) def scaling_law_power( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: """Returns network ScalingLawPower hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ScalingLawPower", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value / 100.0 + call = self._get_hyperparameter( + param_name="ScalingLawPower", netuid=netuid, block=block + ) + return None if call is None else int(call) / 100.0 def synergy_scaling_law_power( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network SynergyScalingLawPower hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("SynergyScalingLawPower", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value / 100.0 + """ + Returns network ScalingLawPower hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ScalingLawPower hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="SynergyScalingLawPower", netuid=netuid, block=block + ) + return None if call is None else int(call) / 100.0 def subnetwork_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """Returns network SubnetworkN hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("SubnetworkN", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network SubnetworkN hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the SubnetworkN hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="SubnetworkN", netuid=netuid, block=block + ) + return None if call is None else int(call) def max_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """Returns network MaxAllowedUids hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MaxAllowedUids", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network MaxAllowedUids hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the MaxAllowedUids hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxAllowedUids", netuid=netuid, block=block + ) + return None if call is None else int(call) def blocks_since_epoch( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network BlocksSinceLastStep hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("BlocksSinceEpoch", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network BlocksSinceEpoch hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the BlocksSinceEpoch hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="BlocksSinceEpoch", netuid=netuid, block=block + ) + return None if call is None else int(call) def blocks_since_last_update(self, netuid: int, uid: int) -> Optional[int]: - if not self.subnet_exists(netuid): - return None - _result = self.query_subtensor("LastUpdate", None, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None + """ + Returns the number of blocks since the last update for a specific UID in the subnetwork. - return self.get_current_block() - _result.value[uid] + Args: + netuid (int): The unique identifier of the subnetwork. + uid (int): The unique identifier of the neuron. + + Returns: + Optional[int]: The number of blocks since the last update, or ``None`` if the subnetwork or UID does not + exist. + """ + call = self._get_hyperparameter(param_name="LastUpdate", netuid=netuid) + return None if call is None else self.get_current_block() - int(call[uid]) def weights_rate_limit(self, netuid: int) -> Optional[int]: - if not self.subnet_exists(netuid): - return None - _result = self.query_subtensor("WeightsSetRateLimit", None, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network WeightsSetRateLimit hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + + Returns: + Optional[int]: The value of the WeightsSetRateLimit hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter(param_name="WeightsSetRateLimit", netuid=netuid) + return None if call is None else int(call) def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """Returns network Tempo hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Tempo", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network Tempo hyperparameter. - ########################## - #### Account functions ### - ########################## + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the Tempo hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter(param_name="Tempo", netuid=netuid, block=block) + return None if call is None else int(call) + + ##################### + # Account functions # + ##################### def get_total_stake_for_hotkey( self, ss58_address: str, block: Optional[int] = None ) -> Optional["Balance"]: - """Returns the total stake held on a hotkey including delegative""" + """ + Returns the total stake held on a hotkey including delegative. + + Args: + ss58_address (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the stake from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[Balance]: The total stake held on the hotkey, or ``None`` if the hotkey does not + exist or the stake is not found. + """ _result = self.query_subtensor("TotalHotkeyStake", block, [ss58_address]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else Balance.from_rao(_result.value) + ) def get_total_stake_for_coldkey( self, ss58_address: str, block: Optional[int] = None ) -> Optional["Balance"]: - """Returns the total stake held on a coldkey across all hotkeys including delegates""" + """ + Returns the total stake held on a coldkey. + + Args: + ss58_address (str): The SS58 address of the coldkey. + block (Optional[int], optional): The block number to retrieve the stake from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[Balance]: The total stake held on the coldkey, or ``None`` if the coldkey does not + exist or the stake is not found. + """ _result = self.query_subtensor("TotalColdkeyStake", block, [ss58_address]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else Balance.from_rao(_result.value) + ) def get_stake_for_coldkey_and_hotkey( self, hotkey_ss58: str, coldkey_ss58: str, block: Optional[int] = None ) -> Optional["Balance"]: - """Returns the stake under a coldkey - hotkey pairing""" + """ + Returns the stake under a coldkey - hotkey pairing. + + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + coldkey_ss58 (str): The SS58 address of the coldkey. + block (Optional[int], optional): The block number to retrieve the stake from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[Balance]: The stake under the coldkey - hotkey pairing, or ``None`` if the pairing does not + exist or the stake is not found. + """ _result = self.query_subtensor("Stake", block, [hotkey_ss58, coldkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else Balance.from_rao(_result.value) + ) def get_stake( self, hotkey_ss58: str, block: Optional[int] = None ) -> List[Tuple[str, "Balance"]]: - """Returns a list of stake tuples (coldkey, balance) for each delegating coldkey including the owner""" + """ + Returns a list of stake tuples (coldkey, balance) for each delegating coldkey including the owner. + + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the stakes from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + List[Tuple[str, Balance]]: A list of tuples, each containing a coldkey SS58 address and the corresponding + balance staked by that coldkey. + """ return [ (r[0].value, Balance.from_rao(r[1].value)) for r in self.query_map_subtensor("Stake", block, [hotkey_ss58]) ] def does_hotkey_exist(self, hotkey_ss58: str, block: Optional[int] = None) -> bool: - """Returns true if the hotkey is known by the chain and there are accounts.""" - _result = self.query_subtensor("Owner", block, [hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return False + """ + Returns true if the hotkey is known by the chain and there are accounts. - return _result.value != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to check the hotkey against. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + bool: ``True`` if the hotkey is known by the chain and there are accounts, ``False`` otherwise. + """ + _result = self.query_subtensor("Owner", block, [hotkey_ss58]) + return ( + False + if getattr(_result, "value", None) is None + else _result.value != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + ) def get_hotkey_owner( self, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[str]: - """Returns the coldkey owner of the passed hotkey""" + """ + Returns the coldkey owner of the passed hotkey. + + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to check the hotkey owner against. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[str]: The SS58 address of the coldkey owner, or ``None`` if the hotkey does not exist or the owner + is not found. + """ _result = self.query_subtensor("Owner", block, [hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - if self.does_hotkey_exist(hotkey_ss58, block): - return _result.value + return ( + None + if getattr(_result, "value", None) is None + or not self.does_hotkey_exist(hotkey_ss58, block) + else _result.value + ) + # TODO: check if someone still use this method. bittensor not. def get_axon_info( self, netuid: int, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[AxonInfo]: - """Returns the axon information for this hotkey account""" + """ + Returns the axon information for this hotkey account. + + Args: + netuid (int): The unique identifier of the subnetwork. + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the axon information from. If ``None``, the + latest block is used. Default is ``None``. + + Returns: + Optional[AxonInfo]: An AxonInfo object containing the axon information, or ``None`` if the axon information + is not found. + """ result = self.query_subtensor("Axons", block, [netuid, hotkey_ss58]) if result is not None and hasattr(result, "value"): return AxonInfo( @@ -3576,15 +3846,27 @@ def get_axon_info( hotkey=hotkey_ss58, coldkey="", ) - return None + # It is used in subtensor in neuron_info, and serving def get_prometheus_info( self, netuid: int, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[PrometheusInfo]: - """Returns the prometheus information for this hotkey account""" + """ + Returns the prometheus information for this hotkey account. + + Args: + netuid (int): The unique identifier of the subnetwork. + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the prometheus information from. If ``None``, + the latest block is used. Default is ``None``. + + Returns: + Optional[PrometheusInfo]: A PrometheusInfo object containing the prometheus information, or ``None`` if the + prometheus information is not found. + """ result = self.query_subtensor("Prometheus", block, [netuid, hotkey_ss58]) - if result is not None: + if result is not None and hasattr(result, "value"): return PrometheusInfo( ip=networking.int_to_ip(result.value["ip"]), ip_type=result.value["ip_type"], @@ -3592,8 +3874,7 @@ def get_prometheus_info( version=result.value["version"], block=result.value["block"], ) - else: - return None + return None ##################### # Global Parameters # @@ -3623,9 +3904,11 @@ def total_issuance(self, block: Optional[int] = None) -> Optional[Balance]: of the currency and providing insights into the network's economic health and inflationary trends. """ _result = self.query_subtensor("TotalIssuance", block) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else Balance.from_rao(_result.value) + ) def total_stake(self, block: Optional[int] = None) -> Optional[Balance]: """ @@ -3644,16 +3927,20 @@ def total_stake(self, block: Optional[int] = None) -> Optional[Balance]: consensus and incentive mechanisms. """ _result = self.query_subtensor("TotalStake", block) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else Balance.from_rao(_result.value) + ) def serving_rate_limit( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: """ Retrieves the serving rate limit for a specific subnet within the Bittensor network. - This rate limit determines how often you can change your node's IP address on the blockchain. Expressed in number of blocks. Applies to both subnet validator and subnet miner nodes. Used when you move your node to a new machine. + This rate limit determines how often you can change your node's IP address on the blockchain. Expressed in + number of blocks. Applies to both subnet validator and subnet miner nodes. Used when you move your node to a new + machine. Args: netuid (int): The unique identifier of the subnet. @@ -3666,12 +3953,10 @@ def serving_rate_limit( overuse of resources by individual neurons. It helps ensure a balanced distribution of service requests across the network. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ServingRateLimit", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter( + param_name="ServingRateLimit", netuid=netuid, block=block + ) + return None if call is None else int(call) def tx_rate_limit(self, block: Optional[int] = None) -> Optional[int]: """ @@ -3689,13 +3974,11 @@ def tx_rate_limit(self, block: Optional[int] = None) -> Optional[int]: maintaining efficient and timely transaction processing. """ _result = self.query_subtensor("TxRateLimit", block) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) - ##################################### - #### Network Parameters #### - ##################################### + ###################### + # Network Parameters # + ###################### def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: """ @@ -3712,9 +3995,7 @@ def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: enabling a deeper understanding of the network's structure and composition. """ _result = self.query_subtensor("NetworksAdded", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return False - return _result.value + return getattr(_result, "value", False) def get_all_subnet_netuids(self, block: Optional[int] = None) -> List[int]: """ @@ -3729,14 +4010,12 @@ def get_all_subnet_netuids(self, block: Optional[int] = None) -> List[int]: This function provides a comprehensive view of the subnets within the Bittensor network, offering insights into its diversity and scale. """ - subnet_netuids = [] result = self.query_map_subtensor("NetworksAdded", block) - if result.records: - for netuid, exists in result: - if exists: - subnet_netuids.append(netuid.value) - - return subnet_netuids + return ( + [] + if result is None or not hasattr(result, "records") + else [netuid.value for netuid, exists in result if exists] + ) def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: """ @@ -3752,25 +4031,29 @@ def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: the extent of its decentralized infrastructure. """ _result = self.query_subtensor("TotalNetworks", block) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def get_subnet_modality( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: + """ + Returns the NetworkModality hyperparameter for a specific subnetwork. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the NetworkModality hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ _result = self.query_subtensor("NetworkModality", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def get_subnet_connection_requirement( self, netuid_0: int, netuid_1: int, block: Optional[int] = None ) -> Optional[int]: _result = self.query_subtensor("NetworkConnect", block, [netuid_0, netuid_1]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def get_emission_value_by_subnet( self, netuid: int, block: Optional[int] = None @@ -3790,9 +4073,11 @@ def get_emission_value_by_subnet( reward mechanisms within the subnet. """ _result = self.query_subtensor("EmissionValues", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else Balance.from_rao(_result.value) + ) def get_subnet_connection_requirements( self, netuid: int, block: Optional[int] = None @@ -3812,10 +4097,11 @@ def get_subnet_connection_requirements( with specific subnets, ensuring compliance with their connection standards. """ result = self.query_map_subtensor("NetworkConnect", block, [netuid]) - if result.records: - return {str(tuple[0].value): tuple[1].value for tuple in result.records} - else: - return {} + return ( + {str(netuid.value): exists.value for netuid, exists in result.records} + if result and hasattr(result, "records") + else {} + ) def get_subnets(self, block: Optional[int] = None) -> List[int]: """ @@ -3831,14 +4117,12 @@ def get_subnets(self, block: Optional[int] = None) -> List[int]: This function is valuable for understanding the network's structure and the diversity of subnets available for neuron participation and collaboration. """ - subnets = [] result = self.query_map_subtensor("NetworksAdded", block) - if result.records: - for network in result.records: - subnets.append(network[0].value) - return subnets - else: - return [] + return ( + [network[0].value for network in result.records] + if result and hasattr(result, "records") + else [] + ) def get_all_subnets_info(self, block: Optional[int] = None) -> List[SubnetInfo]: """ @@ -3855,21 +4139,18 @@ def get_all_subnets_info(self, block: Optional[int] = None) -> List[SubnetInfo]: the roles of different subnets, and their unique features. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [] - if block_hash: - params = params + [block_hash] + return self.substrate.rpc_request( method="subnetInfo_getSubnetsInfo", # custom rpc method - params=params, + params=[block_hash] if block_hash else [], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] - if result in (None, []): + if not (result := json_body.get("result", None)): return [] return SubnetInfo.list_from_vec_u8(result) @@ -3892,21 +4173,18 @@ def get_subnet_info( subnet, including its governance, performance, and role within the broader network. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [netuid] - if block_hash: - params = params + [block_hash] + return self.substrate.rpc_request( method="subnetInfo_getSubnetInfo", # custom rpc method - params=params, + params=[netuid, block_hash] if block_hash else [netuid], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] - if result in (None, []): + if not (result := json_body.get("result", None)): return None return SubnetInfo.from_vec_u8(result) @@ -3931,17 +4209,17 @@ def get_subnet_hyperparameters( hex_bytes_result = self.query_runtime_api( runtime_api="SubnetInfoRuntimeApi", method="get_subnet_hyperparams", - params=[netuid], # type: ignore + params=[netuid], block=block, ) if hex_bytes_result is None: return [] - if hex_bytes_result.startswith("0x"): # type: ignore - bytes_result = bytes.fromhex(hex_bytes_result[2:]) # type: ignore + if hex_bytes_result.startswith("0x"): + bytes_result = bytes.fromhex(hex_bytes_result[2:]) else: - bytes_result = bytes.fromhex(hex_bytes_result) # type: ignore + bytes_result = bytes.fromhex(hex_bytes_result) return SubnetHyperparameters.from_vec_u8(bytes_result) # type: ignore @@ -3963,13 +4241,11 @@ def get_subnet_owner( which can be important for decision-making and collaboration within the network. """ _result = self.query_subtensor("SubnetOwner", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) - #################### - #### Nomination #### - #################### + ############## + # Nomination # + ############## def is_hotkey_delegate(self, hotkey_ss58: str, block: Optional[int] = None) -> bool: """ Determines whether a given hotkey (public key) is a delegate on the Bittensor network. This function @@ -4007,9 +4283,11 @@ def get_delegate_take( the distribution of rewards among neurons and their nominators. """ _result = self.query_subtensor("Delegates", block, [hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + return ( + None + if getattr(_result, "value", None) is None + else U16_NORMALIZED_FLOAT(_result.value) + ) def get_nominators_for_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -4029,10 +4307,11 @@ def get_nominators_for_hotkey( indicating its trust and collaboration relationships. """ result = self.query_map_subtensor("Stake", block, [hotkey_ss58]) - if result.records: - return [(record[0].value, record[1].value) for record in result.records] - else: - return 0 + return ( + [(record[0].value, record[1].value) for record in result.records] + if result and hasattr(result, "records") + else 0 + ) def get_delegate_by_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -4052,31 +4331,66 @@ def get_delegate_by_hotkey( the Bittensor network's consensus and governance structures. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(encoded_hotkey: List[int]): - block_hash = None if block == None else self.substrate.get_block_hash(block) - params = [encoded_hotkey] - if block_hash: - params = params + [block_hash] + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(encoded_hotkey_: List[int]): + block_hash = None if block is None else self.substrate.get_block_hash(block) + return self.substrate.rpc_request( method="delegateInfo_getDelegate", # custom rpc method - params=params, + params=( + [encoded_hotkey_, block_hash] if block_hash else [encoded_hotkey_] + ), ) encoded_hotkey = ss58_to_vec_u8(hotkey_ss58) json_body = make_substrate_call_with_retry(encoded_hotkey) - result = json_body["result"] - if result in (None, []): + if not (result := json_body.get("result", None)): return None return DelegateInfo.from_vec_u8(result) + def get_delegates_lite(self, block: Optional[int] = None) -> List[DelegateInfoLite]: + """ + Retrieves a lighter list of all delegate neurons within the Bittensor network. This function provides an + overview of the neurons that are actively involved in the network's delegation system. + + Analyzing the delegate population offers insights into the network's governance dynamics and the distribution + of trust and responsibility among participating neurons. + + This is a lighter version of :func:`get_delegates`. + + Args: + block (Optional[int], optional): The blockchain block number for the query. + + Returns: + List[DelegateInfoLite]: A list of ``DelegateInfoLite`` objects detailing each delegate's characteristics. + + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(): + block_hash = None if block is None else self.substrate.get_block_hash(block) + + return self.substrate.rpc_request( + method="delegateInfo_getDelegatesLite", # custom rpc method + params=[block_hash] if block_hash else [], + ) + + json_body = make_substrate_call_with_retry() + + if not (result := json_body.get("result", None)): + return [] + + return [DelegateInfoLite(**d) for d in result] + def get_delegates(self, block: Optional[int] = None) -> List[DelegateInfo]: """ - Retrieves a list of all delegate neurons within the Bittensor network. This function provides an overview of the neurons that are actively involved in the network's delegation system. + Retrieves a list of all delegate neurons within the Bittensor network. This function provides an overview of the + neurons that are actively involved in the network's delegation system. - Analyzing the delegate population offers insights into the network's governance dynamics and the distribution of trust and responsibility among participating neurons. + Analyzing the delegate population offers insights into the network's governance dynamics and the distribution of + trust and responsibility among participating neurons. Args: block (Optional[int], optional): The blockchain block number for the query. @@ -4089,18 +4403,15 @@ def get_delegates(self, block: Optional[int] = None) -> List[DelegateInfo]: @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [] - if block_hash: - params.extend([block_hash]) + return self.substrate.rpc_request( method="delegateInfo_getDelegates", # custom rpc method - params=params, + params=[block_hash] if block_hash else [], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] - if result in (None, []): + if not (result := json_body.get("result", None)): return [] return DelegateInfo.list_from_vec_u8(result) @@ -4117,35 +4428,35 @@ def get_delegated( block (Optional[int], optional): The blockchain block number for the query. Returns: - List[Tuple[DelegateInfo, Balance]]: A list of tuples, each containing a delegate's information and staked amount. + List[Tuple[DelegateInfo, Balance]]: A list of tuples, each containing a delegate's information and staked + amount. This function is important for account holders to understand their stake allocations and their involvement in the network's delegation and consensus mechanisms. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(encoded_coldkey: List[int]): - block_hash = None if block == None else self.substrate.get_block_hash(block) - params = [encoded_coldkey] - if block_hash: - params = params + [block_hash] + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(encoded_coldkey_: List[int]): + block_hash = None if block is None else self.substrate.get_block_hash(block) + return self.substrate.rpc_request( - method="delegateInfo_getDelegated", # custom rpc method - params=params, + method="delegateInfo_getDelegated", + params=( + [block_hash, encoded_coldkey_] if block_hash else [encoded_coldkey_] + ), ) encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) json_body = make_substrate_call_with_retry(encoded_coldkey) - result = json_body["result"] - if result in (None, []): + if not (result := json_body.get("result", None)): return [] return DelegateInfo.delegated_list_from_vec_u8(result) - ########################### - #### Stake Information #### - ########################### + ##################### + # Stake Information # + ##################### def get_stake_info_for_coldkey( self, coldkey_ss58: str, block: Optional[int] = None @@ -4224,8 +4535,20 @@ def get_stake_info_for_coldkeys( def get_minimum_required_stake( self, - ): - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + ) -> Balance: + """ + Returns the minimum required stake for nominators in the Subtensor network. + + This method retries the substrate call up to three times with exponential backoff in case of failures. + + Returns: + Balance: The minimum required stake as a Balance object. + + Raises: + Exception: If the substrate call fails after the maximum number of retries. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( module="SubtensorModule", storage_function="NominatorMinRequiredStake" @@ -4234,9 +4557,9 @@ def make_substrate_call_with_retry(): result = make_substrate_call_with_retry() return Balance.from_rao(result.decode()) - ######################################## - #### Neuron information per subnet #### - ######################################## + ################################# + # Neuron information per subnet # + ################################# def is_hotkey_registered_any( self, hotkey_ss58: str, block: Optional[int] = None @@ -4246,7 +4569,7 @@ def is_hotkey_registered_any( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to perform the check. + block (Optional[int]): The blockchain block number at which to perform the check. Returns: bool: ``True`` if the hotkey is registered on any subnet, False otherwise. @@ -4264,7 +4587,7 @@ def is_hotkey_registered_on_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to perform the check. + block (Optional[int]): The blockchain block number at which to perform the check. Returns: bool: ``True`` if the hotkey is registered on the specified subnet, False otherwise. @@ -4272,7 +4595,7 @@ def is_hotkey_registered_on_subnet( This function helps in assessing the participation of a neuron in a particular subnet, indicating its specific area of operation or influence within the network. """ - return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) != None + return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) is not None def is_hotkey_registered( self, @@ -4288,11 +4611,13 @@ def is_hotkey_registered( Args: hotkey_ss58 (str): The SS58 address of the neuron's hotkey. - netuid (Optional[int], optional): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. - block (Optional[int], optional): The blockchain block number at which to perform the query. + netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the + registration is checked across all subnets. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: - bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. + bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific + subnet), ``False`` otherwise. This function is important for verifying the active status of neurons in the Bittensor network. It aids in understanding whether a neuron is eligible to participate in network processes such as consensus, @@ -4312,7 +4637,7 @@ def get_uid_for_hotkey_on_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. @@ -4321,9 +4646,7 @@ def get_uid_for_hotkey_on_subnet( operational and governance activities on a particular subnet. """ _result = self.query_subtensor("Uids", block, [netuid, hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def get_all_uids_for_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -4335,7 +4658,7 @@ def get_all_uids_for_hotkey( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: List[int]: A list of UIDs associated with the given hotkey across various subnets. @@ -4358,13 +4681,17 @@ def get_netuids_for_hotkey( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: List[int]: A list of netuids where the neuron is a member. """ result = self.query_map_subtensor("IsNetworkMember", block, [hotkey_ss58]) - return [record[0].value for record in result.records if record[1]] + return ( + [record[0].value for record in result.records if record[1]] + if result and hasattr(result, "records") + else [] + ) def get_neuron_for_pubkey_and_subnet( self, hotkey_ss58: str, netuid: int, block: Optional[int] = None @@ -4377,7 +4704,7 @@ def get_neuron_for_pubkey_and_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. @@ -4393,7 +4720,7 @@ def get_neuron_for_pubkey_and_subnet( def get_all_neurons_for_pubkey( self, hotkey_ss58: str, block: Optional[int] = None - ) -> Optional[List[NeuronInfo]]: + ) -> List[NeuronInfo]: """ Retrieves information about all neuron instances associated with a given public key (hotkey ``SS58`` address) across different subnets of the Bittensor network. This function aggregates neuron data @@ -4401,7 +4728,7 @@ def get_all_neurons_for_pubkey( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: List[NeuronInfo]: A list of NeuronInfo objects detailing the neuron's presence across various subnets. @@ -4411,7 +4738,7 @@ def get_all_neurons_for_pubkey( """ netuids = self.get_netuids_for_hotkey(hotkey_ss58, block) uids = [self.get_uid_for_hotkey_on_subnet(hotkey_ss58, net) for net in netuids] - return [self.neuron_for_uid(uid, net) for uid, net in list(zip(uids, netuids))] # type: ignore + return [self.neuron_for_uid(uid, net) for uid, net in list(zip(uids, netuids))] def neuron_has_validator_permit( self, uid: int, netuid: int, block: Optional[int] = None @@ -4424,7 +4751,7 @@ def neuron_has_validator_permit( Args: uid (int): The unique identifier of the neuron. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[bool]: ``True`` if the neuron has a validator permit, False otherwise. @@ -4433,9 +4760,7 @@ def neuron_has_validator_permit( subnet, particularly regarding its involvement in network validation and governance. """ _result = self.query_subtensor("ValidatorPermit", block, [netuid, uid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def neuron_for_wallet( self, wallet: "bittensor.wallet", netuid: int, block: Optional[int] = None @@ -4448,7 +4773,7 @@ def neuron_for_wallet( Args: wallet (bittensor.wallet): The wallet associated with the neuron. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. @@ -4462,7 +4787,7 @@ def neuron_for_wallet( def neuron_for_uid( self, uid: Optional[int], netuid: int, block: Optional[int] = None - ) -> Optional[NeuronInfo]: + ) -> NeuronInfo: """ Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive @@ -4474,29 +4799,29 @@ def neuron_for_uid( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. + NeuronInfo: Detailed information about the neuron if found, ``None`` otherwise. This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. """ if uid is None: - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): - block_hash = None if block == None else self.substrate.get_block_hash(block) + block_hash = None if block is None else self.substrate.get_block_hash(block) params = [netuid, uid] if block_hash: params = params + [block_hash] return self.substrate.rpc_request( - method="neuronInfo_getNeuron", params=params # custom rpc method + method="neuronInfo_getNeuron", + params=params, # custom rpc method ) json_body = make_substrate_call_with_retry() - result = json_body["result"] - if result in (None, []): - return NeuronInfo._null_neuron() + if not (result := json_body.get("result", None)): + return NeuronInfo.get_null_neuron() return NeuronInfo.from_vec_u8(result) @@ -4551,7 +4876,7 @@ def neuron_for_uid_lite( subnet without the need for comprehensive data retrieval. """ if uid is None: - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() hex_bytes_result = self.query_runtime_api( runtime_api="NeuronInfoRuntimeApi", @@ -4564,7 +4889,7 @@ def neuron_for_uid_lite( ) if hex_bytes_result is None: - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() if hex_bytes_result.startswith("0x"): bytes_result = bytes.fromhex(hex_bytes_result[2:]) @@ -4627,7 +4952,8 @@ def metagraph( bittensor.Metagraph: The metagraph representing the subnet's structure and neuron relationships. The metagraph is an essential tool for understanding the topology and dynamics of the Bittensor - network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus processes. + network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus + processes. """ metagraph_ = bittensor.metagraph( network=self.network, netuid=netuid, lite=lite, sync=False @@ -4704,7 +5030,8 @@ def bonds( block (Optional[int]): The blockchain block number for the query. Returns: - List[Tuple[int, List[Tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other neurons. + List[Tuple[int, List[Tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other + neurons. Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior within the subnet. It reflects how neurons recognize and invest in each other's intelligence and @@ -4722,7 +5049,7 @@ def bonds( def associated_validator_ip_info( self, netuid: int, block: Optional[int] = None - ) -> Optional[List[IPInfo]]: + ) -> Optional[List["IPInfo"]]: """ Retrieves the list of all validator IP addresses associated with a specific subnet in the Bittensor network. This information is crucial for network communication and the identification of validator nodes. @@ -4732,7 +5059,8 @@ def associated_validator_ip_info( block (Optional[int]): The blockchain block number for the query. Returns: - Optional[List[IPInfo]]: A list of IPInfo objects for validator nodes in the subnet, or ``None`` if no validators are associated. + Optional[List[IPInfo]]: A list of IPInfo objects for validator nodes in the subnet, or ``None`` if no + validators are associated. Validator IP information is key for establishing secure and reliable connections within the network, facilitating consensus and validation processes critical for the network's integrity and performance. @@ -4780,9 +5108,9 @@ def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: return lock_cost - ################ - ## Extrinsics ## - ################ + ############## + # Extrinsics # + ############## def _do_delegation( self, @@ -4792,7 +5120,24 @@ def _do_delegation( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + """ + Delegates a specified amount of stake to a delegate's hotkey. + + This method sends a transaction to add stake to a delegate's hotkey and retries the call up to three times + with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the stake will be delegated. + delegate_ss58 (str): The SS58 address of the delegate's hotkey. + amount (Balance): The amount of stake to be delegated. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the delegation is successful, ``False`` otherwise. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -4814,7 +5159,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -4826,7 +5171,24 @@ def _do_undelegation( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + """ + Removes a specified amount of stake from a delegate's hotkey. + + This method sends a transaction to remove stake from a delegate's hotkey and retries the call up to three times + with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the stake will be removed. + delegate_ss58 (str): The SS58 address of the delegate's hotkey. + amount (Balance): The amount of stake to be removed. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the undelegation is successful, ``False`` otherwise. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -4851,7 +5213,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -4861,7 +5223,22 @@ def _do_nominate( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + """ + Nominates the wallet's hotkey to become a delegate. + + This method sends a transaction to nominate the wallet's hotkey to become a delegate and retries the call up to + three times with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet whose hotkey will be nominated. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the nomination is successful, ``False`` otherwise. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -4883,7 +5260,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise NominationError(response.error_message) + raise NominationError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -4895,7 +5272,24 @@ def _do_increase_take( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4) + """ + Increases the take rate for a delegate's hotkey. + + This method sends a transaction to increase the take rate for a delegate's hotkey and retries the call up to + three times with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the transaction will be signed. + hotkey_ss58 (str): The SS58 address of the delegate's hotkey. + take (int): The new take rate to be set. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the take rate increase is successful, ``False`` otherwise. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4) def make_substrate_call_with_retry(): with self.substrate as substrate: call = substrate.compose_call( @@ -4921,7 +5315,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise TakeError(response.error_message) + raise TakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -4933,7 +5327,24 @@ def _do_decrease_take( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4) + """ + Decreases the take rate for a delegate's hotkey. + + This method sends a transaction to decrease the take rate for a delegate's hotkey and retries the call up to + three times with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the transaction will be signed. + hotkey_ss58 (str): The SS58 address of the delegate's hotkey. + take (int): The new take rate to be set. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the take rate decrease is successful, ``False`` otherwise. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4) def make_substrate_call_with_retry(): with self.substrate as substrate: call = substrate.compose_call( @@ -4959,13 +5370,13 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise TakeError(response.error_message) + raise TakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() - ################ - #### Legacy #### - ################ + ########## + # Legacy # + ########## def get_balance(self, address: str, block: Optional[int] = None) -> Balance: """ @@ -4984,7 +5395,7 @@ def get_balance(self, address: str, block: Optional[int] = None) -> Balance: """ try: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( module="System", @@ -5015,9 +5426,9 @@ def get_current_block(self) -> int: operations on the blockchain. It serves as a reference point for network activities and data synchronization. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): - return self.substrate.get_block_number(None) + return self.substrate.get_block_number(None) # type: ignore return make_substrate_call_with_retry() @@ -5036,7 +5447,7 @@ def get_balances(self, block: Optional[int] = None) -> Dict[str, Balance]: including the distribution of financial resources and the financial status of network participants. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query_map( module="System", @@ -5053,9 +5464,10 @@ def make_substrate_call_with_retry(): return_dict[r[0].value] = bal return return_dict + # TODO: check with the team if this is used anywhere externally. not in bittensor @staticmethod def _null_neuron() -> NeuronInfo: - neuron = NeuronInfo( # type: ignore + neuron = NeuronInfo( uid=0, netuid=0, active=0, @@ -5076,7 +5488,7 @@ def _null_neuron() -> NeuronInfo: is_null=True, coldkey="000000000000000000000000000000000000000000000000", hotkey="000000000000000000000000000000000000000000000000", - ) + ) # type: ignore return neuron def get_block_hash(self, block_id: int) -> str: @@ -5098,8 +5510,15 @@ def get_block_hash(self, block_id: int) -> str: return self.substrate.get_block_hash(block_id=block_id) def get_error_info_by_index(self, error_index: int) -> Tuple[str, str]: - """Returns the error name and description from the Subtensor error list.""" + """ + Returns the error name and description from the Subtensor error list. + Args: + error_index (int): The index of the error to retrieve. + + Returns: + Tuple[str, str]: A tuple containing the error name and description from substrate metadata. If the error index is not found, returns ("Unknown Error", "") and logs a warning. + """ unknown_error = ("Unknown Error", "") if not self._subtensor_errors: @@ -5113,3 +5532,7 @@ def get_error_info_by_index(self, error_index: int) -> Tuple[str, str]: ) return name, description + + +# TODO: remove this after fully migrate `bittensor.subtensor` to `bittensor.Subtensor` in `bittensor/__init__.py` +subtensor = Subtensor diff --git a/bittensor/synapse.py b/bittensor/synapse.py index 805e8bc617..80053f7065 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -20,6 +20,7 @@ import base64 import json import sys +import warnings from pydantic import ( BaseModel, @@ -29,7 +30,7 @@ model_validator, ) import bittensor -from typing import Optional, List, Any, Dict +from typing import Optional, Any, Dict, ClassVar, Tuple def get_size(obj, seen=None) -> int: @@ -78,7 +79,7 @@ def cast_int(raw: str) -> int: int or None: The converted integer, or ``None`` if the input was ``None``. """ - return int(raw) if raw != None else raw # type: ignore + return int(raw) if raw is not None else raw # type: ignore def cast_float(raw: str) -> float: @@ -94,7 +95,7 @@ def cast_float(raw: str) -> float: float or None: The converted float, or ``None`` if the input was ``None``. """ - return float(raw) if raw != None else raw # type: ignore + return float(raw) if raw is not None else raw # type: ignore class TerminalInfo(BaseModel): @@ -301,6 +302,8 @@ class Synapse(BaseModel): 5. Body Hash Computation (``computed_body_hash``, ``required_hash_fields``): Ensures data integrity and security by computing hashes of transmitted data. Provides users with a mechanism to verify data integrity and detect any tampering during transmission. + It is recommended that names of fields in `required_hash_fields` are listed in the order they are + defined in the class. 6. Serialization and Deserialization Methods: Facilitates the conversion of Synapse objects to and from a format suitable for network transmission. @@ -478,14 +481,7 @@ def set_name_type(cls, values) -> dict: repr=False, ) - required_hash_fields: Optional[List[str]] = Field( - title="required_hash_fields", - description="The list of required fields to compute the body hash.", - examples=["roles", "messages"], - default=[], - frozen=True, - repr=False, - ) + required_hash_fields: ClassVar[Tuple[str, ...]] = () _extract_total_size = field_validator("total_size", mode="before")(cast_int) @@ -679,7 +675,7 @@ def body_hash(self) -> str: Process: - 1. Iterates over each required field as specified in ``required_fields_hash``. + 1. Iterates over each required field as specified in ``required_hash_fields``. 2. Concatenates the string representation of these fields. 3. Applies SHA3-256 hashing to the concatenated string to produce a unique fingerprint of the data. @@ -692,21 +688,37 @@ def body_hash(self) -> str: Returns: str: The SHA3-256 hash as a hexadecimal string, providing a fingerprint of the Synapse instance's data for integrity checks. """ - # Hash the body for verification hashes = [] - # Getting the fields of the instance - instance_fields = self.model_dump() + hash_fields_field = self.model_fields.get("required_hash_fields") + instance_fields = None + if hash_fields_field: + warnings.warn( + "The 'required_hash_fields' field handling deprecated and will be removed. " + "Please update Synapse class definition to use 'required_hash_fields' class variable instead.", + DeprecationWarning, + ) + required_hash_fields = hash_fields_field.default + + if required_hash_fields: + instance_fields = self.model_dump() + # Preserve backward compatibility in which fields will added in .model_dump() order + # instead of the order one from `self.required_hash_fields` + required_hash_fields = [ + field for field in instance_fields if field in required_hash_fields + ] + + # Hack to cache the required hash fields names + if len(required_hash_fields) == len(required_hash_fields): + self.__class__.required_hash_fields = tuple(required_hash_fields) + else: + required_hash_fields = self.__class__.required_hash_fields + + if required_hash_fields: + instance_fields = instance_fields or self.model_dump() + for field in required_hash_fields: + hashes.append(bittensor.utils.hash(str(instance_fields[field]))) - for field, value in instance_fields.items(): - # If the field is required in the subclass schema, hash and add it. - if ( - self.required_hash_fields is not None - and field in self.required_hash_fields - ): - hashes.append(bittensor.utils.hash(str(value))) - - # Hash and return the hashes that have been concatenated return bittensor.utils.hash("".join(hashes)) @classmethod diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 72d053ea7a..700a656131 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -1,7 +1,6 @@ # The MIT License (MIT) # Copyright © 2022 Opentensor Foundation # Copyright © 2023 Opentensor Technologies Inc -import os # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -17,17 +16,16 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +import hashlib from typing import Callable, List, Dict, Literal, Tuple -import bittensor -import hashlib -import requests -import scalecodec import numpy as np +import scalecodec -from .wallet_utils import * # noqa F401 -from .version import version_checking, check_version, VersionCheckError +import bittensor from .registration import torch, use_torch +from .version import version_checking, check_version, VersionCheckError +from .wallet_utils import * # noqa F401 RAOPERTAO = 1e9 U16_MAX = 65535 @@ -207,9 +205,9 @@ def get_explorer_url_for_network( explorer_urls: Optional[Dict[str, str]] = {} # Will be None if the network is not known. i.e. not in network_map - explorer_root_urls: Optional[ - Dict[str, str] - ] = get_explorer_root_url_by_network_from_map(network, network_map) + explorer_root_urls: Optional[Dict[str, str]] = ( + get_explorer_root_url_by_network_from_map(network, network_map) + ) if explorer_root_urls != {}: # We are on a known network. @@ -260,3 +258,25 @@ def hash(content, encoding="utf-8"): # Produce the hash return sha3.hexdigest() + + +def format_error_message(error_message: dict) -> str: + """ + Formats an error message from the Subtensor error information to using in extrinsics. + + Args: + error_message (dict): A dictionary containing the error information from Subtensor. + + Returns: + str: A formatted error message string. + """ + err_type = "UnknownType" + err_name = "UnknownError" + err_description = "Unknown Description" + + if isinstance(error_message, dict): + err_type = error_message.get("type", err_type) + err_name = error_message.get("name", err_name) + err_docs = error_message.get("docs", []) + err_description = err_docs[0] if len(err_docs) > 0 else err_description + return f"Subtensor returned `{err_name} ({err_type})` error. This means: `{err_description}`" diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 9f1450af81..4d1af585c3 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -1,5 +1,4 @@ -""" Utils for handling local network with ip and ports. -""" +"""Utils for handling local network with ip and ports.""" # The MIT License (MIT) # Copyright © 2021-2022 Yuma Rao @@ -20,10 +19,13 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +# Standard Lib import os import urllib import json import netaddr + +# 3rd party import requests diff --git a/bittensor/utils/wallet_utils.py b/bittensor/utils/wallet_utils.py index 78a7ed065c..39218c33f0 100644 --- a/bittensor/utils/wallet_utils.py +++ b/bittensor/utils/wallet_utils.py @@ -119,18 +119,18 @@ def create_identity_dict( Creates a dictionary with structure for identity extrinsic. Must fit within 64 bits. Args: - display (str): String to be converted and stored under 'display'. - legal (str): String to be converted and stored under 'legal'. - web (str): String to be converted and stored under 'web'. - riot (str): String to be converted and stored under 'riot'. - email (str): String to be converted and stored under 'email'. - pgp_fingerprint (str): String to be converted and stored under 'pgp_fingerprint'. - image (str): String to be converted and stored under 'image'. - info (str): String to be converted and stored under 'info'. - twitter (str): String to be converted and stored under 'twitter'. + display (str): String to be converted and stored under 'display'. + legal (str): String to be converted and stored under 'legal'. + web (str): String to be converted and stored under 'web'. + riot (str): String to be converted and stored under 'riot'. + email (str): String to be converted and stored under 'email'. + pgp_fingerprint (str): String to be converted and stored under 'pgp_fingerprint'. + image (str): String to be converted and stored under 'image'. + info (str): String to be converted and stored under 'info'. + twitter (str): String to be converted and stored under 'twitter'. Returns: - dict: A dictionary with the specified structure and byte string conversions. + dict: A dictionary with the specified structure and byte string conversions. Raises: ValueError: If pgp_fingerprint is not exactly 20 bytes long when encoded. diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 0d1d6734f9..de26d98c02 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -20,6 +20,7 @@ # DEALINGS IN THE SOFTWARE. import hashlib +import logging from typing import Tuple, List, Union import numpy as np @@ -30,8 +31,6 @@ import bittensor from bittensor.utils.registration import torch, use_torch, legacy_torch_api_compat -from bittensor.btlogging import logging - U32_MAX = 4294967295 U16_MAX = 65535 diff --git a/bittensor/wallet.py b/bittensor/wallet.py index 6ac808b12a..be6aa08c93 100644 --- a/bittensor/wallet.py +++ b/bittensor/wallet.py @@ -1,5 +1,4 @@ -""" Implementation of the wallet class, which manages balances with staking and transfer. Also manages hotkey and coldkey. -""" +"""Implementation of the wallet class, which manages balances with staking and transfer. Also manages hotkey and coldkey.""" # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -676,8 +675,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_coldkey( @@ -686,8 +684,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_coldkey( @@ -696,8 +693,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... def regenerate_coldkey( self, @@ -786,8 +782,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_hotkey( @@ -796,8 +791,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_hotkey( @@ -806,8 +800,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... def regenerate_hotkey( self, diff --git a/contrib/CONTRIBUTING.md b/contrib/CONTRIBUTING.md index e1413d1099..f9f4ed5f34 100644 --- a/contrib/CONTRIBUTING.md +++ b/contrib/CONTRIBUTING.md @@ -76,7 +76,7 @@ You can contribute to Bittensor in one of two main ways (as well as many others) Here is a high-level summary: - Code consistency is crucial; adhere to established programming language conventions. -- Use `black` to format your Python code; it ensures readability and consistency. +- Use `ruff format .` to format your Python code; it ensures readability and consistency. - Write concise Git commit messages; summarize changes in ~50 characters. - Follow these six commit rules: - Atomic Commits: Focus on one task or fix per commit. diff --git a/contrib/STYLE.md b/contrib/STYLE.md index b7ac755fc0..7804359d22 100644 --- a/contrib/STYLE.md +++ b/contrib/STYLE.md @@ -58,15 +58,17 @@ Python's official style guide is PEP 8, which provides conventions for writing c #### More details -Use `black` to format your python code before commiting for consistency across such a large pool of contributors. Black's code [style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#code-style) ensures consistent and opinionated code formatting. It automatically formats your Python code according to the Black style guide, enhancing code readability and maintainability. +Use [`ruff` to format](https://docs.astral.sh/ruff/formatter/#the-ruff-formatter) your python code before commiting for consistency across such a large pool of contributors. +Black code [style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#code-style) ensures consistent and opinionated code formatting. +Ruff automatically formats your Python code according to the Black style guide, enhancing code readability and maintainability. -Key Features of Black: +Key Features of ruff & Black code style: - Consistency: Black enforces a single, consistent coding style across your project, eliminating style debates and allowing developers to focus on code logic. + Consistency: ruff enforces a single, consistent coding style across your project, eliminating style debates and allowing developers to focus on code logic. Readability: By applying a standard formatting style, Black improves code readability, making it easier to understand and collaborate on projects. - Automation: Black automates the code formatting process, saving time and effort. It eliminates the need for manual formatting and reduces the likelihood of inconsistencies. + Automation: ruff automates the code formatting process, saving time and effort. It eliminates the need for manual formatting and reduces the likelihood of inconsistencies. ### Naming Conventions diff --git a/requirements/dev.txt b/requirements/dev.txt index 45c90a5b66..6cc94e2679 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,5 +1,6 @@ -aioresponses==0.7.6 +black==23.7.0 pytest==7.2.0 +pytest-asyncio==0.23.7 pytest-mock==3.12.0 pytest-split==0.8.0 pytest-xdist==3.0.2 @@ -13,4 +14,7 @@ mypy==1.8.0 types-retry==0.9.9.4 freezegun==1.5.0 torch>=1.13.1 +httpx==0.27.0 +ruff==0.4.7 +aioresponses==0.7.6 factory-boy==3.3.0 diff --git a/requirements/prod.txt b/requirements/prod.txt index d5bbf44b87..2d9ecabab5 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,19 +1,18 @@ -aiohttp==3.9.0b0 -ansible==6.7.0 -ansible_vault==2.1.0 +aiohttp~=3.9 +ansible~=6.7 +ansible_vault~=2.1 backoff -black==23.7.0 -certifi==2024.2.2 -colorama==0.4.6 -cryptography==42.0.5 -ddt==1.6.0 +certifi~=2024.2.2 +colorama~=0.4.6 +cryptography~=42.0.5 +ddt~=1.6.0 eth-utils<2.3.0 fuzzywuzzy>=0.18.0 -fastapi==0.110.1 -munch==2.5.0 +fastapi~=0.110.1 +munch~=2.5.0 netaddr numpy -msgpack-numpy-opentensor==0.5.0 +msgpack-numpy-opentensor~=0.5.0 nest_asyncio packaging pycryptodome>=3.18.0,<4.0.0 @@ -21,17 +20,15 @@ pyyaml password_strength pydantic>=2.3, <3 PyNaCl>=1.3.0,<=1.5.0 -pytest-asyncio python-Levenshtein -python-statemachine==2.1.2 -pytest +python-statemachine~=2.1.2 retry requests rich scalecodec==1.2.7 # scalecodec should not be changed unless first verifying compatibility with the subtensor's monkeypatching of scalecodec.RuntimeConfiguration.get_decoder_class -shtab==1.6.5 -substrate-interface==1.7.5 +shtab~=1.6.5 +substrate-interface~=1.7.5 termcolor tqdm -uvicorn==0.22.0 +uvicorn<=0.30 wheel diff --git a/scripts/check_compatibility.sh b/scripts/check_compatibility.sh index 5f48f4cbb0..b9c89c24dd 100755 --- a/scripts/check_compatibility.sh +++ b/scripts/check_compatibility.sh @@ -22,7 +22,7 @@ check_compatibility() { continue fi - package_name=$(echo "$requirement" | awk -F'[!=<>]' '{print $1}' | awk -F'[' '{print $1}') # Strip off brackets + package_name=$(echo "$requirement" | awk -F'[!=<>~]' '{print $1}' | awk -F'[' '{print $1}') # Strip off brackets echo -n "Checking $package_name... " url="https://pypi.org/pypi/$package_name/json" diff --git a/scripts/check_pre_submit.sh b/scripts/check_pre_submit.sh index 7ea7c37f41..4dbe7747f6 100755 --- a/scripts/check_pre_submit.sh +++ b/scripts/check_pre_submit.sh @@ -1,8 +1,8 @@ #!/bin/bash -# black checks formating -echo ">>> Run the pre-submit format check with \`black .\`." -python3 -m black --exclude '(env|venv|.eggs|.git)' . +# ruff checks formating +echo ">>> Run the pre-submit format check with \`ruff format .\`." +ruff format . echo ">>> Run the pre-submit format check with \`mypy\`." diff --git a/scripts/environments/apple_m1_environment.yml b/scripts/environments/apple_m1_environment.yml index c18da7236a..a3712e267c 100644 --- a/scripts/environments/apple_m1_environment.yml +++ b/scripts/environments/apple_m1_environment.yml @@ -13,7 +13,6 @@ dependencies: - backcall=0.2.0=pyh9f0ad1d_0 - backports=1.0=pyhd8ed1ab_3 - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0 - - black=23.7.0=py310hbe9552e_1 - beautifulsoup4=4.12.2=pyha770c72_0 - bleach=6.0.0=pyhd8ed1ab_0 - brotli=1.0.9=h1a8c8d9_8 diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index 2300eafc77..7afb6b448f 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -1,19 +1,28 @@ +import logging import os +import re +import shlex import signal -from substrateinterface import SubstrateInterface -import pytest import subprocess -import logging -import shlex -import re import time +import pytest +from substrateinterface import SubstrateInterface + +from tests.e2e_tests.utils import ( + clone_or_update_templates, + install_templates, + uninstall_templates, + template_path, +) + logging.basicConfig(level=logging.INFO) # Fixture for setting up and tearing down a localnet.sh chain between tests @pytest.fixture(scope="function") -def local_chain(): +def local_chain(request): + param = request.param if hasattr(request, "param") else None # Get the environment variable for the script path script_path = os.getenv("LOCALNET_SH_PATH") @@ -22,8 +31,12 @@ def local_chain(): logging.warning("LOCALNET_SH_PATH env variable is not set, e2e test skipped.") pytest.skip("LOCALNET_SH_PATH environment variable is not set.") + # Check if param is None, and handle it accordingly + args = "" if param is None else f"fast_blocks={param}" + + # compile commands to send to process + cmds = shlex.split(f"{script_path} {args}") # Start new node process - cmds = shlex.split(script_path) process = subprocess.Popen( cmds, stdout=subprocess.PIPE, text=True, preexec_fn=os.setsid ) @@ -31,6 +44,11 @@ def local_chain(): # Pattern match indicates node is compiled and ready pattern = re.compile(r"Successfully ran block step\.") + # install neuron templates + logging.info("downloading and installing neuron templates from github") + templates_dir = clone_or_update_templates() + install_templates(templates_dir) + def wait_for_node_start(process, pattern): for line in process.stdout: print(line.strip()) @@ -55,3 +73,7 @@ def wait_for_node_start(process, pattern): # Ensure the process has terminated process.wait() + + # uninstall templates + logging.info("uninstalling neuron templates") + uninstall_templates(template_path) diff --git a/tests/e2e_tests/multistep/test_axon.py b/tests/e2e_tests/multistep/test_axon.py new file mode 100644 index 0000000000..f23fb4da7b --- /dev/null +++ b/tests/e2e_tests/multistep/test_axon.py @@ -0,0 +1,116 @@ +import asyncio +import sys + +import pytest + +import bittensor +from bittensor.utils import networking +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, + template_path, + repo_name, +) + +""" +Test the axon mechanism. + +Verify that: +* axon is registered on network as a miner +* ip +* type +* port + +are set correctly, and that the miner is currently running + +""" + + +@pytest.mark.asyncio +async def test_axon(local_chain): + # Register root as Alice + alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) + + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + # Register a neuron to the subnet + exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--wallet.path", + wallet_path, + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # validate one miner with ip of none + old_axon = metagraph.axons[0] + + assert len(metagraph.axons) == 1 + assert old_axon.hotkey == alice_keypair.ss58_address + assert old_axon.coldkey == alice_keypair.ss58_address + assert old_axon.ip == "0.0.0.0" + assert old_axon.port == 0 + assert old_axon.ip_type == 0 + + # register miner + # "python neurons/miner.py --netuid 1 --subtensor.chain_endpoint ws://localhost:9945 --wallet.name wallet.name --wallet.hotkey wallet.hotkey.ss58_address" + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/miner.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + ] + ) + + await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph to refresh with latest data + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + updated_axon = metagraph.axons[0] + external_ip = networking.get_external_ip() + + assert len(metagraph.axons) == 1 + assert updated_axon.ip == external_ip + assert updated_axon.ip_type == networking.ip_version(external_ip) + assert updated_axon.port == 8091 + assert updated_axon.hotkey == alice_keypair.ss58_address + assert updated_axon.coldkey == alice_keypair.ss58_address diff --git a/tests/e2e_tests/multistep/test_dendrite.py b/tests/e2e_tests/multistep/test_dendrite.py new file mode 100644 index 0000000000..6abde7464d --- /dev/null +++ b/tests/e2e_tests/multistep/test_dendrite.py @@ -0,0 +1,191 @@ +import asyncio +import logging +import sys +import time + +import pytest + +import bittensor +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, + StakeCommand, + RootRegisterCommand, + RootSetBoostCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, + template_path, + repo_name, +) + +logging.basicConfig(level=logging.INFO) + +""" +Test the dendrites mechanism. + +Verify that: +* dendrite is registered on network as a validator +* stake successfully +* validator permit is set + +""" + + +@pytest.mark.asyncio +async def test_dendrite(local_chain): + # Register root as Alice - the subnet owner + alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) + + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + bob_keypair, exec_command, wallet_path = setup_wallet("//Bob") + + # Register a neuron to the subnet + exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + # assert one neuron is Bob + assert len(subtensor.neurons(netuid=1)) == 1 + neuron = metagraph.neurons[0] + assert neuron.hotkey == bob_keypair.ss58_address + assert neuron.coldkey == bob_keypair.ss58_address + + # assert stake is 0 + assert neuron.stake.tao == 0 + + # Stake to become to top neuron after the first epoch + exec_command( + StakeCommand, + [ + "stake", + "add", + "--amount", + "10000", + ], + ) + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + neuron = metagraph.neurons[0] + # assert stake is 10000 + assert neuron.stake.tao == 10_000.0 + + # assert neuron is not validator + assert neuron.active is True + assert neuron.validator_permit is False + assert neuron.validator_trust == 0.0 + assert neuron.pruning_score == 0 + + # register validator from template + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/validator.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + ] + ) + + # run validator in the background + await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph and subtensor to refresh with latest data + + # register validator with root network + exec_command( + RootRegisterCommand, + [ + "root", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + exec_command( + RootSetBoostCommand, + [ + "root", + "boost", + "--netuid", + "1", + "--increase", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + # get current block, wait until 360 blocks pass (subnet tempo) + interval = 360 + current_block = subtensor.get_current_block() + next_tempo_block_start = (current_block - (current_block % interval)) + interval + while current_block < next_tempo_block_start: + time.sleep(1) # Wait for 1 second before checking the block number again + current_block = subtensor.get_current_block() + if current_block % 10 == 0: + print( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) + logging.info( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # refresh validator neuron + neuron = metagraph.neurons[0] + + assert len(metagraph.neurons) == 1 + assert neuron.active is True + assert neuron.validator_permit is True + assert neuron.hotkey == bob_keypair.ss58_address + assert neuron.coldkey == bob_keypair.ss58_address diff --git a/tests/e2e_tests/multistep/test_incentive.py b/tests/e2e_tests/multistep/test_incentive.py new file mode 100644 index 0000000000..ea5809dd7f --- /dev/null +++ b/tests/e2e_tests/multistep/test_incentive.py @@ -0,0 +1,306 @@ +import asyncio +import logging +import sys +import time + +import pytest + +import bittensor +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, + StakeCommand, + RootRegisterCommand, + RootSetBoostCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, + template_path, + repo_name, +) + +logging.basicConfig(level=logging.INFO) + +""" +Test the incentive mechanism. + +Verify that for the miner: +* trust +* rank +* consensus +* incentive +are updated with proper values after an epoch has passed. + +For the validator verify that: +* validator_permit +* validator_trust +* dividends +* stake +are updated with proper values after an epoch has passed. + +""" + + +@pytest.mark.asyncio +async def test_incentive(local_chain): + # Register root as Alice - the subnet owner and validator + alice_keypair, alice_exec_command, alice_wallet_path = setup_wallet("//Alice") + alice_exec_command(RegisterSubnetworkCommand, ["s", "create"]) + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + # Register Bob as miner + bob_keypair, bob_exec_command, bob_wallet_path = setup_wallet("//Bob") + + # Register Alice as neuron to the subnet + alice_exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--wallet.path", + alice_wallet_path, + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + # Register Bob as neuron to the subnet + bob_exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + subtensor = bittensor.subtensor(network="ws://localhost:9945") + # assert two neurons are in network + assert len(subtensor.neurons(netuid=1)) == 2 + + # Alice to stake to become to top neuron after the first epoch + alice_exec_command( + StakeCommand, + [ + "stake", + "add", + "--amount", + "10000", + ], + ) + + # register Bob as miner + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/miner.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + bob_wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--logging.trace", + ] + ) + + miner_process = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + # Function to write output to the log file + async def miner_write_output(stream): + log_file = "miner.log" + with open(log_file, "a") as f: + while True: + line = await stream.readline() + if not line: + break + f.write(line.decode()) + f.flush() + + # Create tasks to read stdout and stderr concurrently + asyncio.create_task(miner_write_output(miner_process.stdout)) + asyncio.create_task(miner_write_output(miner_process.stderr)) + + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph to refresh with latest data + + # register Alice as validator + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/validator.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + alice_wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--logging.trace", + ] + ) + # run validator in the background + + validator_process = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + # Function to write output to the log file + async def validator_write_output(stream): + log_file = "validator.log" + with open(log_file, "a") as f: + while True: + line = await stream.readline() + if not line: + break + f.write(line.decode()) + f.flush() + + # Create tasks to read stdout and stderr concurrently + asyncio.create_task(validator_write_output(validator_process.stdout)) + asyncio.create_task(validator_write_output(validator_process.stderr)) + + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph and subtensor to refresh with latest data + + # register validator with root network + alice_exec_command( + RootRegisterCommand, + [ + "root", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + alice_exec_command( + RootSetBoostCommand, + [ + "root", + "boost", + "--netuid", + "1", + "--increase", + "100", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + # get latest metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # get current emissions + bob_neuron = metagraph.neurons[1] + assert bob_neuron.incentive == 0 + assert bob_neuron.consensus == 0 + assert bob_neuron.rank == 0 + assert bob_neuron.trust == 0 + + alice_neuron = metagraph.neurons[0] + assert alice_neuron.validator_permit is False + assert alice_neuron.dividends == 0 + assert alice_neuron.stake.tao == 10_000.0 + assert alice_neuron.validator_trust == 0 + + # wait until 360 blocks pass (subnet tempo) + wait_epoch(360, subtensor) + + # for some reason the weights do not get set through the template. Set weight manually. + alice_wallet = bittensor.wallet() + alice_wallet._hotkey = alice_keypair + subtensor._do_set_weights( + wallet=alice_wallet, + uids=[1], + vals=[65535], + netuid=1, + version_key=0, + wait_for_inclusion=True, + wait_for_finalization=True, + ) + + # wait epoch until weight go into effect + wait_epoch(360, subtensor) + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # get current emissions and validate that Alice has gotten tao + bob_neuron = metagraph.neurons[1] + assert bob_neuron.incentive == 1 + assert bob_neuron.consensus == 1 + assert bob_neuron.rank == 1 + assert bob_neuron.trust == 1 + + alice_neuron = metagraph.neurons[0] + assert alice_neuron.validator_permit is True + assert alice_neuron.dividends == 1 + assert alice_neuron.stake.tao == 10_000.0 + assert alice_neuron.validator_trust == 1 + + +def wait_epoch(interval, subtensor): + current_block = subtensor.get_current_block() + next_tempo_block_start = (current_block - (current_block % interval)) + interval + while current_block < next_tempo_block_start: + time.sleep(1) # Wait for 1 second before checking the block number again + current_block = subtensor.get_current_block() + if current_block % 10 == 0: + print( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) + logging.info( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) diff --git a/tests/e2e_tests/multistep/test_last_tx_block.py b/tests/e2e_tests/multistep/test_last_tx_block.py index 0d1796f5d8..b97d54f8fa 100644 --- a/tests/e2e_tests/multistep/test_last_tx_block.py +++ b/tests/e2e_tests/multistep/test_last_tx_block.py @@ -9,7 +9,7 @@ # https://discord.com/channels/799672011265015819/1176889736636407808/1236057424134144152 def test_takes(local_chain): # Register root as Alice - (keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RootRegisterCommand, ["root", "register"]) # Create subnet 1 and verify created successfully @@ -21,7 +21,7 @@ def test_takes(local_chain): assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() # Register and nominate Bob - (keypair, exec_command) = setup_wallet("//Bob") + keypair, exec_command, wallet_path = setup_wallet("//Bob") assert ( local_chain.query( "SubtensorModule", "LastTxBlock", [keypair.ss58_address] @@ -35,7 +35,7 @@ def test_takes(local_chain): ).serialize() == 0 ) - exec_command(RegisterCommand, ["s", "register", "--neduid", "1"]) + exec_command(RegisterCommand, ["s", "register", "--netuid", "1"]) exec_command(NominateCommand, ["root", "nominate"]) assert ( local_chain.query( diff --git a/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py b/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py index 0453576332..cc7b1b5744 100644 --- a/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py +++ b/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py @@ -8,7 +8,7 @@ def test_set_delegate_increase_take(local_chain): # Register root as Alice - (keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RootRegisterCommand, ["root", "register"]) # Create subnet 1 and verify created successfully @@ -20,7 +20,7 @@ def test_set_delegate_increase_take(local_chain): assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() # Register and nominate Bob - (keypair, exec_command) = setup_wallet("//Bob") + keypair, exec_command, wallet_path = setup_wallet("//Bob") assert ( local_chain.query( "SubtensorModule", "LastTxBlock", [keypair.ss58_address] diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py new file mode 100644 index 0000000000..0e647387b6 --- /dev/null +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -0,0 +1,86 @@ +import pytest + +import bittensor +from bittensor import logging +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, + RunFaucetCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, +) + + +@pytest.mark.parametrize("local_chain", [False], indirect=True) +def test_faucet(local_chain): + # Register root as Alice + keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) + + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + # Register a neuron to the subnet + exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + # verify current balance + wallet_balance = subtensor.get_balance(keypair.ss58_address) + assert wallet_balance.tao == 998999.0 + + # run faucet 3 times + for i in range(3): + logging.info(f"faucet run #:{i+1}") + try: + exec_command( + RunFaucetCommand, + [ + "wallet", + "faucet", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + logging.info( + f"wallet balance is {subtensor.get_balance(keypair.ss58_address).tao} tao" + ) + except SystemExit as e: + logging.warning( + "Block not generated fast enough to be within 3 block seconds window." + ) + # Handle the SystemExit exception + assert e.code == 1 # Assert that the exit code is 1 + except Exception as e: + logging.warning(f"Unexpected exception occurred on faucet: {e}") + + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + new_wallet_balance = subtensor.get_balance(keypair.ss58_address) + # verify balance increase + assert wallet_balance.tao < new_wallet_balance.tao + assert ( + new_wallet_balance.tao == 999899.0 + ) # after 3 runs we should see an increase of 900 tao diff --git a/tests/e2e_tests/subcommands/wallet/test_transfer.py b/tests/e2e_tests/subcommands/wallet/test_transfer.py index de8052e027..5b491b3f0d 100644 --- a/tests/e2e_tests/subcommands/wallet/test_transfer.py +++ b/tests/e2e_tests/subcommands/wallet/test_transfer.py @@ -5,7 +5,7 @@ # Example test using the local_chain fixture def test_transfer(local_chain): - (keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") acc_before = local_chain.query("System", "Account", [keypair.ss58_address]) exec_command( diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index d22efde267..4c719b0ebd 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -15,9 +15,23 @@ from tests.e2e_tests.utils import setup_wallet +""" +Test the Commit/Reveal weights mechanism. + +Verify that: +* Weights are commited +* weights are hashed with salt +--- after an epoch --- +* weights are un-hashed with salt +* weights are properly revealed + +""" + + def test_commit_and_reveal_weights(local_chain): # Register root as Alice - (alice_keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) # define values @@ -36,9 +50,9 @@ def test_commit_and_reveal_weights(local_chain): # Create a test wallet and set the coldkey, coldkeypub, and hotkey wallet = bittensor.wallet(path="/tmp/btcli-wallet") - wallet.set_coldkey(keypair=alice_keypair, encrypt=False, overwrite=True) - wallet.set_coldkeypub(keypair=alice_keypair, encrypt=False, overwrite=True) - wallet.set_hotkey(keypair=alice_keypair, encrypt=False, overwrite=True) + wallet.set_coldkey(keypair=keypair, encrypt=False, overwrite=True) + wallet.set_coldkeypub(keypair=keypair, encrypt=False, overwrite=True) + wallet.set_hotkey(keypair=keypair, encrypt=False, overwrite=True) # Stake to become to top neuron after the first epoch exec_command( @@ -49,7 +63,7 @@ def test_commit_and_reveal_weights(local_chain): "--wallet.path", "/tmp/btcli-wallet2", "--amount", - "999998998", + "100000", ], ) @@ -168,7 +182,9 @@ def test_commit_and_reveal_weights(local_chain): # Query the Weights storage map revealed_weights = subtensor.query_module( - module="SubtensorModule", name="Weights", params=[1, uid] # netuid and uid + module="SubtensorModule", + name="Weights", + params=[1, uid], # netuid and uid ) # Assert that the revealed weights are set correctly diff --git a/tests/e2e_tests/utils.py b/tests/e2e_tests/utils.py index 3ad789dd6d..4b485e3bd9 100644 --- a/tests/e2e_tests/utils.py +++ b/tests/e2e_tests/utils.py @@ -1,7 +1,17 @@ -from substrateinterface import Keypair +import os +import shutil +import subprocess +import sys + from typing import List + +from bittensor import Keypair + import bittensor +template_path = os.getcwd() + "/neurons/" +repo_name = "templates repository" + def setup_wallet(uri: str): keypair = Keypair.create_from_uri(uri) @@ -29,4 +39,38 @@ def exec_command(command, extra_args: List[str]): cli_instance = bittensor.cli(config) command.run(cli_instance) - return (keypair, exec_command) + return keypair, exec_command, wallet_path + + +def clone_or_update_templates(): + install_dir = template_path + repo_mapping = { + repo_name: "https://github.com/opentensor/bittensor-subnet-template.git", + } + os.makedirs(install_dir, exist_ok=True) + os.chdir(install_dir) + + for repo, git_link in repo_mapping.items(): + if not os.path.exists(repo): + print(f"\033[94mCloning {repo}...\033[0m") + subprocess.run(["git", "clone", git_link, repo], check=True) + else: + print(f"\033[94mUpdating {repo}...\033[0m") + os.chdir(repo) + subprocess.run(["git", "pull"], check=True) + os.chdir("..") + + return install_dir + repo_name + "/" + + +def install_templates(install_dir): + subprocess.check_call([sys.executable, "-m", "pip", "install", install_dir]) + + +def uninstall_templates(install_dir): + # uninstall templates + subprocess.check_call( + [sys.executable, "-m", "pip", "uninstall", "bittensor_subnet_template", "-y"] + ) + # delete everything in directory + shutil.rmtree(install_dir) diff --git a/tests/integration_tests/test_cli.py b/tests/integration_tests/test_cli.py index c20c905549..6fe1acf3bc 100644 --- a/tests/integration_tests/test_cli.py +++ b/tests/integration_tests/test_cli.py @@ -782,72 +782,123 @@ def test_unstake_with_thresholds(self, _): config.no_prompt = True # as the minimum required stake may change, this method allows us to dynamically # update the amount in the mock without updating the tests - config.amount = Balance.from_rao(_subtensor_mock.min_required_stake() - 1) - config.wallet.name = "fake_wallet" - config.hotkeys = ["hk0", "hk1", "hk2"] + min_stake: Balance = _subtensor_mock.get_minimum_required_stake() + # Must be a float + config.amount = min_stake.tao # Unstake below the minimum required stake + wallet_names = ["w0", "w1", "w2"] config.all_hotkeys = False # Notice no max_stake specified mock_stakes: Dict[str, Balance] = { - "hk0": Balance.from_float(10.0), - "hk1": Balance.from_float(11.1), - "hk2": Balance.from_float(12.2), + "w0": 2 * min_stake - 1, # remaining stake will be below the threshold + "w1": 2 * min_stake - 2, + "w2": 2 * min_stake - 5, } - mock_coldkey_kp = _get_mock_keypair(0, self.id()) - mock_wallets = [ SimpleNamespace( - name=config.wallet.name, - coldkey=mock_coldkey_kp, - coldkeypub=mock_coldkey_kp, - hotkey_str=hk, - hotkey=_get_mock_keypair(idx + 100, self.id()), + name=wallet_name, + coldkey=_get_mock_keypair(idx, self.id()), + coldkeypub=_get_mock_keypair(idx, self.id()), + hotkey_str="hk{}".format(idx), # doesn't matter + hotkey=_get_mock_keypair(idx + 100, self.id()), # doesn't matter ) - for idx, hk in enumerate(config.hotkeys) + for idx, wallet_name in enumerate(wallet_names) ] - # Register mock wallets and give them stakes + delegate_hotkey = mock_wallets[0].hotkey.ss58_address - for wallet in mock_wallets: - _ = _subtensor_mock.force_register_neuron( - netuid=1, - hotkey=wallet.hotkey.ss58_address, - coldkey=wallet.coldkey.ss58_address, - stake=mock_stakes[wallet.hotkey_str].rao, - ) + # Register mock neuron, only for w0 + _ = _subtensor_mock.force_register_neuron( + netuid=1, + hotkey=delegate_hotkey, + coldkey=mock_wallets[0].coldkey.ss58_address, + stake=mock_stakes["w0"], + ) - cli = bittensor.cli(config) + # Become a delegate + _ = _subtensor_mock.nominate( + wallet=mock_wallets[0], + ) + + # Stake to the delegate with the other coldkeys + for wallet in mock_wallets[1:]: + # Give balance + _ = _subtensor_mock.force_set_balance( + ss58_address=wallet.coldkeypub.ss58_address, + balance=( + mock_stakes[wallet.name] + _subtensor_mock.get_existential_deposit() + ).tao + + 1.0, + ) + _ = _subtensor_mock.add_stake( + wallet=wallet, + hotkey_ss58=delegate_hotkey, + amount=mock_stakes[wallet.name], + ) def mock_get_wallet(*args, **kwargs): - if kwargs.get("hotkey"): + if kwargs.get("config") and kwargs["config"].get("wallet"): for wallet in mock_wallets: - if wallet.hotkey_str == kwargs.get("hotkey"): + if wallet.name == kwargs["config"].wallet.name: return wallet - else: - return mock_wallets[0] with patch("bittensor.wallet") as mock_create_wallet: mock_create_wallet.side_effect = mock_get_wallet - # Check stakes before unstaking for wallet in mock_wallets: + # Check stakes before unstaking stake = _subtensor_mock.get_stake_for_coldkey_and_hotkey( - hotkey_ss58=wallet.hotkey.ss58_address, + hotkey_ss58=delegate_hotkey, coldkey_ss58=wallet.coldkey.ss58_address, ) - self.assertEqual(stake.rao, mock_stakes[wallet.hotkey_str].rao) + self.assertEqual(stake.rao, mock_stakes[wallet.name].rao) - cli.run() + config.wallet.name = wallet.name + config.hotkey_ss58address = delegate_hotkey # Single unstake - # Check stakes after unstaking - for wallet in mock_wallets: - stake = _subtensor_mock.get_stake_for_coldkey_and_hotkey( - hotkey_ss58=wallet.hotkey.ss58_address, - coldkey_ss58=wallet.coldkey.ss58_address, - ) - # because the amount is less than the threshold, none of these should unstake - self.assertEqual(stake.tao, mock_stakes[wallet.hotkey_str].tao) + cli = bittensor.cli(config) + with patch.object(_subtensor_mock, "_do_unstake") as mock_unstake: + with patch( + "bittensor.__console__.print" + ) as mock_print: # Catch console print + cli.run() + + # Filter for console print calls + console_prints = [ + call[0][0] for call in mock_print.call_args_list + ] + minimum_print = filter( + lambda x: "less than minimum of" in x, console_prints + ) + + unstake_calls = mock_unstake.call_args_list + self.assertEqual(len(unstake_calls), 1) # Only one unstake call + + _, kwargs = unstake_calls[0] + # Verify delegate was unstaked + self.assertEqual(kwargs["hotkey_ss58"], delegate_hotkey) + self.assertEqual(kwargs["wallet"].name, wallet.name) + + if wallet.name == "w0": + # This wallet owns the delegate + # Should unstake specified amount + self.assertEqual( + kwargs["amount"], bittensor.Balance(config.amount) + ) + # No warning for w0 + self.assertRaises( + StopIteration, next, minimum_print + ) # No warning for w0 + else: + # Should unstake *all* the stake + staked = mock_stakes[wallet.name] + self.assertEqual(kwargs["amount"], staked) + + # Check warning was printed + _ = next( + minimum_print + ) # Doesn't raise, so the warning was printed def test_unstake_all(self, _): config = self.config @@ -1671,6 +1722,129 @@ def mock_get_wallet(*args, **kwargs): ) self.assertAlmostEqual(balance.tao, mock_balance.tao, places=4) + def test_stake_with_thresholds(self, _): + config = self.config + config.command = "stake" + config.subcommand = "add" + config.no_prompt = True + + min_stake: Balance = _subtensor_mock.get_minimum_required_stake() + # Must be a float + wallet_names = ["w0", "w1", "w2"] + config.all_hotkeys = False + # Notice no max_stake specified + + mock_stakes: Dict[str, Balance] = { + "w0": min_stake - 1, # new stake will be below the threshold + "w1": min_stake - 2, + "w2": min_stake - 5, + } + + mock_wallets = [ + SimpleNamespace( + name=wallet_name, + coldkey=_get_mock_keypair(idx, self.id()), + coldkeypub=_get_mock_keypair(idx, self.id()), + hotkey_str="hk{}".format(idx), # doesn't matter + hotkey=_get_mock_keypair(idx + 100, self.id()), # doesn't matter + ) + for idx, wallet_name in enumerate(wallet_names) + ] + + delegate_hotkey = mock_wallets[0].hotkey.ss58_address + + # Register mock neuron, only for w0 + _ = _subtensor_mock.force_register_neuron( + netuid=1, + hotkey=delegate_hotkey, + coldkey=mock_wallets[0].coldkey.ss58_address, + balance=(mock_stakes["w0"] + _subtensor_mock.get_existential_deposit()).tao + + 1.0, + ) # No stake, but enough balance + + # Become a delegate + _ = _subtensor_mock.nominate( + wallet=mock_wallets[0], + ) + + # Give enough balance + for wallet in mock_wallets[1:]: + # Give balance + _ = _subtensor_mock.force_set_balance( + ss58_address=wallet.coldkeypub.ss58_address, + balance=( + mock_stakes[wallet.name] + _subtensor_mock.get_existential_deposit() + ).tao + + 1.0, + ) + + def mock_get_wallet(*args, **kwargs): + if kwargs.get("config") and kwargs["config"].get("wallet"): + for wallet in mock_wallets: + if wallet.name == kwargs["config"].wallet.name: + return wallet + + with patch("bittensor.wallet") as mock_create_wallet: + mock_create_wallet.side_effect = mock_get_wallet + + for wallet in mock_wallets: + # Check balances and stakes before staking + stake = _subtensor_mock.get_stake_for_coldkey_and_hotkey( + hotkey_ss58=delegate_hotkey, + coldkey_ss58=wallet.coldkey.ss58_address, + ) + self.assertEqual(stake.rao, 0) # No stake + + balance = _subtensor_mock.get_balance( + address=wallet.coldkeypub.ss58_address + ) + self.assertGreaterEqual( + balance, mock_stakes[wallet.name] + ) # Enough balance + + config.wallet.name = wallet.name + config.wallet.hotkey = delegate_hotkey # Single stake + config.amount = mock_stakes[ + wallet.name + ].tao # Stake an amount below the threshold + + cli = bittensor.cli(config) + with patch.object(_subtensor_mock, "_do_stake") as mock_stake: + with patch( + "bittensor.__console__.print" + ) as mock_print: # Catch console print + cli.run() + + # Filter for console print calls + console_prints = [ + call[0][0] for call in mock_print.call_args_list + ] + minimum_print = filter( + lambda x: "below the minimum required" in x, console_prints + ) + + if wallet.name == "w0": + # This wallet owns the delegate + stake_calls = mock_stake.call_args_list + # Can stake below the threshold + self.assertEqual(len(stake_calls), 1) + + _, kwargs = stake_calls[0] + + # Should stake specified amount + self.assertEqual( + kwargs["amount"], bittensor.Balance(config.amount) + ) + # No error for w0 + self.assertRaises( + StopIteration, next, minimum_print + ) # No warning for w0 + else: + # Should not call stake + self.assertEqual(len(mock_stake.call_args_list), 0) + # Should print error + self.assertIsNotNone(next(minimum_print)) + def test_nominate(self, _): config = self.config config.command = "root" @@ -2519,9 +2693,7 @@ def test_set_identity_command( "bittensor.wallet", return_value=mock_wallet ), patch("bittensor.__console__", MagicMock()), patch( "rich.prompt.Prompt.ask", side_effect=["y", "y"] - ), patch( - "sys.exit" - ) as mock_exit: + ), patch("sys.exit") as mock_exit: # Act if expected_exception: with pytest.raises(expected_exception) as exc_info: diff --git a/tests/integration_tests/test_cli_no_network.py b/tests/integration_tests/test_cli_no_network.py index 975de7dc9b..cd9f89ee6a 100644 --- a/tests/integration_tests/test_cli_no_network.py +++ b/tests/integration_tests/test_cli_no_network.py @@ -1101,7 +1101,7 @@ def test_delegate_prompt_hotkey(self, _): delegate_ss58 = _get_mock_coldkey(0) with patch("bittensor.commands.delegates.show_delegates"): with patch( - "bittensor.subtensor.subtensor.get_delegates", + "bittensor.subtensor.Subtensor.get_delegates", return_value=[ bittensor.DelegateInfo( hotkey_ss58=delegate_ss58, # return delegate with mock coldkey @@ -1188,7 +1188,7 @@ def test_undelegate_prompt_hotkey(self, _): delegate_ss58 = _get_mock_coldkey(0) with patch("bittensor.commands.delegates.show_delegates"): with patch( - "bittensor.subtensor.subtensor.get_delegates", + "bittensor.subtensor.Subtensor.get_delegates", return_value=[ bittensor.DelegateInfo( hotkey_ss58=delegate_ss58, # return delegate with mock coldkey @@ -1273,9 +1273,9 @@ def test_vote_command_prompt_proposal_hash(self, _): mock_proposal_hash = "mock_proposal_hash" - with patch("bittensor.subtensor.subtensor.is_senate_member", return_value=True): + with patch("bittensor.subtensor.Subtensor.is_senate_member", return_value=True): with patch( - "bittensor.subtensor.subtensor.get_vote_data", + "bittensor.subtensor.Subtensor.get_vote_data", return_value={"index": 1}, ): # Patch command to exit early diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 6100eeee10..e3661210bc 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -17,12 +17,10 @@ # DEALINGS IN THE SOFTWARE. import random -import socket -import os import unittest from queue import Empty as QueueEmpty from unittest.mock import MagicMock, patch -from types import SimpleNamespace + import numpy as np import pytest from substrateinterface import Keypair @@ -31,9 +29,7 @@ from bittensor.mock import MockSubtensor from bittensor.utils import weight_utils from bittensor.utils.balance import Balance -from substrateinterface import Keypair from tests.helpers import ( - _get_mock_hotkey, _get_mock_coldkey, MockConsole, _get_mock_keypair, @@ -710,11 +706,8 @@ def test_registration_multiprocessed_already_registered(self): mock_set_status.__exit__ = MagicMock(return_value=True) # should return True - assert ( - self.subtensor.register( - wallet=wallet, netuid=3, num_processes=3, update_interval=5 - ) - == True + assert self.subtensor.register( + wallet=wallet, netuid=3, num_processes=3, update_interval=5 ) # calls until True and once again before exiting subtensor class @@ -741,7 +734,7 @@ def is_registered_side_effect(*args, **kwargs): ) self.subtensor.get_neuron_for_pubkey_and_subnet = MagicMock( - return_value=bittensor.NeuronInfo._null_neuron() + return_value=bittensor.NeuronInfo.get_null_neuron() ) self.subtensor.is_hotkey_registered = MagicMock( side_effect=is_registered_side_effect @@ -823,7 +816,7 @@ class ExitEarly(Exception): # then should create a new pow and check if it is stale # then should enter substrate and exit early because of test self.subtensor.get_neuron_for_pubkey_and_subnet = MagicMock( - return_value=bittensor.NeuronInfo._null_neuron() + return_value=bittensor.NeuronInfo.get_null_neuron() ) with pytest.raises(ExitEarly): bittensor.subtensor.register(mock_subtensor_self, mock_wallet, netuid=3) diff --git a/tests/unit_tests/extrinsics/test_delegation.py b/tests/unit_tests/extrinsics/test_delegation.py index 9a321dbe64..42dcf4e706 100644 --- a/tests/unit_tests/extrinsics/test_delegation.py +++ b/tests/unit_tests/extrinsics/test_delegation.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.utils.balance import Balance from bittensor.extrinsics.delegation import ( diff --git a/tests/unit_tests/extrinsics/test_init.py b/tests/unit_tests/extrinsics/test_init.py new file mode 100644 index 0000000000..8e3caaf900 --- /dev/null +++ b/tests/unit_tests/extrinsics/test_init.py @@ -0,0 +1,49 @@ +"""Tests for bittensor/extrinsics/__ini__ module.""" + +from bittensor.utils import format_error_message + + +def test_format_error_message_with_right_error_message(): + # Prep + fake_error_message = { + "type": "SomeType", + "name": "SomeErrorName", + "docs": ["Some error description."], + } + + # Call + result = format_error_message(fake_error_message) + + # Assertions + + assert "SomeType" in result + assert "SomeErrorName" in result + assert "Some error description." in result + + +def test_format_error_message_with_empty_error_message(): + # Prep + fake_error_message = {} + + # Call + result = format_error_message(fake_error_message) + + # Assertions + + assert "UnknownType" in result + assert "UnknownError" in result + assert "Unknown Description" in result + + +def test_format_error_message_with_wrong_type_error_message(): + # Prep + fake_error_message = None + + # Call + result = format_error_message(fake_error_message) + + # Assertions + + assert "UnknownType" in result + assert "UnknownError" in result + assert "Unknown Description" in result diff --git a/tests/unit_tests/extrinsics/test_network.py b/tests/unit_tests/extrinsics/test_network.py index a11f53111f..67df030ffe 100644 --- a/tests/unit_tests/extrinsics/test_network.py +++ b/tests/unit_tests/extrinsics/test_network.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.extrinsics.network import ( set_hyperparameter_extrinsic, diff --git a/tests/unit_tests/extrinsics/test_prometheus.py b/tests/unit_tests/extrinsics/test_prometheus.py index 0458206701..7d9c975fbc 100644 --- a/tests/unit_tests/extrinsics/test_prometheus.py +++ b/tests/unit_tests/extrinsics/test_prometheus.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import MagicMock, patch import bittensor -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.extrinsics.prometheus import prometheus_extrinsic diff --git a/tests/unit_tests/extrinsics/test_registration.py b/tests/unit_tests/extrinsics/test_registration.py index bad8552b17..49805f0cf4 100644 --- a/tests/unit_tests/extrinsics/test_registration.py +++ b/tests/unit_tests/extrinsics/test_registration.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.utils.registration import POWSolution from bittensor.extrinsics.registration import ( @@ -269,9 +269,7 @@ def test_burned_register_extrinsic( return_value=(recycle_success, "Mock error message"), ), patch.object( mock_subtensor, "is_hotkey_registered", return_value=is_registered - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response) as mock_confirm: # Act result = burned_register_extrinsic( subtensor=mock_subtensor, wallet=mock_wallet, netuid=123, prompt=True @@ -313,9 +311,7 @@ def test_register_extrinsic_without_pow( mock_subtensor, "get_neuron_for_pubkey_and_subnet", return_value=MagicMock(is_null=neuron_is_null), - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ), patch( + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response), patch( "torch.cuda.is_available", return_value=cuda_available ): # Act @@ -373,10 +369,8 @@ def test_register_extrinsic_with_pow( ), patch.object( mock_subtensor, "_do_pow_register", - return_value=(registration_success, "key is already registered"), - ), patch( - "torch.cuda.is_available", return_value=cuda - ): + return_value=(registration_success, "HotKeyAlreadyRegisteredInSubNet"), + ), patch("torch.cuda.is_available", return_value=cuda): # Act if pow_success: mock_pow_solution.is_stale.return_value = pow_stale diff --git a/tests/unit_tests/extrinsics/test_root.py b/tests/unit_tests/extrinsics/test_root.py index 5036fdcbbd..b801f7b4e1 100644 --- a/tests/unit_tests/extrinsics/test_root.py +++ b/tests/unit_tests/extrinsics/test_root.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.extrinsics.root import ( root_register_extrinsic, set_root_weights_extrinsic, @@ -191,9 +191,7 @@ def test_set_root_weights_extrinsic( return_value=(expected_success, "Mock error"), ), patch.object( mock_subtensor, "min_allowed_weights", return_value=0 - ), patch.object( - mock_subtensor, "max_weight_limit", return_value=1 - ), patch( + ), patch.object(mock_subtensor, "max_weight_limit", return_value=1), patch( "rich.prompt.Confirm.ask", return_value=user_response ) as mock_confirm: # Act diff --git a/tests/unit_tests/extrinsics/test_senate.py b/tests/unit_tests/extrinsics/test_senate.py index 8310fc38fa..66849efc5c 100644 --- a/tests/unit_tests/extrinsics/test_senate.py +++ b/tests/unit_tests/extrinsics/test_senate.py @@ -58,9 +58,7 @@ def test_register_senate_extrinsic( "bittensor.extrinsics.senate.Confirm.ask", return_value=not prompt ), patch("bittensor.extrinsics.senate.time.sleep"), patch.object( mock_subtensor.substrate, "compose_call" - ), patch.object( - mock_subtensor.substrate, "create_signed_extrinsic" - ), patch.object( + ), patch.object(mock_subtensor.substrate, "create_signed_extrinsic"), patch.object( mock_subtensor.substrate, "submit_extrinsic", return_value=MagicMock( @@ -153,9 +151,7 @@ def test_vote_senate_extrinsic( "bittensor.extrinsics.senate.Confirm.ask", return_value=not prompt ), patch("bittensor.extrinsics.senate.time.sleep"), patch.object( mock_subtensor.substrate, "compose_call" - ), patch.object( - mock_subtensor.substrate, "create_signed_extrinsic" - ), patch.object( + ), patch.object(mock_subtensor.substrate, "create_signed_extrinsic"), patch.object( mock_subtensor.substrate, "submit_extrinsic", return_value=MagicMock( @@ -219,9 +215,7 @@ def test_leave_senate_extrinsic( "bittensor.extrinsics.senate.Confirm.ask", return_value=not prompt ), patch("bittensor.extrinsics.senate.time.sleep"), patch.object( mock_subtensor.substrate, "compose_call" - ), patch.object( - mock_subtensor.substrate, "create_signed_extrinsic" - ), patch.object( + ), patch.object(mock_subtensor.substrate, "create_signed_extrinsic"), patch.object( mock_subtensor.substrate, "submit_extrinsic", return_value=MagicMock( @@ -229,9 +223,7 @@ def test_leave_senate_extrinsic( process_events=MagicMock(), error_message="error", ), - ), patch.object( - mock_wallet, "is_senate_member", return_value=is_registered - ): + ), patch.object(mock_wallet, "is_senate_member", return_value=is_registered): # Act result = leave_senate_extrinsic( subtensor=mock_subtensor, diff --git a/tests/unit_tests/extrinsics/test_serving.py b/tests/unit_tests/extrinsics/test_serving.py index 513fa5df52..7aa3ebf5b4 100644 --- a/tests/unit_tests/extrinsics/test_serving.py +++ b/tests/unit_tests/extrinsics/test_serving.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.axon import axon as Axon from bittensor.extrinsics.serving import ( @@ -365,7 +365,7 @@ def test_publish_metadata( subtensor=mock_subtensor, wallet=mock_wallet, netuid=net_uid, - type=type_u, + data_type=type_u, data=data, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, diff --git a/tests/unit_tests/extrinsics/test_staking.py b/tests/unit_tests/extrinsics/test_staking.py index 0a77ceb2c7..c3b888520b 100644 --- a/tests/unit_tests/extrinsics/test_staking.py +++ b/tests/unit_tests/extrinsics/test_staking.py @@ -114,9 +114,6 @@ def test_add_stake_extrinsic( else amount ) - if staking_balance > bittensor.Balance.from_rao(1000): - staking_balance = staking_balance - bittensor.Balance.from_rao(1000) - with patch.object( mock_subtensor, "_do_stake", return_value=expected_success ) as mock_add_stake, patch.object( @@ -133,11 +130,22 @@ def test_add_stake_extrinsic( else mock_other_owner_wallet.coldkeypub.ss58_address, ), patch.object( mock_subtensor, "is_hotkey_delegate", return_value=hotkey_delegate - ), patch.object( - mock_subtensor, "get_delegate_take", return_value=0.01 - ), patch( + ), patch.object(mock_subtensor, "get_delegate_take", return_value=0.01), patch( "rich.prompt.Confirm.ask", return_value=user_accepts - ) as mock_confirm: + ) as mock_confirm, patch.object( + mock_subtensor, + "get_minimum_required_stake", + return_value=bittensor.Balance.from_tao(0.01), + ), patch.object( + mock_subtensor, + "get_existential_deposit", + return_value=bittensor.Balance.from_rao(100_000), + ): + mock_balance = mock_subtensor.get_balance() + existential_deposit = mock_subtensor.get_existential_deposit() + if staking_balance > mock_balance - existential_deposit: + staking_balance = mock_balance - existential_deposit + # Act if not hotkey_owner and not hotkey_delegate: with pytest.raises(exception): @@ -506,9 +514,7 @@ def stake_side_effect(hotkey_ss58, *args, **kwargs): mock_subtensor, "_do_stake", side_effect=stake_side_effect ) as mock_do_stake, patch.object( mock_subtensor, "tx_rate_limit", return_value=0 - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response) as mock_confirm: # Act if exception: with pytest.raises(exception) as exc_info: diff --git a/tests/unit_tests/extrinsics/test_unstaking.py b/tests/unit_tests/extrinsics/test_unstaking.py index d6ff094dd8..0fa6ba84c4 100644 --- a/tests/unit_tests/extrinsics/test_unstaking.py +++ b/tests/unit_tests/extrinsics/test_unstaking.py @@ -39,8 +39,8 @@ def mock_get_minimum_required_stake(): ("5FHneW46...", 10.0, True, True, True, False, False, False), # Not enough stake to unstake ("5FHneW46...", 1000.0, True, True, False, None, False, False), - # Unsuccessful - unstake threshold not reached - (None, 0.01, True, True, False, None, False, False), + # Successful - unstake threshold not reached + (None, 0.01, True, True, False, None, True, True), # Successful unstaking all (None, None, False, False, False, None, True, True), # Failure - unstaking failed @@ -51,7 +51,7 @@ def mock_get_minimum_required_stake(): "successful-with-prompt", "failure-prompt-declined", "failure-not-enough-stake", - "failure-threshold-not-reached", + "success-threshold-not-reached", "success-unstake-all", "failure-unstake-failed", ], @@ -83,9 +83,7 @@ def test_unstake_extrinsic( mock_subtensor, "get_stake_for_coldkey_and_hotkey", return_value=mock_current_stake, - ), patch( - "rich.prompt.Confirm.ask", return_value=user_accepts - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=user_accepts) as mock_confirm: result = unstake_extrinsic( subtensor=mock_subtensor, wallet=mock_wallet, @@ -168,18 +166,20 @@ def test_unstake_extrinsic( None, None, ), - # Unsuccessful unstake - threshold not reached + # Successful unstake - new stake below threshold ( ["5FHneW46..."], - [0.01], + [ + 100 - mock_get_minimum_required_stake() + 0.01 + ], # New stake just below threshold 100, True, True, False, True, - [None], - False, - 0, + [True], + True, # Sucessful unstake + 1, None, None, ), @@ -249,7 +249,7 @@ def test_unstake_extrinsic( "partial-success-one-fail", "success-no-hotkey", "failure-not-enough-stake", - "failure-threshold-not-reached", + "success-threshold-not-reached", "failure-prompt-declined", "failure-type-error-hotkeys", "failure-value-error-amounts", @@ -291,15 +291,11 @@ def unstake_side_effect(hotkey_ss58, *args, **kwargs): side_effect=mock_get_minimum_required_stake, ), patch.object( mock_subtensor, "get_balance", return_value=Balance.from_tao(wallet_balance) - ), patch.object( - mock_subtensor, "tx_rate_limit", return_value=0 - ), patch.object( + ), patch.object(mock_subtensor, "tx_rate_limit", return_value=0), patch.object( mock_subtensor, "get_stake_for_coldkey_and_hotkey", return_value=mock_current_stake, - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response) as mock_confirm: # Act if exception: with pytest.raises(exception) as exc_info: diff --git a/tests/unit_tests/test_axon.py b/tests/unit_tests/test_axon.py index 5b82148494..cfb46c32c2 100644 --- a/tests/unit_tests/test_axon.py +++ b/tests/unit_tests/test_axon.py @@ -17,7 +17,11 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import unittest + +# Standard Lib +import re +from dataclasses import dataclass + from typing import Any from unittest import IsolatedAsyncioTestCase from unittest.mock import AsyncMock, MagicMock, patch @@ -25,12 +29,13 @@ # Third Party import netaddr -# Standard Lib import pytest from starlette.requests import Request +from fastapi.testclient import TestClient # Bittensor import bittensor +from bittensor import Synapse, RunException from bittensor.axon import AxonMiddleware from bittensor.axon import axon as Axon @@ -119,7 +124,7 @@ def test_log_and_handle_error(): synapse = log_and_handle_error(synapse, Exception("Error"), 500, 100) assert synapse.axon.status_code == 500 - assert synapse.axon.status_message == "Error" + assert re.match(r"Internal Server Error #[\da-f\-]+", synapse.axon.status_message) assert synapse.axon.process_time is not None @@ -163,15 +168,20 @@ def axon_instance(): # Mocks +@dataclass class MockWallet: - def __init__(self, hotkey): - self.hotkey = hotkey + hotkey: Any + coldkey: Any = None + coldkeypub: Any = None class MockHotkey: def __init__(self, ss58_address): self.ss58_address = ss58_address + def sign(self, *args, **kwargs): + return f"Signed: {args!r} {kwargs!r}".encode() + class MockInfo: def to_string(self): @@ -479,8 +489,8 @@ async def test_preprocess(self): assert synapse.axon.version == str(bittensor.__version_as_int__) assert synapse.axon.uuid == "1234" assert synapse.axon.nonce is not None - assert synapse.axon.status_message == "Success" - assert synapse.axon.status_code == "100" + assert synapse.axon.status_message is None + assert synapse.axon.status_code == 100 assert synapse.axon.signature == "0xaabbccdd" # Check if the preprocess function fills the dendrite information into the synapse @@ -491,5 +501,115 @@ async def test_preprocess(self): assert synapse.name == "request_name" -if __name__ == "__main__": - unittest.main() +class SynapseHTTPClient(TestClient): + def post_synapse(self, synapse: Synapse): + return self.post( + f"/{synapse.__class__.__name__}", + json=synapse.model_dump(), + headers={"computed_body_hash": synapse.body_hash}, + ) + + +@pytest.mark.asyncio +class TestAxonHTTPAPIResponses: + @pytest.fixture + def axon(self): + return Axon( + ip="192.0.2.1", + external_ip="192.0.2.1", + wallet=MockWallet(MockHotkey("A"), MockHotkey("B"), MockHotkey("PUB")), + ) + + @pytest.fixture + def no_verify_axon(self, axon): + axon.default_verify = self.no_verify_fn + return axon + + @pytest.fixture + def http_client(self, axon): + return SynapseHTTPClient(axon.app) + + async def no_verify_fn(self, synapse): + return + + async def test_unknown_path(self, http_client): + response = http_client.get("/no_such_path") + assert (response.status_code, response.json()) == ( + 404, + { + "message": "Synapse name 'no_such_path' not found. Available synapses ['Synapse']" + }, + ) + + async def test_ping__no_dendrite(self, http_client): + response = http_client.post_synapse(bittensor.Synapse()) + assert (response.status_code, response.json()) == ( + 401, + { + "message": "Not Verified with error: No SS58 formatted address or public key provided" + }, + ) + + async def test_ping__without_verification(self, http_client, axon): + axon.verify_fns["Synapse"] = self.no_verify_fn + request_synapse = Synapse() + response = http_client.post_synapse(request_synapse) + assert response.status_code == 200 + response_synapse = Synapse(**response.json()) + assert response_synapse.axon.status_code == 200 + + @pytest.fixture + def custom_synapse_cls(self): + class CustomSynapse(Synapse): + pass + + return CustomSynapse + + async def test_synapse__explicitly_set_status_code( + self, http_client, axon, custom_synapse_cls, no_verify_axon + ): + error_message = "Essential resource for CustomSynapse not found" + + async def forward_fn(synapse: custom_synapse_cls): + synapse.axon.status_code = 404 + synapse.axon.status_message = error_message + return synapse + + axon.attach(forward_fn) + + response = http_client.post_synapse(custom_synapse_cls()) + assert response.status_code == 404 + response_synapse = custom_synapse_cls(**response.json()) + assert ( + response_synapse.axon.status_code, + response_synapse.axon.status_message, + ) == (404, error_message) + + async def test_synapse__exception_with_set_status_code( + self, http_client, axon, custom_synapse_cls, no_verify_axon + ): + error_message = "Conflicting request" + + async def forward_fn(synapse: custom_synapse_cls): + synapse.axon.status_code = 409 + raise RunException(message=error_message, synapse=synapse) + + axon.attach(forward_fn) + + response = http_client.post_synapse(custom_synapse_cls()) + assert response.status_code == 409 + assert response.json() == {"message": error_message} + + async def test_synapse__internal_error( + self, http_client, axon, custom_synapse_cls, no_verify_axon + ): + async def forward_fn(synapse: custom_synapse_cls): + raise ValueError("error with potentially sensitive information") + + axon.attach(forward_fn) + + response = http_client.post_synapse(custom_synapse_cls()) + assert response.status_code == 500 + response_data = response.json() + assert sorted(response_data.keys()) == ["message"] + assert re.match(r"Internal Server Error #[\da-f\-]+", response_data["message"]) diff --git a/tests/unit_tests/test_dendrite.py b/tests/unit_tests/test_dendrite.py index 9b0b6d7ddf..0505247728 100644 --- a/tests/unit_tests/test_dendrite.py +++ b/tests/unit_tests/test_dendrite.py @@ -46,6 +46,23 @@ def setup_dendrite(): return dendrite_obj +@pytest.fixture +def dendrite_obj(setup_dendrite): + return setup_dendrite + + +@pytest.fixture +def axon_info(): + return bittensor.AxonInfo( + version=1, + ip="127.0.0.1", + port=666, + ip_type=4, + hotkey="hot", + coldkey="cold", + ) + + @pytest.fixture(scope="session") def setup_axon(): axon = bittensor.axon() @@ -61,36 +78,32 @@ def test_init(setup_dendrite): assert dendrite_obj.keypair == setup_dendrite.keypair -def test_str(setup_dendrite): - dendrite_obj = setup_dendrite - expected_string = "dendrite({})".format(setup_dendrite.keypair.ss58_address) +def test_str(dendrite_obj): + expected_string = "dendrite({})".format(dendrite_obj.keypair.ss58_address) assert str(dendrite_obj) == expected_string -def test_repr(setup_dendrite): - dendrite_obj = setup_dendrite - expected_string = "dendrite({})".format(setup_dendrite.keypair.ss58_address) +def test_repr(dendrite_obj): + expected_string = "dendrite({})".format(dendrite_obj.keypair.ss58_address) assert repr(dendrite_obj) == expected_string -def test_close(setup_dendrite, setup_axon): +def test_close(dendrite_obj, setup_axon): axon = setup_axon - dendrite_obj = setup_dendrite # Query the axon to open a session dendrite_obj.query(axon, SynapseDummy(input=1)) # Session should be automatically closed after query - assert dendrite_obj._session == None + assert dendrite_obj._session is None @pytest.mark.asyncio -async def test_aclose(setup_dendrite, setup_axon): +async def test_aclose(dendrite_obj, setup_axon): axon = setup_axon - dendrite_obj = setup_dendrite # Use context manager to open an async session async with dendrite_obj: resp = await dendrite_obj([axon], SynapseDummy(input=1), deserialize=False) # Close should automatically be called on the session after context manager scope - assert dendrite_obj._session == None + assert dendrite_obj._session is None class AsyncMock(Mock): @@ -272,3 +285,52 @@ def test_terminal_info_error_cases( version=version, nonce=nonce, ) + + +@pytest.mark.asyncio +async def test_dendrite__call__success_response( + axon_info, dendrite_obj, mock_aioresponse +): + input_synapse = SynapseDummy(input=1) + expected_synapse = SynapseDummy( + **( + input_synapse.model_dump() + | dict( + output=2, + axon=TerminalInfo( + status_code=200, + status_message="Success", + process_time=0.1, + ), + ) + ) + ) + mock_aioresponse.post( + f"http://127.0.0.1:666/SynapseDummy", + body=expected_synapse.json(), + ) + synapse = await dendrite_obj.call(axon_info, synapse=input_synapse) + + assert synapse.input == 1 + assert synapse.output == 2 + assert synapse.dendrite.status_code == 200 + assert synapse.dendrite.status_message == "Success" + assert synapse.dendrite.process_time >= 0 + + +@pytest.mark.asyncio +async def test_dendrite__call__handles_http_error_response( + axon_info, dendrite_obj, mock_aioresponse +): + status_code = 414 + message = "Custom Error" + + mock_aioresponse.post( + f"http://127.0.0.1:666/SynapseDummy", + status=status_code, + payload={"message": message}, + ) + synapse = await dendrite_obj.call(axon_info, synapse=SynapseDummy(input=1)) + + assert synapse.axon.status_code == synapse.dendrite.status_code == status_code + assert synapse.axon.status_message == synapse.dendrite.status_message == message diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index 4024a27f79..c3a295d078 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -25,8 +25,18 @@ # Application import bittensor +from bittensor.subtensor import ( + Subtensor, + _logger, + Balance, +) +from bittensor.chain_data import SubnetHyperparameters +from bittensor.commands.utils import normalize_hyperparameters from bittensor import subtensor_module -from bittensor.subtensor import subtensor as Subtensor, _logger +from bittensor.utils.balance import Balance + +U16_MAX = 65535 +U64_MAX = 18446744073709551615 def test_serve_axon_with_external_ip_set(): @@ -275,6 +285,7 @@ def test_determine_chain_endpoint_and_network( assert result_endpoint == expected_endpoint +# Subtensor().get_error_info_by_index tests @pytest.fixture def substrate(): class MockSubstrate: @@ -316,3 +327,1953 @@ def test_get_error_info_by_index_unknown_error(subtensor, mock_logger): mock_logger.assert_called_once_with( f"Subtensor returned an error with an unknown index: {fake_index}" ) + + +# Subtensor()._get_hyperparameter tests +def test_hyperparameter_subnet_does_not_exist(subtensor, mocker): + """Tests when the subnet does not exist.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=False) + assert subtensor._get_hyperparameter("Difficulty", 1, None) is None + subtensor.subnet_exists.assert_called_once_with(1, None) + + +def test_hyperparameter_result_is_none(subtensor, mocker): + """Tests when query_subtensor returns None.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + assert subtensor._get_hyperparameter("Difficulty", 1, None) is None + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +def test_hyperparameter_result_has_no_value(subtensor, mocker): + """Test when the result has no 'value' attribute.""" + + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + assert subtensor._get_hyperparameter("Difficulty", 1, None) is None + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +def test_hyperparameter_success_int(subtensor, mocker): + """Test when query_subtensor returns an integer value.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock( + return_value=mocker.MagicMock(value=100) + ) + assert subtensor._get_hyperparameter("Difficulty", 1, None) == 100 + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +def test_hyperparameter_success_float(subtensor, mocker): + """Test when query_subtensor returns a float value.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock( + return_value=mocker.MagicMock(value=0.5) + ) + assert subtensor._get_hyperparameter("Difficulty", 1, None) == 0.5 + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +# Tests Hyper parameter calls +@pytest.mark.parametrize( + "method, param_name, value, expected_result_type", + [ + ("rho", "Rho", 1, int), + ("kappa", "Kappa", 1.0, float), + ("difficulty", "Difficulty", 1, int), + ("recycle", "Burn", 1, Balance), + ("immunity_period", "ImmunityPeriod", 1, int), + ("validator_batch_size", "ValidatorBatchSize", 1, int), + ("validator_prune_len", "ValidatorPruneLen", 1, int), + ("validator_logits_divergence", "ValidatorLogitsDivergence", 1.0, float), + ("validator_sequence_length", "ValidatorSequenceLength", 1, int), + ("validator_epochs_per_reset", "ValidatorEpochsPerReset", 1, int), + ("validator_epoch_length", "ValidatorEpochLen", 1, int), + ("validator_exclude_quantile", "ValidatorExcludeQuantile", 1.0, float), + ("max_allowed_validators", "MaxAllowedValidators", 1, int), + ("min_allowed_weights", "MinAllowedWeights", 1, int), + ("max_weight_limit", "MaxWeightsLimit", 1, float), + ("adjustment_alpha", "AdjustmentAlpha", 1, float), + ("bonds_moving_avg", "BondsMovingAverage", 1, float), + ("scaling_law_power", "ScalingLawPower", 1, float), + ("synergy_scaling_law_power", "SynergyScalingLawPower", 1, float), + ("subnetwork_n", "SubnetworkN", 1, int), + ("max_n", "MaxAllowedUids", 1, int), + ("blocks_since_epoch", "BlocksSinceEpoch", 1, int), + ("tempo", "Tempo", 1, int), + ], +) +def test_hyper_parameter_success_calls( + subtensor, mocker, method, param_name, value, expected_result_type +): + """ + Tests various hyperparameter methods to ensure they correctly fetch their respective hyperparameters and return the + expected values. + """ + # Prep + subtensor._get_hyperparameter = mocker.MagicMock(return_value=value) + + spy_u16_normalized_float = mocker.spy(subtensor_module, "U16_NORMALIZED_FLOAT") + spy_u64_normalized_float = mocker.spy(subtensor_module, "U64_NORMALIZED_FLOAT") + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + subtensor_method = getattr(subtensor, method) + result = subtensor_method(netuid=7, block=707) + + # Assertions + subtensor._get_hyperparameter.assert_called_once_with( + block=707, netuid=7, param_name=param_name + ) + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, expected_result_type) + + # Special cases + if method in [ + "kappa", + "validator_logits_divergence", + "validator_exclude_quantile", + "max_weight_limit", + ]: + spy_u16_normalized_float.assert_called_once() + + if method in ["adjustment_alpha", "bonds_moving_avg"]: + spy_u64_normalized_float.assert_called_once() + + if method in ["recycle"]: + spy_balance_from_rao.assert_called_once() + + +def test_blocks_since_last_update_success_calls(subtensor, mocker): + """Tests the weights_rate_limit method to ensure it correctly fetches the LastUpdate hyperparameter.""" + # Prep + uid = 7 + mocked_current_block = 2 + mocked_result = {uid: 1} + subtensor._get_hyperparameter = mocker.MagicMock(return_value=mocked_result) + subtensor.get_current_block = mocker.MagicMock(return_value=mocked_current_block) + + # Call + result = subtensor.blocks_since_last_update(netuid=7, uid=uid) + + # Assertions + subtensor.get_current_block.assert_called_once() + subtensor._get_hyperparameter.assert_called_once_with( + param_name="LastUpdate", netuid=7 + ) + assert result == 1 + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, int) + + +def test_weights_rate_limit_success_calls(subtensor, mocker): + """Tests the weights_rate_limit method to ensure it correctly fetches the WeightsSetRateLimit hyperparameter.""" + # Prep + subtensor._get_hyperparameter = mocker.MagicMock(return_value=5) + + # Call + result = subtensor.weights_rate_limit(netuid=7) + + # Assertions + subtensor._get_hyperparameter.assert_called_once_with( + param_name="WeightsSetRateLimit", netuid=7 + ) + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, int) + + +@pytest.fixture +def sample_hyperparameters(): + return MagicMock(spec=SubnetHyperparameters) + + +def get_normalized_value(normalized_data, param_name): + return next( + ( + norm_value + for p_name, _, norm_value in normalized_data + if p_name == param_name + ), + None, + ) + + +@pytest.mark.parametrize( + "param_name, max_value, mid_value, zero_value, is_balance", + [ + ("adjustment_alpha", U64_MAX, U64_MAX / 2, 0, False), + ("max_weight_limit", U16_MAX, U16_MAX / 2, 0, False), + ("difficulty", U64_MAX, U64_MAX / 2, 0, False), + ("min_difficulty", U64_MAX, U64_MAX / 2, 0, False), + ("max_difficulty", U64_MAX, U64_MAX / 2, 0, False), + ("bonds_moving_avg", U64_MAX, U64_MAX / 2, 0, False), + ("min_burn", 10000000000, 5000000000, 0, True), # These are in rao + ("max_burn", 20000000000, 10000000000, 0, True), + ], + ids=[ + "adjustment-alpha", + "max_weight_limit", + "difficulty", + "min_difficulty", + "max_difficulty", + "bonds_moving_avg", + "min_burn", + "max_burn", + ], +) +def test_hyperparameter_normalization( + sample_hyperparameters, param_name, max_value, mid_value, zero_value, is_balance +): + setattr(sample_hyperparameters, param_name, mid_value) + normalized = normalize_hyperparameters(sample_hyperparameters) + norm_value = get_normalized_value(normalized, param_name) + + # Mid-value test + if is_balance: + numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__)) + expected_tao = mid_value / 1e9 + assert ( + numeric_value == expected_tao + ), f"Mismatch in tao value for {param_name} at mid value" + else: + assert float(norm_value) == 0.5, f"Failed mid-point test for {param_name}" + + # Max-value test + setattr(sample_hyperparameters, param_name, max_value) + normalized = normalize_hyperparameters(sample_hyperparameters) + norm_value = get_normalized_value(normalized, param_name) + + if is_balance: + numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__)) + expected_tao = max_value / 1e9 + assert ( + numeric_value == expected_tao + ), f"Mismatch in tao value for {param_name} at max value" + else: + assert float(norm_value) == 1.0, f"Failed max value test for {param_name}" + + # Zero-value test + setattr(sample_hyperparameters, param_name, zero_value) + normalized = normalize_hyperparameters(sample_hyperparameters) + norm_value = get_normalized_value(normalized, param_name) + + if is_balance: + numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__)) + expected_tao = zero_value / 1e9 + assert ( + numeric_value == expected_tao + ), f"Mismatch in tao value for {param_name} at zero value" + else: + assert float(norm_value) == 0.0, f"Failed zero value test for {param_name}" + + +########################### +# Account functions tests # +########################### + + +# `get_total_stake_for_hotkey` tests +def test_get_total_stake_for_hotkey_success(subtensor, mocker): + """Tests successful retrieval of total stake for hotkey.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=mocker.MagicMock(value=1)) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_hotkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalHotkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_called_once() + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, Balance) + + +def test_get_total_stake_for_hotkey_not_result(subtensor, mocker): + """Tests retrieval of total stake for hotkey when no result is returned.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_hotkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalHotkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, type(None)) + + +def test_get_total_stake_for_hotkey_not_value(subtensor, mocker): + """Tests retrieval of total stake for hotkey when no value attribute is present.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=object) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_hotkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalHotkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(subtensor.query_subtensor.return_value, object) + assert not hasattr(result, "value") + + +# `get_total_stake_for_coldkey` tests +def test_get_total_stake_for_coldkey_success(subtensor, mocker): + """Tests successful retrieval of total stake for coldkey.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=mocker.MagicMock(value=1)) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_coldkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalColdkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_called_once() + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, Balance) + + +def test_get_total_stake_for_coldkey_not_result(subtensor, mocker): + """Tests retrieval of total stake for coldkey when no result is returned.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_coldkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalColdkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(result, type(None)) + + +def test_get_total_stake_for_coldkey_not_value(subtensor, mocker): + """Tests retrieval of total stake for coldkey when no value attribute is present.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=object) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_coldkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalColdkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure the returned type is correct + assert isinstance(subtensor.query_subtensor.return_value, object) + assert not hasattr(result, "value") + + +# `get_stake` tests +def test_get_stake_returns_correct_data(mocker, subtensor): + """Tests that get_stake returns correct data.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + expected_query_result = [ + (mocker.MagicMock(value="coldkey1"), mocker.MagicMock(value=100)), + (mocker.MagicMock(value="coldkey2"), mocker.MagicMock(value=200)), + ] + mocker.patch.object( + subtensor, "query_map_subtensor", return_value=expected_query_result + ) + + # Call + result = subtensor.get_stake(hotkey_ss58, block) + + # Assertion + assert result == [ + ("coldkey1", Balance.from_rao(100)), + ("coldkey2", Balance.from_rao(200)), + ] + subtensor.query_map_subtensor.assert_called_once_with("Stake", block, [hotkey_ss58]) + + +def test_get_stake_no_block(mocker, subtensor): + """Tests get_stake with no block specified.""" + # Prep + hotkey_ss58 = "test_hotkey" + expected_query_result = [ + (MagicMock(value="coldkey1"), MagicMock(value=100)), + ] + mocker.patch.object( + subtensor, "query_map_subtensor", return_value=expected_query_result + ) + + # Call + result = subtensor.get_stake(hotkey_ss58) + + # Assertion + assert result == [("coldkey1", Balance.from_rao(100))] + subtensor.query_map_subtensor.assert_called_once_with("Stake", None, [hotkey_ss58]) + + +def test_get_stake_empty_result(mocker, subtensor): + """Tests get_stake with an empty result.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + expected_query_result = [] + mocker.patch.object( + subtensor, "query_map_subtensor", return_value=expected_query_result + ) + + # Call + result = subtensor.get_stake(hotkey_ss58, block) + + # Assertion + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("Stake", block, [hotkey_ss58]) + + +# `does_hotkey_exist` tests +def test_does_hotkey_exist_true(mocker, subtensor): + """Test does_hotkey_exist returns True when hotkey exists and is valid.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock(value="valid_coldkey") + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is True + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_false_special_value(mocker, subtensor): + """Test does_hotkey_exist returns False when result value is the special value.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + special_value = "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + mock_result = MagicMock(value=special_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is False + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_false_no_value(mocker, subtensor): + """Test does_hotkey_exist returns False when result has no value attribute.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is False + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_false_no_result(mocker, subtensor): + """Test does_hotkey_exist returns False when query_subtensor returns None.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is False + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_no_block(mocker, subtensor): + """Test does_hotkey_exist with no block specified.""" + # Prep + hotkey_ss58 = "test_hotkey" + mock_result = mocker.MagicMock(value="valid_coldkey") + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58) + + # Assertions + assert result is True + subtensor.query_subtensor.assert_called_once_with("Owner", None, [hotkey_ss58]) + + +# `get_hotkey_owner` tests +def test_get_hotkey_owner_exists(mocker, subtensor): + """Test get_hotkey_owner when the hotkey exists.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + expected_owner = "coldkey_owner" + mock_result = mocker.MagicMock(value=expected_owner) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=True) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58, block) + + # Assertions + assert result == expected_owner + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_called_once_with(hotkey_ss58, block) + + +def test_get_hotkey_owner_does_not_exist(mocker, subtensor): + """Test get_hotkey_owner when the hotkey does not exist.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=False) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58, block) + + # Assertions + assert result is None + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_not_called() + + +def test_get_hotkey_owner_no_block(mocker, subtensor): + """Test get_hotkey_owner with no block specified.""" + # Prep + hotkey_ss58 = "test_hotkey" + expected_owner = "coldkey_owner" + mock_result = mocker.MagicMock(value=expected_owner) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=True) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58) + + # Assertions + assert result == expected_owner + subtensor.query_subtensor.assert_called_once_with("Owner", None, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_called_once_with(hotkey_ss58, None) + + +def test_get_hotkey_owner_no_value_attribute(mocker, subtensor): + """Test get_hotkey_owner when the result has no value attribute.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=True) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58, block) + + # Assertions + assert result is None + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_not_called() + + +# `get_axon_info` tests +def test_get_axon_info_success(mocker, subtensor): + """Test get_axon_info returns correct data when axon information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock( + value={ + "ip": "192.168.1.1", + "ip_type": 4, + "port": 8080, + "protocol": "tcp", + "version": "1.0", + "placeholder1": "data1", + "placeholder2": "data2", + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 8080 + assert result.protocol == "tcp" + assert result.version == "1.0" + assert result.placeholder1 == "data1" + assert result.placeholder2 == "data2" + assert result.hotkey == hotkey_ss58 + assert result.coldkey == "" + subtensor.query_subtensor.assert_called_once_with( + "Axons", block, [netuid, hotkey_ss58] + ) + + +def test_get_axon_info_no_data(mocker, subtensor): + """Test get_axon_info returns None when no axon information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Axons", block, [netuid, hotkey_ss58] + ) + + +def test_get_axon_info_no_value_attribute(mocker, subtensor): + """Test get_axon_info returns None when result has no value attribute.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Axons", block, [netuid, hotkey_ss58] + ) + + +def test_get_axon_info_no_block(mocker, subtensor): + """Test get_axon_info with no block specified.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + mock_result = mocker.MagicMock( + value={ + "ip": 3232235777, # 192.168.1.1 + "ip_type": 4, + "port": 8080, + "protocol": "tcp", + "version": "1.0", + "placeholder1": "data1", + "placeholder2": "data2", + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 8080 + assert result.protocol == "tcp" + assert result.version == "1.0" + assert result.placeholder1 == "data1" + assert result.placeholder2 == "data2" + assert result.hotkey == hotkey_ss58 + assert result.coldkey == "" + subtensor.query_subtensor.assert_called_once_with( + "Axons", None, [netuid, hotkey_ss58] + ) + + +# get_prometheus_info tests +def test_get_prometheus_info_success(mocker, subtensor): + """Test get_prometheus_info returns correct data when information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock( + value={ + "ip": 3232235777, # 192.168.1.1 + "ip_type": 4, + "port": 9090, + "version": "1.0", + "block": 1000, + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 9090 + assert result.version == "1.0" + assert result.block == 1000 + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", block, [netuid, hotkey_ss58] + ) + + +def test_get_prometheus_info_no_data(mocker, subtensor): + """Test get_prometheus_info returns None when no information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", block, [netuid, hotkey_ss58] + ) + + +def test_get_prometheus_info_no_value_attribute(mocker, subtensor): + """Test get_prometheus_info returns None when result has no value attribute.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", block, [netuid, hotkey_ss58] + ) + + +def test_get_prometheus_info_no_block(mocker, subtensor): + """Test get_prometheus_info with no block specified.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + mock_result = MagicMock( + value={ + "ip": "192.168.1.1", + "ip_type": 4, + "port": 9090, + "version": "1.0", + "block": 1000, + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 9090 + assert result.version == "1.0" + assert result.block == 1000 + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", None, [netuid, hotkey_ss58] + ) + + +########################### +# Global Parameters tests # +########################### + + +# `block` property test +def test_block_property(mocker, subtensor): + """Test block property returns the correct block number.""" + expected_block = 123 + mocker.patch.object(subtensor, "get_current_block", return_value=expected_block) + + result = subtensor.block + + assert result == expected_block + subtensor.get_current_block.assert_called_once() + + +# `total_issuance` tests +def test_total_issuance_success(mocker, subtensor): + """Test total_issuance returns correct data when issuance information is found.""" + # Prep + block = 123 + issuance_value = 1000 + mock_result = mocker.MagicMock(value=issuance_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance(block) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", block) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ) + + +def test_total_issuance_no_data(mocker, subtensor): + """Test total_issuance returns None when no issuance information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_issuance_no_value_attribute(mocker, subtensor): + """Test total_issuance returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_issuance_no_block(mocker, subtensor): + """Test total_issuance with no block specified.""" + # Prep + issuance_value = 1000 + mock_result = mocker.MagicMock(value=issuance_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance() + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", None) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ) + + +# `total_stake` method tests +def test_total_stake_success(mocker, subtensor): + """Test total_stake returns correct data when stake information is found.""" + # Prep + block = 123 + stake_value = 5000 + mock_result = mocker.MagicMock(value=stake_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake(block) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalStake", block) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ) + + +def test_total_stake_no_data(mocker, subtensor): + """Test total_stake returns None when no stake information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalStake", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_stake_no_value_attribute(mocker, subtensor): + """Test total_stake returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalStake", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_stake_no_block(mocker, subtensor): + """Test total_stake with no block specified.""" + # Prep + stake_value = 5000 + mock_result = mocker.MagicMock(value=stake_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake() + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalStake", None) + ( + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ), + ) + + +# `serving_rate_limit` method tests +def test_serving_rate_limit_success(mocker, subtensor): + """Test serving_rate_limit returns correct data when rate limit information is found.""" + # Prep + netuid = 1 + block = 123 + rate_limit_value = "10" + mocker.patch.object(subtensor, "_get_hyperparameter", return_value=rate_limit_value) + + # Call + result = subtensor.serving_rate_limit(netuid, block) + + # Asserts + assert result is not None + assert result == int(rate_limit_value) + subtensor._get_hyperparameter.assert_called_once_with( + param_name="ServingRateLimit", netuid=netuid, block=block + ) + + +def test_serving_rate_limit_no_data(mocker, subtensor): + """Test serving_rate_limit returns None when no rate limit information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "_get_hyperparameter", return_value=None) + + # Call + result = subtensor.serving_rate_limit(netuid, block) + + # Asserts + assert result is None + subtensor._get_hyperparameter.assert_called_once_with( + param_name="ServingRateLimit", netuid=netuid, block=block + ) + + +def test_serving_rate_limit_no_block(mocker, subtensor): + """Test serving_rate_limit with no block specified.""" + # Prep + netuid = 1 + rate_limit_value = "10" + mocker.patch.object(subtensor, "_get_hyperparameter", return_value=rate_limit_value) + + # Call + result = subtensor.serving_rate_limit(netuid) + + # Asserts + assert result is not None + assert result == int(rate_limit_value) + subtensor._get_hyperparameter.assert_called_once_with( + param_name="ServingRateLimit", netuid=netuid, block=None + ) + + +# `tx_rate_limit` tests +def test_tx_rate_limit_success(mocker, subtensor): + """Test tx_rate_limit returns correct data when rate limit information is found.""" + # Prep + block = 123 + rate_limit_value = 100 + mock_result = mocker.MagicMock(value=rate_limit_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.tx_rate_limit(block) + + # Asserts + assert result is not None + assert result == rate_limit_value + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", block) + + +def test_tx_rate_limit_no_data(mocker, subtensor): + """Test tx_rate_limit returns None when no rate limit information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.tx_rate_limit(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", block) + + +def test_tx_rate_limit_no_value_attribute(mocker, subtensor): + """Test tx_rate_limit returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.tx_rate_limit(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", block) + + +def test_tx_rate_limit_no_block(mocker, subtensor): + """Test tx_rate_limit with no block specified.""" + # Prep + rate_limit_value = 100 + mock_result = mocker.MagicMock(value=rate_limit_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.tx_rate_limit() + + # Asserts + assert result is not None + assert result == rate_limit_value + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", None) + + +############################ +# Network Parameters tests # +############################ + + +# `subnet_exists` tests +def test_subnet_exists_success(mocker, subtensor): + """Test subnet_exists returns True when subnet exists.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock(value=True) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.subnet_exists(netuid, block) + + # Asserts + assert result is True + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", block, [netuid]) + + +def test_subnet_exists_no_data(mocker, subtensor): + """Test subnet_exists returns False when no subnet information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.subnet_exists(netuid, block) + + # Asserts + assert result is False + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", block, [netuid]) + + +def test_subnet_exists_no_value_attribute(mocker, subtensor): + """Test subnet_exists returns False when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.subnet_exists(netuid, block) + + # Asserts + assert result is False + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", block, [netuid]) + + +def test_subnet_exists_no_block(mocker, subtensor): + """Test subnet_exists with no block specified.""" + # Prep + netuid = 1 + mock_result = mocker.MagicMock(value=True) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.subnet_exists(netuid) + + # Asserts + assert result is True + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", None, [netuid]) + + +# `get_all_subnet_netuids` tests +def test_get_all_subnet_netuids_success(mocker, subtensor): + """Test get_all_subnet_netuids returns correct list when netuid information is found.""" + # Prep + block = 123 + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = True + mock_result.__iter__.return_value = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_all_subnet_netuids(block) + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_all_subnet_netuids_no_data(mocker, subtensor): + """Test get_all_subnet_netuids returns empty list when no netuid information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_map_subtensor", return_value=None) + + # Call + result = subtensor.get_all_subnet_netuids(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_all_subnet_netuids_no_records_attribute(mocker, subtensor): + """Test get_all_subnet_netuids returns empty list when result has no records attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.records + mock_result.__iter__.return_value = [] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_all_subnet_netuids(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_all_subnet_netuids_no_block(mocker, subtensor): + """Test get_all_subnet_netuids with no block specified.""" + # Prep + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = True + mock_result.__iter__.return_value = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_all_subnet_netuids() + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", None) + + +# `get_total_subnets` tests +def test_get_total_subnets_success(mocker, subtensor): + """Test get_total_subnets returns correct data when total subnet information is found.""" + # Prep + block = 123 + total_subnets_value = 10 + mock_result = mocker.MagicMock(value=total_subnets_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_total_subnets(block) + + # Asserts + assert result is not None + assert result == total_subnets_value + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", block) + + +def test_get_total_subnets_no_data(mocker, subtensor): + """Test get_total_subnets returns None when no total subnet information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_total_subnets(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", block) + + +def test_get_total_subnets_no_value_attribute(mocker, subtensor): + """Test get_total_subnets returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_total_subnets(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", block) + + +def test_get_total_subnets_no_block(mocker, subtensor): + """Test get_total_subnets with no block specified.""" + # Prep + total_subnets_value = 10 + mock_result = mocker.MagicMock(value=total_subnets_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_total_subnets() + + # Asserts + assert result is not None + assert result == total_subnets_value + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", None) + + +# `get_subnet_modality` tests +def test_get_subnet_modality_success(mocker, subtensor): + """Test get_subnet_modality returns correct data when modality information is found.""" + # Prep + netuid = 1 + block = 123 + modality_value = 42 + mock_result = mocker.MagicMock(value=modality_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_modality(netuid, block) + + # Asserts + assert result is not None + assert result == modality_value + subtensor.query_subtensor.assert_called_once_with( + "NetworkModality", block, [netuid] + ) + + +def test_get_subnet_modality_no_data(mocker, subtensor): + """Test get_subnet_modality returns None when no modality information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_subnet_modality(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "NetworkModality", block, [netuid] + ) + + +def test_get_subnet_modality_no_value_attribute(mocker, subtensor): + """Test get_subnet_modality returns None when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_modality(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "NetworkModality", block, [netuid] + ) + + +def test_get_subnet_modality_no_block_specified(mocker, subtensor): + """Test get_subnet_modality with no block specified.""" + # Prep + netuid = 1 + modality_value = 42 + mock_result = mocker.MagicMock(value=modality_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_modality(netuid) + + # Asserts + assert result is not None + assert result == modality_value + subtensor.query_subtensor.assert_called_once_with("NetworkModality", None, [netuid]) + + +# `get_emission_value_by_subnet` tests +def test_get_emission_value_by_subnet_success(mocker, subtensor): + """Test get_emission_value_by_subnet returns correct data when emission value is found.""" + # Prep + netuid = 1 + block = 123 + emission_value = 1000 + mock_result = mocker.MagicMock(value=emission_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid, block) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", block, [netuid]) + spy_balance_from_rao.assert_called_once_with(emission_value) + assert result == Balance.from_rao(emission_value) + + +def test_get_emission_value_by_subnet_no_data(mocker, subtensor): + """Test get_emission_value_by_subnet returns None when no emission value is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", block, [netuid]) + spy_balance_from_rao.assert_not_called() + + +def test_get_emission_value_by_subnet_no_value_attribute(mocker, subtensor): + """Test get_emission_value_by_subnet returns None when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", block, [netuid]) + spy_balance_from_rao.assert_not_called() + + +def test_get_emission_value_by_subnet_no_block_specified(mocker, subtensor): + """Test get_emission_value_by_subnet with no block specified.""" + # Prep + netuid = 1 + emission_value = 1000 + mock_result = mocker.MagicMock(value=emission_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", None, [netuid]) + spy_balance_from_rao.assert_called_once_with(emission_value) + assert result == Balance.from_rao(emission_value) + + +# `get_subnet_connection_requirements` tests +def test_get_subnet_connection_requirements_success(mocker, subtensor): + """Test get_subnet_connection_requirements returns correct data when requirements are found.""" + # Prep + netuid = 1 + block = 123 + mock_tuple1 = (mocker.MagicMock(value="requirement1"), mocker.MagicMock(value=10)) + mock_tuple2 = (mocker.MagicMock(value="requirement2"), mocker.MagicMock(value=20)) + mock_result = mocker.MagicMock() + mock_result.records = [mock_tuple1, mock_tuple2] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid, block) + + # Asserts + assert result == {"requirement1": 10, "requirement2": 20} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", block, [netuid] + ) + + +def test_get_subnet_connection_requirements_no_data(mocker, subtensor): + """Test get_subnet_connection_requirements returns empty dict when no data is found.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + mock_result.records = [] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid, block) + + # Asserts + assert result == {} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", block, [netuid] + ) + + +def test_get_subnet_connection_requirements_no_records_attribute(mocker, subtensor): + """Test get_subnet_connection_requirements returns empty dict when result has no records attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.records # Simulating a missing records attribute + + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid, block) + + # Asserts + assert result == {} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", block, [netuid] + ) + + +def test_get_subnet_connection_requirements_no_block_specified(mocker, subtensor): + """Test get_subnet_connection_requirements with no block specified.""" + # Prep + netuid = 1 + mock_tuple1 = (mocker.MagicMock(value="requirement1"), mocker.MagicMock(value=10)) + mock_tuple2 = (mocker.MagicMock(value="requirement2"), mocker.MagicMock(value=20)) + mock_result = mocker.MagicMock() + mock_result.records = [mock_tuple1, mock_tuple2] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid) + + # Asserts + assert result == {"requirement1": 10, "requirement2": 20} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", None, [netuid] + ) + + +# `get_subnets` tests +def test_get_subnets_success(mocker, subtensor): + """Test get_subnets returns correct list when subnet information is found.""" + # Prep + block = 123 + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets(block) + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_subnets_no_data(mocker, subtensor): + """Test get_subnets returns empty list when no subnet information is found.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + mock_result.records = [] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_subnets_no_records_attribute(mocker, subtensor): + """Test get_subnets returns empty list when result has no records attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.records # Simulating a missing records attribute + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_subnets_no_block_specified(mocker, subtensor): + """Test get_subnets with no block specified.""" + # Prep + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets() + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", None) + + +# `get_all_subnets_info` tests +def test_get_all_subnets_info_success(mocker, subtensor): + """Test get_all_subnets_info returns correct data when subnet information is found.""" + # Prep + block = 123 + subnet_data = [1, 2, 3] # Mocked response data + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object( + subtensor_module.SubnetInfo, + "list_from_vec_u8", + return_value="list_from_vec_u80", + ) + + # Call + result = subtensor.get_all_subnets_info(block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetsInfo", params=["mock_block_hash"] + ) + subtensor_module.SubnetInfo.list_from_vec_u8.assert_called_once_with(subnet_data) + + +@pytest.mark.parametrize("result_", [[], None]) +def test_get_all_subnets_info_no_data(mocker, subtensor, result_): + """Test get_all_subnets_info returns empty list when no subnet information is found.""" + # Prep + block = 123 + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": result_} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object(subtensor_module.SubnetInfo, "list_from_vec_u8") + + # Call + result = subtensor.get_all_subnets_info(block) + + # Asserts + assert result == [] + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetsInfo", params=["mock_block_hash"] + ) + subtensor_module.SubnetInfo.list_from_vec_u8.assert_not_called() + + +def test_get_all_subnets_info_retry(mocker, subtensor): + """Test get_all_subnets_info retries on failure.""" + # Prep + block = 123 + subnet_data = [1, 2, 3] + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mock_rpc_request = mocker.patch.object( + subtensor.substrate, + "rpc_request", + side_effect=[Exception, Exception, mock_response], + ) + mocker.patch.object( + subtensor_module.SubnetInfo, "list_from_vec_u8", return_value=["some_data"] + ) + + # Call + result = subtensor.get_all_subnets_info(block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_with(block) + assert mock_rpc_request.call_count == 3 + subtensor_module.SubnetInfo.list_from_vec_u8.assert_called_once_with(subnet_data) + assert result == ["some_data"] + + +# `get_subnet_info` tests +def test_get_subnet_info_success(mocker, subtensor): + """Test get_subnet_info returns correct data when subnet information is found.""" + # Prep + netuid = 1 + block = 123 + subnet_data = [1, 2, 3] + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object( + subtensor_module.SubnetInfo, "from_vec_u8", return_value=["from_vec_u8"] + ) + + # Call + result = subtensor.get_subnet_info(netuid, block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetInfo", params=[netuid, "mock_block_hash"] + ) + subtensor_module.SubnetInfo.from_vec_u8.assert_called_once_with(subnet_data) + + +@pytest.mark.parametrize("result_", [None, {}]) +def test_get_subnet_info_no_data(mocker, subtensor, result_): + """Test get_subnet_info returns None when no subnet information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": result_} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object(subtensor_module.SubnetInfo, "from_vec_u8") + + # Call + result = subtensor.get_subnet_info(netuid, block) + + # Asserts + assert result is None + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetInfo", params=[netuid, "mock_block_hash"] + ) + subtensor_module.SubnetInfo.from_vec_u8.assert_not_called() + + +def test_get_subnet_info_retry(mocker, subtensor): + """Test get_subnet_info retries on failure.""" + # Prep + netuid = 1 + block = 123 + subnet_data = [1, 2, 3] + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mock_rpc_request = mocker.patch.object( + subtensor.substrate, + "rpc_request", + side_effect=[Exception, Exception, mock_response], + ) + mocker.patch.object( + subtensor_module.SubnetInfo, "from_vec_u8", return_value=["from_vec_u8"] + ) + + # Call + result = subtensor.get_subnet_info(netuid, block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_with(block) + assert mock_rpc_request.call_count == 3 + subtensor_module.SubnetInfo.from_vec_u8.assert_called_once_with(subnet_data) + + +# `get_subnet_hyperparameters` tests +def test_get_subnet_hyperparameters_success(mocker, subtensor): + """Test get_subnet_hyperparameters returns correct data when hyperparameters are found.""" + # Prep + netuid = 1 + block = 123 + hex_bytes_result = "0x010203" + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + mocker.patch.object(subtensor, "query_runtime_api", return_value=hex_bytes_result) + mocker.patch.object( + subtensor_module.SubnetHyperparameters, + "from_vec_u8", + return_value=["from_vec_u8"], + ) + + # Call + result = subtensor.get_subnet_hyperparameters(netuid, block) + + # Asserts + subtensor.query_runtime_api.assert_called_once_with( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + subtensor_module.SubnetHyperparameters.from_vec_u8.assert_called_once_with( + bytes_result + ) + + +def test_get_subnet_hyperparameters_no_data(mocker, subtensor): + """Test get_subnet_hyperparameters returns empty list when no data is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_runtime_api", return_value=None) + mocker.patch.object(subtensor_module.SubnetHyperparameters, "from_vec_u8") + + # Call + result = subtensor.get_subnet_hyperparameters(netuid, block) + + # Asserts + assert result == [] + subtensor.query_runtime_api.assert_called_once_with( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + subtensor_module.SubnetHyperparameters.from_vec_u8.assert_not_called() + + +def test_get_subnet_hyperparameters_hex_without_prefix(mocker, subtensor): + """Test get_subnet_hyperparameters correctly processes hex string without '0x' prefix.""" + # Prep + netuid = 1 + block = 123 + hex_bytes_result = "010203" + bytes_result = bytes.fromhex(hex_bytes_result) + mocker.patch.object(subtensor, "query_runtime_api", return_value=hex_bytes_result) + mocker.patch.object(subtensor_module.SubnetHyperparameters, "from_vec_u8") + + # Call + result = subtensor.get_subnet_hyperparameters(netuid, block) + + # Asserts + subtensor.query_runtime_api.assert_called_once_with( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + subtensor_module.SubnetHyperparameters.from_vec_u8.assert_called_once_with( + bytes_result + ) + + +# `get_subnet_owner` tests +def test_get_subnet_owner_success(mocker, subtensor): + """Test get_subnet_owner returns correct data when owner information is found.""" + # Prep + netuid = 1 + block = 123 + owner_address = "5F3sa2TJAWMqDhXG6jhV4N8ko9rXPM6twz9mG9m3rrgq3xiJ" + mock_result = mocker.MagicMock(value=owner_address) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_owner(netuid, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("SubnetOwner", block, [netuid]) + assert result == owner_address + + +def test_get_subnet_owner_no_data(mocker, subtensor): + """Test get_subnet_owner returns None when no owner information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_subnet_owner(netuid, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("SubnetOwner", block, [netuid]) + assert result is None + + +def test_get_subnet_owner_no_value_attribute(mocker, subtensor): + """Test get_subnet_owner returns None when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_owner(netuid, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("SubnetOwner", block, [netuid]) + assert result is None + + +#################### +# Nomination tests # +#################### + + +# `is_hotkey_delegate` tests +def test_is_hotkey_delegate_success(mocker, subtensor): + """Test is_hotkey_delegate returns True when hotkey is a delegate.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + mock_delegates = [ + mocker.MagicMock(hotkey_ss58=hotkey_ss58), + mocker.MagicMock(hotkey_ss58="hotkey_ss583"), + ] + mocker.patch.object(subtensor, "get_delegates", return_value=mock_delegates) + + # Call + result = subtensor.is_hotkey_delegate(hotkey_ss58, block) + + # Asserts + subtensor.get_delegates.assert_called_once_with(block=block) + assert result is True + + +def test_is_hotkey_delegate_not_found(mocker, subtensor): + """Test is_hotkey_delegate returns False when hotkey is not a delegate.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + mock_delegates = [mocker.MagicMock(hotkey_ss58="hotkey_ss583")] + mocker.patch.object(subtensor, "get_delegates", return_value=mock_delegates) + + # Call + result = subtensor.is_hotkey_delegate(hotkey_ss58, block) + + # Asserts + subtensor.get_delegates.assert_called_once_with(block=block) + assert result is False + + +# `get_delegate_take` tests +def test_get_delegate_take_success(mocker, subtensor): + """Test get_delegate_take returns correct data when delegate take is found.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + delegate_take_value = 32768 + mock_result = mocker.MagicMock(value=delegate_take_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_u16_normalized_float = mocker.spy(subtensor_module, "U16_NORMALIZED_FLOAT") + + # Call + subtensor.get_delegate_take(hotkey_ss58, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("Delegates", block, [hotkey_ss58]) + spy_u16_normalized_float.assert_called_once_with(delegate_take_value) + + +def test_get_delegate_take_no_data(mocker, subtensor): + """Test get_delegate_take returns None when no delegate take is found.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + delegate_take_value = 32768 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_u16_normalized_float = mocker.spy(subtensor_module, "U16_NORMALIZED_FLOAT") + + # Call + result = subtensor.get_delegate_take(hotkey_ss58, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("Delegates", block, [hotkey_ss58]) + spy_u16_normalized_float.assert_not_called() + assert result is None diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index be5465facb..b0ce4f1325 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -16,16 +16,14 @@ # DEALINGS IN THE SOFTWARE. import json import base64 -from typing import List, Optional - -import pydantic_core import pytest import bittensor +from typing import Optional, ClassVar def test_parse_headers_to_inputs(): class Test(bittensor.Synapse): - key1: List[int] + key1: list[int] # Define a mock headers dictionary to use for testing headers = { @@ -60,7 +58,7 @@ class Test(bittensor.Synapse): def test_from_headers(): class Test(bittensor.Synapse): - key1: List[int] + key1: list[int] # Define a mock headers dictionary to use for testing headers = { @@ -131,14 +129,14 @@ class Test(bittensor.Synapse): a: int # Carried through because required. b: int = None # Not carried through headers c: Optional[int] # Required, carried through headers, cannot be None - d: Optional[List[int]] # Required, carried though headers, cannot be None - e: List[int] # Carried through headers - f: Optional[ - int - ] = None # Not Required, Not carried through headers, can be None - g: Optional[ - List[int] - ] = None # Not Required, Not carried though headers, can be None + d: Optional[list[int]] # Required, carried though headers, cannot be None + e: list[int] # Carried through headers + f: Optional[int] = ( + None # Not Required, Not carried through headers, can be None + ) + g: Optional[list[int]] = ( + None # Not Required, Not carried though headers, can be None + ) # Create an instance of the custom Synapse subclass synapse = Test( @@ -152,12 +150,12 @@ class Test(bittensor.Synapse): assert isinstance(synapse, Test) assert synapse.name == "Test" assert synapse.a == 1 - assert synapse.b == None + assert synapse.b is None assert synapse.c == 3 assert synapse.d == [1, 2, 3, 4] assert synapse.e == [1, 2, 3, 4] - assert synapse.f == None - assert synapse.g == None + assert synapse.f is None + assert synapse.g is None # Convert the Test instance to a headers dictionary headers = synapse.to_headers() @@ -169,12 +167,12 @@ class Test(bittensor.Synapse): # Create a new Test from the headers and check its properties next_synapse = synapse.from_headers(synapse.to_headers()) assert next_synapse.a == 0 # Default value is 0 - assert next_synapse.b == None + assert next_synapse.b is None assert next_synapse.c == 0 # Default is 0 assert next_synapse.d == [] # Default is [] assert next_synapse.e == [] # Empty list is default for list types - assert next_synapse.f == None - assert next_synapse.g == None + assert next_synapse.f is None + assert next_synapse.g is None def test_body_hash_override(): @@ -189,18 +187,6 @@ def test_body_hash_override(): synapse_instance.body_hash = [] -def test_required_fields_override(): - # Create a Synapse instance - synapse_instance = bittensor.Synapse() - - # Try to set the required_hash_fields property and expect a TypeError - with pytest.raises( - pydantic_core.ValidationError, - match="required_hash_fields\n Field is frozen", - ): - synapse_instance.required_hash_fields = [] - - def test_default_instance_fields_dict_consistency(): synapse_instance = bittensor.Synapse() assert synapse_instance.model_dump() == { @@ -233,5 +219,48 @@ def test_default_instance_fields_dict_consistency(): "signature": None, }, "computed_body_hash": "", - "required_hash_fields": [], } + + +class LegacyHashedSynapse(bittensor.Synapse): + """Legacy Synapse subclass that serialized `required_hash_fields`.""" + + a: int + b: int + c: Optional[int] = None + d: Optional[list[str]] = None + required_hash_fields: Optional[list[str]] = ["b", "a", "d"] + + +class HashedSynapse(bittensor.Synapse): + a: int + b: int + c: Optional[int] = None + d: Optional[list[str]] = None + required_hash_fields: ClassVar[tuple[str, ...]] = ("a", "b", "d") + + +@pytest.mark.parametrize("synapse_cls", [LegacyHashedSynapse, HashedSynapse]) +def test_synapse_body_hash(synapse_cls): + synapse_instance = synapse_cls(a=1, b=2, d=["foobar"]) + assert ( + synapse_instance.body_hash + == "ae06397d08f30f75c91395c59f05c62ac3b62b88250eb78b109213258e6ced0c" + ) + + # Extra non-hashed values should not influence the body hash + synapse_instance_slightly_different = synapse_cls(d=["foobar"], c=3, a=1, b=2) + assert synapse_instance.body_hash == synapse_instance_slightly_different.body_hash + + # Even if someone tries to override the required_hash_fields, it should still be the same + synapse_instance_try_override_hash_fields = synapse_cls( + a=1, b=2, d=["foobar"], required_hash_fields=["a"] + ) + assert ( + synapse_instance.body_hash + == synapse_instance_try_override_hash_fields.body_hash + ) + + # Different hashed values should result in different body hashes + synapse_different = synapse_cls(a=1, b=2) + assert synapse_instance.body_hash != synapse_different.body_hash diff --git a/tests/unit_tests/utils/test_balance.py b/tests/unit_tests/utils/test_balance.py index 129af42f01..b99bc111f2 100644 --- a/tests/unit_tests/utils/test_balance.py +++ b/tests/unit_tests/utils/test_balance.py @@ -209,8 +209,8 @@ def test_balance_mul(balance: Union[int, float], balance2: Union[int, float]): prod_ = balance_ * balance2_ assert isinstance(prod_, Balance) - assert prod_.rao == pytest.approx( - rao_ * rao2_, 9 + assert ( + prod_.rao == pytest.approx(rao_ * rao2_, 9) ), f"{balance_} * {balance2_} == {prod_.rao} != {rao_} * {balance2} == {rao_ * balance2}"