From a4199fd40de2a2655034e05b70109eaeb8a941a5 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Fri, 26 Apr 2024 18:07:17 +0200 Subject: [PATCH 001/116] fix body_hash check circumvention --- bittensor/axon.py | 2 -- bittensor/synapse.py | 23 +++++++++++------------ tests/unit_tests/test_synapse.py | 20 ++++++++++++++++++++ 3 files changed, 31 insertions(+), 14 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 34ce9e51f1..f72c024759 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -696,9 +696,7 @@ async def verify_body_integrity(self, request: Request): body = await request.body() request_body = body.decode() if isinstance(body, bytes) else body - # Gather the required field names from the axon's required_hash_fields dict request_name = request.url.path.split("/")[1] - required_hash_fields = self.required_hash_fields[request_name] # Load the body dict and check if all required field hashes match body_dict = json.loads(request_body) diff --git a/bittensor/synapse.py b/bittensor/synapse.py index 6b7af3d765..518b9431c6 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -683,21 +683,20 @@ def body_hash(self) -> str: Returns: str: The SHA3-256 hash as a hexadecimal string, providing a fingerprint of the Synapse instance's data for integrity checks. """ - # Hash the body for verification hashes = [] - # Getting the fields of the instance - instance_fields = self.dict() + required_hash_fields = self.__class__.__fields__["required_hash_fields"].default + + if required_hash_fields: + instance_fields = self.dict() + # Preserve backward compatibility in which fields will added in .dict() order + # instead of the order one from `self.required_hash_fields` + required_hash_fields = [ + field for field in instance_fields if field in required_hash_fields + ] + for field in required_hash_fields: + hashes.append(bittensor.utils.hash(str(instance_fields[field]))) - for field, value in instance_fields.items(): - # If the field is required in the subclass schema, hash and add it. - if ( - self.required_hash_fields is not None - and field in self.required_hash_fields - ): - hashes.append(bittensor.utils.hash(str(value))) - - # Hash and return the hashes that have been concatenated return bittensor.utils.hash("".join(hashes)) @classmethod diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index 70dd88db76..82502fd317 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -224,3 +224,23 @@ def test_default_instance_fields_dict_consistency(): "computed_body_hash": "", "required_hash_fields": [], } + + +def test_synapse_body_hash(): + class HashedSynapse(bittensor.Synapse): + a: int + b: int + c: typing.Optional[int] + d: typing.Optional[typing.List[str]] + required_hash_fields: typing.Optional[typing.List[str]] = ["b", "a", "d"] + + synapse_instance = HashedSynapse(a=1, b=2, d=["foobar"]) + synapse_instance_2 = HashedSynapse(d=["foobar"], c=3, a=1, b=2) + synapse_different = HashedSynapse(a=1, b=2) + + assert synapse_instance.body_hash == synapse_instance_2.body_hash + assert synapse_instance.body_hash != synapse_different.body_hash + assert ( + synapse_instance.body_hash + == "ae06397d08f30f75c91395c59f05c62ac3b62b88250eb78b109213258e6ced0c" + ) From df79322efc337e3a0eb95c73a60e3e4d6ebb1911 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 27 Apr 2024 22:47:32 +0200 Subject: [PATCH 002/116] change required_hash_fields to ClassVar --- bittensor/axon.py | 7 ---- bittensor/synapse.py | 46 +++++++++++++++--------- tests/unit_tests/test_synapse.py | 61 +++++++++++++++++++------------- 3 files changed, 66 insertions(+), 48 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index f72c024759..cd1b8f224d 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -364,7 +364,6 @@ def __init__( self.priority_fns: Dict[str, Optional[Callable]] = {} self.forward_fns: Dict[str, Optional[Callable]] = {} self.verify_fns: Dict[str, Optional[Callable]] = {} - self.required_hash_fields: Dict[str, str] = {} # Instantiate FastAPI self.app = FastAPI() @@ -566,12 +565,6 @@ def verify_custom(synapse: MyCustomSynapse): ) # Use 'default_verify' if 'verify_fn' is None self.forward_fns[request_name] = forward_fn - # Parse required hash fields from the forward function protocol defaults - required_hash_fields = request_class.__dict__["__fields__"][ - "required_hash_fields" - ].default - self.required_hash_fields[request_name] = required_hash_fields - return self @classmethod diff --git a/bittensor/synapse.py b/bittensor/synapse.py index 518b9431c6..5c0975a557 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -20,11 +20,13 @@ import base64 import json import sys +import typing +import warnings import pydantic from pydantic.schema import schema import bittensor -from typing import Optional, List, Any, Dict +from typing import Optional, Any, Dict def get_size(obj, seen=None) -> int: @@ -293,6 +295,8 @@ class Synapse(pydantic.BaseModel): 5. Body Hash Computation (``computed_body_hash``, ``required_hash_fields``): Ensures data integrity and security by computing hashes of transmitted data. Provides users with a mechanism to verify data integrity and detect any tampering during transmission. + It is recommended that names of fields in `required_hash_fields` are listed in the order they are + defined in the class. 6. Serialization and Deserialization Methods: Facilitates the conversion of Synapse objects to and from a format suitable for network transmission. @@ -480,14 +484,7 @@ def set_name_type(cls, values) -> dict: repr=False, ) - required_hash_fields: Optional[List[str]] = pydantic.Field( - title="required_hash_fields", - description="The list of required fields to compute the body hash.", - examples=["roles", "messages"], - default=[], - allow_mutation=False, - repr=False, - ) + required_hash_fields: typing.ClassVar[typing.Tuple[str, ...]] = () def __setattr__(self, name: str, value: Any): """ @@ -685,15 +682,32 @@ def body_hash(self) -> str: """ hashes = [] - required_hash_fields = self.__class__.__fields__["required_hash_fields"].default + hash_fields_field = self.__class__.__fields__.get("required_hash_fields") + instance_fields = None + if hash_fields_field: + warnings.warn( + "The 'required_hash_fields' field handling deprecated and will be removed. " + "Please update Synapse class definition to use 'required_hash_fields' class variable instead.", + DeprecationWarning, + ) + required_hash_fields = hash_fields_field.default + + if required_hash_fields: + instance_fields = self.dict() + # Preserve backward compatibility in which fields will added in .dict() order + # instead of the order one from `self.required_hash_fields` + required_hash_fields = [ + field for field in instance_fields if field in required_hash_fields + ] + + # Hack to cache the required hash fields names + if len(required_hash_fields) == len(required_hash_fields): + self.__class__.required_hash_fields = tuple(required_hash_fields) + else: + required_hash_fields = self.__class__.required_hash_fields if required_hash_fields: - instance_fields = self.dict() - # Preserve backward compatibility in which fields will added in .dict() order - # instead of the order one from `self.required_hash_fields` - required_hash_fields = [ - field for field in instance_fields if field in required_hash_fields - ] + instance_fields = instance_fields or self.dict() for field in required_hash_fields: hashes.append(bittensor.utils.hash(str(instance_fields[field]))) diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index 82502fd317..61e9868878 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -178,18 +178,6 @@ def test_body_hash_override(): synapse_instance.body_hash = [] -def test_required_fields_override(): - # Create a Synapse instance - synapse_instance = bittensor.Synapse() - - # Try to set the required_hash_fields property and expect a TypeError - with pytest.raises( - TypeError, - match='"required_hash_fields" has allow_mutation set to False and cannot be assigned', - ): - synapse_instance.required_hash_fields = [] - - def test_default_instance_fields_dict_consistency(): synapse_instance = bittensor.Synapse() assert synapse_instance.dict() == { @@ -222,25 +210,48 @@ def test_default_instance_fields_dict_consistency(): "signature": None, }, "computed_body_hash": "", - "required_hash_fields": [], } -def test_synapse_body_hash(): - class HashedSynapse(bittensor.Synapse): - a: int - b: int - c: typing.Optional[int] - d: typing.Optional[typing.List[str]] - required_hash_fields: typing.Optional[typing.List[str]] = ["b", "a", "d"] +class LegacyHashedSynapse(bittensor.Synapse): + """Legacy Synapse subclass that serialized `required_hash_fields`.""" - synapse_instance = HashedSynapse(a=1, b=2, d=["foobar"]) - synapse_instance_2 = HashedSynapse(d=["foobar"], c=3, a=1, b=2) - synapse_different = HashedSynapse(a=1, b=2) + a: int + b: int + c: typing.Optional[int] + d: typing.Optional[typing.List[str]] + required_hash_fields: typing.Optional[typing.List[str]] = ["b", "a", "d"] - assert synapse_instance.body_hash == synapse_instance_2.body_hash - assert synapse_instance.body_hash != synapse_different.body_hash + +class HashedSynapse(bittensor.Synapse): + a: int + b: int + c: typing.Optional[int] + d: typing.Optional[typing.List[str]] + required_hash_fields: typing.ClassVar[typing.Tuple[str]] = ("a", "b", "d") + + +@pytest.mark.parametrize("synapse_cls", [LegacyHashedSynapse, HashedSynapse]) +def test_synapse_body_hash(synapse_cls): + synapse_instance = synapse_cls(a=1, b=2, d=["foobar"]) assert ( synapse_instance.body_hash == "ae06397d08f30f75c91395c59f05c62ac3b62b88250eb78b109213258e6ced0c" ) + + # Extra non-hashed values should not influence the body hash + synapse_instance_slightly_different = synapse_cls(d=["foobar"], c=3, a=1, b=2) + assert synapse_instance.body_hash == synapse_instance_slightly_different.body_hash + + # Even if someone tries to override the required_hash_fields, it should still be the same + synapse_instance_try_override_hash_fields = synapse_cls( + a=1, b=2, d=["foobar"], required_hash_fields=["a"] + ) + assert ( + synapse_instance.body_hash + == synapse_instance_try_override_hash_fields.body_hash + ) + + # Different hashed values should result in different body hashes + synapse_different = synapse_cls(a=1, b=2) + assert synapse_instance.body_hash != synapse_different.body_hash From cf176979fdd94da037ed85b11162499ddf5934da Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sun, 21 Apr 2024 21:00:03 +0200 Subject: [PATCH 003/116] allow forward_fn to accept more arguments --- bittensor/axon.py | 86 ++++++++++++++--------------------------------- 1 file changed, 26 insertions(+), 60 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 34ce9e51f1..be7711c611 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -466,27 +466,19 @@ def verify_custom(synapse: MyCustomSynapse): offered by this method allows developers to tailor the Axon's behavior to specific requirements and use cases. """ - - # Assert 'forward_fn' has exactly one argument forward_sig = signature(forward_fn) - assert ( - len(list(forward_sig.parameters)) == 1 - ), "The passed function must have exactly one argument" - - # Obtain the class of the first argument of 'forward_fn' - request_class = forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation + try: + first_param = next(iter(forward_sig.parameters.values())) + except StopIteration: + raise ValueError( + "The forward_fn first argument must be a subclass of bittensor.Synapse, but it has no arguments" + ) - # Assert that the first argument of 'forward_fn' is a subclass of 'bittensor.Synapse' + param_class = first_param.annotation assert issubclass( - request_class, bittensor.Synapse - ), "The argument of forward_fn must inherit from bittensor.Synapse" - - # Obtain the class name of the first argument of 'forward_fn' - request_name = forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation.__name__ + param_class, bittensor.Synapse + ), "The first argument of forward_fn must inherit from bittensor.Synapse" + request_name = param_class.__name__ # Add the endpoint to the router, making it available on both GET and POST methods self.router.add_api_route( @@ -497,58 +489,34 @@ def verify_custom(synapse: MyCustomSynapse): ) self.app.include_router(self.router) - # Expected signatures for 'blacklist_fn', 'priority_fn' and 'verify_fn' - blacklist_sig = Signature( - [ - Parameter( - "synapse", - Parameter.POSITIONAL_OR_KEYWORD, - annotation=forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation, - ) - ], - return_annotation=Tuple[bool, str], - ) - priority_sig = Signature( - [ - Parameter( - "synapse", - Parameter.POSITIONAL_OR_KEYWORD, - annotation=forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation, - ) - ], - return_annotation=float, - ) - verify_sig = Signature( - [ - Parameter( - "synapse", - Parameter.POSITIONAL_OR_KEYWORD, - annotation=forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation, - ) - ], - return_annotation=None, - ) - # Check the signature of blacklist_fn, priority_fn and verify_fn if they are provided + expected_params = [ + Parameter( + "synapse", + Parameter.POSITIONAL_OR_KEYWORD, + annotation=forward_sig.parameters[ + list(forward_sig.parameters)[0] + ].annotation, + ) + ] if blacklist_fn: + blacklist_sig = Signature( + expected_params, return_annotation=Tuple[bool, str] + ) assert ( signature(blacklist_fn) == blacklist_sig ), "The blacklist_fn function must have the signature: blacklist( synapse: {} ) -> Tuple[bool, str]".format( request_name ) if priority_fn: + priority_sig = Signature(expected_params, return_annotation=float) assert ( signature(priority_fn) == priority_sig ), "The priority_fn function must have the signature: priority( synapse: {} ) -> float".format( request_name ) if verify_fn: + verify_sig = Signature(expected_params, return_annotation=None) assert ( signature(verify_fn) == verify_sig ), "The verify_fn function must have the signature: verify( synapse: {} ) -> None".format( @@ -556,9 +524,7 @@ def verify_custom(synapse: MyCustomSynapse): ) # Store functions in appropriate attribute dictionaries - self.forward_class_types[request_name] = forward_sig.parameters[ - list(forward_sig.parameters)[0] - ].annotation + self.forward_class_types[request_name] = param_class self.blacklist_fns[request_name] = blacklist_fn self.priority_fns[request_name] = priority_fn self.verify_fns[request_name] = ( @@ -567,7 +533,7 @@ def verify_custom(synapse: MyCustomSynapse): self.forward_fns[request_name] = forward_fn # Parse required hash fields from the forward function protocol defaults - required_hash_fields = request_class.__dict__["__fields__"][ + required_hash_fields = param_class.__dict__["__fields__"][ "required_hash_fields" ].default self.required_hash_fields[request_name] = required_hash_fields From 22b77d7c23b1d4f8bd86114777277a9857453464 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Mon, 29 Apr 2024 19:52:39 +0200 Subject: [PATCH 004/116] return synapse.axon.status_code in HTTP status code --- bittensor/axon.py | 104 +++++++++++++++++++++++----------- bittensor/errors.py | 39 +++++++++---- requirements/dev.txt | 1 + tests/unit_tests/test_axon.py | 88 +++++++++++++++++++++++++++- 4 files changed, 186 insertions(+), 46 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index be7711c611..7bb7694b56 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -31,6 +31,9 @@ import argparse import traceback import threading + +from fastapi.routing import serialize_response + import bittensor import contextlib @@ -41,7 +44,7 @@ from starlette.responses import Response from starlette.requests import Request from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint -from typing import List, Optional, Tuple, Callable, Any, Dict +from typing import List, Optional, Tuple, Callable, Any, Dict, Awaitable from bittensor.errors import ( InvalidRequestNameError, @@ -54,6 +57,7 @@ RunException, PostProcessException, InternalServerError, + SynapseException, ) from bittensor.threadpool import PriorityThreadPoolExecutor @@ -377,7 +381,8 @@ def __init__( self.app.include_router(self.router) # Build ourselves as the middleware. - self.app.add_middleware(AxonMiddleware, axon=self) + self.middleware_cls = AxonMiddleware + self.app.add_middleware(self.middleware_cls, axon=self) # Attach default forward. def ping(r: bittensor.Synapse) -> bittensor.Synapse: @@ -480,10 +485,25 @@ def verify_custom(synapse: MyCustomSynapse): ), "The first argument of forward_fn must inherit from bittensor.Synapse" request_name = param_class.__name__ + async def endpoint(*args, **kwargs): + start_time = time.time() + response_synapse = forward_fn(*args, **kwargs) + if isinstance(response_synapse, Awaitable): + response_synapse = await response_synapse + return await self.middleware_cls.synapse_to_response( + synapse=response_synapse, start_time=start_time + ) + + # replace the endpoint signature, but set return annotation to JSONResponse + endpoint.__signature__ = Signature( # type: ignore + parameters=list(forward_sig.parameters.values()), + return_annotation=JSONResponse, + ) + # Add the endpoint to the router, making it available on both GET and POST methods self.router.add_api_route( f"/{request_name}", - forward_fn, + endpoint, methods=["GET", "POST"], dependencies=[Depends(self.verify_body_integrity)], ) @@ -908,7 +928,7 @@ async def default_verify(self, synapse: bittensor.Synapse): # Success self.nonces[endpoint_key] = synapse.dendrite.nonce # type: ignore else: - raise SynapseDendriteNoneException() + raise SynapseDendriteNoneException(synapse=synapse) def create_error_response(synapse: bittensor.Synapse): @@ -1020,7 +1040,14 @@ async def dispatch( try: # Set up the synapse from its headers. - synapse: bittensor.Synapse = await self.preprocess(request) + try: + synapse: bittensor.Synapse = await self.preprocess(request) + except Exception as exc: + if isinstance(exc, SynapseException) and exc.synapse is not None: + synapse = exc.synapse + else: + synapse = bittensor.Synapse() + raise # Logs the start of the request processing if synapse.dendrite is not None: @@ -1044,25 +1071,16 @@ async def dispatch( # Call the run function response = await self.run(synapse, call_next, request) - # Call the postprocess function - response = await self.postprocess(synapse, response, start_time) - # Handle errors related to preprocess. except InvalidRequestNameError as e: - if "synapse" not in locals(): - synapse: bittensor.Synapse = bittensor.Synapse() # type: ignore log_and_handle_error(synapse, e, 400, start_time) response = create_error_response(synapse) except SynapseParsingError as e: - if "synapse" not in locals(): - synapse = bittensor.Synapse() log_and_handle_error(synapse, e, 400, start_time) response = create_error_response(synapse) except UnknownSynapseError as e: - if "synapse" not in locals(): - synapse = bittensor.Synapse() log_and_handle_error(synapse, e, 404, start_time) response = create_error_response(synapse) @@ -1292,7 +1310,9 @@ async def blacklist(self, synapse: bittensor.Synapse): raise Exception("Synapse.axon object is None") # We raise an exception to halt the process and return the error message to the requester. - raise BlacklistedException(f"Forbidden. Key is blacklisted: {reason}.") + raise BlacklistedException( + f"Forbidden. Key is blacklisted: {reason}.", synapse=synapse + ) async def priority(self, synapse: bittensor.Synapse): """ @@ -1355,7 +1375,9 @@ async def submit_task( synapse.axon.status_code = 408 # Raise an exception to stop the process and return an appropriate error message to the requester. - raise PriorityException(f"Response timeout after: {synapse.timeout}s") + raise PriorityException( + f"Response timeout after: {synapse.timeout}s", synapse=synapse + ) async def run( self, @@ -1392,25 +1414,26 @@ async def run( bittensor.logging.trace(f"Run exception: {str(e)}") # Set the status code of the synapse to "500" which indicates an internal server error. - if synapse.axon is not None: + if synapse.axon is not None and synapse.axon.status_code is None: synapse.axon.status_code = 500 # Raise an exception to stop the process and return an appropriate error message to the requester. - raise RunException(f"Internal server error with error: {str(e)}") + raise RunException( + f"Internal server error with error: {str(e)}", synapse=synapse + ) # Return the starlet response return response - async def postprocess( - self, synapse: bittensor.Synapse, response: Response, start_time: float - ) -> Response: + @classmethod + async def synapse_to_response( + cls, synapse: bittensor.Synapse, start_time: float + ) -> JSONResponse: """ - Performs the final processing on the response before sending it back to the client. This method - updates the response headers and logs the end of the request processing. + Converts the Synapse object into a JSON response with HTTP headers. Args: synapse (bittensor.Synapse): The Synapse object representing the request. - response (Response): The response generated by processing the request. start_time (float): The timestamp when the request processing started. Returns: @@ -1419,24 +1442,37 @@ async def postprocess( Postprocessing is the last step in the request handling process, ensuring that the response is properly formatted and contains all necessary information. """ - # Set the status code of the synapse to "200" which indicates a successful response. - if synapse.axon is not None: + if synapse.axon is None: + synapse.axon = bittensor.TerminalInfo() + + if synapse.axon.status_code is None: synapse.axon.status_code = 200 - # Set the status message of the synapse to "Success". + if synapse.axon.status_code == 200 and not synapse.axon.status_message: synapse.axon.status_message = "Success" + synapse.axon.process_time = time.time() - start_time + + serialized_synapse = await serialize_response(response_content=synapse) + response = JSONResponse( + status_code=synapse.axon.status_code, + content=serialized_synapse, + ) + try: - # Update the response headers with the headers from the synapse. updated_headers = synapse.to_headers() - response.headers.update(updated_headers) except Exception as e: - # If there is an exception during the response header update, we log the exception. raise PostProcessException( - f"Error while parsing or updating response headers. Postprocess exception: {str(e)}." - ) + f"Error while parsing response headers. Postprocess exception: {str(e)}.", + synapse=synapse, + ) from e - # Calculate the processing time by subtracting the start time from the current time. - synapse.axon.process_time = str(time.time() - start_time) # type: ignore + try: + response.headers.update(updated_headers) + except Exception as e: + raise PostProcessException( + f"Error while updating response headers. Postprocess exception: {str(e)}.", + synapse=synapse, + ) from e return response diff --git a/bittensor/errors.py b/bittensor/errors.py index 13cb43fc1b..c821c94728 100644 --- a/bittensor/errors.py +++ b/bittensor/errors.py @@ -14,6 +14,12 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +from __future__ import annotations + +import typing + +if typing.TYPE_CHECKING: + import bittensor class ChainError(BaseException): @@ -106,7 +112,16 @@ class InvalidRequestNameError(Exception): pass -class UnknownSynapseError(Exception): +class SynapseException(Exception): + def __init__( + self, message="Synapse Exception", synapse: "bittensor.Synapse" | None = None + ): + self.message = message + self.synapse = synapse + super().__init__(self.message) + + +class UnknownSynapseError(SynapseException): r"""This exception is raised when the request name is not found in the Axon's forward_fns dictionary.""" pass @@ -118,43 +133,47 @@ class SynapseParsingError(Exception): pass -class NotVerifiedException(Exception): +class NotVerifiedException(SynapseException): r"""This exception is raised when the request is not verified.""" pass -class BlacklistedException(Exception): +class BlacklistedException(SynapseException): r"""This exception is raised when the request is blacklisted.""" pass -class PriorityException(Exception): +class PriorityException(SynapseException): r"""This exception is raised when the request priority is not met.""" pass -class PostProcessException(Exception): +class PostProcessException(SynapseException): r"""This exception is raised when the response headers cannot be updated.""" pass -class RunException(Exception): +class RunException(SynapseException): r"""This exception is raised when the requested function cannot be executed. Indicates a server error.""" pass -class InternalServerError(Exception): +class InternalServerError(SynapseException): r"""This exception is raised when the requested function fails on the server. Indicates a server error.""" pass -class SynapseDendriteNoneException(Exception): - def __init__(self, message="Synapse Dendrite is None"): +class SynapseDendriteNoneException(SynapseException): + def __init__( + self, + message="Synapse Dendrite is None", + synapse: "bittensor.Synapse" | None = None, + ): self.message = message - super().__init__(self.message) + super().__init__(self.message, synapse) diff --git a/requirements/dev.txt b/requirements/dev.txt index 1d1cad3a85..4ce4238094 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -12,3 +12,4 @@ mypy==1.8.0 types-retry==0.9.9.4 freezegun==1.5.0 torch>=1.13.1 +httpx==0.27.0 diff --git a/tests/unit_tests/test_axon.py b/tests/unit_tests/test_axon.py index 33a3724643..b0eaabf327 100644 --- a/tests/unit_tests/test_axon.py +++ b/tests/unit_tests/test_axon.py @@ -20,15 +20,18 @@ # Standard Lib import pytest import unittest +from dataclasses import dataclass from typing import Any from unittest import IsolatedAsyncioTestCase from unittest.mock import AsyncMock, MagicMock, patch # Third Party from starlette.requests import Request +from fastapi.testclient import TestClient # Bittensor import bittensor +from bittensor import Synapse from bittensor.axon import AxonMiddleware from bittensor.axon import axon as Axon @@ -161,15 +164,20 @@ def axon_instance(): # Mocks +@dataclass class MockWallet: - def __init__(self, hotkey): - self.hotkey = hotkey + hotkey: Any + coldkey: Any = None + coldkeypub: Any = None class MockHotkey: def __init__(self, ss58_address): self.ss58_address = ss58_address + def sign(self, *args, **kwargs): + return f"Signed: {args!r} {kwargs!r}".encode() + class MockInfo: def to_string(self): @@ -440,5 +448,81 @@ async def test_preprocess(self): assert synapse.name == "request_name" +class SynapseHTTPClient(TestClient): + def post_synapse(self, synapse: Synapse): + return self.post( + f"/{synapse.__class__.__name__}", + json=synapse.dict(), + headers={"computed_body_hash": synapse.body_hash}, + ) + + +@pytest.mark.asyncio +class TestAxonHTTPAPIResponses: + @pytest.fixture + def axon(self): + return Axon( + ip="192.0.2.1", + external_ip="192.0.2.1", + wallet=MockWallet(MockHotkey("A"), MockHotkey("B"), MockHotkey("PUB")), + ) + + @pytest.fixture + def http_client(self, axon): + return SynapseHTTPClient(axon.app) + + async def no_verify_fn(self, synapse): + return + + async def test_unknown_path(self, http_client): + response = http_client.get("/no_such_path") + assert (response.status_code, response.json()) == ( + 404, + { + "message": "Synapse name 'no_such_path' not found. Available synapses ['Synapse']" + }, + ) + + async def test_ping__no_dendrite(self, http_client): + response = http_client.post_synapse(bittensor.Synapse()) + assert (response.status_code, response.json()) == ( + 401, + { + "message": "Not Verified with error: No SS58 formatted address or public key provided" + }, + ) + + async def test_ping__without_verification(self, http_client, axon): + axon.verify_fns["Synapse"] = self.no_verify_fn + request_synapse = Synapse() + response = http_client.post_synapse(request_synapse) + assert response.status_code == 200 + response_synapse = Synapse(**response.json()) + assert response_synapse.axon.status_code == 200 + + async def test_synapse__explicitly_set_status_code(self, http_client, axon): + class CustomSynapse(Synapse): + pass + + error_message = "Essential resource for CustomSynapse not found" + + async def forward_fn(synapse: CustomSynapse): + synapse.axon.status_code = 404 + synapse.axon.status_message = error_message + return synapse + + axon.attach(forward_fn) + axon.verify_fns["CustomSynapse"] = self.no_verify_fn + + request_synapse = CustomSynapse() + response = http_client.post_synapse(request_synapse) + assert response.status_code == 404 + response_synapse = CustomSynapse(**response.json()) + assert ( + response_synapse.axon.status_code, + response_synapse.axon.status_message, + ) == (404, error_message) + + if __name__ == "__main__": unittest.main() From bb6228f9b56ea078e3eef05442a9893f70f50d80 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Wed, 15 May 2024 10:21:52 +0200 Subject: [PATCH 005/116] respect SynapseException overrides and hide sensitive internal errors --- bittensor/axon.py | 111 +++++++++++++++------------------- tests/unit_tests/test_axon.py | 62 ++++++++++++++----- 2 files changed, 98 insertions(+), 75 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 7bb7694b56..bca31461ab 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -31,6 +31,7 @@ import argparse import traceback import threading +import typing from fastapi.routing import serialize_response @@ -54,9 +55,7 @@ NotVerifiedException, BlacklistedException, PriorityException, - RunException, PostProcessException, - InternalServerError, SynapseException, ) from bittensor.threadpool import PriorityThreadPoolExecutor @@ -949,28 +948,53 @@ def create_error_response(synapse: bittensor.Synapse): def log_and_handle_error( synapse: bittensor.Synapse, exception: Exception, - status_code: int, - start_time: float, + status_code: typing.Optional[int] = None, + start_time: typing.Optional[float] = None, ): + if isinstance(exception, SynapseException): + synapse = exception.synapse or synapse # Display the traceback for user clarity. bittensor.logging.trace(f"Forward exception: {traceback.format_exc()}") + if synapse.axon is None: + synapse.axon = bittensor.TerminalInfo() + # Set the status code of the synapse to the given status code. + error_id = str(uuid.uuid4()) error_type = exception.__class__.__name__ - error_message = str(exception) - detailed_error_message = f"{error_type}: {error_message}" # Log the detailed error message for internal use - bittensor.logging.error(detailed_error_message) + bittensor.logging.error(f"{error_type}#{error_id}: {exception}") + + if not status_code and synapse.axon.status_code != 100: + status_code = synapse.axon.status_code + status_message = synapse.axon.status_message + if isinstance(exception, SynapseException): + if not status_code: + if isinstance(exception, PriorityException): + status_code = 503 + elif isinstance(exception, UnknownSynapseError): + status_code = 404 + elif isinstance(exception, BlacklistedException): + status_code = 403 + elif isinstance(exception, NotVerifiedException): + status_code = 401 + elif isinstance(exception, (InvalidRequestNameError, SynapseParsingError)): + status_code = 400 + else: + status_code = 500 + status_message = status_message or str(exception) + else: + status_code = status_code or 500 + status_message = status_message or f"Internal Server Error #{error_id}" - if synapse.axon is None: - raise SynapseParsingError(detailed_error_message) # Set a user-friendly error message synapse.axon.status_code = status_code - synapse.axon.status_message = error_message + synapse.axon.status_message = status_message - # Calculate the processing time by subtracting the start time from the current time. - synapse.axon.process_time = str(time.time() - start_time) # type: ignore + if start_time: + # Calculate the processing time by subtracting the start time from the current time. + synapse.axon.process_time = str(time.time() - start_time) # type: ignore return synapse @@ -1073,45 +1097,20 @@ async def dispatch( # Handle errors related to preprocess. except InvalidRequestNameError as e: - log_and_handle_error(synapse, e, 400, start_time) - response = create_error_response(synapse) - - except SynapseParsingError as e: - log_and_handle_error(synapse, e, 400, start_time) - response = create_error_response(synapse) - - except UnknownSynapseError as e: - log_and_handle_error(synapse, e, 404, start_time) + if synapse.axon is None: + synapse.axon = bittensor.TerminalInfo() + synapse.axon.status_code = 400 + synapse.axon.status_message = str(e) + synapse = log_and_handle_error(synapse, e, start_time=start_time) response = create_error_response(synapse) - - # Handle errors related to verify. - except NotVerifiedException as e: - log_and_handle_error(synapse, e, 401, start_time) - response = create_error_response(synapse) - - # Handle errors related to blacklist. - except BlacklistedException as e: - log_and_handle_error(synapse, e, 403, start_time) - response = create_error_response(synapse) - - # Handle errors related to priority. - except PriorityException as e: - log_and_handle_error(synapse, e, 503, start_time) - response = create_error_response(synapse) - - # Handle errors related to run. - except RunException as e: - log_and_handle_error(synapse, e, 500, start_time) - response = create_error_response(synapse) - - # Handle errors related to postprocess. - except PostProcessException as e: - log_and_handle_error(synapse, e, 500, start_time) + except SynapseException as e: + synapse = e.synapse or synapse + synapse = log_and_handle_error(synapse, e, start_time=start_time) response = create_error_response(synapse) # Handle all other errors. except Exception as e: - log_and_handle_error(synapse, InternalServerError(str(e)), 500, start_time) + synapse = log_and_handle_error(synapse, e, start_time=start_time) response = create_error_response(synapse) # Logs the end of request processing and returns the response @@ -1186,8 +1185,7 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: "version": str(bittensor.__version_as_int__), "uuid": str(self.axon.uuid), "nonce": f"{time.time_ns()}", - "status_message": "Success", - "status_code": "100", + "status_code": 100, } ) @@ -1256,7 +1254,9 @@ async def verify(self, synapse: bittensor.Synapse): # We raise an exception to stop the process and return the error to the requester. # The error message includes the original exception message. - raise NotVerifiedException(f"Not Verified with error: {str(e)}") + raise NotVerifiedException( + f"Not Verified with error: {str(e)}", synapse=synapse + ) async def blacklist(self, synapse: bittensor.Synapse): """ @@ -1407,20 +1407,9 @@ async def run( response = await call_next(request) except Exception as e: - # If an exception occurs during the execution of the requested function, - # it is caught and handled here. - # Log the exception for debugging purposes. bittensor.logging.trace(f"Run exception: {str(e)}") - - # Set the status code of the synapse to "500" which indicates an internal server error. - if synapse.axon is not None and synapse.axon.status_code is None: - synapse.axon.status_code = 500 - - # Raise an exception to stop the process and return an appropriate error message to the requester. - raise RunException( - f"Internal server error with error: {str(e)}", synapse=synapse - ) + raise # Return the starlet response return response diff --git a/tests/unit_tests/test_axon.py b/tests/unit_tests/test_axon.py index b0eaabf327..cc2eb8824b 100644 --- a/tests/unit_tests/test_axon.py +++ b/tests/unit_tests/test_axon.py @@ -18,20 +18,20 @@ # DEALINGS IN THE SOFTWARE. # Standard Lib -import pytest -import unittest +import re from dataclasses import dataclass from typing import Any from unittest import IsolatedAsyncioTestCase from unittest.mock import AsyncMock, MagicMock, patch # Third Party +import pytest from starlette.requests import Request from fastapi.testclient import TestClient # Bittensor import bittensor -from bittensor import Synapse +from bittensor import Synapse, RunException from bittensor.axon import AxonMiddleware from bittensor.axon import axon as Axon @@ -120,7 +120,7 @@ def test_log_and_handle_error(): synapse = log_and_handle_error(synapse, Exception("Error"), 500, 100) assert synapse.axon.status_code == 500 - assert synapse.axon.status_message == "Error" + assert re.match(r"Internal Server Error #[\da-f\-]+", synapse.axon.status_message) assert synapse.axon.process_time is not None @@ -436,8 +436,8 @@ async def test_preprocess(self): assert synapse.axon.version == str(bittensor.__version_as_int__) assert synapse.axon.uuid == "1234" assert synapse.axon.nonce is not None - assert synapse.axon.status_message == "Success" - assert synapse.axon.status_code == "100" + assert synapse.axon.status_message is None + assert synapse.axon.status_code == 100 assert synapse.axon.signature == "0xaabbccdd" # Check if the preprocess function fills the dendrite information into the synapse @@ -467,6 +467,11 @@ def axon(self): wallet=MockWallet(MockHotkey("A"), MockHotkey("B"), MockHotkey("PUB")), ) + @pytest.fixture + def no_verify_axon(self, axon): + axon.default_verify = self.no_verify_fn + return axon + @pytest.fixture def http_client(self, axon): return SynapseHTTPClient(axon.app) @@ -500,29 +505,58 @@ async def test_ping__without_verification(self, http_client, axon): response_synapse = Synapse(**response.json()) assert response_synapse.axon.status_code == 200 - async def test_synapse__explicitly_set_status_code(self, http_client, axon): + @pytest.fixture + def custom_synapse_cls(self): class CustomSynapse(Synapse): pass + return CustomSynapse + + async def test_synapse__explicitly_set_status_code( + self, http_client, axon, custom_synapse_cls, no_verify_axon + ): error_message = "Essential resource for CustomSynapse not found" - async def forward_fn(synapse: CustomSynapse): + async def forward_fn(synapse: custom_synapse_cls): synapse.axon.status_code = 404 synapse.axon.status_message = error_message return synapse axon.attach(forward_fn) - axon.verify_fns["CustomSynapse"] = self.no_verify_fn - request_synapse = CustomSynapse() - response = http_client.post_synapse(request_synapse) + response = http_client.post_synapse(custom_synapse_cls()) assert response.status_code == 404 - response_synapse = CustomSynapse(**response.json()) + response_synapse = custom_synapse_cls(**response.json()) assert ( response_synapse.axon.status_code, response_synapse.axon.status_message, ) == (404, error_message) + async def test_synapse__exception_with_set_status_code( + self, http_client, axon, custom_synapse_cls, no_verify_axon + ): + error_message = "Conflicting request" + + async def forward_fn(synapse: custom_synapse_cls): + synapse.axon.status_code = 409 + raise RunException(message=error_message, synapse=synapse) + + axon.attach(forward_fn) + + response = http_client.post_synapse(custom_synapse_cls()) + assert response.status_code == 409 + assert response.json() == {"message": error_message} + + async def test_synapse__internal_error( + self, http_client, axon, custom_synapse_cls, no_verify_axon + ): + async def forward_fn(synapse: custom_synapse_cls): + raise ValueError("error with potentially sensitive information") + + axon.attach(forward_fn) -if __name__ == "__main__": - unittest.main() + response = http_client.post_synapse(custom_synapse_cls()) + assert response.status_code == 500 + response_data = response.json() + assert sorted(response_data.keys()) == ["message"] + assert re.match(r"Internal Server Error #[\da-f\-]+", response_data["message"]) From 49bde43ef74127248d22411875e88b2d2314ea43 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Wed, 15 May 2024 20:33:21 +0200 Subject: [PATCH 006/116] fix dendrite HTTP error handling and improve test coverage --- bittensor/dendrite.py | 6 +++ requirements/dev.txt | 1 + tests/unit_tests/conftest.py | 8 +++ tests/unit_tests/test_dendrite.py | 82 +++++++++++++++++++++++++++---- 4 files changed, 87 insertions(+), 10 deletions(-) create mode 100644 tests/unit_tests/conftest.py diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 9a9202ab31..c0e24b92cf 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -710,6 +710,12 @@ def process_server_response( except: # Ignore errors during attribute setting pass + else: + # If the server responded with an error, update the local synapse state + if local_synapse.axon is None: + local_synapse.axon = bittensor.TerminalInfo() + local_synapse.axon.status_code = server_response.status + local_synapse.axon.status_message = json_response.get("message") # Extract server headers and overwrite None values in local synapse headers server_headers = bittensor.Synapse.from_headers(server_response.headers) # type: ignore diff --git a/requirements/dev.txt b/requirements/dev.txt index 4ce4238094..34184658cf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -13,3 +13,4 @@ types-retry==0.9.9.4 freezegun==1.5.0 torch>=1.13.1 httpx==0.27.0 +aioresponses==0.7.6 diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py new file mode 100644 index 0000000000..21a091f7af --- /dev/null +++ b/tests/unit_tests/conftest.py @@ -0,0 +1,8 @@ +import pytest +from aioresponses import aioresponses + + +@pytest.fixture +def mock_aioresponse(): + with aioresponses() as m: + yield m diff --git a/tests/unit_tests/test_dendrite.py b/tests/unit_tests/test_dendrite.py index 09219816e8..1c4f54be52 100644 --- a/tests/unit_tests/test_dendrite.py +++ b/tests/unit_tests/test_dendrite.py @@ -46,6 +46,23 @@ def setup_dendrite(): return dendrite_obj +@pytest.fixture +def dendrite_obj(setup_dendrite): + return setup_dendrite + + +@pytest.fixture +def axon_info(): + return bittensor.AxonInfo( + version=1, + ip="127.0.0.1", + port=666, + ip_type=4, + hotkey="hot", + coldkey="cold", + ) + + @pytest.fixture(scope="session") def setup_axon(): axon = bittensor.axon() @@ -61,21 +78,18 @@ def test_init(setup_dendrite): assert dendrite_obj.keypair == setup_dendrite.keypair -def test_str(setup_dendrite): - dendrite_obj = setup_dendrite - expected_string = "dendrite({})".format(setup_dendrite.keypair.ss58_address) +def test_str(dendrite_obj): + expected_string = "dendrite({})".format(dendrite_obj.keypair.ss58_address) assert str(dendrite_obj) == expected_string -def test_repr(setup_dendrite): - dendrite_obj = setup_dendrite - expected_string = "dendrite({})".format(setup_dendrite.keypair.ss58_address) +def test_repr(dendrite_obj): + expected_string = "dendrite({})".format(dendrite_obj.keypair.ss58_address) assert repr(dendrite_obj) == expected_string -def test_close(setup_dendrite, setup_axon): +def test_close(dendrite_obj, setup_axon): axon = setup_axon - dendrite_obj = setup_dendrite # Query the axon to open a session dendrite_obj.query(axon, SynapseDummy(input=1)) # Session should be automatically closed after query @@ -83,9 +97,8 @@ def test_close(setup_dendrite, setup_axon): @pytest.mark.asyncio -async def test_aclose(setup_dendrite, setup_axon): +async def test_aclose(dendrite_obj, setup_axon): axon = setup_axon - dendrite_obj = setup_dendrite # Use context manager to open an async session async with dendrite_obj: resp = await dendrite_obj([axon], SynapseDummy(input=1), deserialize=False) @@ -270,3 +283,52 @@ def test_terminal_info_error_cases( version=version, nonce=nonce, ) + + +@pytest.mark.asyncio +async def test_dendrite__call__success_response( + axon_info, dendrite_obj, mock_aioresponse +): + input_synapse = SynapseDummy(input=1) + expected_synapse = SynapseDummy( + **( + input_synapse.dict() + | dict( + output=2, + axon=TerminalInfo( + status_code=200, + status_message="Success", + process_time=0.1, + ), + ) + ) + ) + mock_aioresponse.post( + f"http://127.0.0.1:666/SynapseDummy", + body=expected_synapse.json(), + ) + synapse = await dendrite_obj.call(axon_info, synapse=input_synapse) + + assert synapse.input == 1 + assert synapse.output == 2 + assert synapse.dendrite.status_code == 200 + assert synapse.dendrite.status_message == "Success" + assert synapse.dendrite.process_time >= 0 + + +@pytest.mark.asyncio +async def test_dendrite__call__handles_http_error_response( + axon_info, dendrite_obj, mock_aioresponse +): + status_code = 414 + message = "Custom Error" + + mock_aioresponse.post( + f"http://127.0.0.1:666/SynapseDummy", + status=status_code, + payload={"message": message}, + ) + synapse = await dendrite_obj.call(axon_info, synapse=SynapseDummy(input=1)) + + assert synapse.axon.status_code == synapse.dendrite.status_code == status_code + assert synapse.axon.status_message == synapse.dendrite.status_message == message From 1b6c06e5d09ac0ad9bd94661ce8b47dfc4f71705 Mon Sep 17 00:00:00 2001 From: Roman Date: Tue, 21 May 2024 14:35:34 -0700 Subject: [PATCH 007/116] Part 2 for refactoring bittensor/subtensor.py subtensor().validator_logits_divergence() last refactored method --- bittensor/extrinsics/serving.py | 1 - bittensor/subtensor.py | 282 +++++++++++++++++++------------- 2 files changed, 164 insertions(+), 119 deletions(-) diff --git a/bittensor/extrinsics/serving.py b/bittensor/extrinsics/serving.py index 05bf4c4369..1aefa091ad 100644 --- a/bittensor/extrinsics/serving.py +++ b/bittensor/extrinsics/serving.py @@ -196,7 +196,6 @@ def serve_axon_extrinsic( protocol=4, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, - prompt=prompt, ) return serve_success diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 95ef84f494..b6b01f9962 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -34,7 +34,7 @@ from scalecodec.base import RuntimeConfiguration from scalecodec.exceptions import RemainingScaleBytesNotEmptyException from scalecodec.type_registry import load_type_registry_preset -from scalecodec.types import GenericCall +from scalecodec.types import GenericCall, ScaleType from substrateinterface.base import QueryMapResult, SubstrateInterface, ExtrinsicReceipt from substrateinterface.exceptions import SubstrateRequestException @@ -147,12 +147,6 @@ class subtensor: with market-based speculation, incentivizing neurons to make judicious decisions in their inter-neuronal investments. - Args: - network (str): The name of the Bittensor network (e.g., 'finney', 'test', 'archive', 'local') the instance is - connected to, determining the blockchain interaction context. - chain_endpoint (str): The blockchain node endpoint URL, enabling direct communication with the Bittensor - blockchain for transaction processing and data retrieval. - Example Usage:: # Connect to the main Bittensor network (Finney). @@ -1375,7 +1369,7 @@ def get_transfer_fee( def _do_transfer( self, - wallet: "bittensor.wallet", + wallet: bittensor.wallet, dest: str, transfer_balance: Balance, wait_for_inclusion: bool = True, @@ -1391,11 +1385,12 @@ def _do_transfer( wait_for_finalization (bool): If ``true``, waits for finalization. Returns: success (bool): ``True`` if transfer was successful. - block_hash (str): Block hash of the transfer. On success and if wait_for_ finalization/inclusion is ``True``. + block_hash (str): Block hash of the transfer. On success and if wait_for_ finalization/inclusion is + ``True``. error (str): Error message if transfer failed. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="Balances", @@ -1431,7 +1426,8 @@ def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balan balances below this threshold can be reaped to conserve network resources. Args: - block (Optional[int], optional): Block number at which to query the deposit amount. If ``None``, the current block is used. + block (Optional[int], optional): Block number at which to query the deposit amount. If ``None``, the + current block is used. Returns: Optional[Balance]: The existential deposit amount, or ``None`` if the query fails. @@ -1448,12 +1444,12 @@ def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balan return Balance.from_rao(result.value) - ################# - #### Network #### - ################# + ########### + # Network # + ########### def register_subnetwork( self, - wallet: "bittensor.wallet", + wallet: bittensor.wallet, wait_for_inclusion: bool = False, wait_for_finalization=True, prompt: bool = False, @@ -1524,9 +1520,9 @@ def set_hyperparameter( prompt=prompt, ) - ################# - #### Serving #### - ################# + ########### + # Serving # + ########### def serve( self, wallet: "bittensor.wallet", @@ -1538,7 +1534,6 @@ def serve( placeholder2: int = 0, wait_for_inclusion: bool = False, wait_for_finalization=True, - prompt: bool = False, ) -> bool: """ Registers a neuron's serving endpoint on the Bittensor network. This function announces the @@ -1551,10 +1546,12 @@ def serve( port (int): The port number on which the neuron is serving. protocol (int): The protocol type used by the neuron (e.g., GRPC, HTTP). netuid (int): The unique identifier of the subnetwork. - Other arguments: Placeholder parameters for future extensions. - wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. - wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. + placeholder1 (int, optional): Placeholder parameter for future extensions. Default is `0`. + placeholder2 (int, optional): Placeholder parameter for future extensions. Default is `0`. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Default is + `False`. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Default + is `True`. Returns: bool: ``True`` if the serve registration is successful, False otherwise. @@ -1581,7 +1578,6 @@ def serve_axon( axon: "bittensor.axon", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, - prompt: bool = False, ) -> bool: """ Registers an Axon serving endpoint on the Bittensor network for a specific neuron. This function @@ -1593,7 +1589,6 @@ def serve_axon( axon (bittensor.Axon): The Axon instance to be registered for serving. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: bool: ``True`` if the Axon serve registration is successful, False otherwise. @@ -1629,7 +1624,7 @@ def _do_serve_axon( enhancing the decentralized computation capabilities of Bittensor. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -1691,7 +1686,7 @@ def _do_serve_prometheus( error (:func:`Optional[str]`): Error message if serve prometheus failed, ``None`` otherwise. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -1740,7 +1735,7 @@ def _do_associate_ips( error (:func:`Optional[str]`): Error message if associate IPs failed, None otherwise. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -1769,9 +1764,9 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - ################# - #### Staking #### - ################# + ########### + # Staking # + ########### def add_stake( self, wallet: "bittensor.wallet", @@ -1869,7 +1864,7 @@ def _do_stake( StakeError: If the extrinsic failed. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -1896,9 +1891,9 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - ################### - #### Unstaking #### - ################### + ############# + # Unstaking # + ############# def unstake_multiple( self, wallet: "bittensor.wallet", @@ -1915,7 +1910,8 @@ def unstake_multiple( Args: wallet (bittensor.wallet): The wallet linked to the coldkey from which the stakes are being withdrawn. hotkey_ss58s (List[str]): A list of hotkey ``SS58`` addresses to unstake from. - amounts (List[Union[Balance, float]], optional): The amounts of TAO to unstake from each hotkey. If not provided, unstakes all available stakes. + amounts (List[Union[Balance, float]], optional): The amounts of TAO to unstake from each hotkey. If not + provided, unstakes all available stakes. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -1995,7 +1991,7 @@ def _do_unstake( StakeError: If the extrinsic failed. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -2022,9 +2018,9 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - ################ - #### Senate #### - ################ + ########## + # Senate # + ########## def register_senate( self, @@ -2039,8 +2035,6 @@ def register_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. - amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2068,8 +2062,6 @@ def leave_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. - amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2100,8 +2092,9 @@ def vote_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. - amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. + proposal_hash (str): The hash of the proposal being voted on. + proposal_idx (int): The index of the proposal being voted on. + vote (bool): The vote to be cast (True for yes, False for no). wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. @@ -2173,7 +2166,7 @@ def get_vote_data( ) if not hasattr(vote_data, "serialize"): return None - return vote_data.serialize() if vote_data != None else None + return vote_data.serialize() if vote_data is not None else None get_proposal_vote_data = get_vote_data @@ -2194,7 +2187,7 @@ def get_senate_members(self, block: Optional[int] = None) -> Optional[List[str]] senate_members = self.query_module("SenateMembers", "Members", block=block) if not hasattr(senate_members, "serialize"): return None - return senate_members.serialize() if senate_members != None else None + return senate_members.serialize() if senate_members is not None else None def get_proposal_call_data( self, proposal_hash: str, block: Optional[int] = None @@ -2208,7 +2201,7 @@ def get_proposal_call_data( block (Optional[int], optional): The blockchain block number at which to query the proposal call data. Returns: - Optional[bittensor.ProposalCallData]: An object containing the proposal's call data, or ``None`` if not found. + Optional[GenericCall]: An object containing the proposal's call data, or ``None`` if not found. This function is crucial for analyzing the types of proposals made within the network and the specific changes or actions they intend to implement or address. @@ -2219,7 +2212,7 @@ def get_proposal_call_data( if not hasattr(proposal_data, "serialize"): return None - return proposal_data.serialize() if proposal_data != None else None + return proposal_data.serialize() if proposal_data is not None else None def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str]]: """ @@ -2241,7 +2234,7 @@ def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str] if not hasattr(proposal_hashes, "serialize"): return None - return proposal_hashes.serialize() if proposal_hashes != None else None + return proposal_hashes.serialize() if proposal_hashes is not None else None def get_proposals( self, block: Optional[int] = None @@ -2255,8 +2248,8 @@ def get_proposals( block (Optional[int], optional): The blockchain block number to query the proposals. Returns: - Optional[Dict[str, Tuple[bittensor.ProposalCallData, bittensor.ProposalVoteData]]]: - A dictionary mapping proposal hashes to their corresponding call and vote data, or ``None`` if not available. + Optional[Dict[str, Tuple[bittensor.ProposalCallData, bittensor.ProposalVoteData]]]: A dictionary mapping + proposal hashes to their corresponding call and vote data, or ``None`` if not available. This function is integral for analyzing the governance activity on the Bittensor network, providing a holistic view of the proposals and their impact or potential changes within the network. @@ -2272,9 +2265,9 @@ def get_proposals( for proposal_hash in proposal_hashes } - ############## - #### Root #### - ############## + ######## + # Root # + ######## def root_register( self, @@ -2313,7 +2306,7 @@ def _do_root_register( wait_for_inclusion: bool = False, wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): # create extrinsic call call = self.substrate.compose_call( @@ -2384,24 +2377,24 @@ def root_set_weights( prompt=prompt, ) - ######################## - #### Registry Calls #### - ######################## - - """ Queries subtensor registry named storage with params and block. """ + ################## + # Registry Calls # + ################## + # Queries subtensor registry named storage with params and block. def query_identity( self, key: str, block: Optional[int] = None, - ) -> Optional[object]: + ) -> Optional[ScaleType]: """ Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. NOTE: - See the `Bittensor CLI documentation `_ for supported identity parameters. + See the `Bittensor CLI documentation `_ for supported identity + parameters. Args: key (str): The key used to query the neuron's identity, typically the neuron's ``SS58`` address. @@ -2414,7 +2407,7 @@ def query_identity( network-specific details, providing insights into the neuron's role and status within the Bittensor network. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( module="Registry", @@ -2434,7 +2427,7 @@ def update_identity( self, wallet: "bittensor.wallet", identified: Optional[str] = None, - params: dict = {}, + params: Optional[dict] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: @@ -2443,11 +2436,13 @@ def update_identity( identity attributes, reflecting changes in their roles, stakes, or other network-specific parameters. NOTE: - See the `Bittensor CLI documentation `_ for supported identity parameters. + See the `Bittensor CLI documentation `_ for supported identity + parameters. Args: wallet (bittensor.wallet): The wallet associated with the neuron whose identity is being updated. - identified (str, optional): The identified ``SS58`` address of the neuron. Defaults to the wallet's coldkey address. + identified (str, optional): The identified ``SS58`` address of the neuron. Defaults to the wallet's coldkey + address. params (dict, optional): A dictionary of parameters to update in the neuron's identity. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -2461,11 +2456,13 @@ def update_identity( if identified is None: identified = wallet.coldkey.ss58_address + params = {} if params is None else params + call_params = bittensor.utils.wallet_utils.create_identity_dict(**params) call_params["identified"] = identified - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> bool: call = self.substrate.compose_call( call_module="Registry", call_function="set_identity", @@ -2490,12 +2487,31 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Make some commitment on-chain about arbitary data """ - + # Make some commitment on-chain about arbitrary data. def commit(self, wallet, netuid: int, data: str): + """ + Commits arbitrary data to the Bittensor network by publishing metadata. + + Args: + wallet (bittensor.wallet): The wallet associated with the neuron committing the data. + netuid (int): The unique identifier of the subnetwork. + data (str): The data to be committed to the network. + """ publish_metadata(self, wallet, netuid, f"Raw{len(data)}", data.encode()) def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> str: + """ + Retrieves the on-chain commitment for a specific neuron in the Bittensor network. + + Args: + netuid (int): The unique identifier of the subnetwork. + uid (int): The unique identifier of the neuron. + block (Optional[int], optional): The block number to retrieve the commitment from. If None, the latest block + is used. Default is None. + + Returns: + str: The commitment data as a string. + """ metagraph = self.metagraph(netuid) hotkey = metagraph.hotkeys[uid] @@ -2505,18 +2521,17 @@ def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> return bytes.fromhex(hex_data).decode() - ######################## - #### Standard Calls #### - ######################## - - """ Queries subtensor named storage with params and block. """ + ################## + # Standard Calls # + ################## + # Queries subtensor named storage with params and block. def query_subtensor( self, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> Optional[T]: + params: Optional[list] = None, + ) -> ScaleType: """ Queries named storage from the Subtensor module on the Bittensor blockchain. This function is used to retrieve specific data or parameters from the blockchain, such as stake, rank, or other neuron-specific attributes. @@ -2527,13 +2542,13 @@ def query_subtensor( params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - Optional[object]: An object containing the requested data if found, ``None`` otherwise. + query_response (ScaleType): An object containing the requested data. This query function is essential for accessing detailed information about the network and its neurons, providing valuable insights into the state and dynamics of the Bittensor ecosystem. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( module="SubtensorModule", @@ -2546,17 +2561,17 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Queries subtensor map storage with params and block. """ - + # Queries subtensor map storage with params and block. def query_map_subtensor( self, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], + params: Optional[list] = None, ) -> QueryMapResult: """ Queries map storage from the Subtensor module on the Bittensor blockchain. This function is designed to - retrieve a map-like data structure, which can include various neuron-specific details or network-wide attributes. + retrieve a map-like data structure, which can include various neuron-specific details or network-wide + attributes. Args: name (str): The name of the map storage function to query. @@ -2570,7 +2585,7 @@ def query_map_subtensor( relationships within the Bittensor ecosystem, such as inter-neuronal connections and stake distributions. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query_map( module="SubtensorModule", @@ -2585,7 +2600,7 @@ def make_substrate_call_with_retry(): def query_constant( self, module_name: str, constant_name: str, block: Optional[int] = None - ) -> Optional[object]: + ) -> Optional["ScaleType"]: """ Retrieves a constant from the specified module on the Bittensor blockchain. This function is used to access fixed parameters or values defined within the blockchain's modules, which are essential for @@ -2605,7 +2620,7 @@ def query_constant( """ @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + def make_substrate_call_with_retry() -> Optional["ScaleType"]: return self.substrate.get_constant( module_name=module_name, constant_name=constant_name, @@ -2616,15 +2631,14 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Queries any module storage with params and block. """ - + # Queries any module storage with params and block. def query_module( self, module: str, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> Optional[object]: + params: Optional[list] = None, + ) -> ScaleType: """ Queries any module storage on the Bittensor blockchain with the specified parameters and block number. This function is a generic query interface that allows for flexible and diverse data retrieval from @@ -2644,7 +2658,7 @@ def query_module( """ @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + def make_substrate_call_with_retry() -> ScaleType: return self.substrate.query( module=module, storage_function=name, @@ -2656,18 +2670,18 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - """ Queries any module map storage with params and block. """ - + # Queries any module map storage with params and block. def query_map( self, module: str, name: str, block: Optional[int] = None, - params: Optional[List[object]] = [], - ) -> Optional[object]: + params: Optional[list] = None, + ) -> QueryMapResult: """ Queries map storage from any module on the Bittensor blockchain. This function retrieves data structures - that represent key-value mappings, essential for accessing complex and structured data within the blockchain modules. + that represent key-value mappings, essential for accessing complex and structured data within the blockchain + modules. Args: module (str): The name of the module from which to query the map storage. @@ -2682,8 +2696,8 @@ def query_map( modules, offering insights into the network's state and the relationships between its different components. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> QueryMapResult: return self.substrate.query_map( module=module, storage_function=name, @@ -2700,7 +2714,7 @@ def state_call( method: str, data: str, block: Optional[int] = None, - ) -> Optional[object]: + ) -> Dict[Any, Any]: """ Makes a state call to the Bittensor blockchain, allowing for direct queries of the blockchain's state. This function is typically used for advanced queries that require specific method calls and data inputs. @@ -2711,14 +2725,14 @@ def state_call( block (Optional[int], optional): The blockchain block number at which to perform the state call. Returns: - Optional[object]: The result of the state call if successful, ``None`` otherwise. + result (Dict[Any, Any]): The result of the rpc call. The state call function provides a more direct and flexible way of querying blockchain data, useful for specific use cases where standard queries are insufficient. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> Dict[Any, Any]: block_hash = None if block is None else self.substrate.get_block_hash(block) params = [method, data] if block_hash: @@ -2789,9 +2803,7 @@ def _encode_params( call_definition: List[ParamWithTypes], params: Union[List[Any], Dict[str, Any]], ) -> str: - """ - Returns a hex encoded string of the params using their types. - """ + """Returns a hex encoded string of the params using their types.""" param_data = scalecodec.ScaleBytes(b"") for i, param in enumerate(call_definition["params"]): # type: ignore @@ -2806,14 +2818,14 @@ def _encode_params( return param_data.to_hex() - ##################################### - #### Hyper parameter calls. #### - ##################################### + ########################## + # Hyper parameter calls. # + ########################## def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: """ - Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the global inflation rate, which directly influences the network's - token emission rate and economic model. + Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the + global inflation rate, which directly influences the network's token emission rate and economic model. Note: This is currently fixed such that the Bittensor blockchain emmits 7200 Tao per day. @@ -2887,7 +2899,8 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. The 'Difficulty' parameter directly impacts the network's security and integrity by setting the - computational effort required for validating transactions and participating in the network's consensus mechanism. + computational effort required for validating transactions and participating in the network's consensus + mechanism. """ if not self.subnet_exists(netuid, block): return None @@ -2918,8 +2931,7 @@ def recycle(self, netuid: int, block: Optional[int] = None) -> Optional[Balance] return None return Balance.from_rao(_result.value) - """ Returns network ImmunityPeriod hyper parameter """ - + # Returns network ImmunityPeriod hyper parameter. def immunity_period( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: @@ -2948,7 +2960,18 @@ def immunity_period( def validator_batch_size( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorBatchSize hyper parameter""" + """ + Returns network ValidatorBatchSize hyper parameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If None, the latest block + is used. Default is `None`. + + Returns: + Optional[int]: The value of the ValidatorBatchSize hyperparameter, or None if the subnetwork does not exist + or the parameter is not found. + """ if not self.subnet_exists(netuid, block): return None _result = self.query_subtensor("ValidatorBatchSize", block, [netuid]) @@ -2959,7 +2982,18 @@ def validator_batch_size( def validator_prune_len( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorPruneLen hyper parameter""" + """ + Returns network ValidatorPruneLen hyper parameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If None, the latest block + is used. Default is `None`. + + Returns: + Optional[int]: The value of the ValidatorPruneLen hyperparameter, or None if the subnetwork does not exist + or the parameter is not found. + """ if not self.subnet_exists(netuid, block): return None _result = self.query_subtensor("ValidatorPruneLen", block, [netuid]) @@ -2970,7 +3004,18 @@ def validator_prune_len( def validator_logits_divergence( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network ValidatorLogitsDivergence hyper parameter""" + """ + Returns network ValidatorLogitsDivergence hyper parameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If None, the latest block + is used. Default is `None`. + + Returns: + Optional[float]: The value of the ValidatorLogitsDivergence hyperparameter, or None if the subnetwork does + not exist or the parameter is not found. + """ if not self.subnet_exists(netuid, block): return None _result = self.query_subtensor("ValidatorLogitsDivergence", block, [netuid]) @@ -3212,6 +3257,7 @@ def get_hotkey_owner( return None if self.does_hotkey_exist(hotkey_ss58, block): return _result.value + return None def get_axon_info( self, netuid: int, hotkey_ss58: str, block: Optional[int] = None From b5ee71756fd351ac4bd67892e434558e8a3adfe8 Mon Sep 17 00:00:00 2001 From: Roman Date: Tue, 21 May 2024 14:35:34 -0700 Subject: [PATCH 008/116] Part 2 for refactoring bittensor/subtensor.py subtensor().validator_logits_divergence() last refactored method --- bittensor/subtensor.py | 94 +++++++++++++++++++++--------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index b6b01f9962..2cd8bdc660 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -1426,8 +1426,8 @@ def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balan balances below this threshold can be reaped to conserve network resources. Args: - block (Optional[int], optional): Block number at which to query the deposit amount. If ``None``, the - current block is used. + block (Optional[int]): Block number at which to query the deposit amount. If ``None``, the current block is + used. Returns: Optional[Balance]: The existential deposit amount, or ``None`` if the query fails. @@ -1449,7 +1449,7 @@ def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balan ########### def register_subnetwork( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization=True, prompt: bool = False, @@ -1546,12 +1546,12 @@ def serve( port (int): The port number on which the neuron is serving. protocol (int): The protocol type used by the neuron (e.g., GRPC, HTTP). netuid (int): The unique identifier of the subnetwork. - placeholder1 (int, optional): Placeholder parameter for future extensions. Default is `0`. - placeholder2 (int, optional): Placeholder parameter for future extensions. Default is `0`. + placeholder1 (int, optional): Placeholder parameter for future extensions. Default is ``0``. + placeholder2 (int, optional): Placeholder parameter for future extensions. Default is ``0``. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Default is - `False`. + ``False``. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Default - is `True`. + is ``True``. Returns: bool: ``True`` if the serve registration is successful, False otherwise. @@ -2124,7 +2124,7 @@ def is_senate_member(self, hotkey_ss58: str, block: Optional[int] = None) -> boo Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to check senate membership. + block (Optional[int]): The blockchain block number at which to check senate membership. Returns: bool: ``True`` if the neuron is a senate member at the given block, False otherwise. @@ -2153,7 +2153,7 @@ def get_vote_data( Args: proposal_hash (str): The hash of the proposal for which voting data is requested. - block (Optional[int], optional): The blockchain block number to query the voting data. + block (Optional[int]): The blockchain block number to query the voting data. Returns: Optional[ProposalVoteData]: An object containing the proposal's voting data, or ``None`` if not found. @@ -2176,7 +2176,7 @@ def get_senate_members(self, block: Optional[int] = None) -> Optional[List[str]] responsible for governance and decision-making within the network. Args: - block (Optional[int], optional): The blockchain block number at which to retrieve the senate members. + block (Optional[int]): The blockchain block number at which to retrieve the senate members. Returns: Optional[List[str]]: A list of ``SS58`` addresses of current senate members, or ``None`` if not available. @@ -2198,7 +2198,7 @@ def get_proposal_call_data( Args: proposal_hash (str): The hash of the proposal. - block (Optional[int], optional): The blockchain block number at which to query the proposal call data. + block (Optional[int]): The blockchain block number at which to query the proposal call data. Returns: Optional[GenericCall]: An object containing the proposal's call data, or ``None`` if not found. @@ -2220,7 +2220,7 @@ def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str] uniquely identifies a proposal made within the network. Args: - block (Optional[int], optional): The blockchain block number to query the proposal hashes. + block (Optional[int]): The blockchain block number to query the proposal hashes. Returns: Optional[List[str]]: A list of proposal hashes, or ``None`` if not available. @@ -2245,7 +2245,7 @@ def get_proposals( by the senate. Args: - block (Optional[int], optional): The blockchain block number to query the proposals. + block (Optional[int]): The blockchain block number to query the proposals. Returns: Optional[Dict[str, Tuple[bittensor.ProposalCallData, bittensor.ProposalVoteData]]]: A dictionary mapping @@ -2398,7 +2398,7 @@ def query_identity( Args: key (str): The key used to query the neuron's identity, typically the neuron's ``SS58`` address. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[object]: An object containing the identity information of the neuron if found, ``None`` otherwise. @@ -2506,8 +2506,8 @@ def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> Args: netuid (int): The unique identifier of the subnetwork. uid (int): The unique identifier of the neuron. - block (Optional[int], optional): The block number to retrieve the commitment from. If None, the latest block - is used. Default is None. + block (Optional[int]): The block number to retrieve the commitment from. If None, the latest block + is used. Default is ``None``. Returns: str: The commitment data as a string. @@ -2538,7 +2538,7 @@ def query_subtensor( Args: name (str): The name of the storage function to query. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: @@ -2575,7 +2575,7 @@ def query_map_subtensor( Args: name (str): The name of the map storage function to query. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: @@ -2609,7 +2609,7 @@ def query_constant( Args: module_name (str): The name of the module containing the constant. constant_name (str): The name of the constant to retrieve. - block (Optional[int], optional): The blockchain block number at which to query the constant. + block (Optional[int]): The blockchain block number at which to query the constant. Returns: Optional[object]: The value of the constant if found, ``None`` otherwise. @@ -2619,8 +2619,8 @@ def query_constant( operational parameters. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry() -> Optional["ScaleType"]: + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(): return self.substrate.get_constant( module_name=module_name, constant_name=constant_name, @@ -2647,7 +2647,7 @@ def query_module( Args: module (str): The name of the module from which to query data. name (str): The name of the storage function within the module. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: @@ -2686,7 +2686,7 @@ def query_map( Args: module (str): The name of the module from which to query the map storage. name (str): The specific storage function within the module to query. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. params (Optional[List[object]], optional): Parameters to be passed to the query. Returns: @@ -2722,7 +2722,7 @@ def state_call( Args: method (str): The method name for the state call. data (str): The data to be passed to the method. - block (Optional[int], optional): The blockchain block number at which to perform the state call. + block (Optional[int]): The blockchain block number at which to perform the state call. Returns: result (Dict[Any, Any]): The result of the rpc call. @@ -2757,7 +2757,7 @@ def query_runtime_api( runtime_api (str): The name of the runtime API to query. method (str): The specific method within the runtime API to call. params (Optional[List[ParamWithTypes]], optional): The parameters to pass to the method call. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[bytes]: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. @@ -2832,7 +2832,7 @@ def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to query the parameter. + block (Optional[int]): The blockchain block number at which to query the parameter. Returns: Optional[int]: The value of the 'Rho' hyperparameter if the subnet exists, ``None`` otherwise. @@ -2862,7 +2862,7 @@ def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[float]: The value of the 'Kappa' hyperparameter if the subnet exists, None otherwise. @@ -2893,7 +2893,7 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. @@ -2916,7 +2916,7 @@ def recycle(self, netuid: int, block: Optional[int] = None) -> Optional[Balance] Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[Balance]: The value of the 'Burn' hyperparameter if the subnet exists, None otherwise. @@ -2941,7 +2941,7 @@ def immunity_period( Args: netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, ``None`` otherwise. @@ -2965,8 +2965,8 @@ def validator_batch_size( Args: netuid (int): The unique identifier of the subnetwork. - block (Optional[int], optional): The block number to retrieve the parameter from. If None, the latest block - is used. Default is `None`. + block (Optional[int]): The block number to retrieve the parameter from. If None, the latest block + is used. Default is ``None``. Returns: Optional[int]: The value of the ValidatorBatchSize hyperparameter, or None if the subnetwork does not exist @@ -2987,8 +2987,8 @@ def validator_prune_len( Args: netuid (int): The unique identifier of the subnetwork. - block (Optional[int], optional): The block number to retrieve the parameter from. If None, the latest block - is used. Default is `None`. + block (Optional[int]): The block number to retrieve the parameter from. If None, the latest block + is used. Default is ``None``. Returns: Optional[int]: The value of the ValidatorPruneLen hyperparameter, or None if the subnetwork does not exist @@ -3009,8 +3009,8 @@ def validator_logits_divergence( Args: netuid (int): The unique identifier of the subnetwork. - block (Optional[int], optional): The block number to retrieve the parameter from. If None, the latest block - is used. Default is `None`. + block (Optional[int]): The block number to retrieve the parameter from. If None, the latest block + is used. Default is ``None``. Returns: Optional[float]: The value of the ValidatorLogitsDivergence hyperparameter, or None if the subnetwork does @@ -3983,7 +3983,7 @@ def is_hotkey_registered_any( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to perform the check. + block (Optional[int]): The blockchain block number at which to perform the check. Returns: bool: ``True`` if the hotkey is registered on any subnet, False otherwise. @@ -4001,7 +4001,7 @@ def is_hotkey_registered_on_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to perform the check. + block (Optional[int]): The blockchain block number at which to perform the check. Returns: bool: ``True`` if the hotkey is registered on the specified subnet, False otherwise. @@ -4025,8 +4025,8 @@ def is_hotkey_registered( Args: hotkey_ss58 (str): The SS58 address of the neuron's hotkey. - netuid (Optional[int], optional): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. - block (Optional[int], optional): The blockchain block number at which to perform the query. + netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. @@ -4049,7 +4049,7 @@ def get_uid_for_hotkey_on_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. @@ -4072,7 +4072,7 @@ def get_all_uids_for_hotkey( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: List[int]: A list of UIDs associated with the given hotkey across various subnets. @@ -4095,7 +4095,7 @@ def get_netuids_for_hotkey( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: List[int]: A list of netuids where the neuron is a member. @@ -4114,7 +4114,7 @@ def get_neuron_for_pubkey_and_subnet( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. @@ -4138,7 +4138,7 @@ def get_all_neurons_for_pubkey( Args: hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: List[NeuronInfo]: A list of NeuronInfo objects detailing the neuron's presence across various subnets. @@ -4161,7 +4161,7 @@ def neuron_has_validator_permit( Args: uid (int): The unique identifier of the neuron. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number for the query. + block (Optional[int]): The blockchain block number for the query. Returns: Optional[bool]: ``True`` if the neuron has a validator permit, False otherwise. @@ -4185,7 +4185,7 @@ def neuron_for_wallet( Args: wallet (bittensor.wallet): The wallet associated with the neuron. netuid (int): The unique identifier of the subnet. - block (Optional[int], optional): The blockchain block number at which to perform the query. + block (Optional[int]): The blockchain block number at which to perform the query. Returns: Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. From 540604ef890f49042749e9d1020e7710887d8d7d Mon Sep 17 00:00:00 2001 From: Roman Date: Tue, 21 May 2024 16:58:56 -0700 Subject: [PATCH 009/116] Part 2 request for review comments. Also, high-level refactoring (explicit places) was carried out for the entire module. More detailed refactoring will continue with the "validator_logits_divergence" method. --- bittensor/subtensor.py | 252 ++++++++++++++++++++++------------------- 1 file changed, 134 insertions(+), 118 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 5b4f1d383a..e3f674657d 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -25,7 +25,7 @@ import functools import socket import time -from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any, TypeVar +from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any import numpy as np import scalecodec @@ -38,10 +38,9 @@ from substrateinterface.base import QueryMapResult, SubstrateInterface, ExtrinsicReceipt from substrateinterface.exceptions import SubstrateRequestException - import bittensor -from bittensor.utils import torch from bittensor.btlogging import logging as _logger +from bittensor.utils import torch from .chain_data import ( NeuronInfo, DelegateInfo, @@ -102,11 +101,8 @@ from .utils.registration import POWSolution from .utils.subtensor import get_subtensor_errors - KEY_NONCE: Dict[str, int] = {} -T = TypeVar("T") - ####### # Monkey patch in caching the convert_type_string method ####### @@ -257,7 +253,7 @@ def __init__( url=self.chain_endpoint, type_registry=bittensor.__type_registry__, ) - except ConnectionRefusedError as e: + except ConnectionRefusedError: _logger.error( f"Could not connect to {self.network} network with {self.chain_endpoint} chain endpoint. Exiting...", ) @@ -298,7 +294,7 @@ def __repr__(self) -> str: return self.__str__() @staticmethod - def config() -> bittensor.config: + def config() -> "bittensor.config": """ Creates and returns a Bittensor configuration object. @@ -319,7 +315,7 @@ def help(cls): parser.print_help() @classmethod - def add_args(cls, parser: argparse.ArgumentParser, prefix: Optional[str] = None): + def add_args(cls, parser: "argparse.ArgumentParser", prefix: Optional[str] = None): """ Adds command-line arguments to the provided ArgumentParser for configuring the Subtensor settings. @@ -418,7 +414,7 @@ def determine_chain_endpoint_and_network(network: str): return "unknown", network @staticmethod - def setup_config(network: str, config: bittensor.config): + def setup_config(network: str, config: "bittensor.config"): """ Sets up and returns the configuration for the Subtensor network and endpoint. @@ -498,7 +494,7 @@ def close(self): ############## def nominate( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_finalization: bool = False, wait_for_inclusion: bool = True, ) -> bool: @@ -528,7 +524,7 @@ def nominate( def delegate( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", delegate_ss58: Optional[str] = None, amount: Optional[Union[Balance, float]] = None, wait_for_inclusion: bool = True, @@ -567,7 +563,7 @@ def delegate( def undelegate( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", delegate_ss58: Optional[str] = None, amount: Optional[Union[Balance, float]] = None, wait_for_inclusion: bool = True, @@ -604,7 +600,7 @@ def undelegate( def set_take( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", delegate_ss58: Optional[str] = None, take: float = 0.0, wait_for_inclusion: bool = True, @@ -670,7 +666,7 @@ def set_take( def send_extrinsic( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", module: str, function: str, params: dict, @@ -774,7 +770,7 @@ def send_extrinsic( ############### def set_weights( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", netuid: int, uids: Union[NDArray[np.int64], "torch.LongTensor", list], weights: Union[NDArray[np.float32], "torch.FloatTensor", list], @@ -792,8 +788,10 @@ def set_weights( Args: wallet (bittensor.wallet): The wallet associated with the neuron setting the weights. netuid (int): The unique identifier of the subnet. - uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being set for. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. + uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being + set for. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each + UID. version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -836,7 +834,7 @@ def set_weights( def _do_set_weights( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", uids: List[int], vals: List[int], netuid: int, @@ -905,7 +903,7 @@ def make_substrate_call_with_retry(): ################ def register( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", netuid: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -967,8 +965,8 @@ def register( def swap_hotkey( self, - wallet: bittensor.wallet, - new_wallet: bittensor.wallet, + wallet: "bittensor.wallet", + new_wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, prompt: bool = False, @@ -1001,7 +999,7 @@ def swap_hotkey( def run_faucet( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, prompt: bool = False, @@ -1065,7 +1063,7 @@ def run_faucet( def burned_register( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", netuid: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -1099,7 +1097,7 @@ def burned_register( def _do_pow_register( self, netuid: int, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", pow_result: POWSolution, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -1161,7 +1159,7 @@ def make_substrate_call_with_retry(): def _do_burned_register( self, netuid: int, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: @@ -1218,8 +1216,8 @@ def make_substrate_call_with_retry(): def _do_swap_hotkey( self, - wallet: bittensor.wallet, - new_wallet: bittensor.wallet, + wallet: "bittensor.wallet", + new_wallet: "bittensor.wallet", wait_for_inclusion: bool = False, wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: @@ -1277,7 +1275,7 @@ def make_substrate_call_with_retry(): ############ def transfer( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", dest: str, amount: Union[Balance, float], wait_for_inclusion: bool = True, @@ -1314,8 +1312,8 @@ def transfer( ) def get_transfer_fee( - self, wallet: bittensor.wallet, dest: str, value: Union[Balance, float, int] - ) -> Balance: + self, wallet: "bittensor.wallet", dest: str, value: Union["Balance", float, int] + ) -> "Balance": """ Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. This function simulates the transfer to estimate the associated cost, taking into account the current @@ -1372,9 +1370,9 @@ def get_transfer_fee( def _do_transfer( self, - wallet: bittensor.wallet, + wallet: "bittensor.wallet", dest: str, - transfer_balance: Balance, + transfer_balance: "Balance", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> Tuple[bool, Optional[str], Optional[str]]: @@ -1422,7 +1420,9 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balance]: + def get_existential_deposit( + self, block: Optional[int] = None + ) -> Optional["Balance"]: """ Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with @@ -1718,7 +1718,7 @@ def make_substrate_call_with_retry(): def _do_associate_ips( self, wallet: "bittensor.wallet", - ip_info_list: List[IPInfo], + ip_info_list: List["IPInfo"], netuid: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -1774,7 +1774,7 @@ def add_stake( self, wallet: "bittensor.wallet", hotkey_ss58: Optional[str] = None, - amount: Optional[Union[Balance, float]] = None, + amount: Optional[Union["Balance", float]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -1812,7 +1812,7 @@ def add_stake_multiple( self, wallet: "bittensor.wallet", hotkey_ss58s: List[str], - amounts: Optional[List[Union[Balance, float]]] = None, + amounts: Optional[List[Union["Balance", float]]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -1849,7 +1849,7 @@ def _do_stake( self, wallet: "bittensor.wallet", hotkey_ss58: str, - amount: Balance, + amount: "Balance", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: @@ -1901,7 +1901,7 @@ def unstake_multiple( self, wallet: "bittensor.wallet", hotkey_ss58s: List[str], - amounts: Optional[List[Union[Balance, float]]] = None, + amounts: Optional[List[Union["Balance", float]]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -1939,7 +1939,7 @@ def unstake( self, wallet: "bittensor.wallet", hotkey_ss58: Optional[str] = None, - amount: Optional[Union[Balance, float]] = None, + amount: Optional[Union["Balance", float]] = None, wait_for_inclusion: bool = True, wait_for_finalization: bool = False, prompt: bool = False, @@ -1976,7 +1976,7 @@ def _do_unstake( self, wallet: "bittensor.wallet", hotkey_ss58: str, - amount: Balance, + amount: "Balance", wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: @@ -2194,7 +2194,7 @@ def get_senate_members(self, block: Optional[int] = None) -> Optional[List[str]] def get_proposal_call_data( self, proposal_hash: str, block: Optional[int] = None - ) -> Optional[GenericCall]: + ) -> Optional["GenericCall"]: """ Retrieves the call data of a specific proposal on the Bittensor blockchain. This data provides detailed information about the proposal, including its purpose and specifications. @@ -2241,7 +2241,7 @@ def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str] def get_proposals( self, block: Optional[int] = None - ) -> Optional[Dict[str, Tuple[GenericCall, ProposalVoteData]]]: + ) -> Optional[Dict[str, Tuple["GenericCall", "ProposalVoteData"]]]: """ Retrieves all active proposals on the Bittensor blockchain, along with their call and voting data. This comprehensive view allows for a thorough understanding of the proposals and their reception @@ -2356,8 +2356,10 @@ def root_set_weights( Args: wallet (bittensor.wallet): The wallet associated with the neuron setting the weights. - netuids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs for which weights are being set. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. + netuids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs for which weights are + being set. + weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each + UID. version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -2389,7 +2391,7 @@ def query_identity( self, key: str, block: Optional[int] = None, - ) -> Optional[ScaleType]: + ) -> Optional["ScaleType"]: """ Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized @@ -2404,7 +2406,8 @@ def query_identity( block (Optional[int]): The blockchain block number at which to perform the query. Returns: - Optional[object]: An object containing the identity information of the neuron if found, ``None`` otherwise. + Optional[ScaleType]: An object containing the identity information of the neuron if found, ``None`` + otherwise. The identity information can include various attributes such as the neuron's stake, rank, and other network-specific details, providing insights into the neuron's role and status within the Bittensor network. @@ -2534,7 +2537,7 @@ def query_subtensor( name: str, block: Optional[int] = None, params: Optional[list] = None, - ) -> ScaleType: + ) -> "ScaleType": """ Queries named storage from the Subtensor module on the Bittensor blockchain. This function is used to retrieve specific data or parameters from the blockchain, such as stake, rank, or other neuron-specific attributes. @@ -2570,7 +2573,7 @@ def query_map_subtensor( name: str, block: Optional[int] = None, params: Optional[list] = None, - ) -> QueryMapResult: + ) -> "QueryMapResult": """ Queries map storage from the Subtensor module on the Bittensor blockchain. This function is designed to retrieve a map-like data structure, which can include various neuron-specific details or network-wide @@ -2615,7 +2618,7 @@ def query_constant( block (Optional[int]): The blockchain block number at which to query the constant. Returns: - Optional[object]: The value of the constant if found, ``None`` otherwise. + Optional[ScaleType]: The value of the constant if found, ``None`` otherwise. Constants queried through this function can include critical network parameters such as inflation rates, consensus rules, or validation thresholds, providing a deeper understanding of the Bittensor network's @@ -2641,7 +2644,7 @@ def query_module( name: str, block: Optional[int] = None, params: Optional[list] = None, - ) -> ScaleType: + ) -> "ScaleType": """ Queries any module storage on the Bittensor blockchain with the specified parameters and block number. This function is a generic query interface that allows for flexible and diverse data retrieval from @@ -2654,14 +2657,14 @@ def query_module( params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - Optional[object]: An object containing the requested data if found, ``None`` otherwise. + Optional[ScaleType]: An object containing the requested data if found, ``None`` otherwise. This versatile query function is key to accessing a wide range of data and insights from different parts of the Bittensor blockchain, enhancing the understanding and analysis of the network's state and dynamics. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry() -> ScaleType: + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry() -> "ScaleType": return self.substrate.query( module=module, storage_function=name, @@ -2693,14 +2696,14 @@ def query_map( params (Optional[List[object]], optional): Parameters to be passed to the query. Returns: - Optional[object]: A data structure representing the map storage if found, ``None`` otherwise. + result (QueryMapResult): A data structure representing the map storage if found, ``None`` otherwise. This function is particularly useful for retrieving detailed and structured data from various blockchain modules, offering insights into the network's state and the relationships between its different components. """ @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry() -> QueryMapResult: + def make_substrate_call_with_retry() -> "QueryMapResult": return self.substrate.query_map( module=module, storage_function=name, @@ -2803,7 +2806,7 @@ def query_runtime_api( def _encode_params( self, - call_definition: List[ParamWithTypes], + call_definition: List["ParamWithTypes"], params: Union[List[Any], Dict[str, Any]], ) -> str: """Returns a hex encoded string of the params using their types.""" @@ -2912,7 +2915,7 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: return None return _result.value - def recycle(self, netuid: int, block: Optional[int] = None) -> Optional[Balance]: + def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance"]: """ Retrieves the 'Burn' hyperparameter for a specified subnet. The 'Burn' parameter represents the amount of Tao that is effectively recycled within the Bittensor network. @@ -3203,9 +3206,9 @@ def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: return None return _result.value - ########################## - #### Account functions ### - ########################## + ##################### + # Account functions # + ##################### def get_total_stake_for_hotkey( self, ss58_address: str, block: Optional[int] = None @@ -3356,7 +3359,9 @@ def serving_rate_limit( ) -> Optional[int]: """ Retrieves the serving rate limit for a specific subnet within the Bittensor network. - This rate limit determines how often you can change your node's IP address on the blockchain. Expressed in number of blocks. Applies to both subnet validator and subnet miner nodes. Used when you move your node to a new machine. + This rate limit determines how often you can change your node's IP address on the blockchain. Expressed in + number of blocks. Applies to both subnet validator and subnet miner nodes. Used when you move your node to a new + machine. Args: netuid (int): The unique identifier of the subnet. @@ -3396,9 +3401,9 @@ def tx_rate_limit(self, block: Optional[int] = None) -> Optional[int]: return None return _result.value - ##################################### - #### Network Parameters #### - ##################################### + ###################### + # Network Parameters # + ###################### def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: """ @@ -3516,7 +3521,7 @@ def get_subnet_connection_requirements( """ result = self.query_map_subtensor("NetworkConnect", block, [netuid]) if result.records: - return {str(tuple[0].value): tuple[1].value for tuple in result.records} + return {str(tuple_[0].value): tuple_[1].value for tuple_ in result.records} else: return {} @@ -3558,7 +3563,7 @@ def get_all_subnets_info(self, block: Optional[int] = None) -> List[SubnetInfo]: the roles of different subnets, and their unique features. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) params = [] @@ -3595,7 +3600,7 @@ def get_subnet_info( subnet, including its governance, performance, and role within the broader network. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) params = [netuid] @@ -3670,9 +3675,9 @@ def get_subnet_owner( return None return _result.value - #################### - #### Nomination #### - #################### + ############## + # Nomination # + ############## def is_hotkey_delegate(self, hotkey_ss58: str, block: Optional[int] = None) -> bool: """ Determines whether a given hotkey (public key) is a delegate on the Bittensor network. This function @@ -3726,7 +3731,8 @@ def get_nominators_for_hotkey( block (Optional[int], optional): The blockchain block number for the query. Returns: - Union[List[Tuple[str, Balance]], int]: A list of tuples containing each nominator's address and staked amount or 0. + Union[List[Tuple[str, Balance]], int]: A list of tuples containing each nominator's address and staked amount + or 0. This function provides insights into the neuron's support network within the Bittensor ecosystem, indicating its trust and collaboration relationships. @@ -3755,10 +3761,10 @@ def get_delegate_by_hotkey( the Bittensor network's consensus and governance structures. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(encoded_hotkey: List[int]): - block_hash = None if block == None else self.substrate.get_block_hash(block) - params = [encoded_hotkey] + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(encoded_hotkey_: List[int]): + block_hash = None if block is None else self.substrate.get_block_hash(block) + params = [encoded_hotkey_] if block_hash: params = params + [block_hash] return self.substrate.rpc_request( @@ -3777,9 +3783,11 @@ def make_substrate_call_with_retry(encoded_hotkey: List[int]): def get_delegates_lite(self, block: Optional[int] = None) -> List[DelegateInfoLite]: """ - Retrieves a lighter list of all delegate neurons within the Bittensor network. This function provides an overview of the neurons that are actively involved in the network's delegation system. + Retrieves a lighter list of all delegate neurons within the Bittensor network. This function provides an + overview of the neurons that are actively involved in the network's delegation system. - Analyzing the delegate population offers insights into the network's governance dynamics and the distribution of trust and responsibility among participating neurons. + Analyzing the delegate population offers insights into the network's governance dynamics and the distribution + of trust and responsibility among participating neurons. This is a lighter version of :func:`get_delegates`. @@ -3812,9 +3820,11 @@ def make_substrate_call_with_retry(): def get_delegates(self, block: Optional[int] = None) -> List[DelegateInfo]: """ - Retrieves a list of all delegate neurons within the Bittensor network. This function provides an overview of the neurons that are actively involved in the network's delegation system. + Retrieves a list of all delegate neurons within the Bittensor network. This function provides an overview of the + neurons that are actively involved in the network's delegation system. - Analyzing the delegate population offers insights into the network's governance dynamics and the distribution of trust and responsibility among participating neurons. + Analyzing the delegate population offers insights into the network's governance dynamics and the distribution of + trust and responsibility among participating neurons. For a lighter version of this function, see :func:`get_delegates_lite`. @@ -3857,16 +3867,17 @@ def get_delegated( block (Optional[int], optional): The blockchain block number for the query. Returns: - List[Tuple[DelegateInfo, Balance]]: A list of tuples, each containing a delegate's information and staked amount. + List[Tuple[DelegateInfo, Balance]]: A list of tuples, each containing a delegate's information and staked + amount. This function is important for account holders to understand their stake allocations and their involvement in the network's delegation and consensus mechanisms. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(encoded_coldkey: List[int]): - block_hash = None if block == None else self.substrate.get_block_hash(block) - params = [encoded_coldkey] + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(encoded_coldkey_: List[int]): + block_hash = None if block is None else self.substrate.get_block_hash(block) + params = [encoded_coldkey_] if block_hash: params = params + [block_hash] return self.substrate.rpc_request( @@ -3883,9 +3894,9 @@ def make_substrate_call_with_retry(encoded_coldkey: List[int]): return DelegateInfo.delegated_list_from_vec_u8(result) - ########################### - #### Stake Information #### - ########################### + ##################### + # Stake Information # + ##################### def get_stake_info_for_coldkey( self, coldkey_ss58: str, block: Optional[int] = None @@ -3965,7 +3976,7 @@ def get_stake_info_for_coldkeys( def get_minimum_required_stake( self, ): - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( module="SubtensorModule", storage_function="NominatorMinRequiredStake" @@ -3974,9 +3985,9 @@ def make_substrate_call_with_retry(): result = make_substrate_call_with_retry() return Balance.from_rao(result.decode()) - ######################################## - #### Neuron information per subnet #### - ######################################## + ################################# + # Neuron information per subnet # + ################################# def is_hotkey_registered_any( self, hotkey_ss58: str, block: Optional[int] = None @@ -4012,7 +4023,7 @@ def is_hotkey_registered_on_subnet( This function helps in assessing the participation of a neuron in a particular subnet, indicating its specific area of operation or influence within the network. """ - return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) != None + return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) is not None def is_hotkey_registered( self, @@ -4028,11 +4039,13 @@ def is_hotkey_registered( Args: hotkey_ss58 (str): The SS58 address of the neuron's hotkey. - netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. + netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the + registration is checked across all subnets. block (Optional[int]): The blockchain block number at which to perform the query. Returns: - bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. + bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific + subnet), ``False`` otherwise. This function is important for verifying the active status of neurons in the Bittensor network. It aids in understanding whether a neuron is eligible to participate in network processes such as consensus, @@ -4222,9 +4235,9 @@ def neuron_for_uid( if uid is None: return NeuronInfo._null_neuron() - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): - block_hash = None if block == None else self.substrate.get_block_hash(block) + block_hash = None if block is None else self.substrate.get_block_hash(block) params = [netuid, uid] if block_hash: params = params + [block_hash] @@ -4367,7 +4380,8 @@ def metagraph( bittensor.Metagraph: The metagraph representing the subnet's structure and neuron relationships. The metagraph is an essential tool for understanding the topology and dynamics of the Bittensor - network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus processes. + network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus + processes. """ metagraph_ = bittensor.metagraph( network=self.network, netuid=netuid, lite=lite, sync=False @@ -4444,7 +4458,8 @@ def bonds( block (Optional[int]): The blockchain block number for the query. Returns: - List[Tuple[int, List[Tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other neurons. + List[Tuple[int, List[Tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other + neurons. Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior within the subnet. It reflects how neurons recognize and invest in each other's intelligence and @@ -4462,7 +4477,7 @@ def bonds( def associated_validator_ip_info( self, netuid: int, block: Optional[int] = None - ) -> Optional[List[IPInfo]]: + ) -> Optional[List["IPInfo"]]: """ Retrieves the list of all validator IP addresses associated with a specific subnet in the Bittensor network. This information is crucial for network communication and the identification of validator nodes. @@ -4472,7 +4487,8 @@ def associated_validator_ip_info( block (Optional[int]): The blockchain block number for the query. Returns: - Optional[List[IPInfo]]: A list of IPInfo objects for validator nodes in the subnet, or ``None`` if no validators are associated. + Optional[List[IPInfo]]: A list of IPInfo objects for validator nodes in the subnet, or ``None`` if no + validators are associated. Validator IP information is key for establishing secure and reliable connections within the network, facilitating consensus and validation processes critical for the network's integrity and performance. @@ -4520,9 +4536,9 @@ def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: return lock_cost - ################ - ## Extrinsics ## - ################ + ############## + # Extrinsics # + ############## def _do_delegation( self, @@ -4532,7 +4548,7 @@ def _do_delegation( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -4566,7 +4582,7 @@ def _do_undelegation( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -4601,7 +4617,7 @@ def _do_nominate( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", @@ -4635,7 +4651,7 @@ def _do_increase_take( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4) + @retry(delay=1, tries=3, backoff=2, max_delay=4) def make_substrate_call_with_retry(): with self.substrate as substrate: call = substrate.compose_call( @@ -4673,7 +4689,7 @@ def _do_decrease_take( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: - @retry(delay=2, tries=3, backoff=2, max_delay=4) + @retry(delay=1, tries=3, backoff=2, max_delay=4) def make_substrate_call_with_retry(): with self.substrate as substrate: call = substrate.compose_call( @@ -4703,9 +4719,9 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() - ################ - #### Legacy #### - ################ + ########## + # Legacy # + ########## def get_balance(self, address: str, block: Optional[int] = None) -> Balance: """ @@ -4724,7 +4740,7 @@ def get_balance(self, address: str, block: Optional[int] = None) -> Balance: """ try: - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( module="System", @@ -4755,9 +4771,9 @@ def get_current_block(self) -> int: operations on the blockchain. It serves as a reference point for network activities and data synchronization. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): - return self.substrate.get_block_number(None) + return self.substrate.get_block_number(None) # type: ignore return make_substrate_call_with_retry() @@ -4776,7 +4792,7 @@ def get_balances(self, block: Optional[int] = None) -> Dict[str, Balance]: including the distribution of financial resources and the financial status of network participants. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query_map( module="System", @@ -4795,7 +4811,7 @@ def make_substrate_call_with_retry(): @staticmethod def _null_neuron() -> NeuronInfo: - neuron = NeuronInfo( # type: ignore + neuron = NeuronInfo( uid=0, netuid=0, active=0, @@ -4816,7 +4832,7 @@ def _null_neuron() -> NeuronInfo: is_null=True, coldkey="000000000000000000000000000000000000000000000000", hotkey="000000000000000000000000000000000000000000000000", - ) + ) # type: ignore return neuron def get_block_hash(self, block_id: int) -> str: From 24acf1e9aa2f0db1b9ad7917d72e977ad998af21 Mon Sep 17 00:00:00 2001 From: Liam Date: Wed, 22 May 2024 12:07:03 +0400 Subject: [PATCH 010/116] fix: sufix runtime error --- bittensor/extrinsics/delegation.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bittensor/extrinsics/delegation.py b/bittensor/extrinsics/delegation.py index 66accfc983..9583b80a76 100644 --- a/bittensor/extrinsics/delegation.py +++ b/bittensor/extrinsics/delegation.py @@ -409,7 +409,7 @@ def decrease_take_extrinsic( ) bittensor.logging.success( prefix="Decrease Delegate Take", - sufix="Finalized: " + str(success), + suffix="Finalized: " + str(success), ) return success @@ -419,7 +419,7 @@ def decrease_take_extrinsic( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) bittensor.logging.warning( - prefix="Set weights", sufix="Failed: " + str(e) + prefix="Set weights", suffix="Failed: " + str(e) ) return False @@ -469,7 +469,7 @@ def increase_take_extrinsic( ) bittensor.logging.success( prefix="Increase Delegate Take", - sufix="Finalized: " + str(success), + suffix="Finalized: " + str(success), ) return success @@ -479,14 +479,14 @@ def increase_take_extrinsic( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) bittensor.logging.warning( - prefix="Set weights", sufix="Failed: " + str(e) + prefix="Set weights", suffix="Failed: " + str(e) ) except TakeError as e: bittensor.__console__.print( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) bittensor.logging.warning( - prefix="Set weights", sufix="Failed: " + str(e) + prefix="Set weights", suffix="Failed: " + str(e) ) return False From e8d036652b93278d1fb13eef3ecab5bfe77ab06a Mon Sep 17 00:00:00 2001 From: Liam Date: Wed, 22 May 2024 12:17:28 +0400 Subject: [PATCH 011/116] ci: e2e tests runs on ubuntu --- .github/workflows/e2e-subtensor-tests.yaml | 85 ++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 .github/workflows/e2e-subtensor-tests.yaml diff --git a/.github/workflows/e2e-subtensor-tests.yaml b/.github/workflows/e2e-subtensor-tests.yaml new file mode 100644 index 0000000000..4a55b71c97 --- /dev/null +++ b/.github/workflows/e2e-subtensor-tests.yaml @@ -0,0 +1,85 @@ +name: E2E Subtensor Tests + +concurrency: + group: e2e-subtensor-${{ github.ref }} + cancel-in-progress: true + +on: + ## Run automatically for all PRs against main, regardless of what the changes are + ## to be safe and so we can more easily force re-run the CI when github is being + ## weird by using a blank commit + push: + branches: [main, development, staging] + + ## + # Run automatically for PRs against default/main branch if Rust files change + pull_request: + branches: [main, development, staging] + + ## Allow running workflow manually from the Actions tab + workflow_dispatch: + inputs: + verbose: + description: "Output more information when triggered manually" + required: false + default: "" + +env: + CARGO_TERM_COLOR: always + VERBOSE: ${{ github.events.input.verbose }} + +jobs: + run: + runs-on: ubuntu-22.04 + strategy: + matrix: + rust-branch: + - nightly-2024-03-05 + rust-target: + - x86_64-unknown-linux-gnu + # - x86_64-apple-darwin + os: + - ubuntu-latest + # - macos-latest + include: + - os: ubuntu-latest + # - os: macos-latest + env: + RELEASE_NAME: development + RUSTV: ${{ matrix.rust-branch }} + RUST_BACKTRACE: full + RUST_BIN_DIR: target/${{ matrix.rust-target }} + TARGET: ${{ matrix.rust-target }} + steps: + - name: Check-out repository under $GITHUB_WORKSPACE + uses: actions/checkout@v2 + + - name: Install dependencies + run: | + sudo apt-get update && + sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler + + - name: Install Rust ${{ matrix.rust-branch }} + uses: actions-rs/toolchain@v1.0.6 + with: + toolchain: ${{ matrix.rust-branch }} + components: rustfmt + profile: minimal + + - name: Add wasm32-unknown-unknown target + run: | + rustup target add wasm32-unknown-unknown --toolchain stable-x86_64-unknown-linux-gnu + rustup component add rust-src --toolchain stable-x86_64-unknown-linux-gnu + + - name: Clone subtensor repo + run: git clone https://github.com/opentensor/subtensor.git + + - name: Setup subtensor repo + working-directory: ${{ github.workspace }}/subtensor + run: git checkout development + + - name: Run tests + run: | + python3 -m pip install -e . + python3 -m pip install torch + LOCALNET_SH_PATH="./subtensor/scripts/localnet.sh" pytest tests/e2e_tests/ -s From cb4e3a6b1f90724f70cb256be9a0160d5214491f Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Wed, 22 May 2024 12:33:54 +0200 Subject: [PATCH 012/116] revert to Optional[] syntax as `| None` is not supported on Python 3.9 --- tests/unit_tests/test_synapse.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index ee00da2261..6be99520c1 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -17,7 +17,7 @@ import json import base64 import typing -from typing import List, Optional +from typing import Optional import pytest import bittensor @@ -130,13 +130,15 @@ def test_custom_synapse(): class Test(bittensor.Synapse): a: int # Carried through because required. b: int = None # Not carried through headers - c: int | None # Required, carried through headers, cannot be None - d: list[int] | None # Required, carried though headers, cannot be None + c: Optional[int] # Required, carried through headers, cannot be None + d: Optional[list[int]] # Required, carried though headers, cannot be None e: list[int] # Carried through headers - f: int | None = None # Not Required, Not carried through headers, can be None - g: list[ + f: Optional[ int - ] | None = None # Not Required, Not carried though headers, can be None + ] = None # Not Required, Not carried through headers, can be None + g: Optional[ + list[int] + ] = None # Not Required, Not carried though headers, can be None # Create an instance of the custom Synapse subclass synapse = Test( @@ -227,16 +229,16 @@ class LegacyHashedSynapse(bittensor.Synapse): a: int b: int - c: int | None = None - d: list[str] | None = None - required_hash_fields: list[str] | None = ["b", "a", "d"] + c: Optional[int] = None + d: Optional[list[str]] = None + required_hash_fields: Optional[list[str]] = ["b", "a", "d"] class HashedSynapse(bittensor.Synapse): a: int b: int - c: int | None = None - d: list[str] | None = None + c: Optional[int] = None + d: Optional[list[str]] = None required_hash_fields: typing.ClassVar[tuple[str, ...]] = ("a", "b", "d") From bb758b3dd4f2c94ad6094072d1359e7439fadb43 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Wed, 22 May 2024 13:59:02 +0200 Subject: [PATCH 013/116] fix PoW and other functions not working with USE_TORCH=0 despite torch being available --- bittensor/extrinsics/registration.py | 14 ++- bittensor/utils/registration.py | 99 +++++++++++-------- bittensor/utils/weight_utils.py | 26 ++--- example.env | 11 +-- tests/integration_tests/test_cli.py | 2 - .../test_subtensor_integration.py | 8 -- tests/unit_tests/conftest.py | 6 ++ .../extrinsics/test_registration.py | 5 - tests/unit_tests/extrinsics/test_root.py | 5 - tests/unit_tests/utils/test_registration.py | 45 +++++++++ tests/unit_tests/utils/test_weight_utils.py | 60 +++++++++++ 11 files changed, 192 insertions(+), 89 deletions(-) create mode 100644 tests/unit_tests/conftest.py create mode 100644 tests/unit_tests/utils/test_registration.py diff --git a/bittensor/extrinsics/registration.py b/bittensor/extrinsics/registration.py index 8be4963180..879214ad92 100644 --- a/bittensor/extrinsics/registration.py +++ b/bittensor/extrinsics/registration.py @@ -20,7 +20,12 @@ import time from rich.prompt import Confirm from typing import List, Union, Optional, Tuple -from bittensor.utils.registration import POWSolution, create_pow, torch, use_torch +from bittensor.utils.registration import ( + POWSolution, + create_pow, + torch, + log_no_torch_error, +) def register_extrinsic( @@ -100,7 +105,8 @@ def register_extrinsic( ): return False - if not use_torch(): + if not torch: + log_no_torch_error() return False # Attempt rolling registration. @@ -380,8 +386,8 @@ def run_faucet_extrinsic( ): return False, "" - if not use_torch(): - torch.error() + if not torch: + log_no_torch_error() return False, "Requires torch" # Unlock coldkey diff --git a/bittensor/utils/registration.py b/bittensor/utils/registration.py index ff3816ddbb..43cda9aceb 100644 --- a/bittensor/utils/registration.py +++ b/bittensor/utils/registration.py @@ -1,16 +1,20 @@ import binascii +import functools import hashlib import math import multiprocessing import os import random import time +import typing from dataclasses import dataclass from datetime import timedelta from queue import Empty, Full from typing import Any, Callable, Dict, List, Optional, Tuple, Union import backoff +import numpy + import bittensor from Crypto.Hash import keccak from rich import console as rich_console @@ -19,62 +23,77 @@ from .formatting import get_human_readable, millify from ._register_cuda import solve_cuda -try: - import torch -except ImportError: - torch = None - def use_torch() -> bool: + """Force the use of torch over numpy for certain operations.""" return True if os.getenv("USE_TORCH") == "1" else False -class Torch: - def __init__(self): - self._transformed = False +def legacy_torch_api_compat(func): + """Decorator to convert numpy arrays to torch tensors before passing them to the function. + Args: + func (function): + Function to be decorated. + Returns: + decorated (function): + Decorated function. + """ - @staticmethod - def _error(): - bittensor.logging.warning( - "This command requires torch. You can install torch for bittensor" - ' with `pip install bittensor[torch]` or `pip install ".[torch]"`' - " if installing from source, and then run the command with USE_TORCH=1 {command}" - ) + @functools.wraps(func) + def decorated(*args, **kwargs): + if use_torch(): + # if argument is a Torch tensor, convert it to numpy + args = [ + arg.cpu().numpy() if isinstance(arg, torch.Tensor) else arg + for arg in args + ] + kwargs = { + key: value.cpu().numpy() if isinstance(value, torch.Tensor) else value + for key, value in kwargs.items() + } + ret = func(*args, **kwargs) + if use_torch(): + # if return value is a numpy array, convert it to Torch tensor + if isinstance(ret, numpy.ndarray): + ret = torch.from_numpy(ret) + return ret + + return decorated + + +@functools.cache +def _get_real_torch(): + try: + import torch as _real_torch + except ImportError: + _real_torch = None + return _real_torch - def error(self): - self._error() - def _transform(self): - try: - import torch as real_torch +def log_no_torch_error(): + bittensor.btlogging.error( + "This command requires torch. You can install torch for bittensor" + ' with `pip install bittensor[torch]` or `pip install ".[torch]"`' + " if installing from source, and then run the command with USE_TORCH=1 {command}" + ) - self.__dict__.update(real_torch.__dict__) - self._transformed = True - except ImportError: - self._error() +class LazyLoadedTorch: def __bool__(self): - return False + return bool(_get_real_torch()) def __getattr__(self, name): - if not self._transformed and use_torch(): - self._transform() - if self._transformed: - return getattr(self, name) + if real_torch := _get_real_torch(): + return getattr(real_torch, name) else: - self._error() - - def __call__(self, *args, **kwargs): - if not self._transformed and use_torch(): - self._transform() - if self._transformed: - return self(*args, **kwargs) - else: - self._error() + log_no_torch_error() + raise ImportError("torch not installed") -if not torch or not use_torch(): - torch = Torch() +if typing.TYPE_CHECKING: + import torch +else: + torch = LazyLoadedTorch() class CUDAException(Exception): diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 109951e14b..9bd8606c9d 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -22,12 +22,13 @@ import bittensor from numpy.typing import NDArray from typing import Tuple, List, Union -from bittensor.utils.registration import torch, use_torch +from bittensor.utils.registration import torch, use_torch, legacy_torch_api_compat U32_MAX = 4294967295 U16_MAX = 65535 +@legacy_torch_api_compat def normalize_max_weight( x: Union[NDArray[np.float32], "torch.FloatTensor"], limit: float = 0.1 ) -> Union[NDArray[np.float32], "torch.FloatTensor"]: @@ -43,14 +44,8 @@ def normalize_max_weight( """ epsilon = 1e-7 # For numerical stability after normalization - weights = x.clone() if use_torch() else x.copy() - if use_torch(): - values, _ = torch.sort(weights) - else: - values = np.sort(weights) - - if use_torch() and x.sum() == 0 or len(x) * limit <= 1: - return torch.ones_like(x) / x.size(0) + weights = x.copy() + values = np.sort(weights) if x.sum() == 0 or x.shape[0] * limit <= 1: return np.ones_like(x) / x.shape[0] @@ -61,18 +56,11 @@ def normalize_max_weight( return weights / weights.sum() # Find the cumlative sum and sorted tensor - cumsum = ( - torch.cumsum(estimation, 0) if use_torch() else np.cumsum(estimation, 0) - ) + cumsum = np.cumsum(estimation, 0) # Determine the index of cutoff - estimation_sum_data = [ - (len(values) - i - 1) * estimation[i] for i in range(len(values)) - ] - estimation_sum = ( - torch.tensor(estimation_sum_data) - if use_torch() - else np.array(estimation_sum_data) + estimation_sum = np.array( + [(len(values) - i - 1) * estimation[i] for i in range(len(values))] ) n_values = (estimation / (estimation_sum + cumsum + epsilon) < limit).sum() diff --git a/example.env b/example.env index de5fb400ed..35d405fb58 100644 --- a/example.env +++ b/example.env @@ -1,6 +1,5 @@ -# To use Torch functionality in bittensor, you must set the USE_TORCH flag to 1: -USE_TORCH=1 - -# If set to 0 (or anything else), you will use the numpy functions. -# This is generally what you want unless you have a specific reason for using torch -# such as POW registration or legacy interoperability. \ No newline at end of file +# To use legacy Torch-based of bittensor, you must set USE_TORCH=1 +USE_TORCH=0 +# If set to 0 (or anything else than 1), it will use current, numpy-based, bittensor interface. +# This is generally what you want unless you want legacy interoperability. +# Please note that the legacy interface is deprecated, and is not tested nearly as much. diff --git a/tests/integration_tests/test_cli.py b/tests/integration_tests/test_cli.py index a449604a80..c20c905549 100644 --- a/tests/integration_tests/test_cli.py +++ b/tests/integration_tests/test_cli.py @@ -2090,7 +2090,6 @@ def test_register(self, _): def test_pow_register(self, _): # Not the best way to do this, but I need to finish these tests, and unittest doesn't make this # as simple as pytest - os.environ["USE_TORCH"] = "1" config = self.config config.command = "subnets" config.subcommand = "pow_register" @@ -2114,7 +2113,6 @@ class MockException(Exception): mock_create_wallet.assert_called_once() self.assertEqual(mock_is_stale.call_count, 1) - del os.environ["USE_TORCH"] def test_stake(self, _): amount_to_stake: Balance = Balance.from_tao(0.5) diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 5c2ff0cf34..845a73ee7d 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -423,7 +423,6 @@ def test_is_hotkey_registered_not_registered(self): self.assertFalse(registered, msg="Hotkey should not be registered") def test_registration_multiprocessed_already_registered(self): - os.environ["USE_TORCH"] = "1" workblocks_before_is_registered = random.randint(5, 10) # return False each work block but return True after a random number of blocks is_registered_return_values = ( @@ -477,10 +476,8 @@ def test_registration_multiprocessed_already_registered(self): self.subtensor.is_hotkey_registered.call_count == workblocks_before_is_registered + 2 ) - del os.environ["USE_TORCH"] def test_registration_partly_failed(self): - os.environ["USE_TORCH"] = "1" do_pow_register_mock = MagicMock( side_effect=[(False, "Failed"), (False, "Failed"), (True, None)] ) @@ -514,10 +511,8 @@ def is_registered_side_effect(*args, **kwargs): ), msg="Registration should succeed", ) - del os.environ["USE_TORCH"] def test_registration_failed(self): - os.environ["USE_TORCH"] = "1" is_registered_return_values = [False for _ in range(100)] current_block = [i for i in range(0, 100)] mock_neuron = MagicMock() @@ -551,11 +546,9 @@ def test_registration_failed(self): msg="Registration should fail", ) self.assertEqual(mock_create_pow.call_count, 3) - del os.environ["USE_TORCH"] def test_registration_stale_then_continue(self): # verify that after a stale solution, the solve will continue without exiting - os.environ["USE_TORCH"] = "1" class ExitEarly(Exception): pass @@ -596,7 +589,6 @@ class ExitEarly(Exception): 1, msg="only tries to submit once, then exits", ) - del os.environ["USE_TORCH"] def test_defaults_to_finney(self): sub = bittensor.subtensor() diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py new file mode 100644 index 0000000000..b22bbecf39 --- /dev/null +++ b/tests/unit_tests/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def force_legacy_torch_compat_api(monkeypatch): + monkeypatch.setenv("USE_TORCH", "1") diff --git a/tests/unit_tests/extrinsics/test_registration.py b/tests/unit_tests/extrinsics/test_registration.py index 861ce6b462..bad8552b17 100644 --- a/tests/unit_tests/extrinsics/test_registration.py +++ b/tests/unit_tests/extrinsics/test_registration.py @@ -50,11 +50,6 @@ def mock_new_wallet(): return mock -@pytest.fixture(autouse=True) -def set_use_torch_env(monkeypatch): - monkeypatch.setenv("USE_TORCH", "1") - - @pytest.mark.parametrize( "wait_for_inclusion,wait_for_finalization,prompt,cuda,dev_id,tpb,num_processes,update_interval,log_verbose,expected", [ diff --git a/tests/unit_tests/extrinsics/test_root.py b/tests/unit_tests/extrinsics/test_root.py index 131ca2303d..4806a022a8 100644 --- a/tests/unit_tests/extrinsics/test_root.py +++ b/tests/unit_tests/extrinsics/test_root.py @@ -21,11 +21,6 @@ def mock_wallet(): return mock -@pytest.fixture(autouse=True) -def set_use_torch_env(monkeypatch): - monkeypatch.setenv("USE_TORCH", "1") - - @pytest.mark.parametrize( "wait_for_inclusion, wait_for_finalization, hotkey_registered, registration_success, prompt, user_response, expected_result", [ diff --git a/tests/unit_tests/utils/test_registration.py b/tests/unit_tests/utils/test_registration.py new file mode 100644 index 0000000000..a6861783a4 --- /dev/null +++ b/tests/unit_tests/utils/test_registration.py @@ -0,0 +1,45 @@ +import pytest + +from bittensor.utils.registration import LazyLoadedTorch + + +class MockBittensorLogging: + def __init__(self): + self.messages = [] + + def error(self, message): + self.messages.append(message) + + +@pytest.fixture +def mock_bittensor_logging(monkeypatch): + mock_logger = MockBittensorLogging() + monkeypatch.setattr("bittensor.btlogging", mock_logger) + return mock_logger + + +def test_lazy_loaded_torch__torch_installed(monkeypatch, mock_bittensor_logging): + import torch + + lazy_torch = LazyLoadedTorch() + + assert bool(torch) is True + + assert lazy_torch.nn is torch.nn + with pytest.raises(AttributeError): + lazy_torch.no_such_thing + + +def test_lazy_loaded_torch__no_torch(monkeypatch, mock_bittensor_logging): + monkeypatch.setattr("bittensor.utils.registration._get_real_torch", lambda: None) + + torch = LazyLoadedTorch() + + assert not torch + + with pytest.raises(ImportError): + torch.some_attribute + + # Check if the error message is logged correctly + assert len(mock_bittensor_logging.messages) == 1 + assert "This command requires torch." in mock_bittensor_logging.messages[0] diff --git a/tests/unit_tests/utils/test_weight_utils.py b/tests/unit_tests/utils/test_weight_utils.py index f315edcdce..178ecc6415 100644 --- a/tests/unit_tests/utils/test_weight_utils.py +++ b/tests/unit_tests/utils/test_weight_utils.py @@ -21,6 +21,8 @@ import bittensor.utils.weight_utils as weight_utils import pytest +from bittensor.utils import torch + def test_convert_weight_and_uids(): uids = np.arange(10) @@ -110,6 +112,64 @@ def test_normalize_with_max_weight(): assert np.abs(y - z).sum() < epsilon +def test_normalize_with_max_weight__legacy_torch_api_compat( + force_legacy_torch_compat_api, +): + weights = torch.rand(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.01) + assert wn.max() <= 0.01 + + weights = torch.zeros(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.01) + assert wn.max() <= 0.01 + + weights = torch.rand(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.02) + assert wn.max() <= 0.02 + + weights = torch.zeros(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.02) + assert wn.max() <= 0.02 + + weights = torch.rand(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.03) + assert wn.max() <= 0.03 + + weights = torch.zeros(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.03) + assert wn.max() <= 0.03 + + # Check for Limit + limit = 0.001 + weights = torch.rand(2000) + w = weights / weights.sum() + wn = weight_utils.normalize_max_weight(weights, limit=limit) + assert (w.max() >= limit and (limit - wn.max()).abs() < 0.001) or ( + w.max() < limit and wn.max() < limit + ) + + # Check for Zeros + limit = 0.01 + weights = torch.zeros(2000) + wn = weight_utils.normalize_max_weight(weights, limit=limit) + assert wn.max() == 1 / 2000 + + # Check for Ordering after normalization + weights = torch.rand(100) + wn = weight_utils.normalize_max_weight(weights, limit=1) + assert torch.equal(wn, weights / weights.sum()) + + # Check for eplison changes + eplison = 0.01 + weights, _ = torch.sort(torch.rand(100)) + x = weights / weights.sum() + limit = x[-10] + change = eplison * limit + y = weight_utils.normalize_max_weight(x, limit=limit - change) + z = weight_utils.normalize_max_weight(x, limit=limit + change) + assert (y - z).abs().sum() < eplison + + @pytest.mark.parametrize( "test_id, n, uids, weights, expected", [ From 730d24f53a1de8076ece139c5be47f1536e9fa67 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Wed, 22 May 2024 15:57:43 +0200 Subject: [PATCH 014/116] make legacy_torch_api_compat docstring more explicit --- bittensor/utils/registration.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bittensor/utils/registration.py b/bittensor/utils/registration.py index 43cda9aceb..6c504b05f1 100644 --- a/bittensor/utils/registration.py +++ b/bittensor/utils/registration.py @@ -30,10 +30,12 @@ def use_torch() -> bool: def legacy_torch_api_compat(func): - """Decorator to convert numpy arrays to torch tensors before passing them to the function. + """ + Convert function operating on numpy Input&Output to legacy torch Input&Output API if `use_torch()` is True. + Args: func (function): - Function to be decorated. + Function with numpy Input/Output to be decorated. Returns: decorated (function): Decorated function. From 542eba8763a66e9c68457b023481f639692d64d1 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Wed, 22 May 2024 15:58:41 +0200 Subject: [PATCH 015/116] fix test_normalize_with_max_weight__legacy_torch_api_compat flakeiness caused by rounding errors --- tests/unit_tests/utils/test_weight_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit_tests/utils/test_weight_utils.py b/tests/unit_tests/utils/test_weight_utils.py index 178ecc6415..3ffc85ede3 100644 --- a/tests/unit_tests/utils/test_weight_utils.py +++ b/tests/unit_tests/utils/test_weight_utils.py @@ -157,7 +157,7 @@ def test_normalize_with_max_weight__legacy_torch_api_compat( # Check for Ordering after normalization weights = torch.rand(100) wn = weight_utils.normalize_max_weight(weights, limit=1) - assert torch.equal(wn, weights / weights.sum()) + assert torch.isclose(wn, weights / weights.sum(), atol=1e-08, rtol=0).all() # Check for eplison changes eplison = 0.01 From 67df81aafedc309075039464bfd219fe3ed36338 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Wed, 22 May 2024 17:32:47 +0200 Subject: [PATCH 016/116] Further implementation of legacy_torch_api_compat. --- bittensor/commands/root.py | 32 ++----- bittensor/extrinsics/root.py | 29 ++----- bittensor/subtensor.py | 2 + tests/unit_tests/extrinsics/test_root.py | 94 +++++++++++++++++++++ tests/unit_tests/utils/test_weight_utils.py | 8 +- 5 files changed, 114 insertions(+), 51 deletions(-) diff --git a/bittensor/commands/root.py b/bittensor/commands/root.py index 75c6cb15f6..97f10231e8 100644 --- a/bittensor/commands/root.py +++ b/bittensor/commands/root.py @@ -24,7 +24,6 @@ from rich.prompt import Prompt from rich.table import Table from .utils import get_delegates_details, DelegatesDetails -from bittensor.utils.registration import torch, use_torch from . import defaults @@ -283,7 +282,7 @@ def run(cli: "bittensor.cli"): def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): r"""Set weights for root network.""" wallet = bittensor.wallet(config=cli.config) - subnets: List[bittensor.SubnetInfo] = subtensor.get_all_subnets_info() + # subnets: List[bittensor.SubnetInfo] = subtensor.get_all_subnets_info() root = subtensor.metagraph(0, lite=False) try: @@ -301,11 +300,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): f"Boosting weight for netuid {cli.config.netuid} from {prev_weight} -> {new_weight}" ) my_weights[cli.config.netuid] = new_weight - all_netuids = ( - torch.tensor(list(range(len(my_weights)))) - if use_torch() - else np.arange(len(my_weights)) - ) + all_netuids = np.arange(len(my_weights)) bittensor.__console__.print("Setting root weights...") subtensor.root_set_weights( @@ -405,7 +400,7 @@ def run(cli: "bittensor.cli"): @staticmethod def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): wallet = bittensor.wallet(config=cli.config) - subnets: List[bittensor.SubnetInfo] = subtensor.get_all_subnets_info() + # subnets: List[bittensor.SubnetInfo] = subtensor.get_all_subnets_info() bittensor.__console__.print( "Slashing weight for subnet: {} by amount: {}".format( @@ -423,11 +418,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): my_weights = root.weights[my_uid] my_weights[cli.config.netuid] -= cli.config.amount my_weights[my_weights < 0] = 0 # Ensure weights don't go negative - all_netuids = ( - torch.tensor(list(range(len(my_weights)))) - if use_torch() - else np.arange(len(my_weights)) - ) + all_netuids = np.arange(len(my_weights)) subtensor.root_set_weights( wallet=wallet, @@ -529,23 +520,12 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): # Parse from string matched_netuids = list(map(int, re.split(r"[ ,]+", cli.config.netuids))) - netuids = ( - torch.tensor(matched_netuids, dtype=torch.long) - if use_torch() - else np.array(matched_netuids, dtype=np.int64) - ) + netuids = np.array(matched_netuids, dtype=np.int64) matched_weights = [ float(weight) for weight in re.split(r"[ ,]+", cli.config.weights) ] - weights = ( - torch.tensor(matched_weights, dtype=torch.float32) - if use_torch() - else np.array( - matched_weights, - dtype=np.float32, - ) - ) + weights = np.array(matched_weights, dtype=np.float32) # Run the set weights operation. subtensor.root_set_weights( diff --git a/bittensor/extrinsics/root.py b/bittensor/extrinsics/root.py index 0254ffa523..2cb11bbd69 100644 --- a/bittensor/extrinsics/root.py +++ b/bittensor/extrinsics/root.py @@ -26,7 +26,7 @@ from typing import Union, List import bittensor.utils.weight_utils as weight_utils from bittensor.btlogging.defines import BITTENSOR_LOGGER_NAME -from bittensor.utils.registration import torch, use_torch +from bittensor.utils.registration import torch, legacy_torch_api_compat logger = logging.getLogger(BITTENSOR_LOGGER_NAME) @@ -100,6 +100,7 @@ def root_register_extrinsic( ) +@legacy_torch_api_compat def set_root_weights_extrinsic( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", @@ -133,36 +134,22 @@ def set_root_weights_extrinsic( """ # First convert types. if isinstance(netuids, list): - netuids = ( - torch.tensor(netuids, dtype=torch.int64) - if use_torch() - else np.array(netuids, dtype=np.int64) - ) + netuids = np.array(netuids, dtype=np.int64) if isinstance(weights, list): - weights = ( - torch.tensor(weights, dtype=torch.float32) - if use_torch() - else np.array(weights, dtype=np.float32) - ) + weights = np.array(weights, dtype=np.float32) # Get weight restrictions. min_allowed_weights = subtensor.min_allowed_weights(netuid=0) max_weight_limit = subtensor.max_weight_limit(netuid=0) # Get non zero values. - non_zero_weight_idx = ( - torch.argwhere(weights > 0).squeeze(dim=1) - if use_torch() - else np.argwhere(weights > 0).squeeze(axis=1) - ) + non_zero_weight_idx = np.argwhere(weights > 0).squeeze(axis=1) + non_zero_weight_uids = netuids[non_zero_weight_idx] non_zero_weights = weights[non_zero_weight_idx] - non_zero_weights_size = ( - non_zero_weights.numel() if use_torch() else non_zero_weights.size - ) - if non_zero_weights_size < min_allowed_weights: + if non_zero_weights.size < min_allowed_weights: raise ValueError( "The minimum number of weights required to set weights is {}, got {}".format( - min_allowed_weights, non_zero_weights_size + min_allowed_weights, non_zero_weights.size ) ) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 409549117c..789f23f62e 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -101,6 +101,7 @@ from .utils.balance import Balance from .utils.registration import POWSolution from .utils.subtensor import get_subtensor_errors +from .utils.registration import legacy_torch_api_compat KEY_NONCE: Dict[str, int] = {} @@ -2347,6 +2348,7 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() + @legacy_torch_api_compat def root_set_weights( self, wallet: "bittensor.wallet", diff --git a/tests/unit_tests/extrinsics/test_root.py b/tests/unit_tests/extrinsics/test_root.py index 4806a022a8..84132bb60d 100644 --- a/tests/unit_tests/extrinsics/test_root.py +++ b/tests/unit_tests/extrinsics/test_root.py @@ -212,3 +212,97 @@ def test_set_root_weights_extrinsic( mock_confirm.assert_called_once() else: mock_confirm.assert_not_called() + + +@pytest.mark.parametrize( + "wait_for_inclusion, wait_for_finalization, netuids, weights, prompt, user_response, expected_success", + [ + (True, False, [1, 2], [0.5, 0.5], True, True, True), # Success - weights set + ( + False, + False, + [1, 2], + [0.5, 0.5], + False, + None, + True, + ), # Success - weights set no wait + ( + True, + False, + [1, 2], + [2000, 20], + True, + True, + True, + ), # Success - large value to be normalized + ( + True, + False, + [1, 2], + [2000, 0], + True, + True, + True, + ), # Success - single large value + ( + True, + False, + [1, 2], + [0.5, 0.5], + True, + False, + False, + ), # Failure - prompt declined + ( + True, + False, + [1, 2], + [0.5, 0.5], + False, + None, + False, + ), # Failure - setting weights failed + ( + True, + False, + [], + [], + None, + False, + False, + ), # Exception catched - ValueError 'min() arg is an empty sequence' + ], + ids=[ + "success-weights-set", + "success-not-wait", + "success-large-value", + "success-single-value", + "failure-user-declines", + "failure-setting-weights", + "failure-value-error-exception", + ], +) +def test_set_root_weights_extrinsic_torch( + mock_subtensor, + mock_wallet, + wait_for_inclusion, + wait_for_finalization, + netuids, + weights, + prompt, + user_response, + expected_success, + force_legacy_torch_compat_api, +): + test_set_root_weights_extrinsic( + mock_subtensor, + mock_wallet, + wait_for_inclusion, + wait_for_finalization, + netuids, + weights, + prompt, + user_response, + expected_success, + ) diff --git a/tests/unit_tests/utils/test_weight_utils.py b/tests/unit_tests/utils/test_weight_utils.py index 3ffc85ede3..0a42a9c9b3 100644 --- a/tests/unit_tests/utils/test_weight_utils.py +++ b/tests/unit_tests/utils/test_weight_utils.py @@ -159,15 +159,15 @@ def test_normalize_with_max_weight__legacy_torch_api_compat( wn = weight_utils.normalize_max_weight(weights, limit=1) assert torch.isclose(wn, weights / weights.sum(), atol=1e-08, rtol=0).all() - # Check for eplison changes - eplison = 0.01 + # Check for epsilon changes + epsilon = 0.01 weights, _ = torch.sort(torch.rand(100)) x = weights / weights.sum() limit = x[-10] - change = eplison * limit + change = epsilon * limit y = weight_utils.normalize_max_weight(x, limit=limit - change) z = weight_utils.normalize_max_weight(x, limit=limit + change) - assert (y - z).abs().sum() < eplison + assert (y - z).abs().sum() < epsilon @pytest.mark.parametrize( From c822a6e03ca47ec3f1216aec2ea3ef579f1ac66d Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Wed, 22 May 2024 18:19:24 +0200 Subject: [PATCH 017/116] Removed unused comments. --- bittensor/commands/root.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bittensor/commands/root.py b/bittensor/commands/root.py index 97f10231e8..a3658d03ea 100644 --- a/bittensor/commands/root.py +++ b/bittensor/commands/root.py @@ -282,7 +282,6 @@ def run(cli: "bittensor.cli"): def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): r"""Set weights for root network.""" wallet = bittensor.wallet(config=cli.config) - # subnets: List[bittensor.SubnetInfo] = subtensor.get_all_subnets_info() root = subtensor.metagraph(0, lite=False) try: @@ -400,7 +399,6 @@ def run(cli: "bittensor.cli"): @staticmethod def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): wallet = bittensor.wallet(config=cli.config) - # subnets: List[bittensor.SubnetInfo] = subtensor.get_all_subnets_info() bittensor.__console__.print( "Slashing weight for subnet: {} by amount: {}".format( From b60210fa0b210deddb28e25ca1578e782b8e0232 Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 22 May 2024 16:58:11 -0700 Subject: [PATCH 018/116] Remove unused parameters and fix comments --- bittensor/subnets.py | 6 +----- bittensor/synapse.py | 4 ++-- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/bittensor/subnets.py b/bittensor/subnets.py index c10d4716a4..836a20dcb7 100644 --- a/bittensor/subnets.py +++ b/bittensor/subnets.py @@ -49,8 +49,6 @@ async def query_api( axons: Union[bt.axon, List[bt.axon]], deserialize: Optional[bool] = False, timeout: Optional[int] = 12, - n: Optional[float] = 0.1, - uid: Optional[int] = None, **kwargs: Optional[Any], ) -> Any: """ @@ -60,15 +58,13 @@ async def query_api( axons (Union[bt.axon, List[bt.axon]]): The list of axon(s) to query. deserialize (bool, optional): Whether to deserialize the responses. Defaults to False. timeout (int, optional): The timeout in seconds for the query. Defaults to 12. - n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1. - uid (int, optional): The specific UID of the API node to query. Defaults to None. **kwargs: Keyword arguments for the prepare_synapse_fn. Returns: Any: The result of the process_responses_fn. """ synapse = self.prepare_synapse(**kwargs) - bt.logging.debug(f"Quering valdidator axons with synapse {synapse.name}...") + bt.logging.debug(f"Querying validator axons with synapse {synapse.name}...") responses = await self.dendrite( axons=axons, synapse=synapse, diff --git a/bittensor/synapse.py b/bittensor/synapse.py index c2971202c0..c265dda939 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -80,7 +80,7 @@ def cast_int(raw: str) -> int: int or None: The converted integer, or ``None`` if the input was ``None``. """ - return int(raw) if raw != None else raw # type: ignore + return int(raw) if raw is not None else raw # type: ignore def cast_float(raw: str) -> float: @@ -96,7 +96,7 @@ def cast_float(raw: str) -> float: float or None: The converted float, or ``None`` if the input was ``None``. """ - return float(raw) if raw != None else raw # type: ignore + return float(raw) if raw is not None else raw # type: ignore class TerminalInfo(BaseModel): From 1f36ad12001ef026b425032678c75d6aa29189aa Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 22 May 2024 16:07:15 -0700 Subject: [PATCH 019/116] Since the refactoring of this module would turn into a large project, it was decided to divide it into several stages. This is part 2: current part affects everything up to the subtensor().tempo() method. The "Hyper parameter calls" section was refactored by DRY. Part 1 https://github.com/opentensor/bittensor/pull/1911 Part 2 https://github.com/opentensor/bittensor/pull/1913 --- bittensor/subtensor.py | 468 +++++++++++++++++++---------- tests/unit_tests/test_subtensor.py | 140 ++++++++- 2 files changed, 441 insertions(+), 167 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 9e7f810fe3..737a3b556b 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -111,7 +111,7 @@ original_convert_type_string = RuntimeConfiguration.convert_type_string @functools.lru_cache(maxsize=None) - def convert_type_string(cls, name): + def convert_type_string(_, name): return original_convert_type_string(name) RuntimeConfiguration.convert_type_string = convert_type_string @@ -2557,7 +2557,7 @@ def query_subtensor( """ @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + def make_substrate_call_with_retry() -> "ScaleType": return self.substrate.query( module="SubtensorModule", storage_function=name, @@ -2830,6 +2830,30 @@ def _encode_params( # Hyper parameter calls. # ########################## + def _get_hyperparameter( + self, param_name: str, netuid: int, block: Optional[int] = None + ) -> Optional[Any]: + """ + Retrieves a specified hyperparameter for a specific subnet. + + Args: + param_name (str): The name of the hyperparameter to retrieve. + netuid (int): The unique identifier of the subnet. + block (Optional[int]): The blockchain block number for the query. + + Returns: + Optional[Union[int, float]]: The value of the specified hyperparameter if the subnet exists, ``None`` + otherwise. + """ + if not self.subnet_exists(netuid, block): + return None + + result = self.query_subtensor(param_name, block, [netuid]) + if result is None or not hasattr(result, "value"): + return None + + return result.value + def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: """ Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the @@ -2855,12 +2879,8 @@ def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: 'Rho' is essential for understanding the network's economic dynamics, affecting the reward distribution and incentive structures across the network's neurons. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Rho", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter(param_name="Rho", netuid=netuid, block=block) + return None if call is None else int(call) def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: """ @@ -2885,13 +2905,8 @@ def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: Understanding 'Kappa' is crucial for analyzing stake dynamics and the consensus mechanism within the network, as it plays a significant role in neuron ranking and incentive allocation processes. """ - if not self.subnet_exists(netuid, block): - return None - - _result = self.query_subtensor("Kappa", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + call = self._get_hyperparameter(param_name="Kappa", netuid=netuid, block=block) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: """ @@ -2910,12 +2925,12 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: computational effort required for validating transactions and participating in the network's consensus mechanism. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Difficulty", block, [netuid]) - if not hasattr(_result, "value") or _result is None: + call = self._get_hyperparameter( + param_name="Difficulty", netuid=netuid, block=block + ) + if call is None: return None - return _result.value + return int(call) def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance"]: """ @@ -2932,12 +2947,8 @@ def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance Understanding the 'Burn' rate is essential for analyzing the network registration usage, particularly how it is correlated with user activity and the overall cost of participation in a given subnet. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Burn", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + call = self._get_hyperparameter(param_name="Burn", netuid=netuid, block=block) + return None if call is None else Balance.from_rao(int(call)) # Returns network ImmunityPeriod hyper parameter. def immunity_period( @@ -2958,12 +2969,10 @@ def immunity_period( participants have a grace period to establish themselves and contribute to the network without facing immediate punitive actions. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ImmunityPeriod", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter( + param_name="ImmunityPeriod", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_batch_size( self, netuid: int, block: Optional[int] = None @@ -2980,12 +2989,10 @@ def validator_batch_size( Optional[int]: The value of the ValidatorBatchSize hyperparameter, or None if the subnetwork does not exist or the parameter is not found. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorBatchSize", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter( + param_name="ValidatorBatchSize", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_prune_len( self, netuid: int, block: Optional[int] = None @@ -3002,12 +3009,10 @@ def validator_prune_len( Optional[int]: The value of the ValidatorPruneLen hyperparameter, or None if the subnetwork does not exist or the parameter is not found. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorPruneLen", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter( + param_name="ValidatorPruneLen", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_logits_divergence( self, netuid: int, block: Optional[int] = None @@ -3024,189 +3029,320 @@ def validator_logits_divergence( Optional[float]: The value of the ValidatorLogitsDivergence hyperparameter, or None if the subnetwork does not exist or the parameter is not found. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorLogitsDivergence", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + call = self._get_hyperparameter( + param_name="ValidatorLogitsDivergence", netuid=netuid, block=block + ) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def validator_sequence_length( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorSequenceLength hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorSequenceLength", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorSequenceLength hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorSequenceLength hyperparameter, or ``None`` if the subnetwork does + not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorSequenceLength", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_epochs_per_reset( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorEpochsPerReset hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorEpochsPerReset", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorEpochsPerReset hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorEpochsPerReset hyperparameter, or ``None`` if the subnetwork does + not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorEpochsPerReset", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_epoch_length( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network ValidatorEpochLen hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorEpochLen", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None + """ + Returns network ValidatorEpochLen hyperparameter. - return _result.value + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the ValidatorEpochLen hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorEpochLen", netuid=netuid, block=block + ) + return None if call is None else int(call) def validator_exclude_quantile( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network ValidatorEpochLen hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ValidatorExcludeQuantile", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + """ + Returns network ValidatorExcludeQuantile hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ValidatorExcludeQuantile hyperparameter, or ``None`` if the subnetwork + does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="ValidatorExcludeQuantile", netuid=netuid, block=block + ) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def max_allowed_validators( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network MaxAllowedValidators hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MaxAllowedValidators", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network ValidatorExcludeQuantile hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ValidatorExcludeQuantile hyperparameter, or ``None`` if the subnetwork + does not exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxAllowedValidators", netuid=netuid, block=block + ) + return None if call is None else int(call) def min_allowed_weights( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network MinAllowedWeights hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MinAllowedWeights", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network MinAllowedWeights hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the MinAllowedWeights hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MinAllowedWeights", block=block, netuid=netuid + ) + return None if call is None else int(call) def max_weight_limit( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network MaxWeightsLimit hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MaxWeightsLimit", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + """ + Returns network MaxWeightsLimit hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the MaxWeightsLimit hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxWeightsLimit", block=block, netuid=netuid + ) + return None if call is None else U16_NORMALIZED_FLOAT(int(call)) def adjustment_alpha( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network AdjustmentAlpha hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("AdjustmentAlpha", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U64_NORMALIZED_FLOAT(_result.value) + """ + Returns network AdjustmentAlpha hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the AdjustmentAlpha hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="AdjustmentAlpha", block=block, netuid=netuid + ) + return None if call is None else U64_NORMALIZED_FLOAT(int(call)) def bonds_moving_avg( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network BondsMovingAverage hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("BondsMovingAverage", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return U64_NORMALIZED_FLOAT(_result.value) + """ + Returns network BondsMovingAverage hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the BondsMovingAverage hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="BondsMovingAverage", netuid=netuid, block=block + ) + return None if call is None else U64_NORMALIZED_FLOAT(int(call)) def scaling_law_power( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: """Returns network ScalingLawPower hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ScalingLawPower", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value / 100.0 + call = self._get_hyperparameter( + param_name="ScalingLawPower", netuid=netuid, block=block + ) + return None if call is None else int(call) / 100.0 def synergy_scaling_law_power( self, netuid: int, block: Optional[int] = None ) -> Optional[float]: - """Returns network SynergyScalingLawPower hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("SynergyScalingLawPower", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value / 100.0 + """ + Returns network ScalingLawPower hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[float]: The value of the ScalingLawPower hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="SynergyScalingLawPower", netuid=netuid, block=block + ) + return None if call is None else int(call) / 100.0 def subnetwork_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """Returns network SubnetworkN hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("SubnetworkN", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network SubnetworkN hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the SubnetworkN hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="SubnetworkN", netuid=netuid, block=block + ) + return None if call is None else int(call) def max_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """Returns network MaxAllowedUids hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("MaxAllowedUids", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network MaxAllowedUids hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the MaxAllowedUids hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="MaxAllowedUids", netuid=netuid, block=block + ) + return None if call is None else int(call) def blocks_since_epoch( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: - """Returns network BlocksSinceLastStep hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("BlocksSinceEpoch", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network BlocksSinceEpoch hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the BlocksSinceEpoch hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter( + param_name="BlocksSinceEpoch", netuid=netuid, block=block + ) + return None if call is None else int(call) def blocks_since_last_update(self, netuid: int, uid: int) -> Optional[int]: - if not self.subnet_exists(netuid): - return None - _result = self.query_subtensor("LastUpdate", None, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None + """ + Returns the number of blocks since the last update for a specific UID in the subnetwork. - return self.get_current_block() - _result.value[uid] + Args: + netuid (int): The unique identifier of the subnetwork. + uid (int): The unique identifier of the neuron. + + Returns: + Optional[int]: The number of blocks since the last update, or ``None`` if the subnetwork or UID does not + exist. + """ + call = self._get_hyperparameter(param_name="LastUpdate", netuid=netuid) + return None if call is None else self.get_current_block() - int(call[uid]) def weights_rate_limit(self, netuid: int) -> Optional[int]: - if not self.subnet_exists(netuid): - return None - _result = self.query_subtensor("WeightsSetRateLimit", None, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network WeightsSetRateLimit hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + + Returns: + Optional[int]: The value of the WeightsSetRateLimit hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter(param_name="WeightsSetRateLimit", netuid=netuid) + return None if call is None else int(call) def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """Returns network Tempo hyper parameter""" - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("Tempo", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + """ + Returns network Tempo hyperparameter. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the Tempo hyperparameter, or ``None`` if the subnetwork does not + exist or the parameter is not found. + """ + call = self._get_hyperparameter(param_name="Tempo", netuid=netuid, block=block) + return None if call is None else int(call) ##################### # Account functions # diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index fd8d0c12ed..861ec2e2fb 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -25,7 +25,7 @@ # Application import bittensor -from bittensor.subtensor import subtensor as Subtensor, _logger +from bittensor.subtensor import subtensor as Subtensor, _logger, Balance from bittensor import subtensor_module @@ -275,6 +275,7 @@ def test_determine_chain_endpoint_and_network( assert result_endpoint == expected_endpoint +# Subtensor().get_error_info_by_index tests @pytest.fixture def substrate(): class MockSubstrate: @@ -311,3 +312,140 @@ def test_get_error_info_by_index_unknown_error(subtensor): mock_logger.assert_called_once_with( f"Subtensor returned an error with an unknown index: {fake_index}" ) + + +# Subtensor()._get_hyperparameter tests +def test_hyperparameter_subnet_does_not_exist(subtensor, mocker): + """Tests when the subnet does not exist.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=False) + assert subtensor._get_hyperparameter("Difficulty", 1, None) is None + subtensor.subnet_exists.assert_called_once_with(1, None) + + +def test_hyperparameter_result_is_none(subtensor, mocker): + """Tests when query_subtensor returns None.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + assert subtensor._get_hyperparameter("Difficulty", 1, None) is None + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +def test_hyperparameter_result_has_no_value(subtensor, mocker): + """Test when the result has no 'value' attribute.""" + + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + assert subtensor._get_hyperparameter("Difficulty", 1, None) is None + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +def test_hyperparameter_success_int(subtensor, mocker): + """Test when query_subtensor returns an integer value.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock( + return_value=mocker.MagicMock(value=100) + ) + assert subtensor._get_hyperparameter("Difficulty", 1, None) == 100 + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +def test_hyperparameter_success_float(subtensor, mocker): + """Test when query_subtensor returns a float value.""" + subtensor.subnet_exists = mocker.MagicMock(return_value=True) + subtensor.query_subtensor = mocker.MagicMock( + return_value=mocker.MagicMock(value=0.5) + ) + assert subtensor._get_hyperparameter("Difficulty", 1, None) == 0.5 + subtensor.subnet_exists.assert_called_once_with(1, None) + subtensor.query_subtensor.assert_called_once_with("Difficulty", None, [1]) + + +# Tests Hyper parameter calls +@pytest.mark.parametrize( + "method, param_name, value, expected_result_type", + [ + ("rho", "Rho", 1, int), + ("kappa", "Kappa", 1.0, float), + ("difficulty", "Difficulty", 1, int), + ("recycle", "Burn", 1, Balance), + ("immunity_period", "ImmunityPeriod", 1, int), + ("validator_batch_size", "ValidatorBatchSize", 1, int), + ("validator_prune_len", "ValidatorPruneLen", 1, int), + ("validator_logits_divergence", "ValidatorLogitsDivergence", 1.0, float), + ("validator_sequence_length", "ValidatorSequenceLength", 1, int), + ("validator_epochs_per_reset", "ValidatorEpochsPerReset", 1, int), + ("validator_epoch_length", "ValidatorEpochLen", 1, int), + ("validator_exclude_quantile", "ValidatorExcludeQuantile", 1.0, float), + ("max_allowed_validators", "MaxAllowedValidators", 1, int), + ("min_allowed_weights", "MinAllowedWeights", 1, int), + ("max_weight_limit", "MaxWeightsLimit", 1, float), + ("adjustment_alpha", "AdjustmentAlpha", 1, float), + ("bonds_moving_avg", "BondsMovingAverage", 1, float), + ("scaling_law_power", "ScalingLawPower", 1, float), + ("synergy_scaling_law_power", "SynergyScalingLawPower", 1, float), + ("subnetwork_n", "SubnetworkN", 1, int), + ("max_n", "MaxAllowedUids", 1, int), + ("blocks_since_epoch", "BlocksSinceEpoch", 1, int), + ("tempo", "Tempo", 1, int), + ], +) +def test_hyper_parameter_success_calls( + subtensor, mocker, method, param_name, value, expected_result_type +): + """ + Tests various hyperparameter methods to ensure they correctly fetch their respective hyperparameters and return the + expected values. + """ + # Prep + subtensor._get_hyperparameter = mocker.MagicMock(return_value=value) + + # Call + subtensor_method = getattr(subtensor, method) + result = subtensor_method(netuid=7, block=707) + + # Assertions + subtensor._get_hyperparameter.assert_called_once_with( + block=707, netuid=7, param_name=param_name + ) + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, expected_result_type) + + +def test_blocks_since_last_update_success_calls(subtensor, mocker): + """Tests the weights_rate_limit method to ensure it correctly fetches the LastUpdate hyperparameter.""" + # Prep + uid = 7 + mocked_current_block = 2 + mocked_result = {uid: 1} + subtensor._get_hyperparameter = mocker.MagicMock(return_value=mocked_result) + subtensor.get_current_block = mocker.MagicMock(return_value=mocked_current_block) + + # Call + result = subtensor.blocks_since_last_update(netuid=7, uid=uid) + + # Assertions + subtensor._get_hyperparameter.assert_called_once_with( + param_name="LastUpdate", netuid=7 + ) + assert result == 1 + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, int) + + +def test_weights_rate_limit_success_calls(subtensor, mocker): + """Tests the weights_rate_limit method to ensure it correctly fetches the WeightsSetRateLimit hyperparameter.""" + # Prep + subtensor._get_hyperparameter = mocker.MagicMock(return_value=5) + + # Call + result = subtensor.weights_rate_limit(netuid=7) + + # Assertions + subtensor._get_hyperparameter.assert_called_once_with( + param_name="WeightsSetRateLimit", netuid=7 + ) + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, int) From 3489ee6f25fb110af2c977cfbc3f5376c682cec0 Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 22 May 2024 17:42:26 -0700 Subject: [PATCH 020/116] Refactor imports for required hash field, and fix naming in the comments. --- bittensor/synapse.py | 7 +++---- tests/unit_tests/test_synapse.py | 6 ++---- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/bittensor/synapse.py b/bittensor/synapse.py index c265dda939..05edd8bc2e 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -20,7 +20,6 @@ import base64 import json import sys -import typing import warnings from pydantic import ( @@ -31,7 +30,7 @@ model_validator, ) import bittensor -from typing import Optional, Any, Dict +from typing import Optional, Any, Dict, ClassVar, Tuple def get_size(obj, seen=None) -> int: @@ -482,7 +481,7 @@ def set_name_type(cls, values) -> dict: repr=False, ) - required_hash_fields: typing.ClassVar[typing.Tuple[str, ...]] = () + required_hash_fields: ClassVar[Tuple[str, ...]] = () _extract_total_size = field_validator("total_size", mode="before")(cast_int) @@ -676,7 +675,7 @@ def body_hash(self) -> str: Process: - 1. Iterates over each required field as specified in ``required_fields_hash``. + 1. Iterates over each required field as specified in ``required_hash_fields``. 2. Concatenates the string representation of these fields. 3. Applies SHA3-256 hashing to the concatenated string to produce a unique fingerprint of the data. diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index 6be99520c1..37a0b76ec8 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -16,11 +16,9 @@ # DEALINGS IN THE SOFTWARE. import json import base64 -import typing -from typing import Optional - import pytest import bittensor +from typing import Optional, ClassVar def test_parse_headers_to_inputs(): @@ -239,7 +237,7 @@ class HashedSynapse(bittensor.Synapse): b: int c: Optional[int] = None d: Optional[list[str]] = None - required_hash_fields: typing.ClassVar[tuple[str, ...]] = ("a", "b", "d") + required_hash_fields: ClassVar[tuple[str, ...]] = ("a", "b", "d") @pytest.mark.parametrize("synapse_cls", [LegacyHashedSynapse, HashedSynapse]) From afa165a3b892e781ed9c5180363a1c69a50980b5 Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 22 May 2024 17:59:32 -0700 Subject: [PATCH 021/116] Improve tests logic for Part 3 of subtensor refactoring --- tests/unit_tests/test_subtensor.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index 861ec2e2fb..e6386bc288 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -25,7 +25,13 @@ # Application import bittensor -from bittensor.subtensor import subtensor as Subtensor, _logger, Balance +from bittensor.subtensor import ( + subtensor as Subtensor, + _logger, + Balance, + U16_NORMALIZED_FLOAT, + U64_NORMALIZED_FLOAT, +) from bittensor import subtensor_module @@ -402,6 +408,10 @@ def test_hyper_parameter_success_calls( # Prep subtensor._get_hyperparameter = mocker.MagicMock(return_value=value) + spy_u16_normalized_float = mocker.spy(subtensor_module, "U16_NORMALIZED_FLOAT") + spy_u64_normalized_float = mocker.spy(subtensor_module, "U64_NORMALIZED_FLOAT") + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + # Call subtensor_method = getattr(subtensor, method) result = subtensor_method(netuid=7, block=707) @@ -413,6 +423,21 @@ def test_hyper_parameter_success_calls( # if we change the methods logic in the future we have to be make sure tha returned type is correct assert isinstance(result, expected_result_type) + # Special cases + if method in [ + "kappa", + "validator_logits_divergence", + "validator_exclude_quantile", + "max_weight_limit", + ]: + spy_u16_normalized_float.assert_called_once() + + if method in ["adjustment_alpha", "bonds_moving_avg"]: + spy_u64_normalized_float.assert_called_once() + + if method in ["recycle"]: + spy_balance_from_rao.assert_called_once() + def test_blocks_since_last_update_success_calls(subtensor, mocker): """Tests the weights_rate_limit method to ensure it correctly fetches the LastUpdate hyperparameter.""" @@ -427,6 +452,7 @@ def test_blocks_since_last_update_success_calls(subtensor, mocker): result = subtensor.blocks_since_last_update(netuid=7, uid=uid) # Assertions + subtensor.get_current_block.assert_called_once() subtensor._get_hyperparameter.assert_called_once_with( param_name="LastUpdate", netuid=7 ) From 555f506ffa2a4d6467d4cb0e0ceb2c328264618d Mon Sep 17 00:00:00 2001 From: opendansor Date: Thu, 23 May 2024 11:08:22 -0700 Subject: [PATCH 022/116] Update Pydantic Requirement to at least version 2.3 or greater. --- requirements/prod.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/prod.txt b/requirements/prod.txt index 6b15fe442d..d5bbf44b87 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -19,7 +19,7 @@ packaging pycryptodome>=3.18.0,<4.0.0 pyyaml password_strength -pydantic>=2.0, <3 +pydantic>=2.3, <3 PyNaCl>=1.3.0,<=1.5.0 pytest-asyncio python-Levenshtein From c7c29dcf499426475096dc954eccf29d8f23848a Mon Sep 17 00:00:00 2001 From: Ibraheem Nadeem Date: Wed, 15 May 2024 17:48:11 -0700 Subject: [PATCH 023/116] Feat: Added normalized hyperparams --- bittensor/commands/network.py | 14 +++++++--- bittensor/commands/utils.py | 48 ++++++++++++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 4 deletions(-) diff --git a/bittensor/commands/network.py b/bittensor/commands/network.py index 64fbd272f6..b1a78e905d 100644 --- a/bittensor/commands/network.py +++ b/bittensor/commands/network.py @@ -21,7 +21,12 @@ from rich.prompt import Prompt from rich.table import Table from typing import List, Optional, Dict -from .utils import get_delegates_details, DelegatesDetails, check_netuid_set +from .utils import ( + get_delegates_details, + DelegatesDetails, + check_netuid_set, + normalize_hyperparameters, +) from .identity import SetIdentityCommand console = bittensor.__console__ @@ -489,9 +494,12 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) table.add_column("[overline white]HYPERPARAMETER", style="bold white") table.add_column("[overline white]VALUE", style="green") + table.add_column("[overline white]NORMALIZED", style="cyan") - for param in subnet.__dict__: - table.add_row(" " + param, str(subnet.__dict__[param])) + normalized_values = normalize_hyperparameters(subnet) + + for param, value, norm_value in normalized_values: + table.add_row(" " + param, value, norm_value) bittensor.__console__.print(table) diff --git a/bittensor/commands/utils.py b/bittensor/commands/utils.py index 4ea8fa3dd1..2c7f96a37e 100644 --- a/bittensor/commands/utils.py +++ b/bittensor/commands/utils.py @@ -20,7 +20,9 @@ import bittensor import requests from bittensor.utils.registration import torch -from typing import List, Dict, Any, Optional +from bittensor.utils.balance import Balance +from bittensor.utils import U64_NORMALIZED_FLOAT, U16_NORMALIZED_FLOAT +from typing import List, Dict, Any, Optional, Tuple from rich.prompt import Confirm, PromptBase from dataclasses import dataclass from . import defaults @@ -194,6 +196,50 @@ def filter_netuids_by_registered_hotkeys( return list(set(netuids)) +def normalize_hyperparameters( + subnet: bittensor.SubnetHyperparameters, +) -> List[Tuple[str, str, str]]: + """ + Normalizes the hyperparameters of a subnet. + + Args: + subnet: The subnet hyperparameters object. + + Returns: + A list of tuples containing the parameter name, value, and normalized value. + """ + param_mappings = { + "adjustment_alpha": U64_NORMALIZED_FLOAT, + "min_difficulty": U64_NORMALIZED_FLOAT, + "max_difficulty": U64_NORMALIZED_FLOAT, + "difficulty": U64_NORMALIZED_FLOAT, + "bonds_moving_avg": U64_NORMALIZED_FLOAT, + "max_weight_limit": U16_NORMALIZED_FLOAT, + "kappa": U16_NORMALIZED_FLOAT, + "min_burn": Balance.from_rao, + "max_burn": Balance.from_rao, + } + + normalized_values: List[Tuple[str, str, str]] = [] + subnet_dict = subnet.__dict__ + + for param, value in subnet_dict.items(): + try: + if param in param_mappings: + norm_value = param_mappings[param](value) + if isinstance(norm_value, float): + norm_value = f"{norm_value:.{10}g}" + else: + norm_value = value + except Exception as e: + bittensor.logging.error(f"Error normalizing parameter '{param}': {e}") + norm_value = "-" + + normalized_values.append((param, str(value), str(norm_value))) + + return normalized_values + + @dataclass class DelegatesDetails: name: str From bb1a95a1ee5216311ff708b70eed600d3635bf93 Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 23 May 2024 16:00:31 -0700 Subject: [PATCH 024/116] Since the refactoring of this module would turn into a large project, it was decided to divide it into several stages. This is part 4: current part affects everything up to the subtensor().tx_rate_limit() method. Test coverage improved. Part 1 https://github.com/opentensor/bittensor/pull/1911 Part 2 https://github.com/opentensor/bittensor/pull/1913 Part 3 https://github.com/opentensor/bittensor/pull/1923 --- bittensor/subtensor.py | 202 +++++-- tests/unit_tests/test_subtensor.py | 843 +++++++++++++++++++++++++++++ 2 files changed, 994 insertions(+), 51 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 737a3b556b..d41bae5404 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -1177,8 +1177,7 @@ def _do_burned_register( wait_for_finalization (bool): Whether to wait for the transaction to be finalized. Default is True. Returns: - Tuple[bool, Optional[str]]: A tuple containing a boolean indicating success or failure, and an optional - error message. + Tuple[bool, Optional[str]]: A tuple containing a boolean indicating success or failure, and an optional error message. """ @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) @@ -2393,7 +2392,7 @@ def query_identity( self, key: str, block: Optional[int] = None, - ) -> Optional["ScaleType"]: + ) -> dict: """ Queries the identity of a neuron on the Bittensor blockchain using the given key. This function retrieves detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized @@ -2408,15 +2407,14 @@ def query_identity( block (Optional[int]): The blockchain block number at which to perform the query. Returns: - Optional[ScaleType]: An object containing the identity information of the neuron if found, ``None`` - otherwise. + result (dict): An object containing the identity information of the neuron if found, ``None`` otherwise. The identity information can include various attributes such as the neuron's stake, rank, and other network-specific details, providing insights into the neuron's role and status within the Bittensor network. """ @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) - def make_substrate_call_with_retry(): + def make_substrate_call_with_retry() -> "ScaleType": return self.substrate.query( module="Registry", storage_function="IdentityOf", @@ -2427,6 +2425,7 @@ def make_substrate_call_with_retry(): ) identity_info = make_substrate_call_with_retry() + return bittensor.utils.wallet_utils.decode_hex_identity_dict( identity_info.value["info"] ) @@ -3351,62 +3350,150 @@ def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: def get_total_stake_for_hotkey( self, ss58_address: str, block: Optional[int] = None ) -> Optional["Balance"]: - """Returns the total stake held on a hotkey including delegative""" + """ + Returns the total stake held on a hotkey including delegative. + + Args: + ss58_address (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the stake from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[Balance]: The total stake held on the hotkey, or ``None`` if the hotkey does not + exist or the stake is not found. + """ _result = self.query_subtensor("TotalHotkeyStake", block, [ss58_address]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if _result is None or not hasattr(_result, "value") + else Balance.from_rao(_result.value) + ) def get_total_stake_for_coldkey( self, ss58_address: str, block: Optional[int] = None ) -> Optional["Balance"]: - """Returns the total stake held on a coldkey across all hotkeys including delegates""" + """ + Returns the total stake held on a coldkey. + + Args: + ss58_address (str): The SS58 address of the coldkey. + block (Optional[int], optional): The block number to retrieve the stake from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[Balance]: The total stake held on the coldkey, or ``None`` if the coldkey does not + exist or the stake is not found. + """ _result = self.query_subtensor("TotalColdkeyStake", block, [ss58_address]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if _result is None or not hasattr(_result, "value") + else Balance.from_rao(_result.value) + ) def get_stake_for_coldkey_and_hotkey( self, hotkey_ss58: str, coldkey_ss58: str, block: Optional[int] = None ) -> Optional["Balance"]: - """Returns the stake under a coldkey - hotkey pairing""" + """ + Returns the stake under a coldkey - hotkey pairing. + + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + coldkey_ss58 (str): The SS58 address of the coldkey. + block (Optional[int], optional): The block number to retrieve the stake from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[Balance]: The stake under the coldkey - hotkey pairing, or ``None`` if the pairing does not + exist or the stake is not found. + """ _result = self.query_subtensor("Stake", block, [hotkey_ss58, coldkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if _result is None or not hasattr(_result, "value") + else Balance.from_rao(_result.value) + ) def get_stake( self, hotkey_ss58: str, block: Optional[int] = None ) -> List[Tuple[str, "Balance"]]: - """Returns a list of stake tuples (coldkey, balance) for each delegating coldkey including the owner""" + """ + Returns a list of stake tuples (coldkey, balance) for each delegating coldkey including the owner. + + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the stakes from. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + List[Tuple[str, Balance]]: A list of tuples, each containing a coldkey SS58 address and the corresponding + balance staked by that coldkey. + """ return [ (r[0].value, Balance.from_rao(r[1].value)) for r in self.query_map_subtensor("Stake", block, [hotkey_ss58]) ] def does_hotkey_exist(self, hotkey_ss58: str, block: Optional[int] = None) -> bool: - """Returns true if the hotkey is known by the chain and there are accounts.""" - _result = self.query_subtensor("Owner", block, [hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return False + """ + Returns true if the hotkey is known by the chain and there are accounts. - return _result.value != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to check the hotkey against. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + bool: ``True`` if the hotkey is known by the chain and there are accounts, ``False`` otherwise. + """ + _result = self.query_subtensor("Owner", block, [hotkey_ss58]) + return ( + False + if not _result or not hasattr(_result, "value") + else _result.value != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + ) def get_hotkey_owner( self, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[str]: - """Returns the coldkey owner of the passed hotkey""" + """ + Returns the coldkey owner of the passed hotkey. + + Args: + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to check the hotkey owner against. If ``None``, the latest + block is used. Default is ``None``. + + Returns: + Optional[str]: The SS58 address of the coldkey owner, or ``None`` if the hotkey does not exist or the owner + is not found. + """ _result = self.query_subtensor("Owner", block, [hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - if self.does_hotkey_exist(hotkey_ss58, block): - return _result.value - return None + return ( + None + if not _result + or not hasattr(_result, "value") + or not self.does_hotkey_exist(hotkey_ss58, block) + else _result.value + ) + # TODO: check if someone still use this method. bittensor not. def get_axon_info( self, netuid: int, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[AxonInfo]: - """Returns the axon information for this hotkey account""" + """ + Returns the axon information for this hotkey account. + + Args: + netuid (int): The unique identifier of the subnetwork. + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the axon information from. If ``None``, the + latest block is used. Default is ``None``. + + Returns: + Optional[AxonInfo]: An AxonInfo object containing the axon information, or ``None`` if the axon information + is not found. + """ result = self.query_subtensor("Axons", block, [netuid, hotkey_ss58]) if result is not None and hasattr(result, "value"): return AxonInfo( @@ -3420,15 +3507,27 @@ def get_axon_info( hotkey=hotkey_ss58, coldkey="", ) - return None + # TODO: check if someone still use this method. bittensor not. def get_prometheus_info( self, netuid: int, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[PrometheusInfo]: - """Returns the prometheus information for this hotkey account""" + """ + Returns the prometheus information for this hotkey account. + + Args: + netuid (int): The unique identifier of the subnetwork. + hotkey_ss58 (str): The SS58 address of the hotkey. + block (Optional[int], optional): The block number to retrieve the prometheus information from. If ``None``, + the latest block is used. Default is ``None``. + + Returns: + Optional[PrometheusInfo]: A PrometheusInfo object containing the prometheus information, or ``None`` if the + prometheus information is not found. + """ result = self.query_subtensor("Prometheus", block, [netuid, hotkey_ss58]) - if result is not None: + if result is not None and hasattr(result, "value"): return PrometheusInfo( ip=networking.int_to_ip(result.value["ip"]), ip_type=result.value["ip_type"], @@ -3436,8 +3535,7 @@ def get_prometheus_info( version=result.value["version"], block=result.value["block"], ) - else: - return None + return None ##################### # Global Parameters # @@ -3467,9 +3565,11 @@ def total_issuance(self, block: Optional[int] = None) -> Optional[Balance]: of the currency and providing insights into the network's economic health and inflationary trends. """ _result = self.query_subtensor("TotalIssuance", block) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if not _result or not hasattr(_result, "value") + else Balance.from_rao(_result.value) + ) def total_stake(self, block: Optional[int] = None) -> Optional[Balance]: """ @@ -3488,9 +3588,11 @@ def total_stake(self, block: Optional[int] = None) -> Optional[Balance]: consensus and incentive mechanisms. """ _result = self.query_subtensor("TotalStake", block) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if _result is None or not hasattr(_result, "value") + else Balance.from_rao(_result.value) + ) def serving_rate_limit( self, netuid: int, block: Optional[int] = None @@ -3512,12 +3614,10 @@ def serving_rate_limit( overuse of resources by individual neurons. It helps ensure a balanced distribution of service requests across the network. """ - if not self.subnet_exists(netuid, block): - return None - _result = self.query_subtensor("ServingRateLimit", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + call = self._get_hyperparameter( + param_name="ServingRateLimit", netuid=netuid, block=block + ) + return None if call is None else int(call) def tx_rate_limit(self, block: Optional[int] = None) -> Optional[int]: """ @@ -3535,9 +3635,9 @@ def tx_rate_limit(self, block: Optional[int] = None) -> Optional[int]: maintaining efficient and timely transaction processing. """ _result = self.query_subtensor("TxRateLimit", block) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return ( + None if _result is None or not hasattr(_result, "value") else _result.value + ) ###################### # Network Parameters # diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index e6386bc288..c3bf8c29ec 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -475,3 +475,846 @@ def test_weights_rate_limit_success_calls(subtensor, mocker): ) # if we change the methods logic in the future we have to be make sure tha returned type is correct assert isinstance(result, int) + + +########################### +# Account functions tests # +########################### + + +# `get_total_stake_for_hotkey` tests +def test_get_total_stake_for_hotkey_success(subtensor, mocker): + """Tests successful retrieval of total stake for hotkey.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=mocker.MagicMock(value=1)) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_hotkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalHotkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_called_once() + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, Balance) + + +def test_get_total_stake_for_hotkey_not_result(subtensor, mocker): + """Tests retrieval of total stake for hotkey when no result is returned.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_hotkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalHotkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, type(None)) + + +def test_get_total_stake_for_hotkey_not_value(subtensor, mocker): + """Tests retrieval of total stake for hotkey when no value attribute is present.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=object) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_hotkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalHotkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(subtensor.query_subtensor.return_value, object) + assert not hasattr(result, "value") + + +# `get_total_stake_for_coldkey` tests +def test_get_total_stake_for_coldkey_success(subtensor, mocker): + """Tests successful retrieval of total stake for coldkey.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=mocker.MagicMock(value=1)) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_coldkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalColdkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_called_once() + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, Balance) + + +def test_get_total_stake_for_coldkey_not_result(subtensor, mocker): + """Tests retrieval of total stake for coldkey when no result is returned.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=None) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_coldkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalColdkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(result, type(None)) + + +def test_get_total_stake_for_coldkey_not_value(subtensor, mocker): + """Tests retrieval of total stake for coldkey when no value attribute is present.""" + # Prep + subtensor.query_subtensor = mocker.MagicMock(return_value=object) + fake_ss58_address = "12bzRJfh7arnnfPPUZHeJUaE62QLEwhK48QnH9LXeK2m1iZU" + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_total_stake_for_coldkey(ss58_address=fake_ss58_address) + + # Assertions + subtensor.query_subtensor.assert_called_once_with( + "TotalColdkeyStake", None, [fake_ss58_address] + ) + spy_balance_from_rao.assert_not_called() + # if we change the methods logic in the future we have to be make sure tha returned type is correct + assert isinstance(subtensor.query_subtensor.return_value, object) + assert not hasattr(result, "value") + + +# `get_stake` tests +def test_get_stake_returns_correct_data(mocker, subtensor): + """Tests that get_stake returns correct data.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + expected_query_result = [ + (mocker.MagicMock(value="coldkey1"), mocker.MagicMock(value=100)), + (mocker.MagicMock(value="coldkey2"), mocker.MagicMock(value=200)), + ] + mocker.patch.object( + subtensor, "query_map_subtensor", return_value=expected_query_result + ) + + # Call + result = subtensor.get_stake(hotkey_ss58, block) + + # Assertion + assert result == [ + ("coldkey1", Balance.from_rao(100)), + ("coldkey2", Balance.from_rao(200)), + ] + subtensor.query_map_subtensor.assert_called_once_with("Stake", block, [hotkey_ss58]) + + +def test_get_stake_no_block(mocker, subtensor): + """Tests get_stake with no block specified.""" + # Prep + hotkey_ss58 = "test_hotkey" + expected_query_result = [ + (MagicMock(value="coldkey1"), MagicMock(value=100)), + ] + mocker.patch.object( + subtensor, "query_map_subtensor", return_value=expected_query_result + ) + + # Call + result = subtensor.get_stake(hotkey_ss58) + + # Assertion + assert result == [("coldkey1", Balance.from_rao(100))] + subtensor.query_map_subtensor.assert_called_once_with("Stake", None, [hotkey_ss58]) + + +def test_get_stake_empty_result(mocker, subtensor): + """Tests get_stake with an empty result.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + expected_query_result = [] + mocker.patch.object( + subtensor, "query_map_subtensor", return_value=expected_query_result + ) + + # Call + result = subtensor.get_stake(hotkey_ss58, block) + + # Assertion + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("Stake", block, [hotkey_ss58]) + + +# `does_hotkey_exist` tests +def test_does_hotkey_exist_true(mocker, subtensor): + """Test does_hotkey_exist returns True when hotkey exists and is valid.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock(value="valid_coldkey") + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is True + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_false_special_value(mocker, subtensor): + """Test does_hotkey_exist returns False when result value is the special value.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + special_value = "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" + mock_result = MagicMock(value=special_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is False + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_false_no_value(mocker, subtensor): + """Test does_hotkey_exist returns False when result has no value attribute.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is False + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_false_no_result(mocker, subtensor): + """Test does_hotkey_exist returns False when query_subtensor returns None.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58, block) + + # Assertions + assert result is False + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + + +def test_does_hotkey_exist_no_block(mocker, subtensor): + """Test does_hotkey_exist with no block specified.""" + # Prep + hotkey_ss58 = "test_hotkey" + mock_result = mocker.MagicMock(value="valid_coldkey") + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.does_hotkey_exist(hotkey_ss58) + + # Assertions + assert result is True + subtensor.query_subtensor.assert_called_once_with("Owner", None, [hotkey_ss58]) + + +# `get_hotkey_owner` tests +def test_get_hotkey_owner_exists(mocker, subtensor): + """Test get_hotkey_owner when the hotkey exists.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + expected_owner = "coldkey_owner" + mock_result = mocker.MagicMock(value=expected_owner) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=True) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58, block) + + # Assertions + assert result == expected_owner + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_called_once_with(hotkey_ss58, block) + + +def test_get_hotkey_owner_does_not_exist(mocker, subtensor): + """Test get_hotkey_owner when the hotkey does not exist.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=False) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58, block) + + # Assertions + assert result is None + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_not_called() + + +def test_get_hotkey_owner_no_block(mocker, subtensor): + """Test get_hotkey_owner with no block specified.""" + # Prep + hotkey_ss58 = "test_hotkey" + expected_owner = "coldkey_owner" + mock_result = mocker.MagicMock(value=expected_owner) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=True) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58) + + # Assertions + assert result == expected_owner + subtensor.query_subtensor.assert_called_once_with("Owner", None, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_called_once_with(hotkey_ss58, None) + + +def test_get_hotkey_owner_no_value_attribute(mocker, subtensor): + """Test get_hotkey_owner when the result has no value attribute.""" + # Prep + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + mocker.patch.object(subtensor, "does_hotkey_exist", return_value=True) + + # Call + result = subtensor.get_hotkey_owner(hotkey_ss58, block) + + # Assertions + assert result is None + subtensor.query_subtensor.assert_called_once_with("Owner", block, [hotkey_ss58]) + subtensor.does_hotkey_exist.assert_not_called() + + +# `get_axon_info` tests +def test_get_axon_info_success(mocker, subtensor): + """Test get_axon_info returns correct data when axon information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock( + value={ + "ip": "192.168.1.1", + "ip_type": 4, + "port": 8080, + "protocol": "tcp", + "version": "1.0", + "placeholder1": "data1", + "placeholder2": "data2", + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 8080 + assert result.protocol == "tcp" + assert result.version == "1.0" + assert result.placeholder1 == "data1" + assert result.placeholder2 == "data2" + assert result.hotkey == hotkey_ss58 + assert result.coldkey == "" + subtensor.query_subtensor.assert_called_once_with( + "Axons", block, [netuid, hotkey_ss58] + ) + + +def test_get_axon_info_no_data(mocker, subtensor): + """Test get_axon_info returns None when no axon information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Axons", block, [netuid, hotkey_ss58] + ) + + +def test_get_axon_info_no_value_attribute(mocker, subtensor): + """Test get_axon_info returns None when result has no value attribute.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Axons", block, [netuid, hotkey_ss58] + ) + + +def test_get_axon_info_no_block(mocker, subtensor): + """Test get_axon_info with no block specified.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + mock_result = mocker.MagicMock( + value={ + "ip": 3232235777, # 192.168.1.1 + "ip_type": 4, + "port": 8080, + "protocol": "tcp", + "version": "1.0", + "placeholder1": "data1", + "placeholder2": "data2", + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_axon_info(netuid, hotkey_ss58) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 8080 + assert result.protocol == "tcp" + assert result.version == "1.0" + assert result.placeholder1 == "data1" + assert result.placeholder2 == "data2" + assert result.hotkey == hotkey_ss58 + assert result.coldkey == "" + subtensor.query_subtensor.assert_called_once_with( + "Axons", None, [netuid, hotkey_ss58] + ) + + +# get_prometheus_info tests +def test_get_prometheus_info_success(mocker, subtensor): + """Test get_prometheus_info returns correct data when information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock( + value={ + "ip": 3232235777, # 192.168.1.1 + "ip_type": 4, + "port": 9090, + "version": "1.0", + "block": 1000, + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 9090 + assert result.version == "1.0" + assert result.block == 1000 + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", block, [netuid, hotkey_ss58] + ) + + +def test_get_prometheus_info_no_data(mocker, subtensor): + """Test get_prometheus_info returns None when no information is found.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", block, [netuid, hotkey_ss58] + ) + + +def test_get_prometheus_info_no_value_attribute(mocker, subtensor): + """Test get_prometheus_info returns None when result has no value attribute.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", block, [netuid, hotkey_ss58] + ) + + +def test_get_prometheus_info_no_block(mocker, subtensor): + """Test get_prometheus_info with no block specified.""" + # Prep + netuid = 1 + hotkey_ss58 = "test_hotkey" + mock_result = MagicMock( + value={ + "ip": "192.168.1.1", + "ip_type": 4, + "port": 9090, + "version": "1.0", + "block": 1000, + } + ) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_prometheus_info(netuid, hotkey_ss58) + + # Asserts + assert result is not None + assert result.ip == "192.168.1.1" + assert result.ip_type == 4 + assert result.port == 9090 + assert result.version == "1.0" + assert result.block == 1000 + subtensor.query_subtensor.assert_called_once_with( + "Prometheus", None, [netuid, hotkey_ss58] + ) + + +########################### +# Global Parameters tests # +########################### + + +# `block` property test +def test_block_property(mocker, subtensor): + """Test block property returns the correct block number.""" + expected_block = 123 + mocker.patch.object(subtensor, "get_current_block", return_value=expected_block) + + result = subtensor.block + + assert result == expected_block + subtensor.get_current_block.assert_called_once() + + +# `total_issuance` tests +def test_total_issuance_success(mocker, subtensor): + """Test total_issuance returns correct data when issuance information is found.""" + # Prep + block = 123 + issuance_value = 1000 + mock_result = mocker.MagicMock(value=issuance_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance(block) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", block) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ) + + +def test_total_issuance_no_data(mocker, subtensor): + """Test total_issuance returns None when no issuance information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_issuance_no_value_attribute(mocker, subtensor): + """Test total_issuance returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_issuance_no_block(mocker, subtensor): + """Test total_issuance with no block specified.""" + # Prep + issuance_value = 1000 + mock_result = mocker.MagicMock(value=issuance_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_issuance() + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalIssuance", None) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ) + + +# `total_stake` method tests +def test_total_stake_success(mocker, subtensor): + """Test total_stake returns correct data when stake information is found.""" + # Prep + block = 123 + stake_value = 5000 + mock_result = mocker.MagicMock(value=stake_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake(block) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalStake", block) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ) + + +def test_total_stake_no_data(mocker, subtensor): + """Test total_stake returns None when no stake information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalStake", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_stake_no_value_attribute(mocker, subtensor): + """Test total_stake returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalStake", block) + spy_balance_from_rao.assert_not_called() + + +def test_total_stake_no_block(mocker, subtensor): + """Test total_stake with no block specified.""" + # Prep + stake_value = 5000 + mock_result = mocker.MagicMock(value=stake_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.total_stake() + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("TotalStake", None) + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ), + + +# `serving_rate_limit` method tests +def test_serving_rate_limit_success(mocker, subtensor): + """Test serving_rate_limit returns correct data when rate limit information is found.""" + # Prep + netuid = 1 + block = 123 + rate_limit_value = "10" + mocker.patch.object(subtensor, "_get_hyperparameter", return_value=rate_limit_value) + + # Call + result = subtensor.serving_rate_limit(netuid, block) + + # Asserts + assert result is not None + assert result == int(rate_limit_value) + # subtensor._get_hyperparameter.assert_called_once_with("ServingRateLimit", netuid=netuid, block=block) + + +def test_serving_rate_limit_no_data(mocker, subtensor): + """Test serving_rate_limit returns None when no rate limit information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "_get_hyperparameter", return_value=None) + + # Call + result = subtensor.serving_rate_limit(netuid, block) + + # Asserts + assert result is None + subtensor._get_hyperparameter.assert_called_once_with( + param_name="ServingRateLimit", netuid=netuid, block=block + ) + + +def test_serving_rate_limit_no_block(mocker, subtensor): + """Test serving_rate_limit with no block specified.""" + # Prep + netuid = 1 + rate_limit_value = "10" + mocker.patch.object(subtensor, "_get_hyperparameter", return_value=rate_limit_value) + + # Call + result = subtensor.serving_rate_limit(netuid) + + # Asserts + assert result is not None + assert result == int(rate_limit_value) + subtensor._get_hyperparameter.assert_called_once_with( + param_name="ServingRateLimit", netuid=netuid, block=None + ) + + +# `tx_rate_limit` tests +def test_tx_rate_limit_success(mocker, subtensor): + """Test tx_rate_limit returns correct data when rate limit information is found.""" + # Prep + block = 123 + rate_limit_value = 100 + mock_result = mocker.MagicMock(value=rate_limit_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.tx_rate_limit(block) + + # Asserts + assert result is not None + assert result == rate_limit_value + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", block) + + +def test_tx_rate_limit_no_data(mocker, subtensor): + """Test tx_rate_limit returns None when no rate limit information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.tx_rate_limit(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", block) + + +def test_tx_rate_limit_no_value_attribute(mocker, subtensor): + """Test tx_rate_limit returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.tx_rate_limit(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", block) + + +def test_tx_rate_limit_no_block(mocker, subtensor): + """Test tx_rate_limit with no block specified.""" + # Prep + rate_limit_value = 100 + mock_result = mocker.MagicMock(value=rate_limit_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.tx_rate_limit() + + # Asserts + assert result is not None + assert result == rate_limit_value + subtensor.query_subtensor.assert_called_once_with("TxRateLimit", None) + + +############################ +# Network Parameters tests # +############################ From 279a3a1690de9b6596588e3886c16f12bc1b73b2 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Thu, 23 May 2024 17:43:51 -0700 Subject: [PATCH 025/116] Normalization: Added tests and covered sudo get command --- bittensor/commands/network.py | 9 ++- bittensor/commands/utils.py | 2 +- tests/unit_tests/test_subtensor.py | 88 ++++++++++++++++++++++++++++++ 3 files changed, 95 insertions(+), 4 deletions(-) diff --git a/bittensor/commands/network.py b/bittensor/commands/network.py index b1a78e905d..4c8f659b24 100644 --- a/bittensor/commands/network.py +++ b/bittensor/commands/network.py @@ -492,7 +492,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): table.title = "[white]Subnet Hyperparameters - NETUID: {} - {}".format( cli.config.netuid, subtensor.network ) - table.add_column("[overline white]HYPERPARAMETER", style="bold white") + table.add_column("[overline white]HYPERPARAMETER", style="white") table.add_column("[overline white]VALUE", style="green") table.add_column("[overline white]NORMALIZED", style="cyan") @@ -601,9 +601,12 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) table.add_column("[overline white]HYPERPARAMETER", style="white") table.add_column("[overline white]VALUE", style="green") + table.add_column("[overline white]NORMALIZED", style="cyan") + + normalized_values = normalize_hyperparameters(subnet) - for param in subnet.__dict__: - table.add_row(param, str(subnet.__dict__[param])) + for param, value, norm_value in normalized_values: + table.add_row(" " + param, value, norm_value) bittensor.__console__.print(table) diff --git a/bittensor/commands/utils.py b/bittensor/commands/utils.py index 2c7f96a37e..1694d3bc5e 100644 --- a/bittensor/commands/utils.py +++ b/bittensor/commands/utils.py @@ -232,7 +232,7 @@ def normalize_hyperparameters( else: norm_value = value except Exception as e: - bittensor.logging.error(f"Error normalizing parameter '{param}': {e}") + bittensor.logging.warning(f"Error normalizing parameter '{param}': {e}") norm_value = "-" normalized_values.append((param, str(value), str(norm_value))) diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index e6386bc288..6caf5ad30a 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -32,7 +32,10 @@ U16_NORMALIZED_FLOAT, U64_NORMALIZED_FLOAT, ) +from bittensor.chain_data import SubnetHyperparameters, U16_MAX, U64_MAX +from bittensor.commands.utils import normalize_hyperparameters from bittensor import subtensor_module +from bittensor.utils.balance import Balance def test_serve_axon_with_external_ip_set(): @@ -475,3 +478,88 @@ def test_weights_rate_limit_success_calls(subtensor, mocker): ) # if we change the methods logic in the future we have to be make sure tha returned type is correct assert isinstance(result, int) + + +@pytest.fixture +def sample_hyperparameters(): + return MagicMock(spec=SubnetHyperparameters) + + +def get_normalized_value(normalized_data, param_name): + return next( + ( + norm_value + for p_name, _, norm_value in normalized_data + if p_name == param_name + ), + None, + ) + + +@pytest.mark.parametrize( + "param_name, max_value, mid_value, zero_value, is_balance", + [ + ("adjustment_alpha", U64_MAX, U64_MAX / 2, 0, False), + ("max_weight_limit", U16_MAX, U16_MAX / 2, 0, False), + ("difficulty", U64_MAX, U64_MAX / 2, 0, False), + ("min_difficulty", U64_MAX, U64_MAX / 2, 0, False), + ("max_difficulty", U64_MAX, U64_MAX / 2, 0, False), + ("bonds_moving_avg", U64_MAX, U64_MAX / 2, 0, False), + ("min_burn", 10000000000, 5000000000, 0, True), # These are in rao + ("max_burn", 20000000000, 10000000000, 0, True), + ], + ids=[ + "adjustment-alpha", + "max_weight_limit", + "difficulty", + "min_difficulty", + "max_difficulty", + "bonds_moving_avg", + "min_burn", + "max_burn", + ], +) +def test_hyperparameter_normalization( + sample_hyperparameters, param_name, max_value, mid_value, zero_value, is_balance +): + setattr(sample_hyperparameters, param_name, mid_value) + normalized = normalize_hyperparameters(sample_hyperparameters) + norm_value = get_normalized_value(normalized, param_name) + + # Mid-value test + if is_balance: + numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__)) + expected_tao = mid_value / 1e9 + assert ( + numeric_value == expected_tao + ), f"Mismatch in tao value for {param_name} at mid value" + else: + assert float(norm_value) == 0.5, f"Failed mid-point test for {param_name}" + + # Max-value test + setattr(sample_hyperparameters, param_name, max_value) + normalized = normalize_hyperparameters(sample_hyperparameters) + norm_value = get_normalized_value(normalized, param_name) + + if is_balance: + numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__)) + expected_tao = max_value / 1e9 + assert ( + numeric_value == expected_tao + ), f"Mismatch in tao value for {param_name} at max value" + else: + assert float(norm_value) == 1.0, f"Failed max value test for {param_name}" + + # Zero-value test + setattr(sample_hyperparameters, param_name, zero_value) + normalized = normalize_hyperparameters(sample_hyperparameters) + norm_value = get_normalized_value(normalized, param_name) + + if is_balance: + numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__)) + expected_tao = zero_value / 1e9 + assert ( + numeric_value == expected_tao + ), f"Mismatch in tao value for {param_name} at zero value" + else: + assert float(norm_value) == 0.0, f"Failed zero value test for {param_name}" From e793bdf36bc53434278fdc2114f60bacb6a159ca Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 23 May 2024 20:31:20 -0700 Subject: [PATCH 026/116] Since the refactoring of this module would turn into a large project, it was decided to divide it into several stages. This is part 5: current part affects everything in this document except of renaming class `subtensor` according to the CamelCase rule. Test coverage improved. Part 1 https://github.com/opentensor/bittensor/pull/1911 Part 2 https://github.com/opentensor/bittensor/pull/1913 Part 3 https://github.com/opentensor/bittensor/pull/1923 Part 4 https://github.com/opentensor/bittensor/pull/1931 --- bittensor/subtensor.py | 321 +++++++---- tests/unit_tests/test_subtensor.py | 865 ++++++++++++++++++++++++++++- 2 files changed, 1079 insertions(+), 107 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index d41bae5404..90c7d74fbb 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -2741,10 +2741,11 @@ def state_call( @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry() -> Dict[Any, Any]: block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [method, data] - if block_hash: - params = params + [block_hash] - return self.substrate.rpc_request(method="state_call", params=params) + + return self.substrate.rpc_request( + method="state_call", + params=[method, data, block_hash] if block_hash else [method, data], + ) return make_substrate_call_with_retry() @@ -3101,12 +3102,10 @@ def validator_exclude_quantile( Args: netuid (int): The unique identifier of the subnetwork. - block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest - block is used. Default is ``None``. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. Returns: - Optional[float]: The value of the ValidatorExcludeQuantile hyperparameter, or ``None`` if the subnetwork - does not exist or the parameter is not found. + Optional[float]: The value of the ValidatorExcludeQuantile hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. """ call = self._get_hyperparameter( param_name="ValidatorExcludeQuantile", netuid=netuid, block=block @@ -3658,9 +3657,9 @@ def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: enabling a deeper understanding of the network's structure and composition. """ _result = self.query_subtensor("NetworksAdded", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return False - return _result.value + return ( + False if _result is None or not hasattr(_result, "value") else _result.value + ) def get_all_subnet_netuids(self, block: Optional[int] = None) -> List[int]: """ @@ -3675,14 +3674,12 @@ def get_all_subnet_netuids(self, block: Optional[int] = None) -> List[int]: This function provides a comprehensive view of the subnets within the Bittensor network, offering insights into its diversity and scale. """ - subnet_netuids = [] result = self.query_map_subtensor("NetworksAdded", block) - if result.records: - for netuid, exists in result: - if exists: - subnet_netuids.append(netuid.value) - - return subnet_netuids + return ( + [] + if result is None or not hasattr(result, "records") + else [netuid.value for netuid, exists in result if exists] + ) def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: """ @@ -3698,17 +3695,27 @@ def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: the extent of its decentralized infrastructure. """ _result = self.query_subtensor("TotalNetworks", block) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return ( + None if _result is None or not hasattr(_result, "value") else _result.value + ) def get_subnet_modality( self, netuid: int, block: Optional[int] = None ) -> Optional[int]: + """ + Returns the NetworkModality hyperparameter for a specific subnetwork. + + Args: + netuid (int): The unique identifier of the subnetwork. + block (Optional[int], optional): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. + + Returns: + Optional[int]: The value of the NetworkModality hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. + """ _result = self.query_subtensor("NetworkModality", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return ( + None if _result is None or not hasattr(_result, "value") else _result.value + ) def get_subnet_connection_requirement( self, netuid_0: int, netuid_1: int, block: Optional[int] = None @@ -3736,9 +3743,11 @@ def get_emission_value_by_subnet( reward mechanisms within the subnet. """ _result = self.query_subtensor("EmissionValues", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return Balance.from_rao(_result.value) + return ( + None + if _result is None or not hasattr(_result, "value") + else Balance.from_rao(_result.value) + ) def get_subnet_connection_requirements( self, netuid: int, block: Optional[int] = None @@ -3758,10 +3767,11 @@ def get_subnet_connection_requirements( with specific subnets, ensuring compliance with their connection standards. """ result = self.query_map_subtensor("NetworkConnect", block, [netuid]) - if result.records: - return {str(tuple_[0].value): tuple_[1].value for tuple_ in result.records} - else: - return {} + return ( + {str(netuid.value): exists.value for netuid, exists in result.records} + if result and hasattr(result, "records") + else {} + ) def get_subnets(self, block: Optional[int] = None) -> List[int]: """ @@ -3777,14 +3787,12 @@ def get_subnets(self, block: Optional[int] = None) -> List[int]: This function is valuable for understanding the network's structure and the diversity of subnets available for neuron participation and collaboration. """ - subnets = [] result = self.query_map_subtensor("NetworksAdded", block) - if result.records: - for network in result.records: - subnets.append(network[0].value) - return subnets - else: - return [] + return ( + [network[0].value for network in result.records] + if result and hasattr(result, "records") + else [] + ) def get_all_subnets_info(self, block: Optional[int] = None) -> List[SubnetInfo]: """ @@ -3804,18 +3812,16 @@ def get_all_subnets_info(self, block: Optional[int] = None) -> List[SubnetInfo]: @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [] - if block_hash: - params = params + [block_hash] + return self.substrate.rpc_request( method="subnetInfo_getSubnetsInfo", # custom rpc method - params=params, + params=[block_hash] if block_hash else [], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: return [] return SubnetInfo.list_from_vec_u8(result) @@ -3841,18 +3847,16 @@ def get_subnet_info( @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [netuid] - if block_hash: - params = params + [block_hash] + return self.substrate.rpc_request( method="subnetInfo_getSubnetInfo", # custom rpc method - params=params, + params=[netuid, block_hash] if block_hash else [netuid], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: return None return SubnetInfo.from_vec_u8(result) @@ -3877,17 +3881,17 @@ def get_subnet_hyperparameters( hex_bytes_result = self.query_runtime_api( runtime_api="SubnetInfoRuntimeApi", method="get_subnet_hyperparams", - params=[netuid], # type: ignore + params=[netuid], block=block, ) if hex_bytes_result is None: return [] - if hex_bytes_result.startswith("0x"): # type: ignore - bytes_result = bytes.fromhex(hex_bytes_result[2:]) # type: ignore + if hex_bytes_result.startswith("0x"): + bytes_result = bytes.fromhex(hex_bytes_result[2:]) else: - bytes_result = bytes.fromhex(hex_bytes_result) # type: ignore + bytes_result = bytes.fromhex(hex_bytes_result) return SubnetHyperparameters.from_vec_u8(bytes_result) # type: ignore @@ -3909,9 +3913,7 @@ def get_subnet_owner( which can be important for decision-making and collaboration within the network. """ _result = self.query_subtensor("SubnetOwner", block, [netuid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return _result.value if _result and hasattr(_result, "value") else None ############## # Nomination # @@ -3953,9 +3955,11 @@ def get_delegate_take( the distribution of rewards among neurons and their nominators. """ _result = self.query_subtensor("Delegates", block, [hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - return U16_NORMALIZED_FLOAT(_result.value) + return ( + U16_NORMALIZED_FLOAT(_result.value) + if _result and hasattr(_result, "value") + else None + ) def get_nominators_for_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -3969,17 +3973,17 @@ def get_nominators_for_hotkey( block (Optional[int], optional): The blockchain block number for the query. Returns: - Union[List[Tuple[str, Balance]], int]: A list of tuples containing each nominator's address and staked amount - or 0. + Union[List[Tuple[str, Balance]], int]: A list of tuples containing each nominator's address and staked amount or 0. This function provides insights into the neuron's support network within the Bittensor ecosystem, indicating its trust and collaboration relationships. """ result = self.query_map_subtensor("Stake", block, [hotkey_ss58]) - if result.records: - return [(record[0].value, record[1].value) for record in result.records] - else: - return 0 + return ( + [(record[0].value, record[1].value) for record in result.records] + if result and hasattr(result, "records") + else 0 + ) def get_delegate_by_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -4002,19 +4006,19 @@ def get_delegate_by_hotkey( @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(encoded_hotkey_: List[int]): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [encoded_hotkey_] - if block_hash: - params = params + [block_hash] + return self.substrate.rpc_request( method="delegateInfo_getDelegate", # custom rpc method - params=params, + params=[encoded_hotkey_, block_hash] + if block_hash + else [encoded_hotkey_], ) encoded_hotkey = ss58_to_vec_u8(hotkey_ss58) json_body = make_substrate_call_with_retry(encoded_hotkey) - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: return None return DelegateInfo.from_vec_u8(result) @@ -4040,18 +4044,16 @@ def get_delegates_lite(self, block: Optional[int] = None) -> List[DelegateInfoLi @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [] - if block_hash: - params.extend([block_hash]) + return self.substrate.rpc_request( method="delegateInfo_getDelegatesLite", # custom rpc method - params=params, + params=[block_hash] if block_hash else [], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: return [] return [DelegateInfoLite(**d) for d in result] @@ -4077,18 +4079,16 @@ def get_delegates(self, block: Optional[int] = None) -> List[DelegateInfo]: @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [] - if block_hash: - params.extend([block_hash]) + return self.substrate.rpc_request( method="delegateInfo_getDelegates", # custom rpc method - params=params, + params=[block_hash] if block_hash else [], ) json_body = make_substrate_call_with_retry() - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: return [] return DelegateInfo.list_from_vec_u8(result) @@ -4115,19 +4115,19 @@ def get_delegated( @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(encoded_coldkey_: List[int]): block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [encoded_coldkey_] - if block_hash: - params = params + [block_hash] + return self.substrate.rpc_request( - method="delegateInfo_getDelegated", # custom rpc method - params=params, + method="delegateInfo_getDelegated", + params=[block_hash, encoded_coldkey_] + if block_hash + else [encoded_coldkey_], ) encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) json_body = make_substrate_call_with_retry(encoded_coldkey) - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: return [] return DelegateInfo.delegated_list_from_vec_u8(result) @@ -4213,7 +4213,19 @@ def get_stake_info_for_coldkeys( def get_minimum_required_stake( self, - ): + ) -> Balance: + """ + Returns the minimum required stake for nominators in the Subtensor network. + + This method retries the substrate call up to three times with exponential backoff in case of failures. + + Returns: + Balance: The minimum required stake as a Balance object. + + Raises: + Exception: If the substrate call fails after the maximum number of retries. + """ + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): return self.substrate.query( @@ -4312,9 +4324,7 @@ def get_uid_for_hotkey_on_subnet( operational and governance activities on a particular subnet. """ _result = self.query_subtensor("Uids", block, [netuid, hotkey_ss58]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return _result.value if _result and hasattr(_result, "value") else None def get_all_uids_for_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -4355,7 +4365,11 @@ def get_netuids_for_hotkey( List[int]: A list of netuids where the neuron is a member. """ result = self.query_map_subtensor("IsNetworkMember", block, [hotkey_ss58]) - return [record[0].value for record in result.records if record[1]] + return ( + [record[0].value for record in result.records if record[1]] + if result and hasattr(result, "records") + else [] + ) def get_neuron_for_pubkey_and_subnet( self, hotkey_ss58: str, netuid: int, block: Optional[int] = None @@ -4384,7 +4398,7 @@ def get_neuron_for_pubkey_and_subnet( def get_all_neurons_for_pubkey( self, hotkey_ss58: str, block: Optional[int] = None - ) -> Optional[List[NeuronInfo]]: + ) -> List[NeuronInfo]: """ Retrieves information about all neuron instances associated with a given public key (hotkey ``SS58`` address) across different subnets of the Bittensor network. This function aggregates neuron data @@ -4402,7 +4416,7 @@ def get_all_neurons_for_pubkey( """ netuids = self.get_netuids_for_hotkey(hotkey_ss58, block) uids = [self.get_uid_for_hotkey_on_subnet(hotkey_ss58, net) for net in netuids] - return [self.neuron_for_uid(uid, net) for uid, net in list(zip(uids, netuids))] # type: ignore + return [self.neuron_for_uid(uid, net) for uid, net in list(zip(uids, netuids))] def neuron_has_validator_permit( self, uid: int, netuid: int, block: Optional[int] = None @@ -4453,7 +4467,7 @@ def neuron_for_wallet( def neuron_for_uid( self, uid: Optional[int], netuid: int, block: Optional[int] = None - ) -> Optional[NeuronInfo]: + ) -> NeuronInfo: """ Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive @@ -4465,12 +4479,13 @@ def neuron_for_uid( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. + NeuronInfo: Detailed information about the neuron if found, ``None`` otherwise. This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. """ if uid is None: + # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. return NeuronInfo._null_neuron() @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) @@ -4484,9 +4499,10 @@ def make_substrate_call_with_retry(): ) json_body = make_substrate_call_with_retry() - result = json_body["result"] + result = json_body.get("result", None) - if result in (None, []): + if not result: + # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. return NeuronInfo._null_neuron() return NeuronInfo.from_vec_u8(result) @@ -4542,6 +4558,7 @@ def neuron_for_uid_lite( subnet without the need for comprehensive data retrieval. """ if uid is None: + # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. return NeuronInfoLite._null_neuron() hex_bytes_result = self.query_runtime_api( @@ -4555,6 +4572,7 @@ def neuron_for_uid_lite( ) if hex_bytes_result is None: + # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. return NeuronInfoLite._null_neuron() if hex_bytes_result.startswith("0x"): @@ -4786,6 +4804,23 @@ def _do_delegation( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: + """ + Delegates a specified amount of stake to a delegate's hotkey. + + This method sends a transaction to add stake to a delegate's hotkey and retries the call up to three times + with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the stake will be delegated. + delegate_ss58 (str): The SS58 address of the delegate's hotkey. + amount (Balance): The amount of stake to be delegated. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the delegation is successful, ``False`` otherwise. + """ + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( @@ -4820,6 +4855,23 @@ def _do_undelegation( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: + """ + Removes a specified amount of stake from a delegate's hotkey. + + This method sends a transaction to remove stake from a delegate's hotkey and retries the call up to three times + with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the stake will be removed. + delegate_ss58 (str): The SS58 address of the delegate's hotkey. + amount (Balance): The amount of stake to be removed. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the undelegation is successful, ``False`` otherwise. + """ + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( @@ -4855,6 +4907,21 @@ def _do_nominate( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: + """ + Nominates the wallet's hotkey to become a delegate. + + This method sends a transaction to nominate the wallet's hotkey to become a delegate and retries the call up to + three times with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet whose hotkey will be nominated. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the nomination is successful, ``False`` otherwise. + """ + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( @@ -4889,6 +4956,23 @@ def _do_increase_take( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: + """ + Increases the take rate for a delegate's hotkey. + + This method sends a transaction to increase the take rate for a delegate's hotkey and retries the call up to + three times with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the transaction will be signed. + hotkey_ss58 (str): The SS58 address of the delegate's hotkey. + take (int): The new take rate to be set. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the take rate increase is successful, ``False`` otherwise. + """ + @retry(delay=1, tries=3, backoff=2, max_delay=4) def make_substrate_call_with_retry(): with self.substrate as substrate: @@ -4927,6 +5011,23 @@ def _do_decrease_take( wait_for_inclusion: bool = True, wait_for_finalization: bool = False, ) -> bool: + """ + Decreases the take rate for a delegate's hotkey. + + This method sends a transaction to decrease the take rate for a delegate's hotkey and retries the call up to + three times with exponential backoff in case of failures. + + Args: + wallet (bittensor.wallet): The wallet from which the transaction will be signed. + hotkey_ss58 (str): The SS58 address of the delegate's hotkey. + take (int): The new take rate to be set. + wait_for_inclusion (bool, optional): Whether to wait for the transaction to be included in a block. Default is ``True``. + wait_for_finalization (bool, optional): Whether to wait for the transaction to be finalized. Default is ``False``. + + Returns: + bool: ``True`` if the take rate decrease is successful, ``False`` otherwise. + """ + @retry(delay=1, tries=3, backoff=2, max_delay=4) def make_substrate_call_with_retry(): with self.substrate as substrate: @@ -5035,9 +5136,9 @@ def make_substrate_call_with_retry(): return self.substrate.query_map( module="System", storage_function="Account", - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), + block_hash=None + if block is None + else self.substrate.get_block_hash(block), ) result = make_substrate_call_with_retry() @@ -5047,6 +5148,7 @@ def make_substrate_call_with_retry(): return_dict[r[0].value] = bal return return_dict + # TODO: check with the teem if this is used anywhere outside. in bittensor no @staticmethod def _null_neuron() -> NeuronInfo: neuron = NeuronInfo( @@ -5092,8 +5194,15 @@ def get_block_hash(self, block_id: int) -> str: return self.substrate.get_block_hash(block_id=block_id) def get_error_info_by_index(self, error_index: int) -> Tuple[str, str]: - """Returns the error name and description from the Subtensor error list.""" + """ + Returns the error name and description from the Subtensor error list. + Args: + error_index (int): The index of the error to retrieve. + + Returns: + Tuple[str, str]: A tuple containing the error name and description from substrate metadata. If the error index is not found, returns ("Unknown Error", "") and logs a warning. + """ unknown_error = ("Unknown Error", "") if not self._subtensor_errors: diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index c3bf8c29ec..d8ea5836bd 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -30,7 +30,6 @@ _logger, Balance, U16_NORMALIZED_FLOAT, - U64_NORMALIZED_FLOAT, ) from bittensor import subtensor_module @@ -1318,3 +1317,867 @@ def test_tx_rate_limit_no_block(mocker, subtensor): ############################ # Network Parameters tests # ############################ + + +# `subnet_exists` tests +def test_subnet_exists_success(mocker, subtensor): + """Test subnet_exists returns True when subnet exists.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock(value=True) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.subnet_exists(netuid, block) + + # Asserts + assert result is True + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", block, [netuid]) + + +def test_subnet_exists_no_data(mocker, subtensor): + """Test subnet_exists returns False when no subnet information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.subnet_exists(netuid, block) + + # Asserts + assert result is False + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", block, [netuid]) + + +def test_subnet_exists_no_value_attribute(mocker, subtensor): + """Test subnet_exists returns False when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.subnet_exists(netuid, block) + + # Asserts + assert result is False + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", block, [netuid]) + + +def test_subnet_exists_no_block(mocker, subtensor): + """Test subnet_exists with no block specified.""" + # Prep + netuid = 1 + mock_result = mocker.MagicMock(value=True) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.subnet_exists(netuid) + + # Asserts + assert result is True + subtensor.query_subtensor.assert_called_once_with("NetworksAdded", None, [netuid]) + + +# `get_all_subnet_netuids` tests +def test_get_all_subnet_netuids_success(mocker, subtensor): + """Test get_all_subnet_netuids returns correct list when netuid information is found.""" + # Prep + block = 123 + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = True + mock_result.__iter__.return_value = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_all_subnet_netuids(block) + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_all_subnet_netuids_no_data(mocker, subtensor): + """Test get_all_subnet_netuids returns empty list when no netuid information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_map_subtensor", return_value=None) + + # Call + result = subtensor.get_all_subnet_netuids(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_all_subnet_netuids_no_records_attribute(mocker, subtensor): + """Test get_all_subnet_netuids returns empty list when result has no records attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.records + mock_result.__iter__.return_value = [] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_all_subnet_netuids(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_all_subnet_netuids_no_block(mocker, subtensor): + """Test get_all_subnet_netuids with no block specified.""" + # Prep + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = True + mock_result.__iter__.return_value = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_all_subnet_netuids() + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", None) + + +# `get_total_subnets` tests +def test_get_total_subnets_success(mocker, subtensor): + """Test get_total_subnets returns correct data when total subnet information is found.""" + # Prep + block = 123 + total_subnets_value = 10 + mock_result = mocker.MagicMock(value=total_subnets_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_total_subnets(block) + + # Asserts + assert result is not None + assert result == total_subnets_value + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", block) + + +def test_get_total_subnets_no_data(mocker, subtensor): + """Test get_total_subnets returns None when no total subnet information is found.""" + # Prep + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_total_subnets(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", block) + + +def test_get_total_subnets_no_value_attribute(mocker, subtensor): + """Test get_total_subnets returns None when result has no value attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_total_subnets(block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", block) + + +def test_get_total_subnets_no_block(mocker, subtensor): + """Test get_total_subnets with no block specified.""" + # Prep + total_subnets_value = 10 + mock_result = mocker.MagicMock(value=total_subnets_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_total_subnets() + + # Asserts + assert result is not None + assert result == total_subnets_value + subtensor.query_subtensor.assert_called_once_with("TotalNetworks", None) + + +# `get_subnet_modality` tests +def test_get_subnet_modality_success(mocker, subtensor): + """Test get_subnet_modality returns correct data when modality information is found.""" + # Prep + netuid = 1 + block = 123 + modality_value = 42 + mock_result = mocker.MagicMock(value=modality_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_modality(netuid, block) + + # Asserts + assert result is not None + assert result == modality_value + subtensor.query_subtensor.assert_called_once_with( + "NetworkModality", block, [netuid] + ) + + +def test_get_subnet_modality_no_data(mocker, subtensor): + """Test get_subnet_modality returns None when no modality information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_subnet_modality(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "NetworkModality", block, [netuid] + ) + + +def test_get_subnet_modality_no_value_attribute(mocker, subtensor): + """Test get_subnet_modality returns None when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_modality(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with( + "NetworkModality", block, [netuid] + ) + + +def test_get_subnet_modality_no_block_specified(mocker, subtensor): + """Test get_subnet_modality with no block specified.""" + # Prep + netuid = 1 + modality_value = 42 + mock_result = mocker.MagicMock(value=modality_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_modality(netuid) + + # Asserts + assert result is not None + assert result == modality_value + subtensor.query_subtensor.assert_called_once_with("NetworkModality", None, [netuid]) + + +# `get_emission_value_by_subnet` tests +def test_get_emission_value_by_subnet_success(mocker, subtensor): + """Test get_emission_value_by_subnet returns correct data when emission value is found.""" + # Prep + netuid = 1 + block = 123 + emission_value = 1000 + mock_result = mocker.MagicMock(value=emission_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid, block) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", block, [netuid]) + spy_balance_from_rao.assert_called_once_with(emission_value) + assert result == Balance.from_rao(emission_value) + + +def test_get_emission_value_by_subnet_no_data(mocker, subtensor): + """Test get_emission_value_by_subnet returns None when no emission value is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", block, [netuid]) + spy_balance_from_rao.assert_not_called() + + +def test_get_emission_value_by_subnet_no_value_attribute(mocker, subtensor): + """Test get_emission_value_by_subnet returns None when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid, block) + + # Asserts + assert result is None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", block, [netuid]) + spy_balance_from_rao.assert_not_called() + + +def test_get_emission_value_by_subnet_no_block_specified(mocker, subtensor): + """Test get_emission_value_by_subnet with no block specified.""" + # Prep + netuid = 1 + emission_value = 1000 + mock_result = mocker.MagicMock(value=emission_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_balance_from_rao = mocker.spy(Balance, "from_rao") + + # Call + result = subtensor.get_emission_value_by_subnet(netuid) + + # Asserts + assert result is not None + subtensor.query_subtensor.assert_called_once_with("EmissionValues", None, [netuid]) + spy_balance_from_rao.assert_called_once_with(emission_value) + assert result == Balance.from_rao(emission_value) + + +# `get_subnet_connection_requirements` tests +def test_get_subnet_connection_requirements_success(mocker, subtensor): + """Test get_subnet_connection_requirements returns correct data when requirements are found.""" + # Prep + netuid = 1 + block = 123 + mock_tuple1 = (mocker.MagicMock(value="requirement1"), mocker.MagicMock(value=10)) + mock_tuple2 = (mocker.MagicMock(value="requirement2"), mocker.MagicMock(value=20)) + mock_result = mocker.MagicMock() + mock_result.records = [mock_tuple1, mock_tuple2] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid, block) + + # Asserts + assert result == {"requirement1": 10, "requirement2": 20} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", block, [netuid] + ) + + +def test_get_subnet_connection_requirements_no_data(mocker, subtensor): + """Test get_subnet_connection_requirements returns empty dict when no data is found.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + mock_result.records = [] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid, block) + + # Asserts + assert result == {} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", block, [netuid] + ) + + +def test_get_subnet_connection_requirements_no_records_attribute(mocker, subtensor): + """Test get_subnet_connection_requirements returns empty dict when result has no records attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.records # Simulating a missing records attribute + + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid, block) + + # Asserts + assert result == {} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", block, [netuid] + ) + + +def test_get_subnet_connection_requirements_no_block_specified(mocker, subtensor): + """Test get_subnet_connection_requirements with no block specified.""" + # Prep + netuid = 1 + mock_tuple1 = (mocker.MagicMock(value="requirement1"), mocker.MagicMock(value=10)) + mock_tuple2 = (mocker.MagicMock(value="requirement2"), mocker.MagicMock(value=20)) + mock_result = mocker.MagicMock() + mock_result.records = [mock_tuple1, mock_tuple2] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_connection_requirements(netuid) + + # Asserts + assert result == {"requirement1": 10, "requirement2": 20} + subtensor.query_map_subtensor.assert_called_once_with( + "NetworkConnect", None, [netuid] + ) + + +# `get_subnets` tests +def test_get_subnets_success(mocker, subtensor): + """Test get_subnets returns correct list when subnet information is found.""" + # Prep + block = 123 + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets(block) + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_subnets_no_data(mocker, subtensor): + """Test get_subnets returns empty list when no subnet information is found.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + mock_result.records = [] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_subnets_no_records_attribute(mocker, subtensor): + """Test get_subnets returns empty list when result has no records attribute.""" + # Prep + block = 123 + mock_result = mocker.MagicMock() + del mock_result.records # Simulating a missing records attribute + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets(block) + + # Asserts + assert result == [] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", block) + + +def test_get_subnets_no_block_specified(mocker, subtensor): + """Test get_subnets with no block specified.""" + # Prep + mock_netuid1 = mocker.MagicMock(value=1) + mock_netuid2 = mocker.MagicMock(value=2) + mock_result = mocker.MagicMock() + mock_result.records = [(mock_netuid1, True), (mock_netuid2, True)] + mocker.patch.object(subtensor, "query_map_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnets() + + # Asserts + assert result == [1, 2] + subtensor.query_map_subtensor.assert_called_once_with("NetworksAdded", None) + + +# `get_all_subnets_info` tests +def test_get_all_subnets_info_success(mocker, subtensor): + """Test get_all_subnets_info returns correct data when subnet information is found.""" + # Prep + block = 123 + subnet_data = [1, 2, 3] # Mocked response data + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object( + subtensor_module.SubnetInfo, + "list_from_vec_u8", + return_value="list_from_vec_u80", + ) + + # Call + result = subtensor.get_all_subnets_info(block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetsInfo", params=["mock_block_hash"] + ) + subtensor_module.SubnetInfo.list_from_vec_u8.assert_called_once_with(subnet_data) + + +@pytest.mark.parametrize("result_", [[], None]) +def test_get_all_subnets_info_no_data(mocker, subtensor, result_): + """Test get_all_subnets_info returns empty list when no subnet information is found.""" + # Prep + block = 123 + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": result_} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object(subtensor_module.SubnetInfo, "list_from_vec_u8") + + # Call + result = subtensor.get_all_subnets_info(block) + + # Asserts + assert result == [] + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetsInfo", params=["mock_block_hash"] + ) + subtensor_module.SubnetInfo.list_from_vec_u8.assert_not_called() + + +def test_get_all_subnets_info_retry(mocker, subtensor): + """Test get_all_subnets_info retries on failure.""" + # Prep + block = 123 + subnet_data = [1, 2, 3] + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mock_rpc_request = mocker.patch.object( + subtensor.substrate, + "rpc_request", + side_effect=[Exception, Exception, mock_response], + ) + mocker.patch.object( + subtensor_module.SubnetInfo, "list_from_vec_u8", return_value=["some_data"] + ) + + # Call + result = subtensor.get_all_subnets_info(block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_with(block) + assert mock_rpc_request.call_count == 3 + subtensor_module.SubnetInfo.list_from_vec_u8.assert_called_once_with(subnet_data) + assert result == ["some_data"] + + +# `get_subnet_info` tests +def test_get_subnet_info_success(mocker, subtensor): + """Test get_subnet_info returns correct data when subnet information is found.""" + # Prep + netuid = 1 + block = 123 + subnet_data = [1, 2, 3] + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object( + subtensor_module.SubnetInfo, "from_vec_u8", return_value=["from_vec_u8"] + ) + + # Call + result = subtensor.get_subnet_info(netuid, block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetInfo", params=[netuid, "mock_block_hash"] + ) + subtensor_module.SubnetInfo.from_vec_u8.assert_called_once_with(subnet_data) + + +@pytest.mark.parametrize("result_", [None, {}]) +def test_get_subnet_info_no_data(mocker, subtensor, result_): + """Test get_subnet_info returns None when no subnet information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": result_} + mocker.patch.object(subtensor.substrate, "rpc_request", return_value=mock_response) + mocker.patch.object(subtensor_module.SubnetInfo, "from_vec_u8") + + # Call + result = subtensor.get_subnet_info(netuid, block) + + # Asserts + assert result is None + subtensor.substrate.get_block_hash.assert_called_once_with(block) + subtensor.substrate.rpc_request.assert_called_once_with( + method="subnetInfo_getSubnetInfo", params=[netuid, "mock_block_hash"] + ) + subtensor_module.SubnetInfo.from_vec_u8.assert_not_called() + + +def test_get_subnet_info_retry(mocker, subtensor): + """Test get_subnet_info retries on failure.""" + # Prep + netuid = 1 + block = 123 + subnet_data = [1, 2, 3] + mocker.patch.object( + subtensor.substrate, "get_block_hash", return_value="mock_block_hash" + ) + mock_response = {"result": subnet_data} + mock_rpc_request = mocker.patch.object( + subtensor.substrate, + "rpc_request", + side_effect=[Exception, Exception, mock_response], + ) + mocker.patch.object( + subtensor_module.SubnetInfo, "from_vec_u8", return_value=["from_vec_u8"] + ) + + # Call + result = subtensor.get_subnet_info(netuid, block) + + # Asserts + subtensor.substrate.get_block_hash.assert_called_with(block) + assert mock_rpc_request.call_count == 3 + subtensor_module.SubnetInfo.from_vec_u8.assert_called_once_with(subnet_data) + + +# `get_subnet_hyperparameters` tests +def test_get_subnet_hyperparameters_success(mocker, subtensor): + """Test get_subnet_hyperparameters returns correct data when hyperparameters are found.""" + # Prep + netuid = 1 + block = 123 + hex_bytes_result = "0x010203" + bytes_result = bytes.fromhex(hex_bytes_result[2:]) + mocker.patch.object(subtensor, "query_runtime_api", return_value=hex_bytes_result) + mocker.patch.object( + subtensor_module.SubnetHyperparameters, + "from_vec_u8", + return_value=["from_vec_u8"], + ) + + # Call + result = subtensor.get_subnet_hyperparameters(netuid, block) + + # Asserts + subtensor.query_runtime_api.assert_called_once_with( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + subtensor_module.SubnetHyperparameters.from_vec_u8.assert_called_once_with( + bytes_result + ) + + +def test_get_subnet_hyperparameters_no_data(mocker, subtensor): + """Test get_subnet_hyperparameters returns empty list when no data is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_runtime_api", return_value=None) + mocker.patch.object(subtensor_module.SubnetHyperparameters, "from_vec_u8") + + # Call + result = subtensor.get_subnet_hyperparameters(netuid, block) + + # Asserts + assert result == [] + subtensor.query_runtime_api.assert_called_once_with( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + subtensor_module.SubnetHyperparameters.from_vec_u8.assert_not_called() + + +def test_get_subnet_hyperparameters_hex_without_prefix(mocker, subtensor): + """Test get_subnet_hyperparameters correctly processes hex string without '0x' prefix.""" + # Prep + netuid = 1 + block = 123 + hex_bytes_result = "010203" + bytes_result = bytes.fromhex(hex_bytes_result) + mocker.patch.object(subtensor, "query_runtime_api", return_value=hex_bytes_result) + mocker.patch.object(subtensor_module.SubnetHyperparameters, "from_vec_u8") + + # Call + result = subtensor.get_subnet_hyperparameters(netuid, block) + + # Asserts + subtensor.query_runtime_api.assert_called_once_with( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnet_hyperparams", + params=[netuid], + block=block, + ) + subtensor_module.SubnetHyperparameters.from_vec_u8.assert_called_once_with( + bytes_result + ) + + +# `get_subnet_owner` tests +def test_get_subnet_owner_success(mocker, subtensor): + """Test get_subnet_owner returns correct data when owner information is found.""" + # Prep + netuid = 1 + block = 123 + owner_address = "5F3sa2TJAWMqDhXG6jhV4N8ko9rXPM6twz9mG9m3rrgq3xiJ" + mock_result = mocker.MagicMock(value=owner_address) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_owner(netuid, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("SubnetOwner", block, [netuid]) + assert result == owner_address + + +def test_get_subnet_owner_no_data(mocker, subtensor): + """Test get_subnet_owner returns None when no owner information is found.""" + # Prep + netuid = 1 + block = 123 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + + # Call + result = subtensor.get_subnet_owner(netuid, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("SubnetOwner", block, [netuid]) + assert result is None + + +def test_get_subnet_owner_no_value_attribute(mocker, subtensor): + """Test get_subnet_owner returns None when result has no value attribute.""" + # Prep + netuid = 1 + block = 123 + mock_result = mocker.MagicMock() + del mock_result.value # Simulating a missing value attribute + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + + # Call + result = subtensor.get_subnet_owner(netuid, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("SubnetOwner", block, [netuid]) + assert result is None + + +#################### +# Nomination tests # +#################### + + +# `is_hotkey_delegate` tests +def test_is_hotkey_delegate_success(mocker, subtensor): + """Test is_hotkey_delegate returns True when hotkey is a delegate.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + mock_delegates = [ + mocker.MagicMock(hotkey_ss58=hotkey_ss58), + mocker.MagicMock(hotkey_ss58="hotkey_ss583"), + ] + mocker.patch.object(subtensor, "get_delegates", return_value=mock_delegates) + + # Call + result = subtensor.is_hotkey_delegate(hotkey_ss58, block) + + # Asserts + subtensor.get_delegates.assert_called_once_with(block=block) + assert result is True + + +def test_is_hotkey_delegate_not_found(mocker, subtensor): + """Test is_hotkey_delegate returns False when hotkey is not a delegate.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + mock_delegates = [mocker.MagicMock(hotkey_ss58="hotkey_ss583")] + mocker.patch.object(subtensor, "get_delegates", return_value=mock_delegates) + + # Call + result = subtensor.is_hotkey_delegate(hotkey_ss58, block) + + # Asserts + subtensor.get_delegates.assert_called_once_with(block=block) + assert result is False + + +# `get_delegate_take` tests +def test_get_delegate_take_success(mocker, subtensor): + """Test get_delegate_take returns correct data when delegate take is found.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + delegate_take_value = 32768 + mock_result = mocker.MagicMock(value=delegate_take_value) + mocker.patch.object(subtensor, "query_subtensor", return_value=mock_result) + spy_u16_normalized_float = mocker.spy(subtensor_module, "U16_NORMALIZED_FLOAT") + + # Call + subtensor.get_delegate_take(hotkey_ss58, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("Delegates", block, [hotkey_ss58]) + spy_u16_normalized_float.assert_called_once_with(delegate_take_value) + + +def test_get_delegate_take_no_data(mocker, subtensor): + """Test get_delegate_take returns None when no delegate take is found.""" + # Prep + hotkey_ss58 = "hotkey_ss58" + block = 123 + delegate_take_value = 32768 + mocker.patch.object(subtensor, "query_subtensor", return_value=None) + spy_u16_normalized_float = mocker.spy(subtensor_module, "U16_NORMALIZED_FLOAT") + + # Call + result = subtensor.get_delegate_take(hotkey_ss58, block) + + # Asserts + subtensor.query_subtensor.assert_called_once_with("Delegates", block, [hotkey_ss58]) + spy_u16_normalized_float.assert_not_called() + assert result is None + + +# `get_delegate_by_hotkey` tests From d9c705b08db81082b8fd4c2b0124d01e9a3dc5e8 Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 23 May 2024 21:57:18 -0700 Subject: [PATCH 027/116] Since the refactoring of this module would turn into a large project, it was decided to divide it into several stages. This is part 6: Renaming class 1 to comply with PEP8 and avoid namespace conflict. Refactor all references to the changed name. Part 1 https://github.com/opentensor/bittensor/pull/1911 Part 2 https://github.com/opentensor/bittensor/pull/1913 Part 3 https://github.com/opentensor/bittensor/pull/1923 Part 4 https://github.com/opentensor/bittensor/pull/1931 Part 5 https://github.com/opentensor/bittensor/pull/1934 --- bittensor/__init__.py | 17 +++++++++---- bittensor/mock/subtensor_mock.py | 4 ++-- bittensor/subtensor.py | 24 +++++++++++-------- bittensor/utils/wallet_utils.py | 22 ++++++++--------- .../integration_tests/test_cli_no_network.py | 8 +++---- .../test_subtensor_integration.py | 16 ++++--------- .../unit_tests/extrinsics/test_delegation.py | 2 +- tests/unit_tests/extrinsics/test_network.py | 2 +- .../unit_tests/extrinsics/test_prometheus.py | 2 +- .../extrinsics/test_registration.py | 2 +- tests/unit_tests/extrinsics/test_root.py | 2 +- tests/unit_tests/extrinsics/test_serving.py | 2 +- tests/unit_tests/test_subtensor.py | 2 +- 13 files changed, 56 insertions(+), 49 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 04c178e3bf..9f8a866576 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -79,7 +79,7 @@ def debug(on: bool = True): # Pip address for versioning __pipaddress__ = "https://pypi.org/pypi/bittensor/json" -# Raw github url for delegates registry file +# Raw GitHub url for delegates registry file __delegates_details_url__: str = "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" # Substrate ss58_format @@ -92,6 +92,7 @@ def debug(on: bool = True): __finney_entrypoint__ = "wss://entrypoint-finney.opentensor.ai:443" +__finney_test_entrypoint__ = "wss://test.finney.opentensor.ai:443/" __finney_test_entrypoint__ = "wss://test.finney.opentensor.ai:443/" __archive_entrypoint__ = "wss://archive.chain.opentensor.ai:443/" @@ -106,7 +107,7 @@ def debug(on: bool = True): __rao_symbol__: str = chr(0x03C1) # Block Explorers map network to explorer url -## Must all be polkadotjs explorer urls +# Must all be polkadotjs explorer urls __network_explorer_map__ = { "opentensor": { "local": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer", @@ -233,7 +234,9 @@ def debug(on: bool = True): UnstakeError, ) -from substrateinterface import Keypair as Keypair +from substrateinterface import Keypair + +_ = Keypair from .config import InvalidConfigFile, DefaultConfig, config, T from .keyfile import ( serialized_keypair_to_keyfile_data, @@ -287,8 +290,14 @@ def debug(on: bool = True): ProposalVoteData, ) +# Allows avoiding name spacing conflicts and continue access to the `subtensor` module with `subtensor_module` name from . import subtensor as subtensor_module -from .subtensor import subtensor as subtensor + +# Double import allows using class `Subtensor` by referencing `bittensor.Subtensor` and `bittensor.subtensor`. +# This will be available for a while until we remove reference `bittensor.subtensor` +from .subtensor import Subtensor +from .subtensor import Subtensor as subtensor + from .cli import cli as cli, COMMANDS as ALL_COMMANDS from .btlogging import logging from .metagraph import metagraph as metagraph diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index 4ca08cfb22..b6be74095b 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -35,7 +35,7 @@ AxonInfo, ) from ..errors import ChainQueryError -from ..subtensor import subtensor +from ..subtensor import Subtensor from ..utils import RAOPERTAO, U16_NORMALIZED_FLOAT from ..utils.balance import Balance from ..utils.registration import POWSolution @@ -196,7 +196,7 @@ class MockChainState(TypedDict): SubtensorModule: MockSubtensorState -class MockSubtensor(subtensor): +class MockSubtensor(Subtensor): """ A Mock Subtensor class for running tests. This should mock only methods that make queries to the chain. diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 90c7d74fbb..8551e27d33 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -123,7 +123,7 @@ class ParamWithTypes(TypedDict): type: str # ScaleType string of the parameter. -class subtensor: +class Subtensor: """ The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. @@ -223,11 +223,11 @@ def __init__( network = None if config is None: - config = subtensor.config() + config = Subtensor.config() self.config = copy.deepcopy(config) # type: ignore # Setup config.subtensor.network and config.subtensor.chain_endpoint - self.chain_endpoint, self.network = subtensor.setup_config(network, config) # type: ignore + self.chain_endpoint, self.network = Subtensor.setup_config(network, config) # type: ignore if ( self.network == "finney" @@ -304,7 +304,7 @@ def config() -> "bittensor.config": `subtensor.add_args` method. """ parser = argparse.ArgumentParser() - subtensor.add_args(parser) + Subtensor.add_args(parser) return bittensor.config(parser, args=[]) @classmethod @@ -439,13 +439,13 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network(network) + ) = Subtensor.determine_chain_endpoint_and_network(network) else: if config.get("__is_set", {}).get("subtensor.chain_endpoint"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.chain_endpoint ) @@ -453,7 +453,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.network ) @@ -461,7 +461,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.chain_endpoint ) @@ -469,7 +469,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.network ) @@ -477,7 +477,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( bittensor.defaults.subtensor.network ) @@ -5216,3 +5216,7 @@ def get_error_info_by_index(self, error_index: int) -> Tuple[str, str]: ) return name, description + + +# TODO: remove this after fully migrate `bittensor.subtensor` to `bittensor.Subtensor` in `bittensor/__init__.py` +subtensor = Subtensor diff --git a/bittensor/utils/wallet_utils.py b/bittensor/utils/wallet_utils.py index 78a7ed065c..3cb4149693 100644 --- a/bittensor/utils/wallet_utils.py +++ b/bittensor/utils/wallet_utils.py @@ -8,7 +8,7 @@ # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# The above copyright notice and this permission notice shall be included in all copies or large portions of # the Software. # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO @@ -119,18 +119,18 @@ def create_identity_dict( Creates a dictionary with structure for identity extrinsic. Must fit within 64 bits. Args: - display (str): String to be converted and stored under 'display'. - legal (str): String to be converted and stored under 'legal'. - web (str): String to be converted and stored under 'web'. - riot (str): String to be converted and stored under 'riot'. - email (str): String to be converted and stored under 'email'. - pgp_fingerprint (str): String to be converted and stored under 'pgp_fingerprint'. - image (str): String to be converted and stored under 'image'. - info (str): String to be converted and stored under 'info'. - twitter (str): String to be converted and stored under 'twitter'. + display (str): String to be converted and stored under 'display'. + legal (str): String to be converted and stored under 'legal'. + web (str): String to be converted and stored under 'web'. + riot (str): String to be converted and stored under 'riot'. + email (str): String to be converted and stored under 'email'. + pgp_fingerprint (str): String to be converted and stored under 'pgp_fingerprint'. + image (str): String to be converted and stored under 'image'. + info (str): String to be converted and stored under 'info'. + twitter (str): String to be converted and stored under 'twitter'. Returns: - dict: A dictionary with the specified structure and byte string conversions. + dict: A dictionary with the specified structure and byte string conversions. Raises: ValueError: If pgp_fingerprint is not exactly 20 bytes long when encoded. diff --git a/tests/integration_tests/test_cli_no_network.py b/tests/integration_tests/test_cli_no_network.py index 18eb26cd8e..b0df5a8778 100644 --- a/tests/integration_tests/test_cli_no_network.py +++ b/tests/integration_tests/test_cli_no_network.py @@ -1099,7 +1099,7 @@ def test_delegate_prompt_hotkey(self, _): delegate_ss58 = _get_mock_coldkey(0) with patch("bittensor.commands.delegates.show_delegates"): with patch( - "bittensor.subtensor.subtensor.get_delegates", + "bittensor.subtensor.Subtensor.get_delegates", return_value=[ bittensor.DelegateInfo( hotkey_ss58=delegate_ss58, # return delegate with mock coldkey @@ -1186,7 +1186,7 @@ def test_undelegate_prompt_hotkey(self, _): delegate_ss58 = _get_mock_coldkey(0) with patch("bittensor.commands.delegates.show_delegates"): with patch( - "bittensor.subtensor.subtensor.get_delegates", + "bittensor.subtensor.Subtensor.get_delegates", return_value=[ bittensor.DelegateInfo( hotkey_ss58=delegate_ss58, # return delegate with mock coldkey @@ -1271,9 +1271,9 @@ def test_vote_command_prompt_proposal_hash(self, _): mock_proposal_hash = "mock_proposal_hash" - with patch("bittensor.subtensor.subtensor.is_senate_member", return_value=True): + with patch("bittensor.subtensor.Subtensor.is_senate_member", return_value=True): with patch( - "bittensor.subtensor.subtensor.get_vote_data", + "bittensor.subtensor.Subtensor.get_vote_data", return_value={"index": 1}, ): # Patch command to exit early diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 845a73ee7d..f693cf61a3 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -17,20 +17,17 @@ # DEALINGS IN THE SOFTWARE. import random -import socket -import os import unittest from queue import Empty as QueueEmpty from unittest.mock import MagicMock, patch -from types import SimpleNamespace + +import pytest +from substrateinterface import Keypair import bittensor from bittensor.mock import MockSubtensor -import pytest from bittensor.utils.balance import Balance -from substrateinterface import Keypair from tests.helpers import ( - _get_mock_hotkey, _get_mock_coldkey, MockConsole, _get_mock_keypair, @@ -463,11 +460,8 @@ def test_registration_multiprocessed_already_registered(self): mock_set_status.__exit__ = MagicMock(return_value=True) # should return True - assert ( - self.subtensor.register( - wallet=wallet, netuid=3, num_processes=3, update_interval=5 - ) - == True + assert self.subtensor.register( + wallet=wallet, netuid=3, num_processes=3, update_interval=5 ) # calls until True and once again before exiting subtensor class diff --git a/tests/unit_tests/extrinsics/test_delegation.py b/tests/unit_tests/extrinsics/test_delegation.py index 9a321dbe64..42dcf4e706 100644 --- a/tests/unit_tests/extrinsics/test_delegation.py +++ b/tests/unit_tests/extrinsics/test_delegation.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.utils.balance import Balance from bittensor.extrinsics.delegation import ( diff --git a/tests/unit_tests/extrinsics/test_network.py b/tests/unit_tests/extrinsics/test_network.py index a11f53111f..67df030ffe 100644 --- a/tests/unit_tests/extrinsics/test_network.py +++ b/tests/unit_tests/extrinsics/test_network.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.extrinsics.network import ( set_hyperparameter_extrinsic, diff --git a/tests/unit_tests/extrinsics/test_prometheus.py b/tests/unit_tests/extrinsics/test_prometheus.py index 0458206701..7d9c975fbc 100644 --- a/tests/unit_tests/extrinsics/test_prometheus.py +++ b/tests/unit_tests/extrinsics/test_prometheus.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import MagicMock, patch import bittensor -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.extrinsics.prometheus import prometheus_extrinsic diff --git a/tests/unit_tests/extrinsics/test_registration.py b/tests/unit_tests/extrinsics/test_registration.py index bad8552b17..ccae48452d 100644 --- a/tests/unit_tests/extrinsics/test_registration.py +++ b/tests/unit_tests/extrinsics/test_registration.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.utils.registration import POWSolution from bittensor.extrinsics.registration import ( diff --git a/tests/unit_tests/extrinsics/test_root.py b/tests/unit_tests/extrinsics/test_root.py index 84132bb60d..2bc1be632b 100644 --- a/tests/unit_tests/extrinsics/test_root.py +++ b/tests/unit_tests/extrinsics/test_root.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.extrinsics.root import ( root_register_extrinsic, set_root_weights_extrinsic, diff --git a/tests/unit_tests/extrinsics/test_serving.py b/tests/unit_tests/extrinsics/test_serving.py index 513fa5df52..bf975e195a 100644 --- a/tests/unit_tests/extrinsics/test_serving.py +++ b/tests/unit_tests/extrinsics/test_serving.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.axon import axon as Axon from bittensor.extrinsics.serving import ( diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index d8ea5836bd..b9863490d6 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -26,7 +26,7 @@ # Application import bittensor from bittensor.subtensor import ( - subtensor as Subtensor, + Subtensor, _logger, Balance, U16_NORMALIZED_FLOAT, From 735bb8f68f284105a6deeb2198a3275e7000d06e Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 23 May 2024 21:57:18 -0700 Subject: [PATCH 028/116] Since the refactoring of this module would turn into a large project, it was decided to divide it into several stages. This is part 6: Renaming class 1 to comply with PEP8 and avoid namespace conflict. Refactor all references to the changed name. Part 1 https://github.com/opentensor/bittensor/pull/1911 Part 2 https://github.com/opentensor/bittensor/pull/1913 Part 3 https://github.com/opentensor/bittensor/pull/1923 Part 4 https://github.com/opentensor/bittensor/pull/1931 Part 5 https://github.com/opentensor/bittensor/pull/1934 --- bittensor/__init__.py | 17 +++++++++---- bittensor/mock/subtensor_mock.py | 4 ++-- bittensor/subtensor.py | 24 +++++++++++-------- bittensor/utils/wallet_utils.py | 22 ++++++++--------- .../integration_tests/test_cli_no_network.py | 8 +++---- .../test_subtensor_integration.py | 16 ++++--------- .../unit_tests/extrinsics/test_delegation.py | 2 +- tests/unit_tests/extrinsics/test_network.py | 2 +- .../unit_tests/extrinsics/test_prometheus.py | 2 +- .../extrinsics/test_registration.py | 2 +- tests/unit_tests/extrinsics/test_root.py | 2 +- tests/unit_tests/extrinsics/test_serving.py | 2 +- tests/unit_tests/test_subtensor.py | 2 +- 13 files changed, 56 insertions(+), 49 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 04c178e3bf..9f8a866576 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -79,7 +79,7 @@ def debug(on: bool = True): # Pip address for versioning __pipaddress__ = "https://pypi.org/pypi/bittensor/json" -# Raw github url for delegates registry file +# Raw GitHub url for delegates registry file __delegates_details_url__: str = "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" # Substrate ss58_format @@ -92,6 +92,7 @@ def debug(on: bool = True): __finney_entrypoint__ = "wss://entrypoint-finney.opentensor.ai:443" +__finney_test_entrypoint__ = "wss://test.finney.opentensor.ai:443/" __finney_test_entrypoint__ = "wss://test.finney.opentensor.ai:443/" __archive_entrypoint__ = "wss://archive.chain.opentensor.ai:443/" @@ -106,7 +107,7 @@ def debug(on: bool = True): __rao_symbol__: str = chr(0x03C1) # Block Explorers map network to explorer url -## Must all be polkadotjs explorer urls +# Must all be polkadotjs explorer urls __network_explorer_map__ = { "opentensor": { "local": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer", @@ -233,7 +234,9 @@ def debug(on: bool = True): UnstakeError, ) -from substrateinterface import Keypair as Keypair +from substrateinterface import Keypair + +_ = Keypair from .config import InvalidConfigFile, DefaultConfig, config, T from .keyfile import ( serialized_keypair_to_keyfile_data, @@ -287,8 +290,14 @@ def debug(on: bool = True): ProposalVoteData, ) +# Allows avoiding name spacing conflicts and continue access to the `subtensor` module with `subtensor_module` name from . import subtensor as subtensor_module -from .subtensor import subtensor as subtensor + +# Double import allows using class `Subtensor` by referencing `bittensor.Subtensor` and `bittensor.subtensor`. +# This will be available for a while until we remove reference `bittensor.subtensor` +from .subtensor import Subtensor +from .subtensor import Subtensor as subtensor + from .cli import cli as cli, COMMANDS as ALL_COMMANDS from .btlogging import logging from .metagraph import metagraph as metagraph diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index 4ca08cfb22..b6be74095b 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -35,7 +35,7 @@ AxonInfo, ) from ..errors import ChainQueryError -from ..subtensor import subtensor +from ..subtensor import Subtensor from ..utils import RAOPERTAO, U16_NORMALIZED_FLOAT from ..utils.balance import Balance from ..utils.registration import POWSolution @@ -196,7 +196,7 @@ class MockChainState(TypedDict): SubtensorModule: MockSubtensorState -class MockSubtensor(subtensor): +class MockSubtensor(Subtensor): """ A Mock Subtensor class for running tests. This should mock only methods that make queries to the chain. diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 90c7d74fbb..8551e27d33 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -123,7 +123,7 @@ class ParamWithTypes(TypedDict): type: str # ScaleType string of the parameter. -class subtensor: +class Subtensor: """ The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. @@ -223,11 +223,11 @@ def __init__( network = None if config is None: - config = subtensor.config() + config = Subtensor.config() self.config = copy.deepcopy(config) # type: ignore # Setup config.subtensor.network and config.subtensor.chain_endpoint - self.chain_endpoint, self.network = subtensor.setup_config(network, config) # type: ignore + self.chain_endpoint, self.network = Subtensor.setup_config(network, config) # type: ignore if ( self.network == "finney" @@ -304,7 +304,7 @@ def config() -> "bittensor.config": `subtensor.add_args` method. """ parser = argparse.ArgumentParser() - subtensor.add_args(parser) + Subtensor.add_args(parser) return bittensor.config(parser, args=[]) @classmethod @@ -439,13 +439,13 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network(network) + ) = Subtensor.determine_chain_endpoint_and_network(network) else: if config.get("__is_set", {}).get("subtensor.chain_endpoint"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.chain_endpoint ) @@ -453,7 +453,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.network ) @@ -461,7 +461,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.chain_endpoint ) @@ -469,7 +469,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( config.subtensor.network ) @@ -477,7 +477,7 @@ def setup_config(network: str, config: "bittensor.config"): ( evaluated_network, evaluated_endpoint, - ) = subtensor.determine_chain_endpoint_and_network( + ) = Subtensor.determine_chain_endpoint_and_network( bittensor.defaults.subtensor.network ) @@ -5216,3 +5216,7 @@ def get_error_info_by_index(self, error_index: int) -> Tuple[str, str]: ) return name, description + + +# TODO: remove this after fully migrate `bittensor.subtensor` to `bittensor.Subtensor` in `bittensor/__init__.py` +subtensor = Subtensor diff --git a/bittensor/utils/wallet_utils.py b/bittensor/utils/wallet_utils.py index 78a7ed065c..3cb4149693 100644 --- a/bittensor/utils/wallet_utils.py +++ b/bittensor/utils/wallet_utils.py @@ -8,7 +8,7 @@ # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# The above copyright notice and this permission notice shall be included in all copies or large portions of # the Software. # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO @@ -119,18 +119,18 @@ def create_identity_dict( Creates a dictionary with structure for identity extrinsic. Must fit within 64 bits. Args: - display (str): String to be converted and stored under 'display'. - legal (str): String to be converted and stored under 'legal'. - web (str): String to be converted and stored under 'web'. - riot (str): String to be converted and stored under 'riot'. - email (str): String to be converted and stored under 'email'. - pgp_fingerprint (str): String to be converted and stored under 'pgp_fingerprint'. - image (str): String to be converted and stored under 'image'. - info (str): String to be converted and stored under 'info'. - twitter (str): String to be converted and stored under 'twitter'. + display (str): String to be converted and stored under 'display'. + legal (str): String to be converted and stored under 'legal'. + web (str): String to be converted and stored under 'web'. + riot (str): String to be converted and stored under 'riot'. + email (str): String to be converted and stored under 'email'. + pgp_fingerprint (str): String to be converted and stored under 'pgp_fingerprint'. + image (str): String to be converted and stored under 'image'. + info (str): String to be converted and stored under 'info'. + twitter (str): String to be converted and stored under 'twitter'. Returns: - dict: A dictionary with the specified structure and byte string conversions. + dict: A dictionary with the specified structure and byte string conversions. Raises: ValueError: If pgp_fingerprint is not exactly 20 bytes long when encoded. diff --git a/tests/integration_tests/test_cli_no_network.py b/tests/integration_tests/test_cli_no_network.py index 18eb26cd8e..b0df5a8778 100644 --- a/tests/integration_tests/test_cli_no_network.py +++ b/tests/integration_tests/test_cli_no_network.py @@ -1099,7 +1099,7 @@ def test_delegate_prompt_hotkey(self, _): delegate_ss58 = _get_mock_coldkey(0) with patch("bittensor.commands.delegates.show_delegates"): with patch( - "bittensor.subtensor.subtensor.get_delegates", + "bittensor.subtensor.Subtensor.get_delegates", return_value=[ bittensor.DelegateInfo( hotkey_ss58=delegate_ss58, # return delegate with mock coldkey @@ -1186,7 +1186,7 @@ def test_undelegate_prompt_hotkey(self, _): delegate_ss58 = _get_mock_coldkey(0) with patch("bittensor.commands.delegates.show_delegates"): with patch( - "bittensor.subtensor.subtensor.get_delegates", + "bittensor.subtensor.Subtensor.get_delegates", return_value=[ bittensor.DelegateInfo( hotkey_ss58=delegate_ss58, # return delegate with mock coldkey @@ -1271,9 +1271,9 @@ def test_vote_command_prompt_proposal_hash(self, _): mock_proposal_hash = "mock_proposal_hash" - with patch("bittensor.subtensor.subtensor.is_senate_member", return_value=True): + with patch("bittensor.subtensor.Subtensor.is_senate_member", return_value=True): with patch( - "bittensor.subtensor.subtensor.get_vote_data", + "bittensor.subtensor.Subtensor.get_vote_data", return_value={"index": 1}, ): # Patch command to exit early diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 845a73ee7d..f693cf61a3 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -17,20 +17,17 @@ # DEALINGS IN THE SOFTWARE. import random -import socket -import os import unittest from queue import Empty as QueueEmpty from unittest.mock import MagicMock, patch -from types import SimpleNamespace + +import pytest +from substrateinterface import Keypair import bittensor from bittensor.mock import MockSubtensor -import pytest from bittensor.utils.balance import Balance -from substrateinterface import Keypair from tests.helpers import ( - _get_mock_hotkey, _get_mock_coldkey, MockConsole, _get_mock_keypair, @@ -463,11 +460,8 @@ def test_registration_multiprocessed_already_registered(self): mock_set_status.__exit__ = MagicMock(return_value=True) # should return True - assert ( - self.subtensor.register( - wallet=wallet, netuid=3, num_processes=3, update_interval=5 - ) - == True + assert self.subtensor.register( + wallet=wallet, netuid=3, num_processes=3, update_interval=5 ) # calls until True and once again before exiting subtensor class diff --git a/tests/unit_tests/extrinsics/test_delegation.py b/tests/unit_tests/extrinsics/test_delegation.py index 9a321dbe64..42dcf4e706 100644 --- a/tests/unit_tests/extrinsics/test_delegation.py +++ b/tests/unit_tests/extrinsics/test_delegation.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.utils.balance import Balance from bittensor.extrinsics.delegation import ( diff --git a/tests/unit_tests/extrinsics/test_network.py b/tests/unit_tests/extrinsics/test_network.py index a11f53111f..67df030ffe 100644 --- a/tests/unit_tests/extrinsics/test_network.py +++ b/tests/unit_tests/extrinsics/test_network.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.extrinsics.network import ( set_hyperparameter_extrinsic, diff --git a/tests/unit_tests/extrinsics/test_prometheus.py b/tests/unit_tests/extrinsics/test_prometheus.py index 0458206701..7d9c975fbc 100644 --- a/tests/unit_tests/extrinsics/test_prometheus.py +++ b/tests/unit_tests/extrinsics/test_prometheus.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import MagicMock, patch import bittensor -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.extrinsics.prometheus import prometheus_extrinsic diff --git a/tests/unit_tests/extrinsics/test_registration.py b/tests/unit_tests/extrinsics/test_registration.py index bad8552b17..ccae48452d 100644 --- a/tests/unit_tests/extrinsics/test_registration.py +++ b/tests/unit_tests/extrinsics/test_registration.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.utils.registration import POWSolution from bittensor.extrinsics.registration import ( diff --git a/tests/unit_tests/extrinsics/test_root.py b/tests/unit_tests/extrinsics/test_root.py index 84132bb60d..2bc1be632b 100644 --- a/tests/unit_tests/extrinsics/test_root.py +++ b/tests/unit_tests/extrinsics/test_root.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.extrinsics.root import ( root_register_extrinsic, set_root_weights_extrinsic, diff --git a/tests/unit_tests/extrinsics/test_serving.py b/tests/unit_tests/extrinsics/test_serving.py index 513fa5df52..bf975e195a 100644 --- a/tests/unit_tests/extrinsics/test_serving.py +++ b/tests/unit_tests/extrinsics/test_serving.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import MagicMock, patch -from bittensor.subtensor import subtensor as Subtensor +from bittensor.subtensor import Subtensor from bittensor.wallet import wallet as Wallet from bittensor.axon import axon as Axon from bittensor.extrinsics.serving import ( diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index d8ea5836bd..b9863490d6 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -26,7 +26,7 @@ # Application import bittensor from bittensor.subtensor import ( - subtensor as Subtensor, + Subtensor, _logger, Balance, U16_NORMALIZED_FLOAT, From fbf5a456946e0671fad338f82ad8f8445b335714 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Thu, 23 May 2024 07:51:00 +0200 Subject: [PATCH 029/116] less verbose synapse exceptions --- bittensor/axon.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 959e009c3a..476ed52db2 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -942,8 +942,10 @@ def log_and_handle_error( ): if isinstance(exception, SynapseException): synapse = exception.synapse or synapse - # Display the traceback for user clarity. - bittensor.logging.trace(f"Forward exception: {traceback.format_exc()}") + + bittensor.logging.trace(f"Forward handled exception: {exception}") + else: + bittensor.logging.trace(f"Forward exception: {traceback.format_exc()}") if synapse.axon is None: synapse.axon = bittensor.TerminalInfo() From 6cf8e6e9ff8ee1e9b8089d1a5e799a4f694285c0 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 24 May 2024 08:03:24 -0700 Subject: [PATCH 030/116] Remove line duplication. Use # noqa: F401 for import --- bittensor/__init__.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 9f8a866576..8ec8019728 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -92,7 +92,6 @@ def debug(on: bool = True): __finney_entrypoint__ = "wss://entrypoint-finney.opentensor.ai:443" -__finney_test_entrypoint__ = "wss://test.finney.opentensor.ai:443/" __finney_test_entrypoint__ = "wss://test.finney.opentensor.ai:443/" __archive_entrypoint__ = "wss://archive.chain.opentensor.ai:443/" @@ -234,9 +233,7 @@ def debug(on: bool = True): UnstakeError, ) -from substrateinterface import Keypair - -_ = Keypair +from substrateinterface import Keypair # noqa: F401 from .config import InvalidConfigFile, DefaultConfig, config, T from .keyfile import ( serialized_keypair_to_keyfile_data, From 32cff029240051e68f19c3aede889adbc2f5f449 Mon Sep 17 00:00:00 2001 From: Liam Date: Fri, 24 May 2024 19:18:40 +0400 Subject: [PATCH 031/116] fix: use subtensorci --- .github/workflows/e2e-subtensor-tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e-subtensor-tests.yaml b/.github/workflows/e2e-subtensor-tests.yaml index 4a55b71c97..896d2142f9 100644 --- a/.github/workflows/e2e-subtensor-tests.yaml +++ b/.github/workflows/e2e-subtensor-tests.yaml @@ -30,7 +30,7 @@ env: jobs: run: - runs-on: ubuntu-22.04 + runs-on: SubtensorCI strategy: matrix: rust-branch: From 1b0168a81c43c53ef48605ea7a4e6c94f9eda1f2 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 24 May 2024 11:12:17 -0700 Subject: [PATCH 032/116] Fixes based on Ben's review. --- bittensor/subtensor.py | 82 +++++++++++------------------- tests/unit_tests/test_subtensor.py | 26 +++++----- 2 files changed, 43 insertions(+), 65 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 90c7d74fbb..e81f5568f1 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -3364,7 +3364,7 @@ def get_total_stake_for_hotkey( _result = self.query_subtensor("TotalHotkeyStake", block, [ss58_address]) return ( None - if _result is None or not hasattr(_result, "value") + if not getattr(_result, "value", None) else Balance.from_rao(_result.value) ) @@ -3386,7 +3386,7 @@ def get_total_stake_for_coldkey( _result = self.query_subtensor("TotalColdkeyStake", block, [ss58_address]) return ( None - if _result is None or not hasattr(_result, "value") + if not getattr(_result, "value", None) else Balance.from_rao(_result.value) ) @@ -3409,7 +3409,7 @@ def get_stake_for_coldkey_and_hotkey( _result = self.query_subtensor("Stake", block, [hotkey_ss58, coldkey_ss58]) return ( None - if _result is None or not hasattr(_result, "value") + if not getattr(_result, "value", None) else Balance.from_rao(_result.value) ) @@ -3448,7 +3448,7 @@ def does_hotkey_exist(self, hotkey_ss58: str, block: Optional[int] = None) -> bo _result = self.query_subtensor("Owner", block, [hotkey_ss58]) return ( False - if not _result or not hasattr(_result, "value") + if not getattr(_result, "value", None) else _result.value != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" ) @@ -3470,8 +3470,7 @@ def get_hotkey_owner( _result = self.query_subtensor("Owner", block, [hotkey_ss58]) return ( None - if not _result - or not hasattr(_result, "value") + if not getattr(_result, "value", None) or not self.does_hotkey_exist(hotkey_ss58, block) else _result.value ) @@ -3508,7 +3507,7 @@ def get_axon_info( ) return None - # TODO: check if someone still use this method. bittensor not. + # It is used in subtensor in neuron_info, and serving def get_prometheus_info( self, netuid: int, hotkey_ss58: str, block: Optional[int] = None ) -> Optional[PrometheusInfo]: @@ -3566,7 +3565,7 @@ def total_issuance(self, block: Optional[int] = None) -> Optional[Balance]: _result = self.query_subtensor("TotalIssuance", block) return ( None - if not _result or not hasattr(_result, "value") + if not getattr(_result, "value", None) else Balance.from_rao(_result.value) ) @@ -3589,7 +3588,7 @@ def total_stake(self, block: Optional[int] = None) -> Optional[Balance]: _result = self.query_subtensor("TotalStake", block) return ( None - if _result is None or not hasattr(_result, "value") + if not getattr(_result, "value", None) else Balance.from_rao(_result.value) ) @@ -3634,9 +3633,7 @@ def tx_rate_limit(self, block: Optional[int] = None) -> Optional[int]: maintaining efficient and timely transaction processing. """ _result = self.query_subtensor("TxRateLimit", block) - return ( - None if _result is None or not hasattr(_result, "value") else _result.value - ) + return getattr(_result, "value", None) ###################### # Network Parameters # @@ -3657,9 +3654,7 @@ def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: enabling a deeper understanding of the network's structure and composition. """ _result = self.query_subtensor("NetworksAdded", block, [netuid]) - return ( - False if _result is None or not hasattr(_result, "value") else _result.value - ) + return getattr(_result, "value", False) def get_all_subnet_netuids(self, block: Optional[int] = None) -> List[int]: """ @@ -3695,9 +3690,7 @@ def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: the extent of its decentralized infrastructure. """ _result = self.query_subtensor("TotalNetworks", block) - return ( - None if _result is None or not hasattr(_result, "value") else _result.value - ) + return getattr(_result, "value", None) def get_subnet_modality( self, netuid: int, block: Optional[int] = None @@ -3713,17 +3706,13 @@ def get_subnet_modality( Optional[int]: The value of the NetworkModality hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. """ _result = self.query_subtensor("NetworkModality", block, [netuid]) - return ( - None if _result is None or not hasattr(_result, "value") else _result.value - ) + return getattr(_result, "value", None) def get_subnet_connection_requirement( self, netuid_0: int, netuid_1: int, block: Optional[int] = None ) -> Optional[int]: _result = self.query_subtensor("NetworkConnect", block, [netuid_0, netuid_1]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def get_emission_value_by_subnet( self, netuid: int, block: Optional[int] = None @@ -3745,7 +3734,7 @@ def get_emission_value_by_subnet( _result = self.query_subtensor("EmissionValues", block, [netuid]) return ( None - if _result is None or not hasattr(_result, "value") + if not getattr(_result, "value", None) else Balance.from_rao(_result.value) ) @@ -3819,9 +3808,8 @@ def make_substrate_call_with_retry(): ) json_body = make_substrate_call_with_retry() - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): return [] return SubnetInfo.list_from_vec_u8(result) @@ -3854,9 +3842,8 @@ def make_substrate_call_with_retry(): ) json_body = make_substrate_call_with_retry() - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): return None return SubnetInfo.from_vec_u8(result) @@ -3913,7 +3900,7 @@ def get_subnet_owner( which can be important for decision-making and collaboration within the network. """ _result = self.query_subtensor("SubnetOwner", block, [netuid]) - return _result.value if _result and hasattr(_result, "value") else None + return getattr(_result, "value", None) ############## # Nomination # @@ -3956,9 +3943,9 @@ def get_delegate_take( """ _result = self.query_subtensor("Delegates", block, [hotkey_ss58]) return ( - U16_NORMALIZED_FLOAT(_result.value) - if _result and hasattr(_result, "value") - else None + None + if not getattr(_result, "value", None) + else U16_NORMALIZED_FLOAT(_result.value) ) def get_nominators_for_hotkey( @@ -4016,9 +4003,8 @@ def make_substrate_call_with_retry(encoded_hotkey_: List[int]): encoded_hotkey = ss58_to_vec_u8(hotkey_ss58) json_body = make_substrate_call_with_retry(encoded_hotkey) - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): return None return DelegateInfo.from_vec_u8(result) @@ -4051,9 +4037,8 @@ def make_substrate_call_with_retry(): ) json_body = make_substrate_call_with_retry() - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): return [] return [DelegateInfoLite(**d) for d in result] @@ -4086,9 +4071,8 @@ def make_substrate_call_with_retry(): ) json_body = make_substrate_call_with_retry() - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): return [] return DelegateInfo.list_from_vec_u8(result) @@ -4125,9 +4109,8 @@ def make_substrate_call_with_retry(encoded_coldkey_: List[int]): encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) json_body = make_substrate_call_with_retry(encoded_coldkey) - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): return [] return DelegateInfo.delegated_list_from_vec_u8(result) @@ -4324,7 +4307,7 @@ def get_uid_for_hotkey_on_subnet( operational and governance activities on a particular subnet. """ _result = self.query_subtensor("Uids", block, [netuid, hotkey_ss58]) - return _result.value if _result and hasattr(_result, "value") else None + return getattr(_result, "value", None) def get_all_uids_for_hotkey( self, hotkey_ss58: str, block: Optional[int] = None @@ -4438,9 +4421,7 @@ def neuron_has_validator_permit( subnet, particularly regarding its involvement in network validation and governance. """ _result = self.query_subtensor("ValidatorPermit", block, [netuid, uid]) - if not hasattr(_result, "value") or _result is None: - return None - return _result.value + return getattr(_result, "value", None) def neuron_for_wallet( self, wallet: "bittensor.wallet", netuid: int, block: Optional[int] = None @@ -4499,9 +4480,8 @@ def make_substrate_call_with_retry(): ) json_body = make_substrate_call_with_retry() - result = json_body.get("result", None) - if not result: + if not (result := json_body.get("result", None)): # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. return NeuronInfo._null_neuron() @@ -5136,9 +5116,9 @@ def make_substrate_call_with_retry(): return self.substrate.query_map( module="System", storage_function="Account", - block_hash=None - if block is None - else self.substrate.get_block_hash(block), + block_hash=( + None if block is None else self.substrate.get_block_hash(block) + ), ) result = make_substrate_call_with_retry() @@ -5148,7 +5128,7 @@ def make_substrate_call_with_retry(): return_dict[r[0].value] = bal return return_dict - # TODO: check with the teem if this is used anywhere outside. in bittensor no + # TODO: check with the team if this is used anywhere externally. not in bittensor @staticmethod def _null_neuron() -> NeuronInfo: neuron = NeuronInfo( diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index d8ea5836bd..2366a1de86 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -29,7 +29,6 @@ subtensor as Subtensor, _logger, Balance, - U16_NORMALIZED_FLOAT, ) from bittensor import subtensor_module @@ -419,7 +418,7 @@ def test_hyper_parameter_success_calls( subtensor._get_hyperparameter.assert_called_once_with( block=707, netuid=7, param_name=param_name ) - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, expected_result_type) # Special cases @@ -456,7 +455,7 @@ def test_blocks_since_last_update_success_calls(subtensor, mocker): param_name="LastUpdate", netuid=7 ) assert result == 1 - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, int) @@ -472,7 +471,7 @@ def test_weights_rate_limit_success_calls(subtensor, mocker): subtensor._get_hyperparameter.assert_called_once_with( param_name="WeightsSetRateLimit", netuid=7 ) - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, int) @@ -497,7 +496,7 @@ def test_get_total_stake_for_hotkey_success(subtensor, mocker): "TotalHotkeyStake", None, [fake_ss58_address] ) spy_balance_from_rao.assert_called_once() - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, Balance) @@ -516,7 +515,7 @@ def test_get_total_stake_for_hotkey_not_result(subtensor, mocker): "TotalHotkeyStake", None, [fake_ss58_address] ) spy_balance_from_rao.assert_not_called() - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, type(None)) @@ -535,7 +534,7 @@ def test_get_total_stake_for_hotkey_not_value(subtensor, mocker): "TotalHotkeyStake", None, [fake_ss58_address] ) spy_balance_from_rao.assert_not_called() - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(subtensor.query_subtensor.return_value, object) assert not hasattr(result, "value") @@ -556,7 +555,7 @@ def test_get_total_stake_for_coldkey_success(subtensor, mocker): "TotalColdkeyStake", None, [fake_ss58_address] ) spy_balance_from_rao.assert_called_once() - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, Balance) @@ -575,7 +574,7 @@ def test_get_total_stake_for_coldkey_not_result(subtensor, mocker): "TotalColdkeyStake", None, [fake_ss58_address] ) spy_balance_from_rao.assert_not_called() - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(result, type(None)) @@ -594,7 +593,7 @@ def test_get_total_stake_for_coldkey_not_value(subtensor, mocker): "TotalColdkeyStake", None, [fake_ss58_address] ) spy_balance_from_rao.assert_not_called() - # if we change the methods logic in the future we have to be make sure tha returned type is correct + # if we change the methods logic in the future we have to be make sure the returned type is correct assert isinstance(subtensor.query_subtensor.return_value, object) assert not hasattr(result, "value") @@ -1212,7 +1211,9 @@ def test_serving_rate_limit_success(mocker, subtensor): # Asserts assert result is not None assert result == int(rate_limit_value) - # subtensor._get_hyperparameter.assert_called_once_with("ServingRateLimit", netuid=netuid, block=block) + subtensor._get_hyperparameter.assert_called_once_with( + param_name="ServingRateLimit", netuid=netuid, block=block + ) def test_serving_rate_limit_no_data(mocker, subtensor): @@ -2178,6 +2179,3 @@ def test_get_delegate_take_no_data(mocker, subtensor): subtensor.query_subtensor.assert_called_once_with("Delegates", block, [hotkey_ss58]) spy_u16_normalized_float.assert_not_called() assert result is None - - -# `get_delegate_by_hotkey` tests From e464eace5599322af94a8f57cc29628eae1476a7 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 24 May 2024 11:29:38 -0700 Subject: [PATCH 033/116] Fixes for fixes based on Ben's review --- bittensor/subtensor.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index e81f5568f1..eb9f1c5862 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -3364,7 +3364,7 @@ def get_total_stake_for_hotkey( _result = self.query_subtensor("TotalHotkeyStake", block, [ss58_address]) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else Balance.from_rao(_result.value) ) @@ -3386,7 +3386,7 @@ def get_total_stake_for_coldkey( _result = self.query_subtensor("TotalColdkeyStake", block, [ss58_address]) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else Balance.from_rao(_result.value) ) @@ -3409,7 +3409,7 @@ def get_stake_for_coldkey_and_hotkey( _result = self.query_subtensor("Stake", block, [hotkey_ss58, coldkey_ss58]) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else Balance.from_rao(_result.value) ) @@ -3448,7 +3448,7 @@ def does_hotkey_exist(self, hotkey_ss58: str, block: Optional[int] = None) -> bo _result = self.query_subtensor("Owner", block, [hotkey_ss58]) return ( False - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else _result.value != "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM" ) @@ -3470,7 +3470,7 @@ def get_hotkey_owner( _result = self.query_subtensor("Owner", block, [hotkey_ss58]) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None or not self.does_hotkey_exist(hotkey_ss58, block) else _result.value ) @@ -3565,7 +3565,7 @@ def total_issuance(self, block: Optional[int] = None) -> Optional[Balance]: _result = self.query_subtensor("TotalIssuance", block) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else Balance.from_rao(_result.value) ) @@ -3588,7 +3588,7 @@ def total_stake(self, block: Optional[int] = None) -> Optional[Balance]: _result = self.query_subtensor("TotalStake", block) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else Balance.from_rao(_result.value) ) @@ -3734,7 +3734,7 @@ def get_emission_value_by_subnet( _result = self.query_subtensor("EmissionValues", block, [netuid]) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else Balance.from_rao(_result.value) ) @@ -3944,7 +3944,7 @@ def get_delegate_take( _result = self.query_subtensor("Delegates", block, [hotkey_ss58]) return ( None - if not getattr(_result, "value", None) + if getattr(_result, "value", None) is None else U16_NORMALIZED_FLOAT(_result.value) ) From 672a53a79e8ec3db3206b4b68504e2636d0c1ae1 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Fri, 24 May 2024 21:30:06 +0200 Subject: [PATCH 034/116] Added tests to cover all compatibility layer cases. --- bittensor/tensor.py | 125 +++++++++-------- bittensor/utils/weight_utils.py | 2 +- tests/unit_tests/test_chain_data.py | 67 +++++++++ tests/unit_tests/test_metagraph.py | 2 +- tests/unit_tests/test_tensor.py | 109 +++++++++++++++ tests/unit_tests/utils/test_weight_utils.py | 147 ++++++++++++++++++++ 6 files changed, 392 insertions(+), 60 deletions(-) diff --git a/bittensor/tensor.py b/bittensor/tensor.py index e6bf6de3d1..ab46560d99 100644 --- a/bittensor/tensor.py +++ b/bittensor/tensor.py @@ -24,33 +24,54 @@ from bittensor.utils.registration import torch, use_torch from pydantic import ConfigDict, BaseModel, Field, field_validator -NUMPY_DTYPES = { - "float16": np.float16, - "float32": np.float32, - "float64": np.float64, - "uint8": np.uint8, - "int16": np.int16, - "int8": np.int8, - "int32": np.int32, - "int64": np.int64, - "bool": bool, -} - -if use_torch(): - TORCH_DTYPES = { - "torch.float16": torch.float16, - "torch.float32": torch.float32, - "torch.float64": torch.float64, - "torch.uint8": torch.uint8, - "torch.int16": torch.int16, - "torch.int8": torch.int8, - "torch.int32": torch.int32, - "torch.int64": torch.int64, - "torch.bool": torch.bool, - } - - -def cast_dtype(raw: Union[None, np.dtype, "torch.dtype", str]) -> str: + +class DTypes(dict): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.torch: bool = False + self.update( + { + "float16": np.float16, + "float32": np.float32, + "float64": np.float64, + "uint8": np.uint8, + "int16": np.int16, + "int8": np.int8, + "int32": np.int32, + "int64": np.int64, + "bool": bool, + } + ) + + def __getitem__(self, key): + self._add_torch() + return super().__getitem__(key) + + def __contains__(self, key): + self._add_torch() + return super().__contains__(key) + + def _add_torch(self): + if self.torch is False: + torch_dtypes = { + "torch.float16": torch.float16, + "torch.float32": torch.float32, + "torch.float64": torch.float64, + "torch.uint8": torch.uint8, + "torch.int16": torch.int16, + "torch.int8": torch.int8, + "torch.int32": torch.int32, + "torch.int64": torch.int64, + "torch.bool": torch.bool, + } + self.update(torch_dtypes) + self.torch = True + + +dtypes = DTypes() + + +def cast_dtype(raw: Union[None, np.dtype, "torch.dtype", str]) -> Optional[str]: """ Casts the raw value to a string representing the `numpy data type `_, or the @@ -67,21 +88,16 @@ def cast_dtype(raw: Union[None, np.dtype, "torch.dtype", str]) -> str: """ if not raw: return None - if isinstance(raw, np.dtype): - return NUMPY_DTYPES[raw] - elif use_torch(): - if isinstance(raw, torch.dtype): - return TORCH_DTYPES[raw] + if use_torch() and isinstance(raw, torch.dtype): + return dtypes[raw] + elif isinstance(raw, np.dtype): + return dtypes[raw] elif isinstance(raw, str): if use_torch(): - assert ( - raw in TORCH_DTYPES - ), f"{raw} not a valid torch type in dict {TORCH_DTYPES}" + assert raw in dtypes, f"{raw} not a valid torch type in dict {dtypes}" return raw else: - assert ( - raw in NUMPY_DTYPES - ), f"{raw} not a valid numpy type in dict {NUMPY_DTYPES}" + assert raw in dtypes, f"{raw} not a valid numpy type in dict {dtypes}" return raw else: raise Exception( @@ -89,7 +105,7 @@ def cast_dtype(raw: Union[None, np.dtype, "torch.dtype", str]) -> str: ) -def cast_shape(raw: Union[None, List[int], str]) -> str: +def cast_shape(raw: Union[None, List[int], str]) -> Optional[Union[str, list]]: """ Casts the raw value to a string representing the tensor shape. @@ -105,9 +121,7 @@ def cast_shape(raw: Union[None, List[int], str]) -> str: if not raw: return None elif isinstance(raw, list): - if len(raw) == 0: - return raw - elif isinstance(raw[0], int): + if len(raw) == 0 or isinstance(raw[0], int): return raw else: raise Exception(f"{raw} list elements are not of type int") @@ -124,7 +138,7 @@ class tensor: def __new__(cls, tensor: Union[list, np.ndarray, "torch.Tensor"]): if isinstance(tensor, list) or isinstance(tensor, np.ndarray): tensor = torch.tensor(tensor) if use_torch() else np.array(tensor) - return Tensor.serialize(tensor=tensor) + return Tensor.serialize(tensor_=tensor) class Tensor(BaseModel): @@ -170,20 +184,20 @@ def deserialize(self) -> Union["np.ndarray", "torch.Tensor"]: # Reshape does not work for (0) or [0] if not (len(shape) == 1 and shape[0] == 0): torch_object = torch_object.reshape(shape) - return torch_object.type(TORCH_DTYPES[self.dtype]) + return torch_object.type(dtypes[self.dtype]) else: # Reshape does not work for (0) or [0] if not (len(shape) == 1 and shape[0] == 0): numpy_object = numpy_object.reshape(shape) - return numpy_object.astype(NUMPY_DTYPES[self.dtype]) + return numpy_object.astype(dtypes[self.dtype]) @staticmethod - def serialize(tensor: Union["np.ndarray", "torch.Tensor"]) -> "Tensor": + def serialize(tensor_: Union["np.ndarray", "torch.Tensor"]) -> "Tensor": """ Serializes the given tensor. Args: - tensor (np.array or torch.Tensor): The tensor to serialize. + tensor_ (np.array or torch.Tensor): The tensor to serialize. Returns: Tensor: The serialized tensor. @@ -191,19 +205,14 @@ def serialize(tensor: Union["np.ndarray", "torch.Tensor"]) -> "Tensor": Raises: Exception: If the serialization process encounters an error. """ - dtype = str(tensor.dtype) - shape = list(tensor.shape) + dtype = str(tensor_.dtype) + shape = list(tensor_.shape) if len(shape) == 0: shape = [0] - if use_torch(): - torch_numpy = tensor.cpu().detach().numpy().copy() - data_buffer = base64.b64encode( - msgpack.packb(torch_numpy, default=msgpack_numpy.encode) - ).decode("utf-8") - else: - data_buffer = base64.b64encode( - msgpack.packb(tensor, default=msgpack_numpy.encode) - ).decode("utf-8") + tensor__ = tensor_.cpu().detach().numpy().copy() if use_torch() else tensor_ + data_buffer = base64.b64encode( + msgpack.packb(tensor__, default=msgpack_numpy.encode) + ).decode("utf-8") return Tensor(buffer=data_buffer, shape=shape, dtype=dtype) # Represents the tensor buffer data. diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 9bd8606c9d..2810a9a0c1 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -55,7 +55,7 @@ def normalize_max_weight( if estimation.max() <= limit: return weights / weights.sum() - # Find the cumlative sum and sorted tensor + # Find the cumulative sum and sorted tensor cumsum = np.cumsum(estimation, 0) # Determine the index of cutoff diff --git a/tests/unit_tests/test_chain_data.py b/tests/unit_tests/test_chain_data.py index 2cc842d9a2..a6474bbee9 100644 --- a/tests/unit_tests/test_chain_data.py +++ b/tests/unit_tests/test_chain_data.py @@ -1,5 +1,6 @@ import pytest import bittensor +import torch from bittensor.chain_data import AxonInfo, ChainDataType, DelegateInfo, NeuronInfo SS58_FORMAT = bittensor.__ss58_format__ @@ -204,6 +205,36 @@ def test_to_parameter_dict(axon_info, test_case): assert result[key] == value, f"Test case: {test_case}" +@pytest.mark.parametrize( + "axon_info, test_case", + [ + ( + AxonInfo( + version=1, + ip="127.0.0.1", + port=8080, + ip_type=4, + hotkey="hot", + coldkey="cold", + ), + "ID_to_parameter_dict", + ), + ], +) +def test_to_parameter_dict_torch( + axon_info, + test_case, + force_legacy_torch_compat_api, +): + result = axon_info.to_parameter_dict() + + # Assert + assert isinstance(result, torch.nn.ParameterDict) + for key, value in axon_info.__dict__.items(): + assert key in result + assert result[key] == value, f"Test case: {test_case}" + + @pytest.mark.parametrize( "parameter_dict, expected, test_case", [ @@ -236,6 +267,42 @@ def test_from_parameter_dict(parameter_dict, expected, test_case): assert result == expected, f"Test case: {test_case}" +@pytest.mark.parametrize( + "parameter_dict, expected, test_case", + [ + ( + torch.nn.ParameterDict( + { + "version": 1, + "ip": "127.0.0.1", + "port": 8080, + "ip_type": 4, + "hotkey": "hot", + "coldkey": "cold", + } + ), + AxonInfo( + version=1, + ip="127.0.0.1", + port=8080, + ip_type=4, + hotkey="hot", + coldkey="cold", + ), + "ID_from_parameter_dict", + ), + ], +) +def test_from_parameter_dict_torch( + parameter_dict, expected, test_case, force_legacy_torch_compat_api +): + # Act + result = AxonInfo.from_parameter_dict(parameter_dict) + + # Assert + assert result == expected, f"Test case: {test_case}" + + def create_neuron_info_decoded( hotkey, coldkey, diff --git a/tests/unit_tests/test_metagraph.py b/tests/unit_tests/test_metagraph.py index 38d2cf14cb..af0dbdba76 100644 --- a/tests/unit_tests/test_metagraph.py +++ b/tests/unit_tests/test_metagraph.py @@ -85,7 +85,7 @@ def test_set_metagraph_attributes(mock_environment): metagraph.consensus, np.array([neuron.consensus for neuron in neurons], dtype=np.float32), ) - == True + is True ) # Similarly for other attributes... diff --git a/tests/unit_tests/test_tensor.py b/tests/unit_tests/test_tensor.py index 94d8f7cd52..9939b397e7 100644 --- a/tests/unit_tests/test_tensor.py +++ b/tests/unit_tests/test_tensor.py @@ -18,6 +18,7 @@ import numpy as np import bittensor import numpy +import torch # This is a fixture that creates an example tensor for testing @@ -30,6 +31,15 @@ def example_tensor(): return bittensor.tensor(data) +@pytest.fixture +def example_tensor_torch(force_legacy_torch_compat_api): + # Create a tensor from a list using PyTorch + data = torch.tensor([1, 2, 3, 4]) + + # Serialize the tensor into a Tensor instance and return it + return bittensor.tensor(data) + + def test_deserialize(example_tensor): # Deserialize the tensor from the Tensor instance tensor = example_tensor.deserialize() @@ -39,6 +49,13 @@ def test_deserialize(example_tensor): assert tensor.tolist() == [1, 2, 3, 4] +def test_deserialize_torch(example_tensor_torch, force_legacy_torch_compat_api): + tensor = example_tensor_torch.deserialize() + # Check that the result is a PyTorch tensor with the correct values + assert isinstance(tensor, torch.Tensor) + assert tensor.tolist() == [1, 2, 3, 4] + + def test_serialize(example_tensor): # Check that the serialized tensor is an instance of Tensor assert isinstance(example_tensor, bittensor.Tensor) @@ -70,6 +87,37 @@ def test_serialize(example_tensor): assert example_tensor.shape == example_tensor.shape +def test_serialize_torch(example_tensor_torch, force_legacy_torch_compat_api): + # Check that the serialized tensor is an instance of Tensor + assert isinstance(example_tensor_torch, bittensor.Tensor) + + # Check that the Tensor instance has the correct buffer, dtype, and shape + assert example_tensor_torch.buffer == example_tensor_torch.buffer + assert example_tensor_torch.dtype == example_tensor_torch.dtype + assert example_tensor_torch.shape == example_tensor_torch.shape + + assert isinstance(example_tensor_torch.tolist(), list) + + # Check that the Tensor instance has the correct buffer, dtype, and shape + assert example_tensor_torch.buffer == example_tensor_torch.buffer + assert example_tensor_torch.dtype == example_tensor_torch.dtype + assert example_tensor_torch.shape == example_tensor_torch.shape + + assert isinstance(example_tensor_torch.numpy(), numpy.ndarray) + + # Check that the Tensor instance has the correct buffer, dtype, and shape + assert example_tensor_torch.buffer == example_tensor_torch.buffer + assert example_tensor_torch.dtype == example_tensor_torch.dtype + assert example_tensor_torch.shape == example_tensor_torch.shape + + assert isinstance(example_tensor_torch.tensor(), torch.Tensor) + + # Check that the Tensor instance has the correct buffer, dtype, and shape + assert example_tensor_torch.buffer == example_tensor_torch.buffer + assert example_tensor_torch.dtype == example_tensor_torch.dtype + assert example_tensor_torch.shape == example_tensor_torch.shape + + def test_buffer_field(): # Create a Tensor instance with a specified buffer, dtype, and shape tensor = bittensor.Tensor( @@ -80,6 +128,16 @@ def test_buffer_field(): assert tensor.buffer == "0x321e13edqwds231231231232131" +def test_buffer_field_torch(force_legacy_torch_compat_api): + # Create a Tensor instance with a specified buffer, dtype, and shape + tensor = bittensor.Tensor( + buffer="0x321e13edqwds231231231232131", dtype="torch.float32", shape=[3, 3] + ) + + # Check that the buffer field matches the provided value + assert tensor.buffer == "0x321e13edqwds231231231232131" + + def test_dtype_field(): # Create a Tensor instance with a specified buffer, dtype, and shape tensor = bittensor.Tensor( @@ -90,6 +148,13 @@ def test_dtype_field(): assert tensor.dtype == "float32" +def test_dtype_field_torch(force_legacy_torch_compat_api): + tensor = bittensor.Tensor( + buffer="0x321e13edqwds231231231232131", dtype="torch.float32", shape=[3, 3] + ) + assert tensor.dtype == "torch.float32" + + def test_shape_field(): # Create a Tensor instance with a specified buffer, dtype, and shape tensor = bittensor.Tensor( @@ -100,6 +165,13 @@ def test_shape_field(): assert tensor.shape == [3, 3] +def test_shape_field_torch(force_legacy_torch_compat_api): + tensor = bittensor.Tensor( + buffer="0x321e13edqwds231231231232131", dtype="torch.float32", shape=[3, 3] + ) + assert tensor.shape == [3, 3] + + def test_serialize_all_types(): bittensor.tensor(np.array([1], dtype=np.float16)) bittensor.tensor(np.array([1], dtype=np.float32)) @@ -110,6 +182,16 @@ def test_serialize_all_types(): bittensor.tensor(np.array([1], dtype=bool)) +def test_serialize_all_types_torch(force_legacy_torch_compat_api): + bittensor.tensor(torch.tensor([1], dtype=torch.float16)) + bittensor.tensor(torch.tensor([1], dtype=torch.float32)) + bittensor.tensor(torch.tensor([1], dtype=torch.float64)) + bittensor.tensor(torch.tensor([1], dtype=torch.uint8)) + bittensor.tensor(torch.tensor([1], dtype=torch.int32)) + bittensor.tensor(torch.tensor([1], dtype=torch.int64)) + bittensor.tensor(torch.tensor([1], dtype=torch.bool)) + + def test_serialize_all_types_equality(): rng = np.random.default_rng() @@ -132,3 +214,30 @@ def test_serialize_all_types_equality(): tensor = rng.standard_normal((100,), dtype=np.float32) < 0.5 assert np.all(bittensor.tensor(tensor).tensor() == tensor) + + +def test_serialize_all_types_equality_torch(force_legacy_torch_compat_api): + torchtensor = torch.randn([100], dtype=torch.float16) + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) + + torchtensor = torch.randn([100], dtype=torch.float32) + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) + + torchtensor = torch.randn([100], dtype=torch.float64) + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) + + torchtensor = torch.randint(255, 256, (1000,), dtype=torch.uint8) + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) + + torchtensor = torch.randint( + 2_147_483_646, 2_147_483_647, (1000,), dtype=torch.int32 + ) + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) + + torchtensor = torch.randint( + 9_223_372_036_854_775_806, 9_223_372_036_854_775_807, (1000,), dtype=torch.int64 + ) + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) + + torchtensor = torch.randn([100], dtype=torch.float32) < 0.5 + assert torch.all(bittensor.tensor(torchtensor).tensor() == torchtensor) diff --git a/tests/unit_tests/utils/test_weight_utils.py b/tests/unit_tests/utils/test_weight_utils.py index 0a42a9c9b3..edf334db50 100644 --- a/tests/unit_tests/utils/test_weight_utils.py +++ b/tests/unit_tests/utils/test_weight_utils.py @@ -56,6 +56,36 @@ def test_convert_weight_and_uids(): weight_utils.convert_weights_and_uids_for_emit(uids, weights) +def test_convert_weight_and_uids_torch(force_legacy_torch_compat_api): + uids = torch.tensor(list(range(10))) + weights = torch.rand(10) + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + # min weight < 0 + weights[5] = -1 + with pytest.raises(ValueError) as pytest_wrapped_e: + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + # min uid < 0 + weights[5] = 0 + uids[3] = -1 + with pytest.raises(ValueError) as pytest_wrapped_e: + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + # len(uids) != len(weights) + uids[3] = 3 + with pytest.raises(ValueError) as pytest_wrapped_e: + weight_utils.convert_weights_and_uids_for_emit(uids, weights[1:]) + + # sum(weights) == 0 + weights = torch.zeros(10) + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + # test for overflow and underflow + for _ in range(5): + uids = torch.tensor(list(range(10))) + weights = torch.rand(10) + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + def test_normalize_with_max_weight(): weights = np.random.rand(1000) wn = weight_utils.normalize_max_weight(weights, limit=0.01) @@ -187,6 +217,37 @@ def test_convert_weight_uids_and_vals_to_tensor_happy_path( assert np.allclose(result, expected), f"Failed {test_id}" +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, expected", + [ + ( + "happy-path-1", + 3, + [0, 1, 2], + [15, 5, 80], + [0, 1, 2], + torch.tensor([0.15, 0.05, 0.8]), + ), + ( + "happy-path-2", + 3, + [0, 2], + [300, 300], + [0, 1, 2], + torch.tensor([0.5, 0.0, 0.5]), + ), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_happy_path_torch( + test_id, n, uids, weights, subnets, expected, force_legacy_torch_compat_api +): + # Act + result = weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + # Assert + assert torch.allclose(result, expected), f"Failed {test_id}" + + @pytest.mark.parametrize( "test_id, n, uids, weights, expected", [ @@ -254,6 +315,39 @@ def test_convert_root_weight_uids_and_vals_to_tensor_happy_paths( assert np.allclose(result, expected, atol=1e-4), f"Failed {test_id}" +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, expected", + [ + ( + "edge-1", + 1, + [0], + [0], + [0], + torch.tensor([0.0]), + ), # Single neuron with zero weight + ( + "edge-2", + 2, + [0, 1], + [0, 0], + [0, 1], + torch.tensor([0.0, 0.0]), + ), # All zero weights + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_edge_cases( + test_id, n, uids, weights, subnets, expected, force_legacy_torch_compat_api +): + # Act + result = weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets + ) + + # Assert + assert torch.allclose(result, expected, atol=1e-4), f"Failed {test_id}" + + @pytest.mark.parametrize( "test_id, n, uids, weights, subnets, expected", [ @@ -333,6 +427,36 @@ def test_happy_path(test_id, n, uids, bonds, expected_output): assert np.array_equal(result, expected_output), f"Failed {test_id}" +@pytest.mark.parametrize( + "test_id, n, uids, bonds, expected_output", + [ + ( + "happy-path-1", + 5, + [1, 3, 4], + [10, 20, 30], + torch.tensor([0, 10, 0, 20, 30], dtype=torch.int64), + ), + ( + "happy-path-2", + 3, + [0, 1, 2], + [7, 8, 9], + torch.tensor([7, 8, 9], dtype=torch.int64), + ), + ("happy-path-3", 4, [2], [15], torch.tensor([0, 0, 15, 0], dtype=torch.int64)), + ], +) +def test_happy_path_torch( + test_id, n, uids, bonds, expected_output, force_legacy_torch_compat_api +): + # Act + result = weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) + + # Assert + assert torch.equal(result, expected_output), f"Failed {test_id}" + + @pytest.mark.parametrize( "test_id, n, uids, bonds, expected_output", [ @@ -354,6 +478,29 @@ def test_edge_cases(test_id, n, uids, bonds, expected_output): assert np.array_equal(result, expected_output), f"Failed {test_id}" +@pytest.mark.parametrize( + "test_id, n, uids, bonds, expected_output", + [ + ("edge-1", 1, [0], [0], torch.tensor([0], dtype=torch.int64)), # Single element + ( + "edge-2", + 10, + [], + [], + torch.zeros(10, dtype=torch.int64), + ), # Empty uids and bonds + ], +) +def test_edge_cases_torch( + test_id, n, uids, bonds, expected_output, force_legacy_torch_compat_api +): + # Act + result = weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) + + # Assert + assert torch.equal(result, expected_output), f"Failed {test_id}" + + @pytest.mark.parametrize( "test_id, n, uids, bonds, exception", [ From 451327807190cc74446d2942210998abbdbd50a1 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 24 May 2024 13:33:13 -0700 Subject: [PATCH 035/116] Fixes based on Ben's review --- bittensor/utils/wallet_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/utils/wallet_utils.py b/bittensor/utils/wallet_utils.py index 3cb4149693..39218c33f0 100644 --- a/bittensor/utils/wallet_utils.py +++ b/bittensor/utils/wallet_utils.py @@ -8,7 +8,7 @@ # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -# The above copyright notice and this permission notice shall be included in all copies or large portions of +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of # the Software. # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO From cb93237c0bdac5dd2f593370be261d1aa7530b2b Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 24 May 2024 13:51:35 -0700 Subject: [PATCH 036/116] Add Commit weights and reveal weights function --- bittensor/cli.py | 14 + bittensor/commands/__init__.py | 1 + bittensor/commands/weights.py | 232 +++++++++++++++++ bittensor/extrinsics/commit_weights.py | 119 +++++++++ bittensor/subtensor.py | 245 +++++++++++++++++- bittensor/utils/weight_utils.py | 27 ++ .../integration_tests/test_cli_no_network.py | 2 + .../test_subtensor_integration.py | 220 +++++++++++++++- 8 files changed, 853 insertions(+), 7 deletions(-) create mode 100644 bittensor/commands/weights.py create mode 100644 bittensor/extrinsics/commit_weights.py diff --git a/bittensor/cli.py b/bittensor/cli.py index fb8f628335..49e188317c 100644 --- a/bittensor/cli.py +++ b/bittensor/cli.py @@ -68,6 +68,8 @@ VoteCommand, WalletBalanceCommand, WalletCreateCommand, + CommitWeightCommand, + RevealWeightCommand, ) # Create a console instance for CLI display. @@ -93,6 +95,9 @@ "sudos": "sudo", "i": "info", "info": "info", + "weights": "weight", + "wt": "weight", + "weight": "weight", } COMMANDS = { "subnets": { @@ -166,6 +171,15 @@ "remove": UnStakeCommand, }, }, + "weight": { + "name": "weight", + "aliases": ["wt", "weights"], + "help": "Commands for managing weight for subnets.", + "commands": { + "commit": CommitWeightCommand, + "reveal": RevealWeightCommand, + }, + }, "sudo": { "name": "sudo", "aliases": ["su", "sudos"], diff --git a/bittensor/commands/__init__.py b/bittensor/commands/__init__.py index 27883be2f8..2ccea346a4 100644 --- a/bittensor/commands/__init__.py +++ b/bittensor/commands/__init__.py @@ -91,6 +91,7 @@ WalletBalanceCommand, GetWalletHistoryCommand, ) +from .weights import CommitWeightCommand, RevealWeightCommand from .transfer import TransferCommand from .inspect import InspectCommand from .metagraph import MetagraphCommand diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py new file mode 100644 index 0000000000..9499c60c95 --- /dev/null +++ b/bittensor/commands/weights.py @@ -0,0 +1,232 @@ +import argparse +import re + +import torch +from rich.prompt import Prompt + +import bittensor +from . import defaults + + +class CommitWeightCommand: + """ + Executes the ``commit`` command to commit weights for specific subnet on the Bittensor network. + + Usage: + The command allows committing weights for a specific subnet. Users need to specify the netuid (network unique identifier), corresponding UIDs, and weights they wish to commit. + + Optional arguments: + - ``--netuid`` (int): The netuid of the subnet for which weights are to be commited. + - ``--uids`` (str): Corresponding UIDs for the specified netuid, in comma-separated format. + - ``--weights`` (str): Corresponding weights for the specified UIDs, in comma-separated format. + + Example usage: + $ btcli wt commit --netuid 1 --uids 1,2,3,4 --weights 0.1,0.2,0.3,0.4 + + Note: + This command is used to commit weights for a specific subnet and requires the user to have the necessary permissions. + """ + + @staticmethod + def run(cli: "bittensor.cli"): + r"""Commit weights for a specific subnet.""" + try: + subtensor: "bittensor.subtensor" = bittensor.subtensor(config=cli.config, log_verbose=False) + CommitWeightCommand._run(cli, subtensor) + finally: + if "subtensor" in locals(): + subtensor.close() + bittensor.logging.debug("closing subtensor connection") + + @staticmethod + def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): + r"""Commit weights for a specific subnet""" + wallet = bittensor.wallet(config=cli.config) + + # Get values if not set + if not cli.config.is_set("netuid"): + cli.config.netuid = int(Prompt.ask(f"Enter netuid")) + + if not cli.config.is_set("uids"): + cli.config.uids = Prompt.ask(f"Enter UIDs (comma-separated)") + + if not cli.config.is_set("weights"): + cli.config.weights = Prompt.ask(f"Enter weights (comma-separated)") + + # Parse from string + netuid = cli.config.netuid + uids = torch.tensor( + list(map(int, re.split(r"[ ,]+", cli.config.uids))), + dtype=torch.int64 + ) + weights = torch.tensor( + list(map(int, re.split(r"[ ,]+", cli.config.weights))), + dtype=torch.float32 + ) + + # Run the commit weights operation + success, message = subtensor.commit_weights( + wallet=wallet, + netuid=netuid, + uids=uids, + weights=weights, + wait_for_inclusion=cli.config.wait_for_inclusion, + wait_for_finalization=cli.config.wait_for_finalization, + prompt=cli.config.prompt, + ) + + # Result + if success: + bittensor.__console__.print(f"Weights committed successfully") + else: + bittensor.__console__.print(f"Failed to commit weights: {message}") + + @staticmethod + def add_args(parser: argparse.ArgumentParser): + parser = parser.add_parser( + "commit", help="""Commit weights for a specific subnet.""" + ) + parser.add_argument("--netuid", dest="netuid", type=int, required=False) + parser.add_argument("--uids", dest="uids", type=str, required=False) + parser.add_argument("--weights", dest="weights", type=str, required=False) + parser.add_argument( + "--wait-for-inclusion", + dest="wait_for_inclusion", + action="store_true", + default=False, + ) + parser.add_argument( + "--wait-for-finalization", + dest="wait_for_finalization", + action="store_true", + default=True, + ) + parser.add_argument( + "--prompt", + dest="prompt", + action="store_true", + default=True, + ) + + bittensor.wallet.add_args(parser) + bittensor.subtensor.add_args(parser) + + @staticmethod + def check_config(config: "bittensor.config"): + if not config.is_set("wallet.name") and not config.no_prompt: + wallet_name = Prompt.ask("Enter wallet name", default=defaults.wallet.name) + config.wallet.name = str(wallet_name) + if not config.is_set("wallet.hotkey") and not config.no_prompt: + hotkey = Prompt.ask("Enter hotkey name", default=defaults.wallet.hotkey) + config.wallet.hotkey = str(hotkey) + + +class RevealWeightCommand: + """ + Executes the ``reveal`` command to reveal weights for a specific subnet on the Bittensor network. + Usage: + The command allows revealing weights for a specific subnet. Users need to specify the netuid (network unique identifier), corresponding UIDs, and weights they wish to reveal. + Optional arguments: + - ``--netuid`` (int): The netuid of the subnet for which weights are to be revealed. + - ``--uids`` (str): Corresponding UIDs for the specified netuid, in comma-separated format. + - ``--weights`` (str): Corresponding weights for the specified UIDs, in comma-separated format. + Example usage:: + $ btcli wt reveal --netuid 1 --uids 1,2,3,4 --weights 0.1,0.2,0.3,0.4 + Note: + This command is used to reveal weights for a specific subnet and requires the user to have the necessary permissions. + """ + + @staticmethod + def run(cli: "bittensor.cli"): + r"""Reveal weights for a specific subnet.""" + try: + subtensor: "bittensor.subtensor" = bittensor.subtensor( + config=cli.config, log_verbose=False + ) + RevealWeightCommand._run(cli, subtensor) + finally: + if "subtensor" in locals(): + subtensor.close() + bittensor.logging.debug("closing subtensor connection") + + @staticmethod + def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): + r"""Reveal weights for a specific subnet.""" + wallet = bittensor.wallet(config=cli.config) + + # Get values if not set. + if not cli.config.is_set("netuid"): + cli.config.netuid = int(Prompt.ask(f"Enter netuid")) + + if not cli.config.is_set("uids"): + cli.config.uids = Prompt.ask(f"Enter UIDs (comma-separated)") + + if not cli.config.is_set("weights"): + cli.config.weights = Prompt.ask(f"Enter weights (comma-separated)") + + # Parse from string + netuid = cli.config.netuid + uids = torch.tensor( + list(map(int, re.split(r"[ ,]+", cli.config.uids))), + dtype=torch.int64, + ) + weights = torch.tensor( + list(map(float, re.split(r"[ ,]+", cli.config.weights))), + dtype=torch.float32, + ) + + # Run the reveal weights operation. + success, message = subtensor.reveal_weights( + wallet=wallet, + netuid=netuid, + uids=uids, + weights=weights, + version_key=0, + wait_for_inclusion=cli.config.wait_for_inclusion, + wait_for_finalization=cli.config.wait_for_finalization, + prompt=cli.config.prompt, + ) + + if success: + bittensor.__console__.print(f"Weights revealed successfully") + else: + bittensor.__console__.print(f"Failed to reveal weights: {message}") + + @staticmethod + def add_args(parser: argparse.ArgumentParser): + parser = parser.add_parser( + "reveal", help="""Reveal weights for a specific subnet.""" + ) + parser.add_argument("--netuid", dest="netuid", type=int, required=False) + parser.add_argument("--uids", dest="uids", type=str, required=False) + parser.add_argument("--weights", dest="weights", type=str, required=False) + parser.add_argument( + "--wait-for-inclusion", + dest="wait_for_inclusion", + action="store_true", + default=False, + ) + parser.add_argument( + "--wait-for-finalization", + dest="wait_for_finalization", + action="store_true", + default=True, + ) + parser.add_argument( + "--prompt", + dest="prompt", + action="store_true", + default=True, + ) + + bittensor.wallet.add_args(parser) + bittensor.subtensor.add_args(parser) + + @staticmethod + def check_config(config: "bittensor.config"): + if not config.is_set("wallet.name") and not config.no_prompt: + wallet_name = Prompt.ask("Enter wallet name", default=defaults.wallet.name) + config.wallet.name = str(wallet_name) + if not config.is_set("wallet.hotkey") and not config.no_prompt: + hotkey = Prompt.ask("Enter hotkey name", default=defaults.wallet.hotkey) + config.wallet.hotkey = str(hotkey) diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py new file mode 100644 index 0000000000..59fd2f2776 --- /dev/null +++ b/bittensor/extrinsics/commit_weights.py @@ -0,0 +1,119 @@ +# The MIT License (MIT) +# Copyright © 2021 Yuma Rao +# Copyright © 2023 Opentensor Foundation + +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# documentation files (the “Software”), to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# the Software. + +# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + +from typing import Tuple, List + +import bittensor + + +def commit_weights_extrinsic( + subtensor: "bittensor.subtensor", + wallet: "bittensor.wallet", + netuid: int, + commit_hash: str, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, +) -> Tuple[bool, str]: + """ + Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. + This function is a wrapper around the `_do_commit_weights` method, handling user prompts and error messages. + Args: + subtensor (bittensor.subtensor): The subtensor instance used for blockchain interaction. + wallet (bittensor.wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + commit_hash (str): The hash of the neuron's weights to be committed. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. + Returns: + Tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + This function provides a user-friendly interface for committing weights to the Bittensor blockchain, ensuring proper + error handling and user interaction when required. + """ + if prompt: + if not input("Would you like to commit weights? (y/n) ").lower() == "y": + return False, "User cancelled the operation." + + success, error_message = subtensor._do_commit_weights( + wallet=wallet, + netuid=netuid, + commit_hash=commit_hash, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if success: + bittensor.logging.info("Successfully committed weights.") + return True, "Successfully committed weights." + else: + bittensor.logging.error(f"Failed to commit weights: {error_message}") + return False, error_message + + +def reveal_weights_extrinsic( + subtensor: "bittensor.subtensor", + wallet: "bittensor.wallet", + netuid: int, + uids: List[int], + weights: List[int], + version_key: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, +) -> Tuple[bool, str]: + """ + Reveals the weights for a specific subnet on the Bittensor blockchain using the provided wallet. + This function is a wrapper around the `_do_reveal_weights` method, handling user prompts and error messages. + Args: + subtensor (bittensor.subtensor): The subtensor instance used for blockchain interaction. + wallet (bittensor.wallet): The wallet associated with the neuron revealing the weights. + netuid (int): The unique identifier of the subnet. + uids (List[int]): List of neuron UIDs for which weights are being revealed. + weights (List[int]): List of weight values corresponding to each UID. + version_key (int): Version key for compatibility with the network. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. + Returns: + Tuple[bool, str]: ``True`` if the weight revelation is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + This function provides a user-friendly interface for revealing weights on the Bittensor blockchain, ensuring proper + error handling and user interaction when required. + """ + if prompt: + if not input("Would you like to reveal weights? (y/n) ").lower() == "y": + return False, "User cancelled the operation." + + success, error_message = subtensor._do_reveal_weights( + wallet=wallet, + netuid=netuid, + uids=uids, + values=weights, + version_key=version_key, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if success: + bittensor.logging.info("Successfully revealed weights.") + return True, "Successfully revealed weights." + else: + bittensor.logging.error(f"Failed to reveal weights: {error_message}") + return False, error_message diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 737a3b556b..2ed618fd5f 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -40,7 +40,7 @@ import bittensor from bittensor.btlogging import logging as _logger -from bittensor.utils import torch +from bittensor.utils import torch, weight_utils from .chain_data import ( NeuronInfo, DelegateInfo, @@ -86,6 +86,10 @@ publish_metadata, get_metadata, ) +from .extrinsics.commit_weights import ( + commit_weights_extrinsic, + reveal_weights_extrinsic, +) from .extrinsics.set_weights import set_weights_extrinsic from .extrinsics.staking import add_stake_extrinsic, add_stake_multiple_extrinsic from .extrinsics.transfer import transfer_extrinsic @@ -899,6 +903,245 @@ def make_substrate_call_with_retry(): return make_substrate_call_with_retry() + ################## + # Commit Weights # + ################## + def commit_weights( + self, + wallet: "bittensor.wallet", + netuid: int, + uids: torch.Tensor, + weights: torch.Tensor, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, + max_retries: int = 5, + ) -> Tuple[bool, str]: + """ + Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. + This action serves as a commitment or snapshot of the neuron's current weight distribution. + Args: + wallet (bittensor.wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. + max_retries (int, optional): The number of maximum attempts to commit weights. (Default: 5) + Returns: + Tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, + enhancing transparency and accountability within the Bittensor network. + """ + uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to commit weights!" + + # Generate the hash of the weights + commit_hash = weight_utils.generate_weight_hash( + who=wallet.hotkey.ss58_address, + netuid=netuid, + uids=uids.tolist(), + values=weights.tolist(), + version_key=0, + ) + + while retries < max_retries: + try: + success, message = commit_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + commit_hash=commit_hash, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + if success: + break + except Exception as e: + bittensor.logging.error(f"Error committing weights: {e}") + finally: + retries += 1 + + return success, message + + def _do_commit_weights( + self, + wallet: "bittensor.wallet", + netuid: int, + commit_hash: str, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + ) -> Tuple[bool, Optional[str]]: + """ + Internal method to send a transaction to the Bittensor blockchain, committing the hash of a neuron's weights. + This method constructs and submits the transaction, handling retries and blockchain communication. + Args: + wallet (bittensor.wallet): The wallet associated with the neuron committing the weights. + netuid (int): The unique identifier of the subnet. + commit_hash (str): The hash of the neuron's weights to be committed. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + Returns: + Tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. + This method ensures that the weight commitment is securely recorded on the Bittensor blockchain, providing a verifiable + record of the neuron's weight distribution at a specific point in time. + """ + + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(): + call = self.substrate.compose_call( + call_module="SubtensorModule", + call_function="commit_weights", + call_params={ + "netuid": netuid, + "commit_hash": commit_hash, + }, + ) + extrinsic = self.substrate.create_signed_extrinsic( + call=call, + keypair=wallet.hotkey, + ) + response = self.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if not wait_for_finalization and not wait_for_inclusion: + return True, None + + response.process_events() + if response.is_success: + return True, None + else: + return False, response.error_message + + return make_substrate_call_with_retry() + + ################## + # Reveal Weights # + ################## + def reveal_weights( + self, + wallet: "bittensor.wallet", + netuid: int, + uids: torch.Tensor, + weights: torch.Tensor, + version_key: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, + max_retries: int = 5, + ) -> Tuple[bool, str]: + """ + Reveals the weights for a specific subnet on the Bittensor blockchain using the provided wallet. + This action serves as a revelation of the neuron's previously committed weight distribution. + Args: + wallet (bittensor.wallet): The wallet associated with the neuron revealing the weights. + netuid (int): The unique identifier of the subnet. + uids (torch.Tensor): Tensor of neuron UIDs for which weights are being revealed. + weights (torch.Tensor): Tensor of weight values corresponding to each UID. + version_key (int): Version key for compatibility with the network. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. + max_retries (int, optional): The number of maximum attempts to reveal weights. (Default: 5) + Returns: + Tuple[bool, str]: ``True`` if the weight revelation is successful, False otherwise. And `msg`, a string + value describing the success or potential error. + This function allows neurons to reveal their previously committed weight distribution, ensuring transparency + and accountability within the Bittensor network. + """ + uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) + retries = 0 + success = False + message = "No attempt made. Perhaps it is too soon to reveal weights!" + + while retries < max_retries: + try: + success, message = reveal_weights_extrinsic( + subtensor=self, + wallet=wallet, + netuid=netuid, + uids=uids.tolist(), + weights=weights.tolist(), + version_key=version_key, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + prompt=prompt, + ) + if success: + break + except Exception as e: + bittensor.logging.error(f"Error revealing weights: {e}") + finally: + retries += 1 + + return success, message + + def _do_reveal_weights( + self, + wallet: "bittensor.wallet", + netuid: int, + uids: List[int], + values: List[int], + version_key: int, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + ) -> Tuple[bool, Optional[str]]: + """ + Internal method to send a transaction to the Bittensor blockchain, revealing the weights for a specific subnet. + This method constructs and submits the transaction, handling retries and blockchain communication. + Args: + wallet (bittensor.wallet): The wallet associated with the neuron revealing the weights. + netuid (int): The unique identifier of the subnet. + uids (List[int]): List of neuron UIDs for which weights are being revealed. + values (List[int]): List of weight values corresponding to each UID. + version_key (int): Version key for compatibility with the network. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + Returns: + Tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. + This method ensures that the weight revelation is securely recorded on the Bittensor blockchain, providing transparency + and accountability for the neuron's weight distribution. + """ + + @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(): + call = self.substrate.compose_call( + call_module="SubtensorModule", + call_function="reveal_weights", + call_params={ + "netuid": netuid, + "uids": uids, + "values": values, + "version_key": version_key, + }, + ) + extrinsic = self.substrate.create_signed_extrinsic( + call=call, + keypair=wallet.hotkey, + ) + response = self.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + + if not wait_for_finalization and not wait_for_inclusion: + return True, None + + response.process_events() + if response.is_success: + return True, None + else: + return False, response.error_message + + return make_substrate_call_with_retry() + ################ # Registration # ################ diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 9bd8606c9d..c78095008d 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -20,6 +20,7 @@ import numpy as np import bittensor +import hashlib from numpy.typing import NDArray from typing import Tuple, List, Union from bittensor.utils.registration import torch, use_torch, legacy_torch_api_compat @@ -341,3 +342,29 @@ def process_weights_for_netuid( bittensor.logging.debug("final_weights", normalized_weights) return non_zero_weight_uids, normalized_weights + + +def generate_weight_hash( + who: str, netuid: int, uids: List[int], values: List[int], version_key: int +) -> str: + """ + Generate a valid commit hash from the provided weights. + Args: + who (str): The account identifier. + netuid (int): The network unique identifier. + uids (List[int]): The list of UIDs. + values (List[int]): The list of weight values. + version_key (int): The version key. + Returns: + str: The generated commit hash. + """ + # Create a tuple of the input parameters + data = (who, netuid, uids, values, version_key) + + # Generate Blake2b hash of the data tuple + blake2b_hash = hashlib.blake2b(str(data).encode(), digest_size=32).digest() + + # Convert the hash to hex string and add "0x" prefix + commit_hash = "0x" + blake2b_hash.hex() + + return commit_hash diff --git a/tests/integration_tests/test_cli_no_network.py b/tests/integration_tests/test_cli_no_network.py index 18eb26cd8e..6d6d24f58a 100644 --- a/tests/integration_tests/test_cli_no_network.py +++ b/tests/integration_tests/test_cli_no_network.py @@ -427,7 +427,9 @@ def test_command_no_args(self, _, __, patched_prompt_ask): "stakes", "roots", "wallets", + "weights", "st", + "wt", "su", ] # Skip duplicate aliases ] diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 845a73ee7d..ea9f2d2822 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -17,20 +17,18 @@ # DEALINGS IN THE SOFTWARE. import random -import socket -import os import unittest from queue import Empty as QueueEmpty from unittest.mock import MagicMock, patch -from types import SimpleNamespace + +import pytest +import torch +from substrateinterface import Keypair import bittensor from bittensor.mock import MockSubtensor -import pytest from bittensor.utils.balance import Balance -from substrateinterface import Keypair from tests.helpers import ( - _get_mock_hotkey, _get_mock_coldkey, MockConsole, _get_mock_keypair, @@ -362,6 +360,216 @@ def test_set_weights_failed(self): ) assert fail == False + def test_commit_weights(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + commit_hash = bittensor.utils.weight_utils.generate_weight_hash( + who=self.wallet.hotkey.ss58_address, + netuid=3, + uids=uids.tolist(), + values=weights.tolist(), + version_key=0, + ) + + self.subtensor.commit_weights = MagicMock( + return_value=(True, "Successfully committed weights.") + ) + self.subtensor._do_commit_weights = MagicMock(return_value=(True, None)) + + success, message = self.subtensor.commit_weights( + wallet=self.wallet, + netuid=3, + uids=uids, + weights=weights, + ) + assert success == True + assert message == "Successfully committed weights." + + def test_commit_weights_inclusion(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + commit_hash = bittensor.utils.weight_utils.generate_weight_hash( + who=self.wallet.hotkey.ss58_address, + netuid=1, + uids=uids.tolist(), + values=weights.tolist(), + version_key=0, + ) + + self.subtensor._do_commit_weights = MagicMock(return_value=(True, None)) + self.subtensor.commit_weights = MagicMock( + return_value=(True, "Successfully committed weights.") + ) + + success, message = self.subtensor.commit_weights( + wallet=self.wallet, + netuid=1, + uids=uids, + weights=weights, + wait_for_inclusion=True, + ) + assert success == True + assert message == "Successfully committed weights." + + def test_commit_weights_failed(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + commit_hash = bittensor.utils.weight_utils.generate_weight_hash( + who=self.wallet.hotkey.ss58_address, + netuid=3, + uids=uids.tolist(), + values=weights.tolist(), + version_key=0, + ) + + self.subtensor._do_commit_weights = MagicMock( + return_value=(False, "Mock failure message") + ) + self.subtensor.commit_weights = MagicMock( + return_value=(False, "Mock failure message") + ) + + success, message = self.subtensor.commit_weights( + wallet=self.wallet, + netuid=3, + uids=uids, + weights=weights, + wait_for_inclusion=True, + ) + assert success == False + assert message == "Mock failure message" + + def test_reveal_weights(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + + self.subtensor.reveal_weights = MagicMock( + return_value=(True, "Successfully revealed weights.") + ) + self.subtensor._do_reveal_weights = MagicMock(return_value=(True, None)) + + success, message = self.subtensor.reveal_weights( + wallet=self.wallet, netuid=3, uids=uids, weights=weights, version_key=0 + ) + assert success == True + assert message == "Successfully revealed weights." + + def test_reveal_weights_inclusion(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + + self.subtensor._do_reveal_weights = MagicMock(return_value=(True, None)) + self.subtensor.reveal_weights = MagicMock( + return_value=(True, "Successfully revealed weights.") + ) + + success, message = self.subtensor.reveal_weights( + wallet=self.wallet, + netuid=1, + uids=uids, + weights=weights, + version_key=0, + wait_for_inclusion=True, + ) + assert success == True + assert message == "Successfully revealed weights." + + def test_reveal_weights_failed(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + + self.subtensor._do_reveal_weights = MagicMock( + return_value=(False, "Mock failure message") + ) + self.subtensor.reveal_weights = MagicMock( + return_value=(False, "Mock failure message") + ) + + success, message = self.subtensor.reveal_weights( + wallet=self.wallet, + netuid=3, + uids=uids, + weights=weights, + version_key=0, + wait_for_inclusion=True, + ) + assert success == False + assert message == "Mock failure message" + + def test_commit_and_reveal_weights(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + version_key = 0 + + # Mock the commit_weights and reveal_weights functions + self.subtensor.commit_weights = MagicMock( + return_value=(True, "Successfully committed weights.") + ) + self.subtensor._do_commit_weights = MagicMock(return_value=(True, None)) + self.subtensor.reveal_weights = MagicMock( + return_value=(True, "Successfully revealed weights.") + ) + self.subtensor._do_reveal_weights = MagicMock(return_value=(True, None)) + + # Commit weights + commit_success, commit_message = self.subtensor.commit_weights( + wallet=self.wallet, + netuid=3, + uids=uids, + weights=weights, + ) + assert commit_success == True + assert commit_message == "Successfully committed weights." + + # Reveal weights + reveal_success, reveal_message = self.subtensor.reveal_weights( + wallet=self.wallet, + netuid=3, + uids=uids, + weights=weights, + version_key=version_key, + ) + assert reveal_success == True + assert reveal_message == "Successfully revealed weights." + + def test_commit_and_reveal_weights_inclusion(self): + weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) + uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + version_key = 0 + + # Mock the commit_weights and reveal_weights functions + self.subtensor.commit_weights = MagicMock( + return_value=(True, "Successfully committed weights.") + ) + self.subtensor._do_commit_weights = MagicMock(return_value=(True, None)) + self.subtensor.reveal_weights = MagicMock( + return_value=(True, "Successfully revealed weights.") + ) + self.subtensor._do_reveal_weights = MagicMock(return_value=(True, None)) + + # Commit weights with wait_for_inclusion + commit_success, commit_message = self.subtensor.commit_weights( + wallet=self.wallet, + netuid=1, + uids=uids, + weights=weights, + wait_for_inclusion=True, + ) + assert commit_success == True + assert commit_message == "Successfully committed weights." + + # Reveal weights with wait_for_inclusion + reveal_success, reveal_message = self.subtensor.reveal_weights( + wallet=self.wallet, + netuid=1, + uids=uids, + weights=weights, + version_key=version_key, + wait_for_inclusion=True, + ) + assert reveal_success == True + assert reveal_message == "Successfully revealed weights." + def test_get_balance(self): fake_coldkey = _get_mock_coldkey(0) balance = self.subtensor.get_balance(address=fake_coldkey) From e80f9dc33c12224e85e18dddebe869c62e8f3f4b Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Fri, 24 May 2024 15:25:53 -0700 Subject: [PATCH 037/116] chore - fix take message --- bittensor/commands/delegates.py | 4 +--- bittensor/extrinsics/set_weights.py | 1 - contrib/RELEASE_GUIDELINES.md | 1 + 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index 01767cc236..782e81ac84 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -1172,9 +1172,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): new_take = float(new_take_str) if new_take > 0.18: - bittensor.__console__.print( - "ERROR: Take value should be in the range of 0 to 18%" - ) + bittensor.__console__.print("ERROR: Take value should be below 18%") return result: bool = subtensor.set_take( diff --git a/bittensor/extrinsics/set_weights.py b/bittensor/extrinsics/set_weights.py index 2a8c972aa3..5db0a1a7a9 100644 --- a/bittensor/extrinsics/set_weights.py +++ b/bittensor/extrinsics/set_weights.py @@ -129,7 +129,6 @@ def set_weights_extrinsic( return False, error_message except Exception as e: - # TODO( devs ): lets remove all of the bittensor.__console__ calls and replace with loguru. bittensor.__console__.print( ":cross_mark: [red]Failed[/red]: error:{}".format(e) ) diff --git a/contrib/RELEASE_GUIDELINES.md b/contrib/RELEASE_GUIDELINES.md index e2f7124751..d6bda7c860 100644 --- a/contrib/RELEASE_GUIDELINES.md +++ b/contrib/RELEASE_GUIDELINES.md @@ -14,6 +14,7 @@ If you are new in this role, ask for the proper setup you need to run this proce ## Process of release 1. Create a branch called `release/VERSION`, having VERSION with the version to release. +1. Make sure twine is installed: `pip install twine` 1. Within the release branch: 1. Update the version executing:`./scripts/release/versioning.sh --update UPDATE_TYPE` 1. **UPDATE_TYPE** could be *major*, *minor* or *patch*. From 5901f864df4d6d1f474efce459e3263022c9b8c2 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Fri, 24 May 2024 15:46:27 -0700 Subject: [PATCH 038/116] Improved err_msg --- bittensor/commands/delegates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index 782e81ac84..1fd475785c 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -1172,7 +1172,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): new_take = float(new_take_str) if new_take > 0.18: - bittensor.__console__.print("ERROR: Take value should be below 18%") + bittensor.__console__.print("ERROR: Take value should not exceed 18%") return result: bool = subtensor.set_take( From 92b3b22cb4d6a6229b72b64b90a2cb48a658ffa3 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 24 May 2024 16:34:34 -0700 Subject: [PATCH 039/116] - remove torch - add e2e test - encode data using SCALE codec --- bittensor/chain_data.py | 6 + bittensor/cli.py | 12 +- bittensor/commands/network.py | 2 + bittensor/commands/weights.py | 61 ++++-- bittensor/subtensor.py | 56 ++++-- bittensor/utils/weight_utils.py | 31 ++- .../e2e_tests/subcommands/weights/__init__.py | 0 .../weights/test_commit_weights.py | 179 ++++++++++++++++++ .../integration_tests/test_cli_no_network.py | 2 +- .../test_subtensor_integration.py | 33 ++-- 10 files changed, 317 insertions(+), 65 deletions(-) create mode 100644 tests/e2e_tests/subcommands/weights/__init__.py create mode 100644 tests/e2e_tests/subcommands/weights/test_commit_weights.py diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 548fa40ede..9140415506 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -177,6 +177,8 @@ ["max_validators", "Compact"], ["adjustment_alpha", "Compact"], ["difficulty", "Compact"], + ["commit_reveal_weights_interval", "Compact"], + ["commit_reveal_weights_enabled", "bool"], ], }, } @@ -1074,6 +1076,8 @@ class SubnetHyperparameters: max_validators: int adjustment_alpha: int difficulty: int + commit_reveal_weights_interval: int + commit_reveal_weights_enabled: bool @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: @@ -1128,6 +1132,8 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetHyperparameters": bonds_moving_avg=decoded["bonds_moving_avg"], adjustment_alpha=decoded["adjustment_alpha"], difficulty=decoded["difficulty"], + commit_reveal_weights_interval=decoded["commit_reveal_weights_interval"], + commit_reveal_weights_enabled=decoded["commit_reveal_weights_enabled"], ) def _to_parameter_dict_torch( diff --git a/bittensor/cli.py b/bittensor/cli.py index 49e188317c..931c5d8d14 100644 --- a/bittensor/cli.py +++ b/bittensor/cli.py @@ -95,9 +95,9 @@ "sudos": "sudo", "i": "info", "info": "info", - "weights": "weight", - "wt": "weight", - "weight": "weight", + "weights": "weights", + "wt": "weights", + "weight": "weights", } COMMANDS = { "subnets": { @@ -171,9 +171,9 @@ "remove": UnStakeCommand, }, }, - "weight": { - "name": "weight", - "aliases": ["wt", "weights"], + "weights": { + "name": "weights", + "aliases": ["wt", "weight"], "help": "Commands for managing weight for subnets.", "commands": { "commit": CommitWeightCommand, diff --git a/bittensor/commands/network.py b/bittensor/commands/network.py index 64fbd272f6..f20eac67a6 100644 --- a/bittensor/commands/network.py +++ b/bittensor/commands/network.py @@ -323,6 +323,8 @@ def add_args(parser: argparse.ArgumentParser): "kappa": "sudo_set_kappa", "difficulty": "sudo_set_difficulty", "bonds_moving_avg": "sudo_set_bonds_moving_average", + "commit_reveal_weights_interval": "sudo_set_commit_reveal_weights_interval", + "commit_reveal_weights_enabled": "sudo_set_commit_reveal_weights_enabled", } diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 9499c60c95..f62680a682 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -1,9 +1,27 @@ +# The MIT License (MIT) +# Copyright © 2021 Yuma Rao +# Copyright © 2023 Opentensor Foundation + +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# documentation files (the “Software”), to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# the Software. + +# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + import argparse import re -import torch +import numpy as np from rich.prompt import Prompt - +import bittensor.utils.weight_utils as weight_utils import bittensor from . import defaults @@ -55,21 +73,22 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): # Parse from string netuid = cli.config.netuid - uids = torch.tensor( + uids = np.array( list(map(int, re.split(r"[ ,]+", cli.config.uids))), - dtype=torch.int64 + dtype=np.int64 ) - weights = torch.tensor( - list(map(int, re.split(r"[ ,]+", cli.config.weights))), - dtype=torch.float32 + weights = np.array( + list(map(float, re.split(r"[ ,]+", cli.config.weights))), + dtype=np.float32 ) + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit(uids=uids, weights=weights) # Run the commit weights operation success, message = subtensor.commit_weights( wallet=wallet, netuid=netuid, - uids=uids, - weights=weights, + uids=weight_uids, + weights=weight_vals, wait_for_inclusion=cli.config.wait_for_inclusion, wait_for_finalization=cli.config.wait_for_finalization, prompt=cli.config.prompt, @@ -105,7 +124,7 @@ def add_args(parser: argparse.ArgumentParser): "--prompt", dest="prompt", action="store_true", - default=True, + default=False, ) bittensor.wallet.add_args(parser) @@ -140,9 +159,7 @@ class RevealWeightCommand: def run(cli: "bittensor.cli"): r"""Reveal weights for a specific subnet.""" try: - subtensor: "bittensor.subtensor" = bittensor.subtensor( - config=cli.config, log_verbose=False - ) + subtensor: "bittensor.subtensor" = bittensor.subtensor(config=cli.config, log_verbose=False) RevealWeightCommand._run(cli, subtensor) finally: if "subtensor" in locals(): @@ -166,22 +183,24 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): # Parse from string netuid = cli.config.netuid - uids = torch.tensor( + version = bittensor.__version_as_int__ + uids = np.array( list(map(int, re.split(r"[ ,]+", cli.config.uids))), - dtype=torch.int64, + dtype=np.int64, ) - weights = torch.tensor( + weights = np.array( list(map(float, re.split(r"[ ,]+", cli.config.weights))), - dtype=torch.float32, + dtype=np.float32, ) + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit(uids=uids, weights=weights) # Run the reveal weights operation. success, message = subtensor.reveal_weights( wallet=wallet, netuid=netuid, - uids=uids, - weights=weights, - version_key=0, + uids=weight_uids, + weights=weight_vals, + version_key=version, wait_for_inclusion=cli.config.wait_for_inclusion, wait_for_finalization=cli.config.wait_for_finalization, prompt=cli.config.prompt, @@ -216,7 +235,7 @@ def add_args(parser: argparse.ArgumentParser): "--prompt", dest="prompt", action="store_true", - default=True, + default=False, ) bittensor.wallet.add_args(parser) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 2ed618fd5f..34d50bd9d0 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -910,8 +910,9 @@ def commit_weights( self, wallet: "bittensor.wallet", netuid: int, - uids: torch.Tensor, - weights: torch.Tensor, + uids: Union[NDArray[np.int64], list], + weights: Union[NDArray[np.float32], list], + version_key: int = bittensor.__version_as_int__, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, prompt: bool = False, @@ -920,16 +921,22 @@ def commit_weights( """ Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. This action serves as a commitment or snapshot of the neuron's current weight distribution. + Args: wallet (bittensor.wallet): The wallet associated with the neuron committing the weights. netuid (int): The unique identifier of the subnet. + uids (np.ndarray): NumPy array of neuron UIDs for which weights are being committed. + weights (np.ndarray): NumPy array of weight values corresponding to each UID. + version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. max_retries (int, optional): The number of maximum attempts to commit weights. (Default: 5) + Returns: Tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string value describing the success or potential error. + This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, enhancing transparency and accountability within the Bittensor network. """ @@ -938,15 +945,23 @@ def commit_weights( success = False message = "No attempt made. Perhaps it is too soon to commit weights!" + _logger.info( + "Committing weights with params: netuid={}, uids={}, weights={}, version_key={}".format( + netuid, uids, weights, version_key + ) + ) + # Generate the hash of the weights commit_hash = weight_utils.generate_weight_hash( who=wallet.hotkey.ss58_address, netuid=netuid, - uids=uids.tolist(), - values=weights.tolist(), - version_key=0, + uids=uids, + values=weights, + version_key=version_key, ) + _logger.info("Commit Hash: {}".format(commit_hash)) + while retries < max_retries: try: success, message = commit_weights_extrinsic( @@ -978,16 +993,19 @@ def _do_commit_weights( """ Internal method to send a transaction to the Bittensor blockchain, committing the hash of a neuron's weights. This method constructs and submits the transaction, handling retries and blockchain communication. + Args: wallet (bittensor.wallet): The wallet associated with the neuron committing the weights. netuid (int): The unique identifier of the subnet. commit_hash (str): The hash of the neuron's weights to be committed. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + Returns: Tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. - This method ensures that the weight commitment is securely recorded on the Bittensor blockchain, providing a verifiable - record of the neuron's weight distribution at a specific point in time. + + This method ensures that the weight commitment is securely recorded on the Bittensor blockchain, providing a + verifiable record of the neuron's weight distribution at a specific point in time. """ @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) @@ -1028,9 +1046,9 @@ def reveal_weights( self, wallet: "bittensor.wallet", netuid: int, - uids: torch.Tensor, - weights: torch.Tensor, - version_key: int, + uids: Union[NDArray[np.int64], list], + weights: Union[NDArray[np.float32], list], + version_key: int = bittensor.__version_as_int__, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, prompt: bool = False, @@ -1039,19 +1057,22 @@ def reveal_weights( """ Reveals the weights for a specific subnet on the Bittensor blockchain using the provided wallet. This action serves as a revelation of the neuron's previously committed weight distribution. + Args: wallet (bittensor.wallet): The wallet associated with the neuron revealing the weights. netuid (int): The unique identifier of the subnet. - uids (torch.Tensor): Tensor of neuron UIDs for which weights are being revealed. - weights (torch.Tensor): Tensor of weight values corresponding to each UID. - version_key (int): Version key for compatibility with the network. + uids (np.ndarray): NumPy array of neuron UIDs for which weights are being revealed. + weights (np.ndarray): NumPy array of weight values corresponding to each UID. + version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. max_retries (int, optional): The number of maximum attempts to reveal weights. (Default: 5) + Returns: Tuple[bool, str]: ``True`` if the weight revelation is successful, False otherwise. And `msg`, a string value describing the success or potential error. + This function allows neurons to reveal their previously committed weight distribution, ensuring transparency and accountability within the Bittensor network. """ @@ -1066,8 +1087,8 @@ def reveal_weights( subtensor=self, wallet=wallet, netuid=netuid, - uids=uids.tolist(), - weights=weights.tolist(), + uids=uids, + weights=weights, version_key=version_key, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, @@ -1095,6 +1116,7 @@ def _do_reveal_weights( """ Internal method to send a transaction to the Bittensor blockchain, revealing the weights for a specific subnet. This method constructs and submits the transaction, handling retries and blockchain communication. + Args: wallet (bittensor.wallet): The wallet associated with the neuron revealing the weights. netuid (int): The unique identifier of the subnet. @@ -1103,13 +1125,15 @@ def _do_reveal_weights( version_key (int): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + Returns: Tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. + This method ensures that the weight revelation is securely recorded on the Bittensor blockchain, providing transparency and accountability for the neuron's weight distribution. """ - @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): call = self.substrate.compose_call( call_module="SubtensorModule", diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index c78095008d..94262cafc9 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -1,4 +1,5 @@ -""" Conversion for weight between chain representation and np.array or torch.Tensor +""" +Conversion for weight between chain representation and np.array or torch.Tensor """ # The MIT License (MIT) @@ -21,6 +22,9 @@ import numpy as np import bittensor import hashlib +import struct +from scalecodec import ScaleBytes, U16, Vec +from substrateinterface import Keypair from numpy.typing import NDArray from typing import Tuple, List, Union from bittensor.utils.registration import torch, use_torch, legacy_torch_api_compat @@ -349,22 +353,39 @@ def generate_weight_hash( ) -> str: """ Generate a valid commit hash from the provided weights. + Args: who (str): The account identifier. netuid (int): The network unique identifier. uids (List[int]): The list of UIDs. values (List[int]): The list of weight values. version_key (int): The version key. + Returns: str: The generated commit hash. """ - # Create a tuple of the input parameters - data = (who, netuid, uids, values, version_key) + # Encode data using SCALE codec + the_who = ScaleBytes(Keypair(ss58_address=who).public_key) + the_netuid = ScaleBytes(netuid.to_bytes(2, "little")) + + vec_uids = Vec(data=None, sub_type="U16") + vec_uids.value = [U16(ScaleBytes(uid.to_bytes(2, "little"))) for uid in uids] + the_uids = ScaleBytes(vec_uids.encode().data) + + vec_values = Vec(data=None, sub_type="U16") + vec_values.value = [ + U16(ScaleBytes(bytearray(struct.pack(" 0, f"Invalid block number: {commit_block}" + + # Query the WeightCommitRevealInterval storage map + weight_commit_reveal_interval = subtensor.query_module( + module="SubtensorModule", name="WeightCommitRevealInterval", params=[1] + ) + interval = weight_commit_reveal_interval.value + assert interval > 0, "Invalid WeightCommitRevealInterval" + + # Wait until the reveal block range + current_block = subtensor.get_current_block() + reveal_block_start = (commit_block - (commit_block % interval)) + interval + while current_block < reveal_block_start: + time.sleep(1) # Wait for 1 second before checking the block number again + current_block = subtensor.get_current_block() + if current_block % 10 == 0: + print(f"Current Block: {current_block} Revealing at: {reveal_block_start}") + + # Configure the CLI arguments for the RevealWeightCommand + exec_command( + RevealWeightCommand, + [ + "wt", + "reveal", + "--no_prompt", + "--netuid", + "1", + "--uids", + str(uid), + "--weights", + str(weights), + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + "/tmp/btcli-wallet", + ], + ) + + # Query the Weights storage map + revealed_weights = subtensor.query_module( + module="SubtensorModule", name="Weights", params=[1, uid] # netuid and uid + ) + + # Assert that the revealed weights are set correctly + assert revealed_weights.value is not None, "Weight reveal not found in storage" + + uid_list = list(map(int, re.split(r"[ ,]+", str(uid)))) + uids = np.array(uid_list, dtype=np.int64) + weight_list = list(map(float, re.split(r"[ ,]+", str(weights)))) + weights_array = np.array(weight_list, dtype=np.float32) + weight_uids, expected_weights = weight_utils.convert_weights_and_uids_for_emit( + uids, weights_array + ) + assert ( + expected_weights[0] == revealed_weights.value[0][1] + ), f"Incorrect revealed weights. Expected: {expected_weights[0]}, Actual: {revealed_weights.value[0][1]}" diff --git a/tests/integration_tests/test_cli_no_network.py b/tests/integration_tests/test_cli_no_network.py index 6d6d24f58a..d2bc78f3aa 100644 --- a/tests/integration_tests/test_cli_no_network.py +++ b/tests/integration_tests/test_cli_no_network.py @@ -427,7 +427,7 @@ def test_command_no_args(self, _, __, patched_prompt_ask): "stakes", "roots", "wallets", - "weights", + "weight", "st", "wt", "su", diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index ea9f2d2822..1ff50be3bf 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -24,6 +24,7 @@ import pytest import torch from substrateinterface import Keypair +import numpy as np import bittensor from bittensor.mock import MockSubtensor @@ -361,8 +362,8 @@ def test_set_weights_failed(self): assert fail == False def test_commit_weights(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) commit_hash = bittensor.utils.weight_utils.generate_weight_hash( who=self.wallet.hotkey.ss58_address, netuid=3, @@ -386,8 +387,8 @@ def test_commit_weights(self): assert message == "Successfully committed weights." def test_commit_weights_inclusion(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) commit_hash = bittensor.utils.weight_utils.generate_weight_hash( who=self.wallet.hotkey.ss58_address, netuid=1, @@ -412,8 +413,8 @@ def test_commit_weights_inclusion(self): assert message == "Successfully committed weights." def test_commit_weights_failed(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) commit_hash = bittensor.utils.weight_utils.generate_weight_hash( who=self.wallet.hotkey.ss58_address, netuid=3, @@ -440,8 +441,8 @@ def test_commit_weights_failed(self): assert message == "Mock failure message" def test_reveal_weights(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) self.subtensor.reveal_weights = MagicMock( return_value=(True, "Successfully revealed weights.") @@ -455,8 +456,8 @@ def test_reveal_weights(self): assert message == "Successfully revealed weights." def test_reveal_weights_inclusion(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) self.subtensor._do_reveal_weights = MagicMock(return_value=(True, None)) self.subtensor.reveal_weights = MagicMock( @@ -475,8 +476,8 @@ def test_reveal_weights_inclusion(self): assert message == "Successfully revealed weights." def test_reveal_weights_failed(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) self.subtensor._do_reveal_weights = MagicMock( return_value=(False, "Mock failure message") @@ -497,8 +498,8 @@ def test_reveal_weights_failed(self): assert message == "Mock failure message" def test_commit_and_reveal_weights(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) version_key = 0 # Mock the commit_weights and reveal_weights functions @@ -533,8 +534,8 @@ def test_commit_and_reveal_weights(self): assert reveal_message == "Successfully revealed weights." def test_commit_and_reveal_weights_inclusion(self): - weights = torch.FloatTensor([0.1, 0.2, 0.3, 0.4]) - uids = torch.tensor([1, 2, 3, 4], dtype=torch.int64) + weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) + uids = np.array([1, 2, 3, 4], dtype=np.int64) version_key = 0 # Mock the commit_weights and reveal_weights functions From ad0cf0353766e9e18da8f8190975a8e5c11af083 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 24 May 2024 16:38:19 -0700 Subject: [PATCH 040/116] black reformat --- bittensor/commands/weights.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index f62680a682..510ef3d414 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -49,7 +49,9 @@ class CommitWeightCommand: def run(cli: "bittensor.cli"): r"""Commit weights for a specific subnet.""" try: - subtensor: "bittensor.subtensor" = bittensor.subtensor(config=cli.config, log_verbose=False) + subtensor: "bittensor.subtensor" = bittensor.subtensor( + config=cli.config, log_verbose=False + ) CommitWeightCommand._run(cli, subtensor) finally: if "subtensor" in locals(): @@ -74,14 +76,14 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): # Parse from string netuid = cli.config.netuid uids = np.array( - list(map(int, re.split(r"[ ,]+", cli.config.uids))), - dtype=np.int64 + list(map(int, re.split(r"[ ,]+", cli.config.uids))), dtype=np.int64 ) weights = np.array( - list(map(float, re.split(r"[ ,]+", cli.config.weights))), - dtype=np.float32 + list(map(float, re.split(r"[ ,]+", cli.config.weights))), dtype=np.float32 + ) + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( + uids=uids, weights=weights ) - weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit(uids=uids, weights=weights) # Run the commit weights operation success, message = subtensor.commit_weights( @@ -159,7 +161,9 @@ class RevealWeightCommand: def run(cli: "bittensor.cli"): r"""Reveal weights for a specific subnet.""" try: - subtensor: "bittensor.subtensor" = bittensor.subtensor(config=cli.config, log_verbose=False) + subtensor: "bittensor.subtensor" = bittensor.subtensor( + config=cli.config, log_verbose=False + ) RevealWeightCommand._run(cli, subtensor) finally: if "subtensor" in locals(): @@ -192,7 +196,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): list(map(float, re.split(r"[ ,]+", cli.config.weights))), dtype=np.float32, ) - weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit(uids=uids, weights=weights) + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( + uids=uids, weights=weights + ) # Run the reveal weights operation. success, message = subtensor.reveal_weights( From 84ac53fcb6d3de4f8afd574d0c134fc2b5bffb10 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 24 May 2024 17:04:00 -0700 Subject: [PATCH 041/116] mypy reformat --- bittensor/subtensor.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index c29c6e95bf..5a297d938d 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -955,8 +955,8 @@ def commit_weights( commit_hash = weight_utils.generate_weight_hash( who=wallet.hotkey.ss58_address, netuid=netuid, - uids=uids, - values=weights, + uids=list(uids), + values=list(weights), version_key=version_key, ) @@ -1087,8 +1087,8 @@ def reveal_weights( subtensor=self, wallet=wallet, netuid=netuid, - uids=uids, - weights=weights, + uids=list(uids), + weights=list(weights), version_key=version_key, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, From a8ced82d5233cda7323dde8d123f9bfcd38c3d86 Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 12:34:24 -0700 Subject: [PATCH 042/116] Add a salt function for hashing, address PR comments. --- bittensor/commands/weights.py | 40 ++++++++++--- bittensor/extrinsics/commit_weights.py | 11 +++- bittensor/subtensor.py | 12 +++- bittensor/utils/weight_utils.py | 29 +++++++--- .../weights/test_commit_weights.py | 4 +- .../test_subtensor_integration.py | 58 +++++++++++++------ 6 files changed, 112 insertions(+), 42 deletions(-) diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 510ef3d414..a8da82e171 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -17,14 +17,18 @@ # DEALINGS IN THE SOFTWARE. import argparse +import os import re import numpy as np -from rich.prompt import Prompt +from rich.prompt import Prompt, Confirm import bittensor.utils.weight_utils as weight_utils import bittensor from . import defaults +"""Module that encapsulates the CommitWeightCommand and the RevealWeightCommand. Used to commit and reveal weights +for a specific subnet on the Bittensor Network.""" + class CommitWeightCommand: """ @@ -76,21 +80,30 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): # Parse from string netuid = cli.config.netuid uids = np.array( - list(map(int, re.split(r"[ ,]+", cli.config.uids))), dtype=np.int64 + [int(x) for x in re.split(r"[ ,]+", cli.config.uids)], dtype=np.int64 ) weights = np.array( - list(map(float, re.split(r"[ ,]+", cli.config.weights))), dtype=np.float32 + [float(x) for x in re.split(r"[ ,]+", cli.config.weights)], dtype=np.float32 ) weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( uids=uids, weights=weights ) + # Generate random salt + salt_length = 8 + salt = list(os.urandom(salt_length)) + + if not Confirm.ask(f"Have you recorded the [red]salt[/red]: [bold white]'{salt}'[/bold white]? It will be " + f"required to reveal weights."): + return False, "User cancelled the operation." + # Run the commit weights operation success, message = subtensor.commit_weights( wallet=wallet, netuid=netuid, uids=weight_uids, weights=weight_vals, + salt=salt, wait_for_inclusion=cli.config.wait_for_inclusion, wait_for_finalization=cli.config.wait_for_finalization, prompt=cli.config.prompt, @@ -110,6 +123,7 @@ def add_args(parser: argparse.ArgumentParser): parser.add_argument("--netuid", dest="netuid", type=int, required=False) parser.add_argument("--uids", dest="uids", type=str, required=False) parser.add_argument("--weights", dest="weights", type=str, required=False) + parser.add_argument("--salt", dest="salt", type=str, required=False) parser.add_argument( "--wait-for-inclusion", dest="wait_for_inclusion", @@ -134,10 +148,10 @@ def add_args(parser: argparse.ArgumentParser): @staticmethod def check_config(config: "bittensor.config"): - if not config.is_set("wallet.name") and not config.no_prompt: + if not config.no_prompt and not config.is_set("wallet.name"): wallet_name = Prompt.ask("Enter wallet name", default=defaults.wallet.name) config.wallet.name = str(wallet_name) - if not config.is_set("wallet.hotkey") and not config.no_prompt: + if not config.no_prompt and not config.is_set("wallet.hotkey"): hotkey = Prompt.ask("Enter hotkey name", default=defaults.wallet.hotkey) config.wallet.hotkey = str(hotkey) @@ -151,8 +165,9 @@ class RevealWeightCommand: - ``--netuid`` (int): The netuid of the subnet for which weights are to be revealed. - ``--uids`` (str): Corresponding UIDs for the specified netuid, in comma-separated format. - ``--weights`` (str): Corresponding weights for the specified UIDs, in comma-separated format. + - ``--salt`` (str): Corresponding salt for the hash function, integers in comma-separated format. Example usage:: - $ btcli wt reveal --netuid 1 --uids 1,2,3,4 --weights 0.1,0.2,0.3,0.4 + $ btcli wt reveal --netuid 1 --uids 1,2,3,4 --weights 0.1,0.2,0.3,0.4 --salt 163,241,217,11,161,142,147,189 Note: This command is used to reveal weights for a specific subnet and requires the user to have the necessary permissions. """ @@ -185,17 +200,24 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): if not cli.config.is_set("weights"): cli.config.weights = Prompt.ask(f"Enter weights (comma-separated)") + if not cli.config.is_set("salt"): + cli.config.salt = Prompt.ask(f"Enter salt (comma-separated)") + # Parse from string netuid = cli.config.netuid version = bittensor.__version_as_int__ uids = np.array( - list(map(int, re.split(r"[ ,]+", cli.config.uids))), + [int(x) for x in re.split(r"[ ,]+", cli.config.uids)], dtype=np.int64, ) weights = np.array( - list(map(float, re.split(r"[ ,]+", cli.config.weights))), + [float(x) for x in re.split(r"[ ,]+", cli.config.weights)], dtype=np.float32, ) + salt = np.array( + [int(x) for x in re.split(r"[ ,]+", cli.config.salt)], + dtype=np.int64, + ) weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( uids=uids, weights=weights ) @@ -206,6 +228,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): netuid=netuid, uids=weight_uids, weights=weight_vals, + salt=salt, version_key=version, wait_for_inclusion=cli.config.wait_for_inclusion, wait_for_finalization=cli.config.wait_for_finalization, @@ -225,6 +248,7 @@ def add_args(parser: argparse.ArgumentParser): parser.add_argument("--netuid", dest="netuid", type=int, required=False) parser.add_argument("--uids", dest="uids", type=str, required=False) parser.add_argument("--weights", dest="weights", type=str, required=False) + parser.add_argument("--salt", dest="salt", type=str, required=False) parser.add_argument( "--wait-for-inclusion", dest="wait_for_inclusion", diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py index 59fd2f2776..669a4bdcc1 100644 --- a/bittensor/extrinsics/commit_weights.py +++ b/bittensor/extrinsics/commit_weights.py @@ -17,9 +17,12 @@ # DEALINGS IN THE SOFTWARE. from typing import Tuple, List +from rich.prompt import Confirm import bittensor +""" Module commit weights and reveal weights extrinsic. """ + def commit_weights_extrinsic( subtensor: "bittensor.subtensor", @@ -47,9 +50,8 @@ def commit_weights_extrinsic( This function provides a user-friendly interface for committing weights to the Bittensor blockchain, ensuring proper error handling and user interaction when required. """ - if prompt: - if not input("Would you like to commit weights? (y/n) ").lower() == "y": - return False, "User cancelled the operation." + if prompt and not Confirm.ask(f"Would you like to commit weights?"): + return False, "User cancelled the operation." success, error_message = subtensor._do_commit_weights( wallet=wallet, @@ -73,6 +75,7 @@ def reveal_weights_extrinsic( netuid: int, uids: List[int], weights: List[int], + salt: List[int], version_key: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, @@ -87,6 +90,7 @@ def reveal_weights_extrinsic( netuid (int): The unique identifier of the subnet. uids (List[int]): List of neuron UIDs for which weights are being revealed. weights (List[int]): List of weight values corresponding to each UID. + salt (List[int]): List of salt values corresponding to the hash function. version_key (int): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -106,6 +110,7 @@ def reveal_weights_extrinsic( netuid=netuid, uids=uids, values=weights, + salt=salt, version_key=version_key, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 5a297d938d..3d8ea92107 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -23,6 +23,7 @@ import argparse import copy import functools +import os import socket import time from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any @@ -910,6 +911,7 @@ def commit_weights( self, wallet: "bittensor.wallet", netuid: int, + salt: List[int], uids: Union[NDArray[np.int64], list], weights: Union[NDArray[np.float32], list], version_key: int = bittensor.__version_as_int__, @@ -925,6 +927,7 @@ def commit_weights( Args: wallet (bittensor.wallet): The wallet associated with the neuron committing the weights. netuid (int): The unique identifier of the subnet. + salt (List[int]): list of randomly generated integers as salt to generated weighted hash. uids (np.ndarray): NumPy array of neuron UIDs for which weights are being committed. weights (np.ndarray): NumPy array of weight values corresponding to each UID. version_key (int, optional): Version key for compatibility with the network. @@ -953,10 +956,11 @@ def commit_weights( # Generate the hash of the weights commit_hash = weight_utils.generate_weight_hash( - who=wallet.hotkey.ss58_address, + address=wallet.hotkey.ss58_address, netuid=netuid, uids=list(uids), values=list(weights), + salt=salt, version_key=version_key, ) @@ -1048,6 +1052,7 @@ def reveal_weights( netuid: int, uids: Union[NDArray[np.int64], list], weights: Union[NDArray[np.float32], list], + salt: Union[NDArray[np.int64], list], version_key: int = bittensor.__version_as_int__, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, @@ -1063,6 +1068,7 @@ def reveal_weights( netuid (int): The unique identifier of the subnet. uids (np.ndarray): NumPy array of neuron UIDs for which weights are being revealed. weights (np.ndarray): NumPy array of weight values corresponding to each UID. + salt (np.ndarray): NumPy array of salt values corresponding to the hash function. version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -1089,6 +1095,7 @@ def reveal_weights( netuid=netuid, uids=list(uids), weights=list(weights), + salt=list(salt), version_key=version_key, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, @@ -1109,6 +1116,7 @@ def _do_reveal_weights( netuid: int, uids: List[int], values: List[int], + salt: List[int], version_key: int, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, @@ -1122,6 +1130,7 @@ def _do_reveal_weights( netuid (int): The unique identifier of the subnet. uids (List[int]): List of neuron UIDs for which weights are being revealed. values (List[int]): List of weight values corresponding to each UID. + salt (List[int]): List of salt values corresponding to the hash function. version_key (int): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. @@ -1142,6 +1151,7 @@ def make_substrate_call_with_retry(): "netuid": netuid, "uids": uids, "values": values, + "salt": salt, "version_key": version_key, }, ) diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 4f4f513d82..f624aa8a96 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -1,6 +1,7 @@ """ Conversion for weight between chain representation and np.array or torch.Tensor """ +import os # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -349,15 +350,21 @@ def process_weights_for_netuid( def generate_weight_hash( - who: str, netuid: int, uids: List[int], values: List[int], version_key: int + address: str, + netuid: int, + uids: List[int], + values: List[int], + version_key: int, + salt: List[int], ) -> str: """ Generate a valid commit hash from the provided weights. Args: - who (str): The account identifier. + address (str): The account identifier. Wallet ss58_address. netuid (int): The network unique identifier. uids (List[int]): The list of UIDs. + salt (List[int]): The salt to add to hash. values (List[int]): The list of weight values. version_key (int): The version key. @@ -365,25 +372,29 @@ def generate_weight_hash( str: The generated commit hash. """ # Encode data using SCALE codec - the_who = ScaleBytes(Keypair(ss58_address=who).public_key) - the_netuid = ScaleBytes(netuid.to_bytes(2, "little")) + wallet_address = ScaleBytes(Keypair(ss58_address=address).public_key) + netuid = ScaleBytes(netuid.to_bytes(2, "little")) vec_uids = Vec(data=None, sub_type="U16") vec_uids.value = [U16(ScaleBytes(uid.to_bytes(2, "little"))) for uid in uids] - the_uids = ScaleBytes(vec_uids.encode().data) + uids = ScaleBytes(vec_uids.encode().data) vec_values = Vec(data=None, sub_type="U16") vec_values.value = [ U16(ScaleBytes(bytearray(struct.pack(" Date: Tue, 28 May 2024 12:49:23 -0700 Subject: [PATCH 043/116] Replace prompt with Confirm.ask, run black. --- bittensor/commands/weights.py | 6 ++++-- bittensor/extrinsics/commit_weights.py | 6 +++--- tests/integration_tests/test_subtensor_integration.py | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index a8da82e171..16f50eb726 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -93,8 +93,10 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): salt_length = 8 salt = list(os.urandom(salt_length)) - if not Confirm.ask(f"Have you recorded the [red]salt[/red]: [bold white]'{salt}'[/bold white]? It will be " - f"required to reveal weights."): + if not Confirm.ask( + f"Have you recorded the [red]salt[/red]: [bold white]'{salt}'[/bold white]? It will be " + f"required to reveal weights." + ): return False, "User cancelled the operation." # Run the commit weights operation diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py index 669a4bdcc1..ee1b0ef0d7 100644 --- a/bittensor/extrinsics/commit_weights.py +++ b/bittensor/extrinsics/commit_weights.py @@ -101,9 +101,9 @@ def reveal_weights_extrinsic( This function provides a user-friendly interface for revealing weights on the Bittensor blockchain, ensuring proper error handling and user interaction when required. """ - if prompt: - if not input("Would you like to reveal weights? (y/n) ").lower() == "y": - return False, "User cancelled the operation." + + if prompt and not Confirm.ask(f"Would you like to reveal weights?"): + return False, "User cancelled the operation." success, error_message = subtensor._do_reveal_weights( wallet=wallet, diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 672f8437e6..311f58687b 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -373,7 +373,7 @@ def test_commit_weights(self): version_key=0, ) - self.subtensor.commit_weights = MagicMock( + self.subtensor.commit_weights = mocker.MagicMock( return_value=(True, "Successfully committed weights.") ) self.subtensor._do_commit_weights = MagicMock(return_value=(True, None)) From 4fe438b0b86214ed447710267193e29f44e1645a Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 13:02:17 -0700 Subject: [PATCH 044/116] Remove mocker. --- tests/integration_tests/test_subtensor_integration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 311f58687b..672f8437e6 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -373,7 +373,7 @@ def test_commit_weights(self): version_key=0, ) - self.subtensor.commit_weights = mocker.MagicMock( + self.subtensor.commit_weights = MagicMock( return_value=(True, "Successfully committed weights.") ) self.subtensor._do_commit_weights = MagicMock(return_value=(True, None)) From 3ab1d8e46e0d101f1d92d788c88e8481524940d4 Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 13:15:33 -0700 Subject: [PATCH 045/116] Replace some final legacy Pydantic references. --- bittensor/synapse.py | 4 ++-- tests/unit_tests/test_axon.py | 2 +- tests/unit_tests/test_dendrite.py | 2 +- tests/unit_tests/test_synapse.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bittensor/synapse.py b/bittensor/synapse.py index 05edd8bc2e..53c53f0083 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -702,7 +702,7 @@ def body_hash(self) -> str: if required_hash_fields: instance_fields = self.model_dump() - # Preserve backward compatibility in which fields will added in .dict() order + # Preserve backward compatibility in which fields will added in .model_dump() order # instead of the order one from `self.required_hash_fields` required_hash_fields = [ field for field in instance_fields if field in required_hash_fields @@ -715,7 +715,7 @@ def body_hash(self) -> str: required_hash_fields = self.__class__.required_hash_fields if required_hash_fields: - instance_fields = instance_fields or self.dict() + instance_fields = instance_fields or self.model_dump() for field in required_hash_fields: hashes.append(bittensor.utils.hash(str(instance_fields[field]))) diff --git a/tests/unit_tests/test_axon.py b/tests/unit_tests/test_axon.py index cc2eb8824b..ec0d9f5e53 100644 --- a/tests/unit_tests/test_axon.py +++ b/tests/unit_tests/test_axon.py @@ -452,7 +452,7 @@ class SynapseHTTPClient(TestClient): def post_synapse(self, synapse: Synapse): return self.post( f"/{synapse.__class__.__name__}", - json=synapse.dict(), + json=synapse.model_dump(), headers={"computed_body_hash": synapse.body_hash}, ) diff --git a/tests/unit_tests/test_dendrite.py b/tests/unit_tests/test_dendrite.py index 36ccb2ecb2..c30ecc58fa 100644 --- a/tests/unit_tests/test_dendrite.py +++ b/tests/unit_tests/test_dendrite.py @@ -294,7 +294,7 @@ async def test_dendrite__call__success_response( input_synapse = SynapseDummy(input=1) expected_synapse = SynapseDummy( **( - input_synapse.dict() + input_synapse.model_dump() | dict( output=2, axon=TerminalInfo( diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index 37a0b76ec8..e28265dc24 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -189,7 +189,7 @@ def test_body_hash_override(): def test_default_instance_fields_dict_consistency(): synapse_instance = bittensor.Synapse() - assert synapse_instance.dict() == { + assert synapse_instance.model_dump() == { "name": "Synapse", "timeout": 12.0, "total_size": 0, From d7be84373b1eb531892a3c1575bbe9f74334dd6b Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 13:20:15 -0700 Subject: [PATCH 046/116] Flake8 --- bittensor/subtensor.py | 1 - bittensor/utils/weight_utils.py | 1 - 2 files changed, 2 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 3d8ea92107..49ed50b6f1 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -23,7 +23,6 @@ import argparse import copy import functools -import os import socket import time from typing import List, Dict, Union, Optional, Tuple, TypedDict, Any diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index f624aa8a96..d8f6dd0035 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -1,7 +1,6 @@ """ Conversion for weight between chain representation and np.array or torch.Tensor """ -import os # The MIT License (MIT) # Copyright © 2021 Yuma Rao From 668fd29bd505a1c6fc3f78dc6f17c5d45efffcb0 Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 14:23:41 -0700 Subject: [PATCH 047/116] E2E Test --- bittensor/subtensor.py | 12 ++++++------ bittensor/utils/weight_utils.py | 13 +++++++------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 49ed50b6f1..5cc1d5da06 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -56,6 +56,10 @@ custom_rpc_type_registry, ) from .errors import IdentityError, NominationError, StakeError, TakeError +from .extrinsics.commit_weights import ( + commit_weights_extrinsic, + reveal_weights_extrinsic, +) from .extrinsics.delegation import ( delegate_extrinsic, nominate_extrinsic, @@ -86,10 +90,6 @@ publish_metadata, get_metadata, ) -from .extrinsics.commit_weights import ( - commit_weights_extrinsic, - reveal_weights_extrinsic, -) from .extrinsics.set_weights import set_weights_extrinsic from .extrinsics.staking import add_stake_extrinsic, add_stake_multiple_extrinsic from .extrinsics.transfer import transfer_extrinsic @@ -103,8 +103,8 @@ ) from .utils.balance import Balance from .utils.registration import POWSolution -from .utils.subtensor import get_subtensor_errors from .utils.registration import legacy_torch_api_compat +from .utils.subtensor import get_subtensor_errors KEY_NONCE: Dict[str, int] = {} @@ -912,7 +912,7 @@ def commit_weights( netuid: int, salt: List[int], uids: Union[NDArray[np.int64], list], - weights: Union[NDArray[np.float32], list], + weights: Union[NDArray[np.int64], list], version_key: int = bittensor.__version_as_int__, wait_for_inclusion: bool = False, wait_for_finalization: bool = False, diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index d8f6dd0035..c7e25cf6a5 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -19,14 +19,15 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import numpy as np -import bittensor import hashlib -import struct +from typing import Tuple, List, Union + +import numpy as np +from numpy.typing import NDArray from scalecodec import ScaleBytes, U16, Vec from substrateinterface import Keypair -from numpy.typing import NDArray -from typing import Tuple, List, Union + +import bittensor from bittensor.utils.registration import torch, use_torch, legacy_torch_api_compat U32_MAX = 4294967295 @@ -380,7 +381,7 @@ def generate_weight_hash( vec_values = Vec(data=None, sub_type="U16") vec_values.value = [ - U16(ScaleBytes(bytearray(struct.pack(" Date: Tue, 28 May 2024 14:29:09 -0700 Subject: [PATCH 048/116] Float -> int for np --- bittensor/commands/weights.py | 9 +++++---- bittensor/extrinsics/commit_weights.py | 5 +++-- bittensor/subtensor.py | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 16f50eb726..9df3241bc0 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -16,6 +16,10 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +"""Module that encapsulates the CommitWeightCommand and the RevealWeightCommand. Used to commit and reveal weights +for a specific subnet on the Bittensor Network.""" + + import argparse import os import re @@ -24,10 +28,7 @@ from rich.prompt import Prompt, Confirm import bittensor.utils.weight_utils as weight_utils import bittensor -from . import defaults - -"""Module that encapsulates the CommitWeightCommand and the RevealWeightCommand. Used to commit and reveal weights -for a specific subnet on the Bittensor Network.""" +from . import defaults # type: ignore class CommitWeightCommand: diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py index ee1b0ef0d7..a27e1941ba 100644 --- a/bittensor/extrinsics/commit_weights.py +++ b/bittensor/extrinsics/commit_weights.py @@ -16,13 +16,14 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +""" Module commit weights and reveal weights extrinsic. """ + from typing import Tuple, List + from rich.prompt import Confirm import bittensor -""" Module commit weights and reveal weights extrinsic. """ - def commit_weights_extrinsic( subtensor: "bittensor.subtensor", diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 5cc1d5da06..ff48a50288 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -1050,7 +1050,7 @@ def reveal_weights( wallet: "bittensor.wallet", netuid: int, uids: Union[NDArray[np.int64], list], - weights: Union[NDArray[np.float32], list], + weights: Union[NDArray[np.int64], list], salt: Union[NDArray[np.int64], list], version_key: int = bittensor.__version_as_int__, wait_for_inclusion: bool = False, From 80ec4d6965fdd2f3a3a38eafd8bc9741d71ebc64 Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 14:48:39 -0700 Subject: [PATCH 049/116] Fix test. Normalize weights before passing as params. --- .../test_subtensor_integration.py | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 672f8437e6..8849aeb9db 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -21,12 +21,13 @@ from queue import Empty as QueueEmpty from unittest.mock import MagicMock, patch +import numpy as np import pytest from substrateinterface import Keypair -import numpy as np import bittensor from bittensor.mock import MockSubtensor +from bittensor.utils import weight_utils from bittensor.utils.balance import Balance from tests.helpers import ( _get_mock_coldkey, @@ -364,11 +365,14 @@ def test_commit_weights(self): weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) uids = np.array([1, 2, 3, 4], dtype=np.int64) salt = np.array([1, 2, 3, 4, 5, 6, 7, 8], dtype=np.int64) + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( + uids=uids, weights=weights + ) commit_hash = bittensor.utils.weight_utils.generate_weight_hash( address=self.wallet.hotkey.ss58_address, netuid=3, - uids=uids.tolist(), - values=weights.tolist(), + uids=weight_uids, + values=weight_vals, salt=salt.tolist(), version_key=0, ) @@ -388,11 +392,16 @@ def test_commit_weights_inclusion(self): weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) uids = np.array([1, 2, 3, 4], dtype=np.int64) salt = np.array([1, 2, 3, 4, 5, 6, 7, 8], dtype=np.int64) + + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( + uids=uids, weights=weights + ) + commit_hash = bittensor.utils.weight_utils.generate_weight_hash( address=self.wallet.hotkey.ss58_address, netuid=1, - uids=uids.tolist(), - values=weights.tolist(), + uids=weight_uids, + values=weight_vals, salt=salt.tolist(), version_key=0, ) @@ -417,11 +426,16 @@ def test_commit_weights_failed(self): weights = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32) uids = np.array([1, 2, 3, 4], dtype=np.int64) salt = np.array([1, 2, 3, 4, 5, 6, 7, 8], dtype=np.int64) + + weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit( + uids=uids, weights=weights + ) + commit_hash = bittensor.utils.weight_utils.generate_weight_hash( address=self.wallet.hotkey.ss58_address, netuid=3, - uids=uids.tolist(), - values=weights.tolist(), + uids=weight_uids, + values=weight_vals, salt=salt.tolist(), version_key=0, ) From c0ca5da839dd12d3dc2d1141c18768354c7d896b Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 28 May 2024 14:57:04 -0700 Subject: [PATCH 050/116] Fix test. Normalize weights before passing as params. Set byte length to 2. --- bittensor/utils/weight_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index c7e25cf6a5..42f0a88380 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -388,7 +388,7 @@ def generate_weight_hash( version_key = ScaleBytes(version_key.to_bytes(8, "little")) vec_salt = Vec(data=None, sub_type="U16") - vec_salt.value = [U16(ScaleBytes(salts.to_bytes(3, "little"))) for salts in salt] + vec_salt.value = [U16(ScaleBytes(salts.to_bytes(2, "little"))) for salts in salt] salt = ScaleBytes(vec_salt.encode().data) data = wallet_address + netuid + uids + values + salt + version_key From 47f032ecde708067c6f2382fc584f7c9c9c6e5ab Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Wed, 29 May 2024 19:45:05 +0200 Subject: [PATCH 051/116] Clean up the imports in commands/stake.py --- bittensor/commands/stake.py | 35 +++++++++++++---------------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/bittensor/commands/stake.py b/bittensor/commands/stake.py index 9a28cd4f13..8fe80b606a 100644 --- a/bittensor/commands/stake.py +++ b/bittensor/commands/stake.py @@ -15,14 +15,22 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import sys import argparse -import bittensor -from tqdm import tqdm +import os +import sys +from typing import List, Union, Optional, Dict, Tuple + from rich.prompt import Confirm, Prompt +from rich.table import Table +from tqdm import tqdm + +import bittensor from bittensor.utils.balance import Balance -from typing import List, Union, Optional, Dict, Tuple -from .utils import get_hotkey_wallets_for_wallet +from .utils import ( + get_hotkey_wallets_for_wallet, + get_delegates_details, + DelegatesDetails, +) from . import defaults console = bittensor.__console__ @@ -291,23 +299,6 @@ def add_args(cls, parser: argparse.ArgumentParser): bittensor.subtensor.add_args(stake_parser) -### Stake list. -import argparse -import bittensor -from tqdm import tqdm -from rich.table import Table -from rich.prompt import Prompt -from typing import Dict, Union, List, Tuple -from .utils import get_delegates_details, DelegatesDetails -from . import defaults - -console = bittensor.__console__ - -import os -import bittensor -from typing import List, Tuple, Optional, Dict - - def _get_coldkey_wallets_for_path(path: str) -> List["bittensor.wallet"]: try: wallet_names = next(os.walk(os.path.expanduser(path)))[1] From 6fc11152782afac93f10cbf97448f9496fe3b0c5 Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 29 May 2024 11:58:11 -0700 Subject: [PATCH 052/116] Fix E2E test for Commit/Reveal with Salt flag. --- bittensor/commands/weights.py | 27 ++++++++++++------- .../weights/test_commit_weights.py | 17 ++++++++---- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 9df3241bc0..19989c94f3 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -26,8 +26,9 @@ import numpy as np from rich.prompt import Prompt, Confirm -import bittensor.utils.weight_utils as weight_utils + import bittensor +import bittensor.utils.weight_utils as weight_utils from . import defaults # type: ignore @@ -90,15 +91,21 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): uids=uids, weights=weights ) - # Generate random salt - salt_length = 8 - salt = list(os.urandom(salt_length)) - - if not Confirm.ask( - f"Have you recorded the [red]salt[/red]: [bold white]'{salt}'[/bold white]? It will be " - f"required to reveal weights." - ): - return False, "User cancelled the operation." + if not cli.config.is_set("salt"): + # Generate random salt + salt_length = 8 + salt = list(os.urandom(salt_length)) + + if not Confirm.ask( + f"Have you recorded the [red]salt[/red]: [bold white]'{salt}'[/bold white]? It will be " + f"required to reveal weights." + ): + return False, "User cancelled the operation." + else: + salt = np.array( + [int(x) for x in re.split(r"[ ,]+", cli.config.salt)], + dtype=np.int64, + ).tolist() # Run the commit weights operation success, message = subtensor.commit_weights( diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index 1dc33b6092..d22efde267 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -1,3 +1,10 @@ +import re +import time + +import numpy as np + +import bittensor +import bittensor.utils.weight_utils as weight_utils from bittensor.commands import ( RegisterCommand, StakeCommand, @@ -5,12 +12,7 @@ CommitWeightCommand, RevealWeightCommand, ) -import bittensor from tests.e2e_tests.utils import setup_wallet -import time -import bittensor.utils.weight_utils as weight_utils -import re -import numpy as np def test_commit_and_reveal_weights(local_chain): @@ -21,6 +23,7 @@ def test_commit_and_reveal_weights(local_chain): # define values weights = 0.1 uid = 0 + salt = "18, 179, 107, 0, 165, 211, 141, 197" # Verify subnet 1 created successfully assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() @@ -101,6 +104,8 @@ def test_commit_and_reveal_weights(local_chain): str(uid), "--weights", str(weights), + "--salt", + str(salt), "--subtensor.network", "local", "--subtensor.chain_endpoint", @@ -150,6 +155,8 @@ def test_commit_and_reveal_weights(local_chain): str(uid), "--weights", str(weights), + "--salt", + str(salt), "--subtensor.network", "local", "--subtensor.chain_endpoint", From b26a3c78504ed9fdcb768c606d58698c4c12ce0b Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 29 May 2024 14:34:51 -0700 Subject: [PATCH 053/116] `bittensor.chain_data.py` module refactoring. Removed unused code. --- bittensor/chain_data.py | 413 ++++++------------ bittensor/commands/overview.py | 2 +- bittensor/mock/subtensor_mock.py | 2 +- bittensor/subtensor.py | 14 +- .../test_subtensor_integration.py | 4 +- 5 files changed, 139 insertions(+), 296 deletions(-) diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 9140415506..49f92e5ce6 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -14,17 +14,24 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import bittensor + +""" +This module provides data structures and functions for working with the Bittensor network, +including neuron and subnet information, SCALE encoding/decoding, and custom RPC type registry. +""" + import json -from enum import Enum from dataclasses import dataclass, asdict -from scalecodec.types import GenericCall +from enum import Enum from typing import List, Tuple, Dict, Optional, Any, TypedDict, Union + from scalecodec.base import RuntimeConfiguration, ScaleBytes from scalecodec.type_registry import load_type_registry_preset +from scalecodec.types import GenericCall from scalecodec.utils.ss58 import ss58_encode -from .utils import networking as net, U16_MAX, U16_NORMALIZED_FLOAT +import bittensor +from .utils import networking as net, RAOPERTAO, U16_NORMALIZED_FLOAT from .utils.balance import Balance from .utils.registration import torch, use_torch @@ -200,18 +207,16 @@ class AxonInfo: @property def is_serving(self) -> bool: """True if the endpoint is serving.""" - if self.ip == "0.0.0.0": - return False - else: - return True + return self.ip != "0.0.0.0" def ip_str(self) -> str: """Return the whole IP as string""" return net.ip__str__(self.ip_type, self.ip, self.port) def __eq__(self, other: "AxonInfo"): - if other == None: + if other is None: return False + if ( self.version == other.version and self.ip == other.ip @@ -221,8 +226,8 @@ def __eq__(self, other: "AxonInfo"): and self.hotkey == other.hotkey ): return True - else: - return False + + return False def __str__(self): return "AxonInfo( {}, {}, {}, {} )".format( @@ -241,10 +246,23 @@ def to_string(self) -> str: return AxonInfo(0, "", 0, 0, "", "").to_string() @classmethod - def from_string(cls, s: str) -> "AxonInfo": - """Creates an AxonInfo object from its string representation using JSON.""" + def from_string(cls, json_string: str) -> "AxonInfo": + """ + Creates an AxonInfo object from its string representation using JSON. + + Args: + json_string (str): The JSON string representation of the AxonInfo object. + + Returns: + AxonInfo: An instance of AxonInfo created from the JSON string. If decoding fails, returns a default AxonInfo object with default values. + + Raises: + json.JSONDecodeError: If there is an error in decoding the JSON string. + TypeError: If there is a type error when creating the AxonInfo object. + ValueError: If there is a value error when creating the AxonInfo object. + """ try: - data = json.loads(s) + data = json.loads(json_string) return cls(**data) except json.JSONDecodeError as e: bittensor.logging.error(f"Error decoding JSON: {e}") @@ -256,7 +274,15 @@ def from_string(cls, s: str) -> "AxonInfo": @classmethod def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo": - """Converts a dictionary to an axon_info object.""" + """ + Converts a dictionary to an AxonInfo object. + + Args: + neuron_info (dict): A dictionary containing the neuron information. + + Returns: + instance (AxonInfo): An instance of AxonInfo created from the dictionary. + """ return cls( version=neuron_info["axon_info"]["version"], ip=net.int_to_ip(int(neuron_info["axon_info"]["ip"])), @@ -266,33 +292,14 @@ def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo": coldkey=neuron_info["coldkey"], ) - def _to_parameter_dict( - self, return_type: str - ) -> Union[dict[str, Union[int, str]], "torch.nn.ParameterDict"]: - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict( self, ) -> Union[dict[str, Union[int, str]], "torch.nn.ParameterDict"]: - """Returns a torch tensor or dict of the subnet info, depending on the USE_TORCH flag set""" + """Returns a torch tensor or dict of the subnet info, depending on the USE_TORCH flag set.""" if use_torch(): - return self._to_parameter_dict("torch") - else: - return self._to_parameter_dict("numpy") - - @classmethod - def _from_parameter_dict( - cls, - parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"], - return_type: str, - ) -> "AxonInfo": - if return_type == "torch": - return cls(**dict(parameter_dict)) + return torch.nn.ParameterDict(self.__dict__) else: - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( @@ -300,9 +307,9 @@ def from_parameter_dict( ) -> "AxonInfo": """Returns an axon_info object from a torch parameter_dict or a parameter dict.""" if use_torch(): - return cls._from_parameter_dict(parameter_dict, "torch") + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict(parameter_dict, "numpy") + return cls(**parameter_dict) class ChainDataType(Enum): @@ -316,18 +323,24 @@ class ChainDataType(Enum): SubnetHyperparameters = 8 -# Constants -RAOPERTAO = 1e9 -U16_MAX = 65535 -U64_MAX = 18446744073709551615 - - def from_scale_encoding( - input: Union[List[int], bytes, ScaleBytes], + input_: Union[List[int], bytes, ScaleBytes], type_name: ChainDataType, is_vec: bool = False, is_option: bool = False, ) -> Optional[Dict]: + """ + Decodes input_ data from SCALE encoding based on the specified type name and modifiers. + + Args: + input_ (Union[List[int], bytes, ScaleBytes]): The input_ data to decode. + type_name (ChainDataType): The type of data being decoded. + is_vec (bool, optional): Whether the data is a vector of the specified type. Default is ``False``. + is_option (bool, optional): Whether the data is an optional value of the specified type. Default is ``False``. + + Returns: + Optional[Dict]: The decoded data as a dictionary, or ``None`` if the decoding fails. + """ type_string = type_name.name if type_name == ChainDataType.DelegatedInfo: # DelegatedInfo is a tuple of (DelegateInfo, Compact) @@ -337,22 +350,22 @@ def from_scale_encoding( if is_vec: type_string = f"Vec<{type_string}>" - return from_scale_encoding_using_type_string(input, type_string) + return from_scale_encoding_using_type_string(input_, type_string) def from_scale_encoding_using_type_string( - input: Union[List[int], bytes, ScaleBytes], type_string: str + input_: Union[List[int], bytes, ScaleBytes], type_string: str ) -> Optional[Dict]: - if isinstance(input, ScaleBytes): - as_scale_bytes = input + if isinstance(input_, ScaleBytes): + as_scale_bytes = input_ else: - if isinstance(input, list) and all([isinstance(i, int) for i in input]): - vec_u8 = input + if isinstance(input_, list) and all([isinstance(i, int) for i in input_]): + vec_u8 = input_ as_bytes = bytes(vec_u8) - elif isinstance(input, bytes): - as_bytes = input + elif isinstance(input_, bytes): + as_bytes = input_ else: - raise TypeError("input must be a List[int], bytes, or ScaleBytes") + raise TypeError("input_ must be a List[int], bytes, or ScaleBytes") as_scale_bytes = ScaleBytes(as_bytes) @@ -368,9 +381,7 @@ def from_scale_encoding_using_type_string( # Dataclasses for chain data. @dataclass class NeuronInfo: - r""" - Dataclass for neuron metadata. - """ + """Dataclass for neuron metadata.""" hotkey: str coldkey: str @@ -399,7 +410,7 @@ class NeuronInfo: @classmethod def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfo": - r"""Fixes the values of the NeuronInfo object.""" + """Fixes the values of the NeuronInfo object.""" neuron_info_decoded["hotkey"] = ss58_encode( neuron_info_decoded["hotkey"], bittensor.__ss58_format__ ) @@ -445,26 +456,23 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfo": neuron_info_decoded["axon_info"] = AxonInfo.from_neuron_info( neuron_info_decoded ) - return cls(**neuron_info_decoded) @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfo": - r"""Returns a NeuronInfo object from a ``vec_u8``.""" + """Returns a NeuronInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() decoded = from_scale_encoding(vec_u8, ChainDataType.NeuronInfo) if decoded is None: - return NeuronInfo._null_neuron() - - decoded = NeuronInfo.fix_decoded_values(decoded) + return NeuronInfo.get_null_neuron() - return decoded + return NeuronInfo.fix_decoded_values(decoded) @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfo"]: - r"""Returns a list of NeuronInfo objects from a ``vec_u8``.""" + """Returns a list of NeuronInfo objects from a ``vec_u8``""" decoded_list = from_scale_encoding( vec_u8, ChainDataType.NeuronInfo, is_vec=True @@ -478,7 +486,7 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfo"]: return decoded_list @staticmethod - def _null_neuron() -> "NeuronInfo": + def get_null_neuron() -> "NeuronInfo": neuron = NeuronInfo( uid=0, netuid=0, @@ -519,34 +527,10 @@ def from_weights_bonds_and_neuron_lite( return cls(**n_dict) - @staticmethod - def _neuron_dict_to_namespace(neuron_dict) -> "NeuronInfo": - # TODO: Legacy: remove? - if neuron_dict["hotkey"] == "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM": - return NeuronInfo._null_neuron() - else: - neuron = NeuronInfo(**neuron_dict) - neuron.stake_dict = { - hk: Balance.from_rao(stake) for hk, stake in neuron.stake.items() - } - neuron.stake = Balance.from_rao(neuron.total_stake) - neuron.total_stake = neuron.stake - neuron.rank = neuron.rank / U16_MAX - neuron.trust = neuron.trust / U16_MAX - neuron.consensus = neuron.consensus / U16_MAX - neuron.validator_trust = neuron.validator_trust / U16_MAX - neuron.incentive = neuron.incentive / U16_MAX - neuron.dividends = neuron.dividends / U16_MAX - neuron.emission = neuron.emission / RAOPERTAO - - return neuron - @dataclass class NeuronInfoLite: - r""" - Dataclass for neuron metadata, but without the weights and bonds. - """ + """Dataclass for neuron metadata, but without the weights and bonds.""" hotkey: str coldkey: str @@ -566,16 +550,14 @@ class NeuronInfoLite: dividends: float last_update: int validator_permit: bool - # weights: List[List[int]] - # bonds: List[List[int]] No weights or bonds in lite version - prometheus_info: "PrometheusInfo" + prometheus_info: Optional["PrometheusInfo"] axon_info: "axon_info" pruning_score: int is_null: bool = False @classmethod def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": - r"""Fixes the values of the NeuronInfoLite object.""" + """Fixes the values of the NeuronInfoLite object.""" neuron_info_decoded["hotkey"] = ss58_encode( neuron_info_decoded["hotkey"], bittensor.__ss58_format__ ) @@ -591,9 +573,6 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": neuron_info_decoded["stake_dict"] = stake_dict neuron_info_decoded["stake"] = sum(stake_dict.values()) neuron_info_decoded["total_stake"] = neuron_info_decoded["stake"] - # Don't need weights and bonds in lite version - # neuron_info_decoded['weights'] = [[int(weight[0]), int(weight[1])] for weight in neuron_info_decoded['weights']] - # neuron_info_decoded['bonds'] = [[int(bond[0]), int(bond[1])] for bond in neuron_info_decoded['bonds']] neuron_info_decoded["rank"] = U16_NORMALIZED_FLOAT(neuron_info_decoded["rank"]) neuron_info_decoded["emission"] = neuron_info_decoded["emission"] / RAOPERTAO neuron_info_decoded["incentive"] = U16_NORMALIZED_FLOAT( @@ -621,21 +600,19 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfoLite": - r"""Returns a NeuronInfoLite object from a ``vec_u8``.""" + """Returns a NeuronInfoLite object from a ``vec_u8``.""" if len(vec_u8) == 0: - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() decoded = from_scale_encoding(vec_u8, ChainDataType.NeuronInfoLite) if decoded is None: - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() - decoded = NeuronInfoLite.fix_decoded_values(decoded) - - return decoded + return NeuronInfoLite.fix_decoded_values(decoded) @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfoLite"]: - r"""Returns a list of NeuronInfoLite objects from a ``vec_u8``.""" + """Returns a list of NeuronInfoLite objects from a ``vec_u8``.""" decoded_list = from_scale_encoding( vec_u8, ChainDataType.NeuronInfoLite, is_vec=True @@ -649,7 +626,7 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfoLite"]: return decoded_list @staticmethod - def _null_neuron() -> "NeuronInfoLite": + def get_null_neuron() -> "NeuronInfoLite": neuron = NeuronInfoLite( uid=0, netuid=0, @@ -666,8 +643,6 @@ def _null_neuron() -> "NeuronInfoLite": dividends=0, last_update=0, validator_permit=False, - # weights = [], // No weights or bonds in lite version - # bonds = [], prometheus_info=None, axon_info=None, is_null=True, @@ -677,34 +652,10 @@ def _null_neuron() -> "NeuronInfoLite": ) return neuron - @staticmethod - def _neuron_dict_to_namespace(neuron_dict) -> "NeuronInfoLite": - # TODO: Legacy: remove? - if neuron_dict["hotkey"] == "5C4hrfjw9DjXZTzV3MwzrrAr9P1MJhSrvWGWqi1eSuyUpnhM": - return NeuronInfoLite._null_neuron() - else: - neuron = NeuronInfoLite(**neuron_dict) - neuron.stake = Balance.from_rao(neuron.total_stake) - neuron.stake_dict = { - hk: Balance.from_rao(stake) for hk, stake in neuron.stake.items() - } - neuron.total_stake = neuron.stake - neuron.rank = neuron.rank / U16_MAX - neuron.trust = neuron.trust / U16_MAX - neuron.consensus = neuron.consensus / U16_MAX - neuron.validator_trust = neuron.validator_trust / U16_MAX - neuron.incentive = neuron.incentive / U16_MAX - neuron.dividends = neuron.dividends / U16_MAX - neuron.emission = neuron.emission / RAOPERTAO - - return neuron - @dataclass class PrometheusInfo: - r""" - Dataclass for prometheus info. - """ + """Dataclass for prometheus info.""" block: int version: int @@ -714,7 +665,7 @@ class PrometheusInfo: @classmethod def fix_decoded_values(cls, prometheus_info_decoded: Dict) -> "PrometheusInfo": - r"""Returns a PrometheusInfo object from a prometheus_info_decoded dictionary.""" + """Returns a PrometheusInfo object from a prometheus_info_decoded dictionary.""" prometheus_info_decoded["ip"] = net.int_to_ip( int(prometheus_info_decoded["ip"]) ) @@ -736,7 +687,6 @@ class DelegateInfoLite: validator_permits (list[int]): List of subnets that the delegate is allowed to validate on. return_per_1000 (int): Return per 1000 TAO, for the delegate over a day. total_daily_return (int): Total daily return of the delegate. - """ delegate_ss58: str # Hotkey of delegate @@ -753,7 +703,7 @@ class DelegateInfoLite: @dataclass class DelegateInfo: - r""" + """ Dataclass for delegate information. For a lighter version of this class, see :func:`DelegateInfoLite`. Args: @@ -785,7 +735,7 @@ class DelegateInfo: @classmethod def fix_decoded_values(cls, decoded: Any) -> "DelegateInfo": - r"""Fixes the decoded values.""" + """Fixes the decoded values.""" return cls( hotkey_ss58=ss58_encode( @@ -811,57 +761,47 @@ def fix_decoded_values(cls, decoded: Any) -> "DelegateInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["DelegateInfo"]: - r"""Returns a DelegateInfo object from a ``vec_u8``.""" + """Returns a DelegateInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.DelegateInfo) - if decoded is None: return None - decoded = DelegateInfo.fix_decoded_values(decoded) - - return decoded + return DelegateInfo.fix_decoded_values(decoded) @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["DelegateInfo"]: - r"""Returns a list of DelegateInfo objects from a ``vec_u8``.""" + """Returns a list of DelegateInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.DelegateInfo, is_vec=True) if decoded is None: return [] - decoded = [DelegateInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [DelegateInfo.fix_decoded_values(d) for d in decoded] @classmethod def delegated_list_from_vec_u8( cls, vec_u8: List[int] ) -> List[Tuple["DelegateInfo", Balance]]: - r"""Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. + """Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. This is the list of delegates that the user has delegated to, and the amount of stake delegated. """ decoded = from_scale_encoding(vec_u8, ChainDataType.DelegatedInfo, is_vec=True) - if decoded is None: return [] - decoded = [ + return [ (DelegateInfo.fix_decoded_values(d), Balance.from_rao(s)) for d, s in decoded ] - return decoded - @dataclass class StakeInfo: - r""" - Dataclass for stake info. - """ + """Dataclass for stake info.""" hotkey_ss58: str # Hotkey address coldkey_ss58: str # Coldkey address @@ -869,8 +809,7 @@ class StakeInfo: @classmethod def fix_decoded_values(cls, decoded: Any) -> "StakeInfo": - r"""Fixes the decoded values.""" - + """Fixes the decoded values.""" return cls( hotkey_ss58=ss58_encode(decoded["hotkey"], bittensor.__ss58_format__), coldkey_ss58=ss58_encode(decoded["coldkey"], bittensor.__ss58_format__), @@ -879,60 +818,50 @@ def fix_decoded_values(cls, decoded: Any) -> "StakeInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["StakeInfo"]: - r"""Returns a StakeInfo object from a ``vec_u8``.""" + """Returns a StakeInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.StakeInfo) - if decoded is None: return None - decoded = StakeInfo.fix_decoded_values(decoded) - - return decoded + return StakeInfo.fix_decoded_values(decoded) @classmethod def list_of_tuple_from_vec_u8( cls, vec_u8: List[int] ) -> Dict[str, List["StakeInfo"]]: - r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" + """Returns a list of StakeInfo objects from a ``vec_u8``.""" decoded: Optional[ list[tuple[str, list[object]]] ] = from_scale_encoding_using_type_string( - input=vec_u8, type_string="Vec<(AccountId, Vec)>" + input_=vec_u8, type_string="Vec<(AccountId, Vec)>" ) if decoded is None: return {} - stake_map = { + return { ss58_encode(address=account_id, ss58_format=bittensor.__ss58_format__): [ StakeInfo.fix_decoded_values(d) for d in stake_info ] for account_id, stake_info in decoded } - return stake_map - @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["StakeInfo"]: - r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" + """Returns a list of StakeInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.StakeInfo, is_vec=True) - if decoded is None: return [] - decoded = [StakeInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [StakeInfo.fix_decoded_values(d) for d in decoded] @dataclass class SubnetInfo: - r""" - Dataclass for subnet info. - """ + """Dataclass for subnet info.""" netuid: int rho: int @@ -953,16 +882,14 @@ class SubnetInfo: emission_value: float burn: Balance owner_ss58: str - # adjustment_alpha: int @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetInfo"]: - r"""Returns a SubnetInfo object from a ``vec_u8``.""" + """Returns a SubnetInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.SubnetInfo) - if decoded is None: return None @@ -978,13 +905,11 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["SubnetInfo"]: if decoded is None: return [] - decoded = [SubnetInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [SubnetInfo.fix_decoded_values(d) for d in decoded] @classmethod def fix_decoded_values(cls, decoded: Dict) -> "SubnetInfo": - r"""Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" + """Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" return SubnetInfo( netuid=decoded["netuid"], rho=decoded["rho"], @@ -1011,48 +936,26 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetInfo": owner_ss58=ss58_encode(decoded["owner"], bittensor.__ss58_format__), ) - def _to_parameter_dict( - self, return_type: str - ) -> Union[dict[str, Any], "torch.nn.ParameterDict"]: - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict(self) -> Union[dict[str, Any], "torch.nn.ParameterDict"]: """Returns a torch tensor or dict of the subnet info.""" if use_torch(): - return self._to_parameter_dict("torch") + return torch.nn.ParameterDict(self.__dict__) else: - return self._to_parameter_dict("numpy") - - @classmethod - def _from_parameter_dict_torch( - cls, parameter_dict: "torch.nn.ParameterDict" - ) -> "SubnetInfo": - """Returns a SubnetInfo object from a torch parameter_dict.""" - return cls(**dict(parameter_dict)) - - @classmethod - def _from_parameter_dict_numpy(cls, parameter_dict: dict[str, Any]) -> "SubnetInfo": - r"""Returns a SubnetInfo object from a parameter_dict.""" - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] ) -> "SubnetInfo": if use_torch(): - return cls._from_parameter_dict_torch(parameter_dict) + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict_numpy(parameter_dict) + return cls(**parameter_dict) @dataclass class SubnetHyperparameters: - r""" - Dataclass for subnet hyperparameters. - """ + """Dataclass for subnet hyperparameters.""" rho: int kappa: int @@ -1081,12 +984,11 @@ class SubnetHyperparameters: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: - r"""Returns a SubnetHyperparameters object from a ``vec_u8``.""" + """Returns a SubnetHyperparameters object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.SubnetHyperparameters) - if decoded is None: return None @@ -1094,21 +996,18 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["SubnetHyperparameters"]: - r"""Returns a list of SubnetHyperparameters objects from a ``vec_u8``.""" + """Returns a list of SubnetHyperparameters objects from a ``vec_u8``.""" decoded = from_scale_encoding( vec_u8, ChainDataType.SubnetHyperparameters, is_vec=True, is_option=True ) - if decoded is None: return [] - decoded = [SubnetHyperparameters.fix_decoded_values(d) for d in decoded] - - return decoded + return [SubnetHyperparameters.fix_decoded_values(d) for d in decoded] @classmethod def fix_decoded_values(cls, decoded: Dict) -> "SubnetHyperparameters": - r"""Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" + """Returns a SubnetInfo object from a decoded SubnetInfo dictionary.""" return SubnetHyperparameters( rho=decoded["rho"], kappa=decoded["kappa"], @@ -1136,59 +1035,35 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetHyperparameters": commit_reveal_weights_enabled=decoded["commit_reveal_weights_enabled"], ) - def _to_parameter_dict_torch( - self, return_type: str - ) -> Union[dict[str, Union[int, float, bool]], "torch.nn.ParameterDict"]: - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict( self, ) -> Union[dict[str, Union[int, float, bool]], "torch.nn.ParameterDict"]: """Returns a torch tensor or dict of the subnet hyperparameters.""" if use_torch(): - return self._to_parameter_dict_torch("torch") + return torch.nn.ParameterDict(self.__dict__) else: - return self._to_parameter_dict_torch("numpy") - - @classmethod - def _from_parameter_dict_torch( - cls, parameter_dict: "torch.nn.ParameterDict" - ) -> "SubnetHyperparameters": - """Returns a SubnetHyperparameters object from a torch parameter_dict.""" - return cls(**dict(parameter_dict)) - - @classmethod - def _from_parameter_dict_numpy( - cls, parameter_dict: dict[str, Any] - ) -> "SubnetHyperparameters": - """Returns a SubnetHyperparameters object from a parameter_dict.""" - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] ) -> "SubnetHyperparameters": if use_torch(): - return cls._from_parameter_dict_torch(parameter_dict) + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict_numpy(parameter_dict) + return cls(**parameter_dict) @dataclass class IPInfo: - r""" - Dataclass for associated IP Info. - """ + """Dataclass for associated IP Info.""" ip: str ip_type: int protocol: int def encode(self) -> Dict[str, Any]: - r"""Returns a dictionary of the IPInfo object that can be encoded.""" + """Returns a dictionary of the IPInfo object that can be encoded.""" return { "ip": net.ip_to_int( self.ip @@ -1198,12 +1073,11 @@ def encode(self) -> Dict[str, Any]: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["IPInfo"]: - r"""Returns a IPInfo object from a ``vec_u8``.""" + """Returns a IPInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None decoded = from_scale_encoding(vec_u8, ChainDataType.IPInfo) - if decoded is None: return None @@ -1217,62 +1091,37 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["IPInfo"]: if decoded is None: return [] - decoded = [IPInfo.fix_decoded_values(d) for d in decoded] - - return decoded + return [IPInfo.fix_decoded_values(d) for d in decoded] @classmethod def fix_decoded_values(cls, decoded: Dict) -> "IPInfo": - r"""Returns a SubnetInfo object from a decoded IPInfo dictionary.""" + """Returns a SubnetInfo object from a decoded IPInfo dictionary.""" return IPInfo( - ip=bittensor.utils.networking.int_to_ip(decoded["ip"]), + ip=net.int_to_ip(decoded["ip"]), ip_type=decoded["ip_type_and_protocol"] >> 4, protocol=decoded["ip_type_and_protocol"] & 0xF, ) - def _to_parameter_dict( - self, return_type: str - ) -> Union[dict[str, Union[str, int]], "torch.nn.ParameterDict"]: - """Returns a torch tensor of the subnet info.""" - if return_type == "torch": - return torch.nn.ParameterDict(self.__dict__) - else: - return self.__dict__ - def to_parameter_dict( self, ) -> Union[dict[str, Union[str, int]], "torch.nn.ParameterDict"]: """Returns a torch tensor or dict of the subnet IP info.""" if use_torch(): - return self._to_parameter_dict("torch") + return torch.nn.ParameterDict(self.__dict__) else: - return self._to_parameter_dict("numpy") - - @classmethod - def _from_parameter_dict_torch( - cls, parameter_dict: "torch.nn.ParameterDict" - ) -> "IPInfo": - """Returns a IPInfo object from a torch parameter_dict.""" - return cls(**dict(parameter_dict)) - - @classmethod - def _from_parameter_dict_numpy(cls, parameter_dict: dict[str, Any]) -> "IPInfo": - """Returns a IPInfo object from a parameter_dict.""" - return cls(**parameter_dict) + return self.__dict__ @classmethod def from_parameter_dict( cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] ) -> "IPInfo": if use_torch(): - return cls._from_parameter_dict_torch(parameter_dict) + return cls(**dict(parameter_dict)) else: - return cls._from_parameter_dict_numpy(parameter_dict) + return cls(**parameter_dict) # Senate / Proposal data - - class ProposalVoteData(TypedDict): index: int threshold: int diff --git a/bittensor/commands/overview.py b/bittensor/commands/overview.py index 477ad9f01a..b35fd85596 100644 --- a/bittensor/commands/overview.py +++ b/bittensor/commands/overview.py @@ -317,7 +317,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): de_registered_neurons = [] for hotkey_addr, our_stake in de_registered_stake: # Make a neuron info lite for this hotkey and coldkey. - de_registered_neuron = bittensor.NeuronInfoLite._null_neuron() + de_registered_neuron = bittensor.NeuronInfoLite.get_null_neuron() de_registered_neuron.hotkey = hotkey_addr de_registered_neuron.coldkey = ( coldkey_wallet.coldkeypub.ss58_address diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index b6be74095b..a0efe48d74 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -756,7 +756,7 @@ def neuron_for_uid( self, uid: int, netuid: int, block: Optional[int] = None ) -> Optional[NeuronInfo]: if uid is None: - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() if block: if self.block_number < block: diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index ff48a50288..46f999eb97 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -942,7 +942,6 @@ def commit_weights( This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, enhancing transparency and accountability within the Bittensor network. """ - uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) retries = 0 success = False message = "No attempt made. Perhaps it is too soon to commit weights!" @@ -1081,7 +1080,6 @@ def reveal_weights( This function allows neurons to reveal their previously committed weight distribution, ensuring transparency and accountability within the Bittensor network. """ - uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) retries = 0 success = False message = "No attempt made. Perhaps it is too soon to reveal weights!" @@ -4742,8 +4740,7 @@ def neuron_for_uid( subnet, offering insights into their roles in the network's consensus and validation mechanisms. """ if uid is None: - # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() @retry(delay=1, tries=3, backoff=2, max_delay=4, logger=_logger) def make_substrate_call_with_retry(): @@ -4758,8 +4755,7 @@ def make_substrate_call_with_retry(): json_body = make_substrate_call_with_retry() if not (result := json_body.get("result", None)): - # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. - return NeuronInfo._null_neuron() + return NeuronInfo.get_null_neuron() return NeuronInfo.from_vec_u8(result) @@ -4814,8 +4810,7 @@ def neuron_for_uid_lite( subnet without the need for comprehensive data retrieval. """ if uid is None: - # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() hex_bytes_result = self.query_runtime_api( runtime_api="NeuronInfoRuntimeApi", @@ -4828,8 +4823,7 @@ def neuron_for_uid_lite( ) if hex_bytes_result is None: - # TODO: fix `Access to a protected member _null_neuron of a class` error when chane_data.py refactoring. - return NeuronInfoLite._null_neuron() + return NeuronInfoLite.get_null_neuron() if hex_bytes_result.startswith("0x"): bytes_result = bytes.fromhex(hex_bytes_result[2:]) diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index 8849aeb9db..e3661210bc 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -734,7 +734,7 @@ def is_registered_side_effect(*args, **kwargs): ) self.subtensor.get_neuron_for_pubkey_and_subnet = MagicMock( - return_value=bittensor.NeuronInfo._null_neuron() + return_value=bittensor.NeuronInfo.get_null_neuron() ) self.subtensor.is_hotkey_registered = MagicMock( side_effect=is_registered_side_effect @@ -816,7 +816,7 @@ class ExitEarly(Exception): # then should create a new pow and check if it is stale # then should enter substrate and exit early because of test self.subtensor.get_neuron_for_pubkey_and_subnet = MagicMock( - return_value=bittensor.NeuronInfo._null_neuron() + return_value=bittensor.NeuronInfo.get_null_neuron() ) with pytest.raises(ExitEarly): bittensor.subtensor.register(mock_subtensor_self, mock_wallet, netuid=3) From ad0829d18927a293d76cf47ed1152a291cd3ee53 Mon Sep 17 00:00:00 2001 From: Dany Gagnon Date: Fri, 31 May 2024 14:28:57 -0400 Subject: [PATCH 054/116] fix: ipv6 is packed with port --- bittensor/chain_data.py | 5 +++-- bittensor/utils/networking.py | 22 ++++++++++++++++++++++ tests/unit_tests/utils/test_networking.py | 8 ++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 49f92e5ce6..94b131cdd9 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -283,10 +283,11 @@ def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo": Returns: instance (AxonInfo): An instance of AxonInfo created from the dictionary. """ + ip, port = net.unpack_encoded_ip_port(neuron_info["axon_info"]["ip"], neuron_info["axon_info"]["port"]) return cls( version=neuron_info["axon_info"]["version"], - ip=net.int_to_ip(int(neuron_info["axon_info"]["ip"])), - port=neuron_info["axon_info"]["port"], + ip=ip, + port=port, ip_type=neuron_info["axon_info"]["ip_type"], hotkey=neuron_info["hotkey"], coldkey=neuron_info["coldkey"], diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 9f1450af81..79d01ab6f3 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -44,6 +44,28 @@ def int_to_ip(int_val: int) -> str: return str(netaddr.IPAddress(int_val)) +def unpack_encoded_ip_port(ip_str: str, port: int) -> tuple: + r"""Unpacks an encoded IP and port if they are encoded together. + Args: + ip_str (:type:`str`, `required`): + The encoded IP address string. + port (:type:`int`, `required`): + The port number. + + Returns: + tuple: A tuple containing the IP address string and port number. + + Raises: + netaddr.core.AddrFormatError (Exception): + Raised when the passed IP string is not a valid IP int value. + """ + if ip_str < (1 << 128) + (1 << 16) and port == 0: + port = ip_str & 0xFFFF + ip = ip_str >> 16 + return int_to_ip(ip), port + return int_to_ip(ip), port + + def ip_to_int(str_val: str) -> int: r"""Maps an ip-string to a unique integer. arg: diff --git a/tests/unit_tests/utils/test_networking.py b/tests/unit_tests/utils/test_networking.py index 2037718578..5f7b445d77 100644 --- a/tests/unit_tests/utils/test_networking.py +++ b/tests/unit_tests/utils/test_networking.py @@ -25,6 +25,14 @@ def test_int_to_ip_range(): ) +def test_packed_ip_port(): + """Test packing and unpacking IP and port.""" + assert utils.networking.unpack_encoded_ip_port(184046647580618, 0) == ( + utils.networking.ip_to_int("167.99.179.13"), + 6090, + ) + + def test_int_to_ip4_max(): """Test converting integer to maximum IPv4 address.""" assert utils.networking.int_to_ip(4294967295) == "255.255.255.255" From 18ee92880de9c6c14e419b2d9a7f476d090a305f Mon Sep 17 00:00:00 2001 From: Dany Gagnon Date: Fri, 31 May 2024 14:35:50 -0400 Subject: [PATCH 055/116] refactor: black ran on chain_data --- bittensor/chain_data.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 94b131cdd9..689f547dc3 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -283,7 +283,9 @@ def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo": Returns: instance (AxonInfo): An instance of AxonInfo created from the dictionary. """ - ip, port = net.unpack_encoded_ip_port(neuron_info["axon_info"]["ip"], neuron_info["axon_info"]["port"]) + ip, port = net.unpack_encoded_ip_port( + neuron_info["axon_info"]["ip"], neuron_info["axon_info"]["port"] + ) return cls( version=neuron_info["axon_info"]["version"], ip=ip, From 2a9c2687e4aa6eeed110f22e88901d6962d8ceff Mon Sep 17 00:00:00 2001 From: Dany Gagnon Date: Fri, 31 May 2024 14:42:22 -0400 Subject: [PATCH 056/116] fix: test int --- tests/unit_tests/utils/test_networking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit_tests/utils/test_networking.py b/tests/unit_tests/utils/test_networking.py index 5f7b445d77..00e84a0a97 100644 --- a/tests/unit_tests/utils/test_networking.py +++ b/tests/unit_tests/utils/test_networking.py @@ -28,7 +28,7 @@ def test_int_to_ip_range(): def test_packed_ip_port(): """Test packing and unpacking IP and port.""" assert utils.networking.unpack_encoded_ip_port(184046647580618, 0) == ( - utils.networking.ip_to_int("167.99.179.13"), + 2808328973, 6090, ) From cf2885bfb8ddb39ba4f2920427f2259906912505 Mon Sep 17 00:00:00 2001 From: Dany Gagnon Date: Fri, 31 May 2024 14:42:53 -0400 Subject: [PATCH 057/116] fix: test int --- tests/unit_tests/utils/test_networking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit_tests/utils/test_networking.py b/tests/unit_tests/utils/test_networking.py index 00e84a0a97..6bc89d3f27 100644 --- a/tests/unit_tests/utils/test_networking.py +++ b/tests/unit_tests/utils/test_networking.py @@ -28,7 +28,7 @@ def test_int_to_ip_range(): def test_packed_ip_port(): """Test packing and unpacking IP and port.""" assert utils.networking.unpack_encoded_ip_port(184046647580618, 0) == ( - 2808328973, + "167.99.179.13", 6090, ) From 94a2505d07e16774285b2f18dd5002674cb1f831 Mon Sep 17 00:00:00 2001 From: Dany Gagnon Date: Fri, 31 May 2024 14:55:39 -0400 Subject: [PATCH 058/116] fix: small mistake typo --- bittensor/utils/networking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 79d01ab6f3..0352cdfaa8 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -63,7 +63,7 @@ def unpack_encoded_ip_port(ip_str: str, port: int) -> tuple: port = ip_str & 0xFFFF ip = ip_str >> 16 return int_to_ip(ip), port - return int_to_ip(ip), port + return int_to_ip(ip_str), port def ip_to_int(str_val: str) -> int: From c7967ebeabde153d23c0c3fa5c1d5b9c63d2cccc Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 31 May 2024 13:55:14 -0700 Subject: [PATCH 059/116] Fix return of ip version --- bittensor/axon.py | 3 +- tests/unit_tests/test_axon.py | 50 ++++++++++++++++++++++++++++++ tests/unit_tests/test_subtensor.py | 6 ++-- 3 files changed, 55 insertions(+), 4 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 476ed52db2..273cd79607 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -57,6 +57,7 @@ SynapseException, ) from bittensor.threadpool import PriorityThreadPoolExecutor +from bittensor.utils import networking class FastAPIThreadedServer(uvicorn.Server): @@ -393,7 +394,7 @@ def info(self) -> "bittensor.AxonInfo": return bittensor.AxonInfo( version=bittensor.__version_as_int__, ip=self.external_ip, - ip_type=4, + ip_type=networking.ip_version(self.external_ip), port=self.external_port, hotkey=self.wallet.hotkey.ss58_address, coldkey=self.wallet.coldkeypub.ss58_address, diff --git a/tests/unit_tests/test_axon.py b/tests/unit_tests/test_axon.py index ec0d9f5e53..36d0ba2d00 100644 --- a/tests/unit_tests/test_axon.py +++ b/tests/unit_tests/test_axon.py @@ -25,6 +25,7 @@ from unittest.mock import AsyncMock, MagicMock, patch # Third Party +import netaddr import pytest from starlette.requests import Request from fastapi.testclient import TestClient @@ -349,6 +350,55 @@ def test_to_string(info_return, expected_output, test_id): assert output == expected_output, f"Test ID: {test_id}" +@pytest.mark.parametrize( + "ip, port, expected_ip_type, test_id", + [ + # Happy path + ( + "127.0.0.1", + 8080, + 4, + "valid_ipv4", + ), + ( + "2001:0db8:85a3:0000:0000:8a2e:0370:7334", + 3030, + 6, + "valid_ipv6", + ), + ], +) +def test_valid_ipv4_and_ipv6_address(ip, port, expected_ip_type, test_id): + # Arrange + axon = Axon() + axon.ip = ip + axon.external_ip = ip + axon.port = port + + # Act + ip_type = axon.info().ip_type + + # Assert + assert ip_type == expected_ip_type, f"Test ID: {test_id}" + + +@pytest.mark.parametrize( + "ip, port, expected_exception", + [ + ( + "This Is not a valid address", + 65534, + netaddr.core.AddrFormatError, + ), + ], + ids=["failed to detect a valid IP " "address from %r"], +) +def test_invalid_ip_address(ip, port, expected_exception): + # Assert + with pytest.raises(expected_exception): + Axon(ip=ip, external_ip=ip, port=port).info() + + @pytest.mark.parametrize( "ip, port, ss58_address, started, forward_fns, expected_str, test_id", [ diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index 3709ec190d..c002aea66b 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -34,8 +34,8 @@ def test_serve_axon_with_external_ip_set(): - internal_ip: str = "this is an internal ip" - external_ip: str = "this is an external ip" + internal_ip: str = "192.0.2.146" + external_ip: str = "2001:0db8:85a3:0000:0000:8a2e:0370:7334" mock_serve_axon = MagicMock(return_value=True) @@ -76,7 +76,7 @@ def test_serve_axon_with_external_ip_set(): def test_serve_axon_with_external_port_set(): - external_ip: str = "this is an external ip" + external_ip: str = "2001:0db8:85a3:0000:0000:8a2e:0370:7334" internal_port: int = 1234 external_port: int = 5678 From 5a50496ecaf5b2363e30d49724e5d56ab20dba59 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 1 Jun 2024 11:19:46 +0200 Subject: [PATCH 060/116] configure github dependabot to check for security updates --- .github/dependabot.yml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..adff4d0aab --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "" + file: "requirements/prod.txt" + schedule: + interval: "daily" + open-pull-requests-limit: 0 # Only security updates will be opened as PRs From a71fb4e88e14ea35dc145eb117f90403692c5960 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 1 Jun 2024 11:31:18 +0200 Subject: [PATCH 061/116] make production dependencies less restrictive --- requirements/dev.txt | 2 ++ requirements/prod.txt | 31 ++++++++++++++----------------- 2 files changed, 16 insertions(+), 17 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 2fe7007484..f6cd2aef9d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,4 +1,6 @@ +black==23.7.0 pytest==7.2.0 +pytest-asyncio pytest-mock==3.12.0 pytest-split==0.8.0 pytest-xdist==3.0.2 diff --git a/requirements/prod.txt b/requirements/prod.txt index d5bbf44b87..2d9ecabab5 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,19 +1,18 @@ -aiohttp==3.9.0b0 -ansible==6.7.0 -ansible_vault==2.1.0 +aiohttp~=3.9 +ansible~=6.7 +ansible_vault~=2.1 backoff -black==23.7.0 -certifi==2024.2.2 -colorama==0.4.6 -cryptography==42.0.5 -ddt==1.6.0 +certifi~=2024.2.2 +colorama~=0.4.6 +cryptography~=42.0.5 +ddt~=1.6.0 eth-utils<2.3.0 fuzzywuzzy>=0.18.0 -fastapi==0.110.1 -munch==2.5.0 +fastapi~=0.110.1 +munch~=2.5.0 netaddr numpy -msgpack-numpy-opentensor==0.5.0 +msgpack-numpy-opentensor~=0.5.0 nest_asyncio packaging pycryptodome>=3.18.0,<4.0.0 @@ -21,17 +20,15 @@ pyyaml password_strength pydantic>=2.3, <3 PyNaCl>=1.3.0,<=1.5.0 -pytest-asyncio python-Levenshtein -python-statemachine==2.1.2 -pytest +python-statemachine~=2.1.2 retry requests rich scalecodec==1.2.7 # scalecodec should not be changed unless first verifying compatibility with the subtensor's monkeypatching of scalecodec.RuntimeConfiguration.get_decoder_class -shtab==1.6.5 -substrate-interface==1.7.5 +shtab~=1.6.5 +substrate-interface~=1.7.5 termcolor tqdm -uvicorn==0.22.0 +uvicorn<=0.30 wheel From 48fdee70682d82e9b48b75fb321cda0deb3d7bb3 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 1 Jun 2024 14:49:58 +0200 Subject: [PATCH 062/116] fix check_compatibility.sh understanding of ~= --- scripts/check_compatibility.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/check_compatibility.sh b/scripts/check_compatibility.sh index 5f48f4cbb0..b9c89c24dd 100755 --- a/scripts/check_compatibility.sh +++ b/scripts/check_compatibility.sh @@ -22,7 +22,7 @@ check_compatibility() { continue fi - package_name=$(echo "$requirement" | awk -F'[!=<>]' '{print $1}' | awk -F'[' '{print $1}') # Strip off brackets + package_name=$(echo "$requirement" | awk -F'[!=<>~]' '{print $1}' | awk -F'[' '{print $1}') # Strip off brackets echo -n "Checking $package_name... " url="https://pypi.org/pypi/$package_name/json" From 672839e9ce82906d717b0a171895710326c8550b Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 1 Jun 2024 14:43:54 +0200 Subject: [PATCH 063/116] explicitly install pytest in E2E tests --- .github/workflows/e2e-subtensor-tests.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/e2e-subtensor-tests.yaml b/.github/workflows/e2e-subtensor-tests.yaml index 896d2142f9..bea776f620 100644 --- a/.github/workflows/e2e-subtensor-tests.yaml +++ b/.github/workflows/e2e-subtensor-tests.yaml @@ -80,6 +80,5 @@ jobs: - name: Run tests run: | - python3 -m pip install -e . - python3 -m pip install torch + python3 -m pip install -e .[torch] pytest LOCALNET_SH_PATH="./subtensor/scripts/localnet.sh" pytest tests/e2e_tests/ -s From 8d5b9264c99caf1075aa319516e9e8e98db3938f Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 1 Jun 2024 15:16:52 +0200 Subject: [PATCH 064/116] replace black with ruff for formatting --- .circleci/config.yml | 52 ++++++++++++++++++++--------------------- contrib/CONTRIBUTING.md | 2 +- contrib/STYLE.md | 10 ++++---- requirements/dev.txt | 1 + 4 files changed, 34 insertions(+), 31 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6bd5e47978..7ea959790e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,7 +6,7 @@ orbs: # coveralls: coveralls/coveralls@1.0.6 jobs: - black: + ruff: resource_class: small parameters: python-version: @@ -18,29 +18,29 @@ jobs: - checkout - restore_cache: - name: Restore cached black venv + name: Restore cached ruff venv keys: - - v2-pypi-py-black-<< parameters.python-version >> + - v2-pypi-py-ruff-<< parameters.python-version >> - run: - name: Update & Activate black venv + name: Update & Activate ruff venv command: | - python -m venv env/ - . env/bin/activate + python -m venv .venv + . .venv/bin/activate python -m pip install --upgrade pip - pip install black==23.7.0 + pip install ruff -c requirements/dev.txt - save_cache: - name: Save cached black venv + name: Save cached ruff venv paths: - - "env/" - key: v2-pypi-py-black-<< parameters.python-version >> + - ".venv/" + key: v2-pypi-py-ruff-<< parameters.python-version >> - run: - name: Black format check + name: Ruff format check command: | - . env/bin/activate - python -m black --exclude '(env|venv|.eggs)' --check . + . .venv/bin/activate + ruff format --diff . check_compatibility: parameters: @@ -85,8 +85,8 @@ jobs: - run: name: Update & Activate venv command: | - python -m venv env/ - . env/bin/activate + python -m venv .venv + . .venv/bin/activate python -m pip install --upgrade pip python -m pip install '.[dev]' @@ -99,20 +99,20 @@ jobs: - run: name: Install Bittensor command: | - . env/bin/activate + . .venv/bin/activate pip install -e '.[dev]' - run: name: Instantiate Mock Wallet command: | - . env/bin/activate + . .venv/bin/activate ./scripts/create_wallet.sh # TODO: Update test durations on different runs - run: name: Unit Tests command: | - . env/bin/activate + . .venv/bin/activate export PYTHONUNBUFFERED=1 pytest -n2 --reruns 3 --durations=0 --verbose --junitxml=test-results/unit_tests.xml \ --cov=. --cov-append --cov-config .coveragerc \ @@ -123,7 +123,7 @@ jobs: - run: name: Integration Tests command: | - . env/bin/activate + . .venv/bin/activate export PYTHONUNBUFFERED=1 pytest -n2 --reruns 3 --reruns-delay 15 --durations=0 --verbose --junitxml=test-results/integration_tests.xml \ --cov=. --cov-append --cov-config .coveragerc \ @@ -143,7 +143,7 @@ jobs: #- run: #name: Upload Coverage #command: | - #. env/bin/activate && coveralls + #. .venv/bin/activate && coveralls #env: #CI_NAME: circleci #CI_BUILD_NUMBER: $CIRCLE_BUILD_NUM @@ -173,8 +173,8 @@ jobs: - run: name: Update & Activate venv command: | - python -m venv env/ - . env/bin/activate + python -m venv .venv + . .venv/bin/activate python -m pip install --upgrade pip python -m pip install '.[dev]' pip install flake8 @@ -188,19 +188,19 @@ jobs: - run: name: Install Bittensor command: | - . env/bin/activate + . .venv/bin/activate pip install -e '.[dev]' - run: name: Lint with flake8 command: | - . env/bin/activate + . .venv/bin/activate python -m flake8 bittensor/ --count - run: name: Type check with mypy command: | - . env/bin/activate + . .venv/bin/activate python -m mypy --ignore-missing-imports bittensor/ unit-tests-all-python-versions: @@ -290,7 +290,7 @@ workflows: pr-requirements: jobs: - - black: + - ruff: python-version: "3.9.13" - build-and-test: matrix: diff --git a/contrib/CONTRIBUTING.md b/contrib/CONTRIBUTING.md index e1413d1099..f9f4ed5f34 100644 --- a/contrib/CONTRIBUTING.md +++ b/contrib/CONTRIBUTING.md @@ -76,7 +76,7 @@ You can contribute to Bittensor in one of two main ways (as well as many others) Here is a high-level summary: - Code consistency is crucial; adhere to established programming language conventions. -- Use `black` to format your Python code; it ensures readability and consistency. +- Use `ruff format .` to format your Python code; it ensures readability and consistency. - Write concise Git commit messages; summarize changes in ~50 characters. - Follow these six commit rules: - Atomic Commits: Focus on one task or fix per commit. diff --git a/contrib/STYLE.md b/contrib/STYLE.md index b7ac755fc0..7804359d22 100644 --- a/contrib/STYLE.md +++ b/contrib/STYLE.md @@ -58,15 +58,17 @@ Python's official style guide is PEP 8, which provides conventions for writing c #### More details -Use `black` to format your python code before commiting for consistency across such a large pool of contributors. Black's code [style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#code-style) ensures consistent and opinionated code formatting. It automatically formats your Python code according to the Black style guide, enhancing code readability and maintainability. +Use [`ruff` to format](https://docs.astral.sh/ruff/formatter/#the-ruff-formatter) your python code before commiting for consistency across such a large pool of contributors. +Black code [style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#code-style) ensures consistent and opinionated code formatting. +Ruff automatically formats your Python code according to the Black style guide, enhancing code readability and maintainability. -Key Features of Black: +Key Features of ruff & Black code style: - Consistency: Black enforces a single, consistent coding style across your project, eliminating style debates and allowing developers to focus on code logic. + Consistency: ruff enforces a single, consistent coding style across your project, eliminating style debates and allowing developers to focus on code logic. Readability: By applying a standard formatting style, Black improves code readability, making it easier to understand and collaborate on projects. - Automation: Black automates the code formatting process, saving time and effort. It eliminates the need for manual formatting and reduces the likelihood of inconsistencies. + Automation: ruff automates the code formatting process, saving time and effort. It eliminates the need for manual formatting and reduces the likelihood of inconsistencies. ### Naming Conventions diff --git a/requirements/dev.txt b/requirements/dev.txt index 2fe7007484..8fbae24589 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -13,5 +13,6 @@ types-retry==0.9.9.4 freezegun==1.5.0 torch>=1.13.1 httpx==0.27.0 +ruff==0.4.7 aioresponses==0.7.6 factory-boy==3.3.0 From 1b8068b04c6bd59d7193916ff78e945d55527479 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 1 Jun 2024 15:21:19 +0200 Subject: [PATCH 065/116] apply `ruff format .` --- bittensor/axon.py | 3 +- bittensor/chain_data.py | 8 ++--- bittensor/commands/delegates.py | 32 +++++++++---------- bittensor/commands/inspect.py | 12 +++---- bittensor/commands/metagraph.py | 8 ++--- bittensor/commands/overview.py | 20 ++++++------ bittensor/commands/root.py | 2 +- bittensor/commands/senate.py | 12 +++---- bittensor/commands/stake.py | 12 +++---- bittensor/commands/weights.py | 1 - bittensor/extrinsics/commit_weights.py | 2 +- bittensor/metagraph.py | 12 ++++--- bittensor/mock/subtensor_mock.py | 22 ++++++------- bittensor/subtensor.py | 8 ++--- bittensor/utils/__init__.py | 6 ++-- bittensor/utils/networking.py | 3 +- bittensor/wallet.py | 21 ++++-------- .../weights/test_commit_weights.py | 4 ++- tests/integration_tests/test_cli.py | 4 +-- .../extrinsics/test_registration.py | 12 ++----- tests/unit_tests/extrinsics/test_root.py | 4 +-- tests/unit_tests/extrinsics/test_senate.py | 16 +++------- tests/unit_tests/extrinsics/test_staking.py | 8 ++--- tests/unit_tests/extrinsics/test_unstaking.py | 12 ++----- tests/unit_tests/test_subtensor.py | 8 +++-- tests/unit_tests/test_synapse.py | 12 +++---- tests/unit_tests/utils/test_balance.py | 4 +-- 27 files changed, 118 insertions(+), 150 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 476ed52db2..f02c45662a 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -1,5 +1,4 @@ -""" Create and initialize Axon, which services the forward and backward requests from other neurons. -""" +"""Create and initialize Axon, which services the forward and backward requests from other neurons.""" # The MIT License (MIT) # Copyright © 2021 Yuma Rao diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 49f92e5ce6..4a9f98244c 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -833,10 +833,10 @@ def list_of_tuple_from_vec_u8( cls, vec_u8: List[int] ) -> Dict[str, List["StakeInfo"]]: """Returns a list of StakeInfo objects from a ``vec_u8``.""" - decoded: Optional[ - list[tuple[str, list[object]]] - ] = from_scale_encoding_using_type_string( - input_=vec_u8, type_string="Vec<(AccountId, Vec)>" + decoded: Optional[list[tuple[str, list[object]]]] = ( + from_scale_encoding_using_type_string( + input_=vec_u8, type_string="Vec<(AccountId, Vec)>" + ) ) if decoded is None: diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index 1fd475785c..344b2bfcea 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -83,9 +83,9 @@ def show_delegates_lite( the table in the console. """ - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -206,9 +206,9 @@ def show_delegates( for prev_delegate in prev_delegates: prev_delegates_dict[prev_delegate.hotkey_ss58] = prev_delegate - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -238,10 +238,10 @@ def show_delegates( "[overline white]NOMINATORS", justify="center", style="green", no_wrap=True ) table.add_column( - "[overline white]DELEGATE STAKE(\u03C4)", justify="right", no_wrap=True + "[overline white]DELEGATE STAKE(\u03c4)", justify="right", no_wrap=True ) table.add_column( - "[overline white]TOTAL STAKE(\u03C4)", + "[overline white]TOTAL STAKE(\u03c4)", justify="right", style="green", no_wrap=True, @@ -250,7 +250,7 @@ def show_delegates( table.add_column("[overline white]VPERMIT", justify="right", no_wrap=False) table.add_column("[overline white]TAKE", style="white", no_wrap=True) table.add_column( - "[overline white]NOMINATOR/(24h)/k\u03C4", style="green", justify="center" + "[overline white]NOMINATOR/(24h)/k\u03c4", style="green", justify="center" ) table.add_column("[overline white]DELEGATE/(24h)", style="green", justify="center") table.add_column("[overline white]Desc", style="rgb(50,163,219)") @@ -980,7 +980,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): style="bold green", ) table.add_column( - "[overline green]\u03C4/24h", + "[overline green]\u03c4/24h", footer_style="overline green", style="bold green", ) @@ -988,10 +988,10 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]NOMS", justify="center", style="green", no_wrap=True ) table.add_column( - "[overline white]OWNER STAKE(\u03C4)", justify="right", no_wrap=True + "[overline white]OWNER STAKE(\u03c4)", justify="right", no_wrap=True ) table.add_column( - "[overline white]TOTAL STAKE(\u03C4)", + "[overline white]TOTAL STAKE(\u03c4)", justify="right", style="green", no_wrap=True, @@ -1000,7 +1000,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]SUBNETS", justify="right", style="white", no_wrap=True ) table.add_column("[overline white]VPERMIT", justify="right", no_wrap=True) - table.add_column("[overline white]24h/k\u03C4", style="green", justify="center") + table.add_column("[overline white]24h/k\u03c4", style="green", justify="center") table.add_column("[overline white]Desc", style="rgb(50,163,219)") total_delegated = 0 @@ -1023,9 +1023,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): delegates.sort(key=lambda delegate: delegate[0].total_stake, reverse=True) total_delegated += sum(my_delegates.values()) - registered_delegate_info: Optional[ - DelegatesDetails - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[DelegatesDetails] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" diff --git a/bittensor/commands/inspect.py b/bittensor/commands/inspect.py index 76b015b774..4ef0e84c4e 100644 --- a/bittensor/commands/inspect.py +++ b/bittensor/commands/inspect.py @@ -138,9 +138,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) bittensor.logging.debug(f"Netuids to check: {netuids}") - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -181,9 +181,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]Emission", footer_style="overline white", style="green" ) for wallet in tqdm(wallets): - delegates: List[ - Tuple[bittensor.DelegateInfo, bittensor.Balance] - ] = subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) + delegates: List[Tuple[bittensor.DelegateInfo, bittensor.Balance]] = ( + subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) + ) if not wallet.coldkeypub_file.exists_on_device(): continue cold_balance = subtensor.get_balance(wallet.coldkeypub.ss58_address) diff --git a/bittensor/commands/metagraph.py b/bittensor/commands/metagraph.py index b6999fe553..1075f50d31 100644 --- a/bittensor/commands/metagraph.py +++ b/bittensor/commands/metagraph.py @@ -159,8 +159,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): style="yellow", ) table.add_column( - "[overline white]STAKE(\u03C4)", - "\u03C4{:.5f}".format(total_stake), + "[overline white]STAKE(\u03c4)", + "\u03c4{:.5f}".format(total_stake), footer_style="overline white", justify="right", style="green", @@ -207,8 +207,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): no_wrap=True, ) table.add_column( - "[overline white]EMISSION(\u03C1)", - "\u03C1{}".format(int(total_emission)), + "[overline white]EMISSION(\u03c1)", + "\u03c1{}".format(int(total_emission)), footer_style="overline white", justify="right", style="green", diff --git a/bittensor/commands/overview.py b/bittensor/commands/overview.py index b35fd85596..b572847e49 100644 --- a/bittensor/commands/overview.py +++ b/bittensor/commands/overview.py @@ -258,9 +258,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): for neuron in neuron_list: if neuron.hotkey in checked_hotkeys: continue - total_coldkey_stake_from_metagraph[ - neuron.coldkey - ] += neuron.stake_dict[neuron.coldkey] + total_coldkey_stake_from_metagraph[neuron.coldkey] += ( + neuron.stake_dict[neuron.coldkey] + ) checked_hotkeys.add(neuron.hotkey) alerts_table = Table(show_header=True, header_style="bold magenta") @@ -483,8 +483,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) if last_subnet: table.add_column( - "[overline white]STAKE(\u03C4)", - "\u03C4{:.5f}".format(total_stake), + "[overline white]STAKE(\u03c4)", + "\u03c4{:.5f}".format(total_stake), footer_style="overline white", justify="right", style="green", @@ -493,7 +493,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): else: # No footer for non-last subnet. table.add_column( - "[overline white]STAKE(\u03C4)", + "[overline white]STAKE(\u03c4)", justify="right", style="green", no_wrap=True, @@ -539,8 +539,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): no_wrap=True, ) table.add_column( - "[overline white]EMISSION(\u03C1)", - "\u03C1{:_}".format(total_emission), + "[overline white]EMISSION(\u03c1)", + "\u03c1{:_}".format(total_emission), footer_style="overline white", justify="right", style="green", @@ -603,7 +603,7 @@ def overview_sort_function(row): console.clear() - caption = "[italic][dim][white]Wallet balance: [green]\u03C4" + str( + caption = "[italic][dim][white]Wallet balance: [green]\u03c4" + str( total_balance.tao ) grid.add_row(Align(caption, vertical="middle", align="center")) @@ -613,7 +613,7 @@ def overview_sort_function(row): @staticmethod def _get_neurons_for_netuid( - args_tuple: Tuple["bittensor.Config", int, List[str]] + args_tuple: Tuple["bittensor.Config", int, List[str]], ) -> Tuple[int, List["bittensor.NeuronInfoLite"], Optional[str]]: subtensor_config, netuid, hot_wallets = args_tuple diff --git a/bittensor/commands/root.py b/bittensor/commands/root.py index a3658d03ea..5607921b19 100644 --- a/bittensor/commands/root.py +++ b/bittensor/commands/root.py @@ -173,7 +173,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): no_wrap=True, ) table.add_column( - "[overline white]STAKE(\u03C4)", + "[overline white]STAKE(\u03c4)", footer_style="overline white", justify="right", style="green", diff --git a/bittensor/commands/senate.py b/bittensor/commands/senate.py index c92290af89..03a73cde5b 100644 --- a/bittensor/commands/senate.py +++ b/bittensor/commands/senate.py @@ -211,9 +211,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): senate_members = subtensor.get_senate_members() proposals = subtensor.get_proposals() - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) table = Table(show_footer=False) table.title = ( @@ -342,9 +342,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): console.print(":cross_mark: [red]Failed[/red]: Proposal not found.") return - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) table = Table(show_footer=False) table.title = "[white]Votes for Proposal {}".format(proposal_hash) diff --git a/bittensor/commands/stake.py b/bittensor/commands/stake.py index 8fe80b606a..1bc2cf2786 100644 --- a/bittensor/commands/stake.py +++ b/bittensor/commands/stake.py @@ -381,9 +381,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): wallets = _get_coldkey_wallets_for_path(cli.config.wallet.path) else: wallets = [bittensor.wallet(config=cli.config)] - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) def get_stake_accounts( wallet, subtensor @@ -515,7 +515,7 @@ def get_all_wallet_accounts( ) table.add_column( "[overline white]Balance", - "\u03C4{:.5f}".format(total_balance), + "\u03c4{:.5f}".format(total_balance), footer_style="overline white", style="green", ) @@ -524,13 +524,13 @@ def get_all_wallet_accounts( ) table.add_column( "[overline white]Stake", - "\u03C4{:.5f}".format(total_stake), + "\u03c4{:.5f}".format(total_stake), footer_style="overline white", style="green", ) table.add_column( "[overline white]Rate", - "\u03C4{:.5f}/d".format(total_rate), + "\u03c4{:.5f}/d".format(total_rate), footer_style="overline white", style="green", ) diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 19989c94f3..ac4d9dfc36 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -19,7 +19,6 @@ """Module that encapsulates the CommitWeightCommand and the RevealWeightCommand. Used to commit and reveal weights for a specific subnet on the Bittensor Network.""" - import argparse import os import re diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py index a27e1941ba..2a526f5e96 100644 --- a/bittensor/extrinsics/commit_weights.py +++ b/bittensor/extrinsics/commit_weights.py @@ -16,7 +16,7 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -""" Module commit weights and reveal weights extrinsic. """ +"""Module commit weights and reveal weights extrinsic.""" from typing import Tuple, List diff --git a/bittensor/metagraph.py b/bittensor/metagraph.py index 8bad4d6c78..8d7e97bcc0 100644 --- a/bittensor/metagraph.py +++ b/bittensor/metagraph.py @@ -618,7 +618,9 @@ def _set_weights_and_bonds(self, subtensor: Optional[bittensor.subtensor] = None # TODO: Check and test the computation of weights and bonds if self.netuid == 0: self.weights = self._process_root_weights( - [neuron.weights for neuron in self.neurons], "weights", subtensor # type: ignore + [neuron.weights for neuron in self.neurons], + "weights", + subtensor, # type: ignore ) else: self.weights = self._process_weights_or_bonds( @@ -659,16 +661,16 @@ def _process_weights_or_bonds( if attribute == "weights": data_array.append( bittensor.utils.weight_utils.convert_weight_uids_and_vals_to_tensor( - len(self.neurons), list(uids), list(values) # type: ignore + len(self.neurons), + list(uids), + list(values), # type: ignore ) ) else: data_array.append( bittensor.utils.weight_utils.convert_bond_uids_and_vals_to_tensor( # type: ignore len(self.neurons), list(uids), list(values) - ).astype( - np.float32 - ) + ).astype(np.float32) ) tensor_param: Union["torch.nn.Parameter", NDArray] = ( ( diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index a0efe48d74..30d58f22e0 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -430,9 +430,9 @@ def _register_neuron(self, netuid: int, hotkey: str, coldkey: str) -> int: subtensor_state["Active"][netuid][uid][self.block_number] = True subtensor_state["LastUpdate"][netuid][uid] = {} - subtensor_state["LastUpdate"][netuid][uid][ + subtensor_state["LastUpdate"][netuid][uid][self.block_number] = ( self.block_number - ] = self.block_number + ) subtensor_state["Rank"][netuid][uid] = {} subtensor_state["Rank"][netuid][uid][self.block_number] = 0.0 @@ -1064,9 +1064,9 @@ def _do_nominate( else: subtensor_state["Delegates"][hotkey_ss58] = {} - subtensor_state["Delegates"][hotkey_ss58][ - self.block_number - ] = 0.18 # Constant for now + subtensor_state["Delegates"][hotkey_ss58][self.block_number] = ( + 0.18 # Constant for now + ) return True @@ -1189,9 +1189,9 @@ def _do_stake( if not wallet.coldkeypub.ss58_address in stake_state[hotkey_ss58]: stake_state[hotkey_ss58][wallet.coldkeypub.ss58_address] = {} - stake_state[hotkey_ss58][wallet.coldkeypub.ss58_address][ - self.block_number - ] = amount.rao + stake_state[hotkey_ss58][wallet.coldkeypub.ss58_address][self.block_number] = ( + amount.rao + ) # Add to total_stake storage subtensor_state["TotalStake"][self.block_number] = ( @@ -1275,9 +1275,9 @@ def _do_unstake( total_hotkey_stake_state = subtensor_state["TotalHotkeyStake"] if not hotkey_ss58 in total_hotkey_stake_state: total_hotkey_stake_state[hotkey_ss58] = {} - total_hotkey_stake_state[hotkey_ss58][ - self.block_number - ] = 0 # Shouldn't happen + total_hotkey_stake_state[hotkey_ss58][self.block_number] = ( + 0 # Shouldn't happen + ) total_coldkey_stake_state = subtensor_state["TotalColdkeyStake"] if not wallet.coldkeypub.ss58_address in total_coldkey_stake_state: diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 46f999eb97..1b422ad276 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -20,6 +20,7 @@ The ``bittensor.subtensor`` module in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. """ + import argparse import copy import functools @@ -3049,9 +3050,7 @@ def query_runtime_api( """ call_definition = bittensor.__type_registry__["runtime_api"][runtime_api][ # type: ignore "methods" # type: ignore - ][ - method - ] # type: ignore + ][method] # type: ignore json_result = self.state_call( method=f"{runtime_api}_{method}", @@ -4749,7 +4748,8 @@ def make_substrate_call_with_retry(): if block_hash: params = params + [block_hash] return self.substrate.rpc_request( - method="neuronInfo_getNeuron", params=params # custom rpc method + method="neuronInfo_getNeuron", + params=params, # custom rpc method ) json_body = make_substrate_call_with_retry() diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 72d053ea7a..175094da87 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -207,9 +207,9 @@ def get_explorer_url_for_network( explorer_urls: Optional[Dict[str, str]] = {} # Will be None if the network is not known. i.e. not in network_map - explorer_root_urls: Optional[ - Dict[str, str] - ] = get_explorer_root_url_by_network_from_map(network, network_map) + explorer_root_urls: Optional[Dict[str, str]] = ( + get_explorer_root_url_by_network_from_map(network, network_map) + ) if explorer_root_urls != {}: # We are on a known network. diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 9f1450af81..675b4e45b4 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -1,5 +1,4 @@ -""" Utils for handling local network with ip and ports. -""" +"""Utils for handling local network with ip and ports.""" # The MIT License (MIT) # Copyright © 2021-2022 Yuma Rao diff --git a/bittensor/wallet.py b/bittensor/wallet.py index 6ac808b12a..be6aa08c93 100644 --- a/bittensor/wallet.py +++ b/bittensor/wallet.py @@ -1,5 +1,4 @@ -""" Implementation of the wallet class, which manages balances with staking and transfer. Also manages hotkey and coldkey. -""" +"""Implementation of the wallet class, which manages balances with staking and transfer. Also manages hotkey and coldkey.""" # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -676,8 +675,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_coldkey( @@ -686,8 +684,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_coldkey( @@ -696,8 +693,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... def regenerate_coldkey( self, @@ -786,8 +782,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_hotkey( @@ -796,8 +791,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_hotkey( @@ -806,8 +800,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... def regenerate_hotkey( self, diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index d22efde267..faed9d3925 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -168,7 +168,9 @@ def test_commit_and_reveal_weights(local_chain): # Query the Weights storage map revealed_weights = subtensor.query_module( - module="SubtensorModule", name="Weights", params=[1, uid] # netuid and uid + module="SubtensorModule", + name="Weights", + params=[1, uid], # netuid and uid ) # Assert that the revealed weights are set correctly diff --git a/tests/integration_tests/test_cli.py b/tests/integration_tests/test_cli.py index c20c905549..aa019c4178 100644 --- a/tests/integration_tests/test_cli.py +++ b/tests/integration_tests/test_cli.py @@ -2519,9 +2519,7 @@ def test_set_identity_command( "bittensor.wallet", return_value=mock_wallet ), patch("bittensor.__console__", MagicMock()), patch( "rich.prompt.Prompt.ask", side_effect=["y", "y"] - ), patch( - "sys.exit" - ) as mock_exit: + ), patch("sys.exit") as mock_exit: # Act if expected_exception: with pytest.raises(expected_exception) as exc_info: diff --git a/tests/unit_tests/extrinsics/test_registration.py b/tests/unit_tests/extrinsics/test_registration.py index ccae48452d..5a4d32dff6 100644 --- a/tests/unit_tests/extrinsics/test_registration.py +++ b/tests/unit_tests/extrinsics/test_registration.py @@ -269,9 +269,7 @@ def test_burned_register_extrinsic( return_value=(recycle_success, "Mock error message"), ), patch.object( mock_subtensor, "is_hotkey_registered", return_value=is_registered - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response) as mock_confirm: # Act result = burned_register_extrinsic( subtensor=mock_subtensor, wallet=mock_wallet, netuid=123, prompt=True @@ -313,9 +311,7 @@ def test_register_extrinsic_without_pow( mock_subtensor, "get_neuron_for_pubkey_and_subnet", return_value=MagicMock(is_null=neuron_is_null), - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ), patch( + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response), patch( "torch.cuda.is_available", return_value=cuda_available ): # Act @@ -374,9 +370,7 @@ def test_register_extrinsic_with_pow( mock_subtensor, "_do_pow_register", return_value=(registration_success, "key is already registered"), - ), patch( - "torch.cuda.is_available", return_value=cuda - ): + ), patch("torch.cuda.is_available", return_value=cuda): # Act if pow_success: mock_pow_solution.is_stale.return_value = pow_stale diff --git a/tests/unit_tests/extrinsics/test_root.py b/tests/unit_tests/extrinsics/test_root.py index 2bc1be632b..d3ae2c3973 100644 --- a/tests/unit_tests/extrinsics/test_root.py +++ b/tests/unit_tests/extrinsics/test_root.py @@ -189,9 +189,7 @@ def test_set_root_weights_extrinsic( mock_subtensor, "_do_set_weights", return_value=(expected_success, "Mock error") ), patch.object( mock_subtensor, "min_allowed_weights", return_value=0 - ), patch.object( - mock_subtensor, "max_weight_limit", return_value=1 - ), patch( + ), patch.object(mock_subtensor, "max_weight_limit", return_value=1), patch( "rich.prompt.Confirm.ask", return_value=user_response ) as mock_confirm: # Act diff --git a/tests/unit_tests/extrinsics/test_senate.py b/tests/unit_tests/extrinsics/test_senate.py index 8310fc38fa..66849efc5c 100644 --- a/tests/unit_tests/extrinsics/test_senate.py +++ b/tests/unit_tests/extrinsics/test_senate.py @@ -58,9 +58,7 @@ def test_register_senate_extrinsic( "bittensor.extrinsics.senate.Confirm.ask", return_value=not prompt ), patch("bittensor.extrinsics.senate.time.sleep"), patch.object( mock_subtensor.substrate, "compose_call" - ), patch.object( - mock_subtensor.substrate, "create_signed_extrinsic" - ), patch.object( + ), patch.object(mock_subtensor.substrate, "create_signed_extrinsic"), patch.object( mock_subtensor.substrate, "submit_extrinsic", return_value=MagicMock( @@ -153,9 +151,7 @@ def test_vote_senate_extrinsic( "bittensor.extrinsics.senate.Confirm.ask", return_value=not prompt ), patch("bittensor.extrinsics.senate.time.sleep"), patch.object( mock_subtensor.substrate, "compose_call" - ), patch.object( - mock_subtensor.substrate, "create_signed_extrinsic" - ), patch.object( + ), patch.object(mock_subtensor.substrate, "create_signed_extrinsic"), patch.object( mock_subtensor.substrate, "submit_extrinsic", return_value=MagicMock( @@ -219,9 +215,7 @@ def test_leave_senate_extrinsic( "bittensor.extrinsics.senate.Confirm.ask", return_value=not prompt ), patch("bittensor.extrinsics.senate.time.sleep"), patch.object( mock_subtensor.substrate, "compose_call" - ), patch.object( - mock_subtensor.substrate, "create_signed_extrinsic" - ), patch.object( + ), patch.object(mock_subtensor.substrate, "create_signed_extrinsic"), patch.object( mock_subtensor.substrate, "submit_extrinsic", return_value=MagicMock( @@ -229,9 +223,7 @@ def test_leave_senate_extrinsic( process_events=MagicMock(), error_message="error", ), - ), patch.object( - mock_wallet, "is_senate_member", return_value=is_registered - ): + ), patch.object(mock_wallet, "is_senate_member", return_value=is_registered): # Act result = leave_senate_extrinsic( subtensor=mock_subtensor, diff --git a/tests/unit_tests/extrinsics/test_staking.py b/tests/unit_tests/extrinsics/test_staking.py index 0a77ceb2c7..288e065f78 100644 --- a/tests/unit_tests/extrinsics/test_staking.py +++ b/tests/unit_tests/extrinsics/test_staking.py @@ -133,9 +133,7 @@ def test_add_stake_extrinsic( else mock_other_owner_wallet.coldkeypub.ss58_address, ), patch.object( mock_subtensor, "is_hotkey_delegate", return_value=hotkey_delegate - ), patch.object( - mock_subtensor, "get_delegate_take", return_value=0.01 - ), patch( + ), patch.object(mock_subtensor, "get_delegate_take", return_value=0.01), patch( "rich.prompt.Confirm.ask", return_value=user_accepts ) as mock_confirm: # Act @@ -506,9 +504,7 @@ def stake_side_effect(hotkey_ss58, *args, **kwargs): mock_subtensor, "_do_stake", side_effect=stake_side_effect ) as mock_do_stake, patch.object( mock_subtensor, "tx_rate_limit", return_value=0 - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response) as mock_confirm: # Act if exception: with pytest.raises(exception) as exc_info: diff --git a/tests/unit_tests/extrinsics/test_unstaking.py b/tests/unit_tests/extrinsics/test_unstaking.py index d6ff094dd8..6ad0a977e7 100644 --- a/tests/unit_tests/extrinsics/test_unstaking.py +++ b/tests/unit_tests/extrinsics/test_unstaking.py @@ -83,9 +83,7 @@ def test_unstake_extrinsic( mock_subtensor, "get_stake_for_coldkey_and_hotkey", return_value=mock_current_stake, - ), patch( - "rich.prompt.Confirm.ask", return_value=user_accepts - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=user_accepts) as mock_confirm: result = unstake_extrinsic( subtensor=mock_subtensor, wallet=mock_wallet, @@ -291,15 +289,11 @@ def unstake_side_effect(hotkey_ss58, *args, **kwargs): side_effect=mock_get_minimum_required_stake, ), patch.object( mock_subtensor, "get_balance", return_value=Balance.from_tao(wallet_balance) - ), patch.object( - mock_subtensor, "tx_rate_limit", return_value=0 - ), patch.object( + ), patch.object(mock_subtensor, "tx_rate_limit", return_value=0), patch.object( mock_subtensor, "get_stake_for_coldkey_and_hotkey", return_value=mock_current_stake, - ), patch( - "rich.prompt.Confirm.ask", return_value=prompt_response - ) as mock_confirm: + ), patch("rich.prompt.Confirm.ask", return_value=prompt_response) as mock_confirm: # Act if exception: with pytest.raises(exception) as exc_info: diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index 3709ec190d..72ee3b2d23 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -1191,9 +1191,11 @@ def test_total_stake_no_block(mocker, subtensor): # Asserts assert result is not None subtensor.query_subtensor.assert_called_once_with("TotalStake", None) - spy_balance_from_rao.assert_called_once_with( - subtensor.query_subtensor.return_value.value - ), + ( + spy_balance_from_rao.assert_called_once_with( + subtensor.query_subtensor.return_value.value + ), + ) # `serving_rate_limit` method tests diff --git a/tests/unit_tests/test_synapse.py b/tests/unit_tests/test_synapse.py index e28265dc24..b0ce4f1325 100644 --- a/tests/unit_tests/test_synapse.py +++ b/tests/unit_tests/test_synapse.py @@ -131,12 +131,12 @@ class Test(bittensor.Synapse): c: Optional[int] # Required, carried through headers, cannot be None d: Optional[list[int]] # Required, carried though headers, cannot be None e: list[int] # Carried through headers - f: Optional[ - int - ] = None # Not Required, Not carried through headers, can be None - g: Optional[ - list[int] - ] = None # Not Required, Not carried though headers, can be None + f: Optional[int] = ( + None # Not Required, Not carried through headers, can be None + ) + g: Optional[list[int]] = ( + None # Not Required, Not carried though headers, can be None + ) # Create an instance of the custom Synapse subclass synapse = Test( diff --git a/tests/unit_tests/utils/test_balance.py b/tests/unit_tests/utils/test_balance.py index 129af42f01..b99bc111f2 100644 --- a/tests/unit_tests/utils/test_balance.py +++ b/tests/unit_tests/utils/test_balance.py @@ -209,8 +209,8 @@ def test_balance_mul(balance: Union[int, float], balance2: Union[int, float]): prod_ = balance_ * balance2_ assert isinstance(prod_, Balance) - assert prod_.rao == pytest.approx( - rao_ * rao2_, 9 + assert ( + prod_.rao == pytest.approx(rao_ * rao2_, 9) ), f"{balance_} * {balance2_} == {prod_.rao} != {rao_} * {balance2} == {rao_ * balance2}" From 6b40c6dbb1fe795de5c03e60be64841da30eebeb Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Tue, 4 Jun 2024 15:01:11 +0200 Subject: [PATCH 066/116] deprecate nest_asyncio use --- bittensor/__init__.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 8ec8019728..21b160da82 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -16,15 +16,28 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +import os +import warnings from rich.console import Console from rich.traceback import install -# Install and apply nest asyncio to allow the async functions -# to run in a .ipynb -import nest_asyncio -nest_asyncio.apply() +if (NEST_ASYNCIO_ENV := os.getenv("NEST_ASYNCIO")) in ("1", None): + if NEST_ASYNCIO_ENV is None: + warnings.warn( + "NEST_ASYNCIO implicitly set to '1'. In the future, the default value will be '0'." + "If you use `nest_asyncio` make sure to add it explicitly to your project dependencies," + "as it will be removed from `bittensor` package dependencies in the future." + "To silence this warning, explicitly set the environment variable, e.g. `export NEST_ASYNCIO=0`.", + DeprecationWarning, + ) + # Install and apply nest asyncio to allow the async functions + # to run in a .ipynb + import nest_asyncio + + nest_asyncio.apply() + # Bittensor code and protocol version. __version__ = "7.0.0" From ae607365d709f00909ef824cbcd0922c82efbd51 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Tue, 4 Jun 2024 09:40:14 -0700 Subject: [PATCH 067/116] Feat: Nonce implementation improved --- bittensor/axon.py | 50 +++++++++++++++++++++++++++++++++++-------- bittensor/dendrite.py | 21 ++++++++++++++++-- 2 files changed, 60 insertions(+), 11 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index f02c45662a..a377860518 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -24,6 +24,7 @@ import contextlib import copy import inspect +import ntplib import json import os import threading @@ -896,16 +897,39 @@ async def default_verify(self, synapse: bittensor.Synapse): # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. - if ( - self.nonces.get(endpoint_key) is None - and synapse.dendrite.nonce <= time.time_ns() - allowedDelta - ): - raise Exception("Nonce is too old") - if ( - self.nonces.get(endpoint_key) is not None + try: + ntp_client = ntplib.NTPClient() + response = ntp_client.request('pool.ntp.org') + current_time = int(response.tx_time * 1e9) # Convert to nanoseconds + except Exception as e: + print(f"Error fetching NTP time: {e}") + # Fallback to local time if NTP fails + current_time = time.time_ns() + + if synapse.dendrite.nonce is None: + raise Exception("Missing Nonce") + + # Updated nonce using NTP implementated at v7.2 + if synapse.dendrite.version >= 720: + # If we don't have a nonce stored, ensure that the nonce falls within + # a reasonable delta. + if ( + self.nonces.get(endpoint_key) is None + and synapse.dendrite.nonce <= current_time - allowedDelta + ): + raise Exception("Nonce is too old") + if ( + self.nonces.get(endpoint_key) is not None + and synapse.dendrite.nonce <= self.nonces[endpoint_key] + ): + raise Exception("Nonce is too old") + else: + if ( + endpoint_key in self.nonces.keys() + and self.nonces[endpoint_key] is not None and synapse.dendrite.nonce <= self.nonces[endpoint_key] ): - raise Exception("Nonce is too old") + raise Exception("Nonce is too small") if not keypair.verify(message, synapse.dendrite.signature): raise Exception( @@ -1168,13 +1192,21 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: f"Improperly formatted request. Could not parse headers {request.headers} into synapse of type {request_name}." ) synapse.name = request_name + try: + ntp_client = ntplib.NTPClient() + response = ntp_client.request('pool.ntp.org') + current_time = int(response.tx_time * 1e9) # Convert to nanoseconds + except Exception as e: + print(f"Error fetching NTP time: {e}") + # Fallback to local time if NTP fails + current_time = time.time_ns() # Fills the local axon information into the synapse. synapse.axon.__dict__.update( { "version": str(bittensor.__version_as_int__), "uuid": str(self.axon.uuid), - "nonce": f"{time.time_ns()}", + "nonce": current_time, "status_code": 100, } ) diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 47a3ba6f95..b31dc16887 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -24,6 +24,7 @@ import time import aiohttp import bittensor +import ntplib from typing import Optional, List, Union, AsyncGenerator, Any from bittensor.utils.registration import torch, use_torch @@ -632,6 +633,15 @@ async def call_stream( else: yield synapse + def get_ntp_time_ns(self): + try: + response = self.ntp_client.request('pool.ntp.org') + return int(response.tx_time * 1e9) # Convert to nanoseconds + except Exception as e: + print(f"Error fetching NTP time: {e}") + # Fallback to local t + return time.time_ns() + def preprocess_synapse_for_request( self, target_axon_info: bittensor.AxonInfo, @@ -653,12 +663,19 @@ def preprocess_synapse_for_request( """ # Set the timeout for the synapse synapse.timeout = timeout - + try: + ntp_client = ntplib.NTPClient() + response = ntp_client.request('pool.ntp.org') + current_time = int(response.tx_time * 1e9) # Convert to nanoseconds + except Exception as e: + bittensor.logging.debug(f"Error fetching NTP time: {e}, using system UNIX time") + # Fallback to local time if NTP fails + current_time = time.time_ns() # Build the Dendrite headers using the local system's details synapse.dendrite = bittensor.TerminalInfo( ip=self.external_ip, version=bittensor.__version_as_int__, - nonce=time.time_ns(), + nonce=current_time, uuid=self.uuid, hotkey=self.keypair.ss58_address, ) From e26be7747a4b568a9adf216f98689b9288eebc72 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Tue, 4 Jun 2024 09:41:21 -0700 Subject: [PATCH 068/116] Removed dupe check --- bittensor/axon.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index a377860518..d5a8c0600e 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -905,9 +905,6 @@ async def default_verify(self, synapse: bittensor.Synapse): print(f"Error fetching NTP time: {e}") # Fallback to local time if NTP fails current_time = time.time_ns() - - if synapse.dendrite.nonce is None: - raise Exception("Missing Nonce") # Updated nonce using NTP implementated at v7.2 if synapse.dendrite.version >= 720: From 200620810deb67c4d68a3e70a2d12d81cd540da6 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Tue, 4 Jun 2024 09:44:40 -0700 Subject: [PATCH 069/116] Removed unused func --- bittensor/dendrite.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index b31dc16887..0a8a3401e3 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -632,15 +632,6 @@ async def call_stream( yield synapse.deserialize() else: yield synapse - - def get_ntp_time_ns(self): - try: - response = self.ntp_client.request('pool.ntp.org') - return int(response.tx_time * 1e9) # Convert to nanoseconds - except Exception as e: - print(f"Error fetching NTP time: {e}") - # Fallback to local t - return time.time_ns() def preprocess_synapse_for_request( self, From 6f92b352679fd7a37517edf55ebd8a69e1adaf33 Mon Sep 17 00:00:00 2001 From: Gus Date: Tue, 4 Jun 2024 15:40:24 -0400 Subject: [PATCH 070/116] feat: adds BittensorNTPClient --- bittensor/axon.py | 20 ++++++++++---------- bittensor/dendrite.py | 13 ++++++++----- bittensor/utils/networking.py | 15 +++++++++++++++ tests/unit_tests/utils/test_networking.py | 7 +++++++ 4 files changed, 40 insertions(+), 15 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index d5a8c0600e..9e956475a1 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -24,7 +24,6 @@ import contextlib import copy import inspect -import ntplib import json import os import threading @@ -57,6 +56,7 @@ SynapseException, ) from bittensor.threadpool import PriorityThreadPoolExecutor +from bittensor.utils.networking import BittensorNTPClient class FastAPIThreadedServer(uvicorn.Server): @@ -898,14 +898,14 @@ async def default_verify(self, synapse: bittensor.Synapse): # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. try: - ntp_client = ntplib.NTPClient() - response = ntp_client.request('pool.ntp.org') + ntp_client = BittensorNTPClient() + response = ntp_client.request("pool.ntp.org") current_time = int(response.tx_time * 1e9) # Convert to nanoseconds except Exception as e: print(f"Error fetching NTP time: {e}") # Fallback to local time if NTP fails current_time = time.time_ns() - + # Updated nonce using NTP implementated at v7.2 if synapse.dendrite.version >= 720: # If we don't have a nonce stored, ensure that the nonce falls within @@ -922,10 +922,10 @@ async def default_verify(self, synapse: bittensor.Synapse): raise Exception("Nonce is too old") else: if ( - endpoint_key in self.nonces.keys() - and self.nonces[endpoint_key] is not None - and synapse.dendrite.nonce <= self.nonces[endpoint_key] - ): + endpoint_key in self.nonces.keys() + and self.nonces[endpoint_key] is not None + and synapse.dendrite.nonce <= self.nonces[endpoint_key] + ): raise Exception("Nonce is too small") if not keypair.verify(message, synapse.dendrite.signature): @@ -1190,8 +1190,8 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: ) synapse.name = request_name try: - ntp_client = ntplib.NTPClient() - response = ntp_client.request('pool.ntp.org') + ntp_client = BittensorNTPClient() + response = ntp_client.request("pool.ntp.org") current_time = int(response.tx_time * 1e9) # Convert to nanoseconds except Exception as e: print(f"Error fetching NTP time: {e}") diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 0a8a3401e3..5d5a39c8bd 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -23,9 +23,10 @@ import uuid import time import aiohttp + import bittensor -import ntplib from typing import Optional, List, Union, AsyncGenerator, Any +from bittensor.utils.networking import BittensorNTPClient from bittensor.utils.registration import torch, use_torch @@ -632,7 +633,7 @@ async def call_stream( yield synapse.deserialize() else: yield synapse - + def preprocess_synapse_for_request( self, target_axon_info: bittensor.AxonInfo, @@ -655,11 +656,13 @@ def preprocess_synapse_for_request( # Set the timeout for the synapse synapse.timeout = timeout try: - ntp_client = ntplib.NTPClient() - response = ntp_client.request('pool.ntp.org') + ntp_client = BittensorNTPClient() + response = ntp_client.request("pool.ntp.org") current_time = int(response.tx_time * 1e9) # Convert to nanoseconds except Exception as e: - bittensor.logging.debug(f"Error fetching NTP time: {e}, using system UNIX time") + bittensor.logging.debug( + f"Error fetching NTP time: {e}, using system UNIX time" + ) # Fallback to local time if NTP fails current_time = time.time_ns() # Build the Dendrite headers using the local system's details diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 0437aff80f..d843f822ed 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -19,10 +19,14 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +# Standard Lib import os import urllib import json import netaddr + +# 3rd party +import ntplib import requests @@ -190,3 +194,14 @@ def get_formatted_ws_endpoint_url(endpoint_url: str) -> str: endpoint_url = "ws://{}".format(endpoint_url) return endpoint_url + + +class BittensorNTPClient: + """NTP singleton client""" + + _instance = None + + def __new__(cls): + if cls._instance is None: + cls._instance = ntplib.NTPClient() + return cls._instance diff --git a/tests/unit_tests/utils/test_networking.py b/tests/unit_tests/utils/test_networking.py index 6bc89d3f27..eaf174fa3e 100644 --- a/tests/unit_tests/utils/test_networking.py +++ b/tests/unit_tests/utils/test_networking.py @@ -173,3 +173,10 @@ def urlopen(self): def test_format(url: str, expected: str): """Test formatting WebSocket endpoint URL.""" assert utils.networking.get_formatted_ws_endpoint_url(url) == expected + + +def test_bt_ntp_client(): + client_1 = utils.networking.BittensorNTPClient() + client_2 = utils.networking.BittensorNTPClient() + + assert client_1 == client_2 From 0676d8e9d965f6c32dc4e7d9f147dc1dedb0cbc5 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Tue, 4 Jun 2024 22:13:16 +0200 Subject: [PATCH 071/116] replace black with currently used ruff in check_pre_submit.sh --- scripts/check_pre_submit.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/check_pre_submit.sh b/scripts/check_pre_submit.sh index 7ea7c37f41..4dbe7747f6 100755 --- a/scripts/check_pre_submit.sh +++ b/scripts/check_pre_submit.sh @@ -1,8 +1,8 @@ #!/bin/bash -# black checks formating -echo ">>> Run the pre-submit format check with \`black .\`." -python3 -m black --exclude '(env|venv|.eggs|.git)' . +# ruff checks formating +echo ">>> Run the pre-submit format check with \`ruff format .\`." +ruff format . echo ">>> Run the pre-submit format check with \`mypy\`." From 9ebdc0b6124aebacf9ea604ca2e71d82c41c1935 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Tue, 4 Jun 2024 22:14:31 +0200 Subject: [PATCH 072/116] delete `black` from dep apple_m1_environment.yml since it is no longer a dependency --- scripts/environments/apple_m1_environment.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/environments/apple_m1_environment.yml b/scripts/environments/apple_m1_environment.yml index c18da7236a..a3712e267c 100644 --- a/scripts/environments/apple_m1_environment.yml +++ b/scripts/environments/apple_m1_environment.yml @@ -13,7 +13,6 @@ dependencies: - backcall=0.2.0=pyh9f0ad1d_0 - backports=1.0=pyhd8ed1ab_3 - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0 - - black=23.7.0=py310hbe9552e_1 - beautifulsoup4=4.12.2=pyha770c72_0 - bleach=6.0.0=pyhd8ed1ab_0 - brotli=1.0.9=h1a8c8d9_8 From 8d7b2017b8e52cadb9d6229d58501bc78c3ef5ab Mon Sep 17 00:00:00 2001 From: Roman Date: Tue, 4 Jun 2024 14:00:29 -0700 Subject: [PATCH 073/116] Wrapper for presenting the error in a human-readable view (https://github.com/opentensor/bittensor/issues/1859) --- bittensor/extrinsics/commit_weights.py | 3 +- bittensor/extrinsics/delegation.py | 14 ++--- bittensor/extrinsics/network.py | 56 +++++++++++-------- bittensor/extrinsics/prometheus.py | 11 ++-- bittensor/extrinsics/registration.py | 30 +++++----- bittensor/extrinsics/root.py | 8 +-- bittensor/extrinsics/senate.py | 19 +++---- bittensor/extrinsics/serving.py | 33 ++++++----- bittensor/extrinsics/set_weights.py | 12 ++-- bittensor/extrinsics/staking.py | 4 +- bittensor/extrinsics/transfer.py | 6 +- bittensor/extrinsics/unstaking.py | 12 ++-- bittensor/subtensor.py | 35 ++++++------ bittensor/utils/__init__.py | 34 ++++++++--- tests/unit_tests/extrinsics/test_init.py | 49 ++++++++++++++++ .../extrinsics/test_registration.py | 2 +- tests/unit_tests/extrinsics/test_serving.py | 2 +- 17 files changed, 200 insertions(+), 130 deletions(-) create mode 100644 tests/unit_tests/extrinsics/test_init.py diff --git a/bittensor/extrinsics/commit_weights.py b/bittensor/extrinsics/commit_weights.py index 2a526f5e96..fd617f3e3a 100644 --- a/bittensor/extrinsics/commit_weights.py +++ b/bittensor/extrinsics/commit_weights.py @@ -23,6 +23,7 @@ from rich.prompt import Confirm import bittensor +from bittensor.utils import format_error_message def commit_weights_extrinsic( @@ -67,7 +68,7 @@ def commit_weights_extrinsic( return True, "Successfully committed weights." else: bittensor.logging.error(f"Failed to commit weights: {error_message}") - return False, error_message + return False, format_error_message(error_message) def reveal_weights_extrinsic( diff --git a/bittensor/extrinsics/delegation.py b/bittensor/extrinsics/delegation.py index 9583b80a76..54bdb5273c 100644 --- a/bittensor/extrinsics/delegation.py +++ b/bittensor/extrinsics/delegation.py @@ -69,7 +69,7 @@ def nominate_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) @@ -138,7 +138,7 @@ def delegate_extrinsic( ) # Convert to bittensor.Balance - if amount == None: + if amount is None: # Stake it all. staking_balance = bittensor.Balance.from_tao(my_prev_coldkey_balance.tao) elif not isinstance(amount, bittensor.Balance): @@ -184,7 +184,7 @@ def delegate_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully staked. + if staking_response is True: # If we successfully staked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -273,7 +273,7 @@ def undelegate_extrinsic( ) # Convert to bittensor.Balance - if amount == None: + if amount is None: # Stake it all. unstaking_balance = bittensor.Balance.from_tao(my_prev_delegated_stake.tao) @@ -315,7 +315,7 @@ def undelegate_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully staked. + if staking_response is True: # If we successfully staked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -403,7 +403,7 @@ def decrease_take_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) @@ -463,7 +463,7 @@ def increase_take_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) diff --git a/bittensor/extrinsics/network.py b/bittensor/extrinsics/network.py index 3e0c3d8661..c03e5cf77b 100644 --- a/bittensor/extrinsics/network.py +++ b/bittensor/extrinsics/network.py @@ -15,11 +15,39 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. + import time -import bittensor +import substrateinterface from rich.prompt import Confirm +import bittensor +from bittensor.utils import format_error_message +from ..commands.network import HYPERPARAMS + + +def _find_event_attributes_in_extrinsic_receipt( + response: "substrateinterface.base.ExtrinsicReceipt", event_name: str +) -> list: + """ + Searches for the attributes of a specified event within an extrinsic receipt. + + Args: + response (substrateinterface.base.ExtrinsicReceipt): The receipt of the extrinsic to be searched. + event_name (str): The name of the event to search for. + + Returns: + list: A list of attributes for the specified event. Returns [-1] if the event is not found. + """ + for event in response.triggered_events: + # Access the event details + event_details = event.value["event"] + # Check if the event_id is 'NetworkAdded' + if event_details["event_id"] == event_name: + # Once found, you can access the attributes of the event_name + return event_details["attributes"] + return [-1] + def register_subnetwork_extrinsic( subtensor: "bittensor.subtensor", @@ -86,15 +114,13 @@ def register_subnetwork_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) # Successful registration, final check for membership else: - attributes = find_event_attributes_in_extrinsic_receipt( + attributes = _find_event_attributes_in_extrinsic_receipt( response, "NetworkAdded" ) bittensor.__console__.print( @@ -103,20 +129,6 @@ def register_subnetwork_extrinsic( return True -def find_event_attributes_in_extrinsic_receipt(response, event_name) -> list: - for event in response.triggered_events: - # Access the event details - event_details = event.value["event"] - # Check if the event_id is 'NetworkAdded' - if event_details["event_id"] == event_name: - # Once found, you can access the attributes of the event_name - return event_details["attributes"] - return [-1] - - -from ..commands.network import HYPERPARAMS - - def set_hyperparameter_extrinsic( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", @@ -158,7 +170,7 @@ def set_hyperparameter_extrinsic( wallet.coldkey # unlock coldkey extrinsic = HYPERPARAMS.get(parameter) - if extrinsic == None: + if extrinsic is None: bittensor.__console__.print( ":cross_mark: [red]Invalid hyperparameter specified.[/red]" ) @@ -198,9 +210,7 @@ def set_hyperparameter_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) diff --git a/bittensor/extrinsics/prometheus.py b/bittensor/extrinsics/prometheus.py index 350817e11f..97f7c17714 100644 --- a/bittensor/extrinsics/prometheus.py +++ b/bittensor/extrinsics/prometheus.py @@ -15,6 +15,7 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. + import bittensor import json @@ -54,7 +55,7 @@ def prometheus_extrinsic( """ # ---- Get external ip ---- - if ip == None: + if ip is None: try: external_ip = net.get_external_ip() bittensor.__console__.print( @@ -125,7 +126,7 @@ def prometheus_extrinsic( ) if wait_for_inclusion or wait_for_finalization: - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Served prometheus[/green]\n [bold white]{}[/bold white]".format( json.dumps(call_params, indent=4, sort_keys=True) @@ -133,11 +134,7 @@ def prometheus_extrinsic( ) return True else: - bittensor.__console__.print( - ":cross_mark: [green]Failed to serve prometheus[/green] error: {}".format( - err - ) - ) + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err}") return False else: return True diff --git a/bittensor/extrinsics/registration.py b/bittensor/extrinsics/registration.py index 879214ad92..c7534c131a 100644 --- a/bittensor/extrinsics/registration.py +++ b/bittensor/extrinsics/registration.py @@ -16,10 +16,13 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import bittensor import time -from rich.prompt import Confirm from typing import List, Union, Optional, Tuple + +from rich.prompt import Confirm + +import bittensor +from bittensor.utils import format_error_message from bittensor.utils.registration import ( POWSolution, create_pow, @@ -171,16 +174,17 @@ def register_extrinsic( ) success, err_msg = result - if success != True or success == False: - if "key is already registered" in err_msg: - # Error meant that the key is already registered. + if not success: + # Look error here + # https://github.com/opentensor/subtensor/blob/development/pallets/subtensor/src/errors.rs + if "HotKeyAlreadyRegisteredInSubNet" in err_msg: bittensor.__console__.print( f":white_heavy_check_mark: [green]Already Registered on [bold]subnet:{netuid}[/bold][/green]" ) return True bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) + f":cross_mark: [red]Failed[/red]: {err_msg}" ) time.sleep(0.5) @@ -290,10 +294,8 @@ def burned_register_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success != True or success == False: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + if not success: + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") time.sleep(0.5) return False # Successful registration, final check for neuron and pubkey @@ -454,7 +456,7 @@ def run_faucet_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - f":cross_mark: [red]Failed[/red]: Error: {response.error_message}" + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) if attempts == max_allowed_attempts: raise MaxAttemptsException @@ -506,10 +508,8 @@ def swap_hotkey_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success != True or success == False: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + if not success: + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") time.sleep(0.5) return False diff --git a/bittensor/extrinsics/root.py b/bittensor/extrinsics/root.py index 2cb11bbd69..2e1e4d3dab 100644 --- a/bittensor/extrinsics/root.py +++ b/bittensor/extrinsics/root.py @@ -77,10 +77,8 @@ def root_register_extrinsic( wait_for_finalization=wait_for_finalization, ) - if success != True or success == False: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + if not success: + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") time.sleep(0.5) # Successful registration, final check for neuron and pubkey @@ -205,7 +203,7 @@ def set_root_weights_extrinsic( return True else: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(error_message) + f":cross_mark: [red]Failed[/red]: {error_message}" ) bittensor.logging.warning( prefix="Set weights", diff --git a/bittensor/extrinsics/senate.py b/bittensor/extrinsics/senate.py index 233a78d614..043233996c 100644 --- a/bittensor/extrinsics/senate.py +++ b/bittensor/extrinsics/senate.py @@ -16,12 +16,13 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -# Imports -import bittensor - import time + from rich.prompt import Confirm +import bittensor +from bittensor.utils import format_error_message + def register_senate_extrinsic( subtensor: "bittensor.subtensor", @@ -78,9 +79,7 @@ def register_senate_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]:{format_error_message(response.error_message)}" ) time.sleep(0.5) @@ -155,9 +154,7 @@ def leave_senate_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) @@ -240,9 +237,7 @@ def vote_senate_extrinsic( response.process_events() if not response.is_success: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format( - response.error_message - ) + f":cross_mark: [red]Failed[/red]: {format_error_message(response.error_message)}" ) time.sleep(0.5) diff --git a/bittensor/extrinsics/serving.py b/bittensor/extrinsics/serving.py index 1aefa091ad..bba5367de1 100644 --- a/bittensor/extrinsics/serving.py +++ b/bittensor/extrinsics/serving.py @@ -15,10 +15,16 @@ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. + import json +from typing import Optional + +from retry import retry +from rich.prompt import Confirm + import bittensor import bittensor.utils.networking as net -from rich.prompt import Confirm +from bittensor.utils import format_error_message from ..errors import MetadataError @@ -123,15 +129,13 @@ def serve_extrinsic( ) if wait_for_inclusion or wait_for_finalization: - if success == True: + if success is True: bittensor.logging.debug( f"Axon served with: AxonInfo({wallet.hotkey.ss58_address},{ip}:{port}) on {subtensor.network}:{netuid} " ) return True else: - bittensor.logging.debug( - f"Axon failed to served with error: {error_message} " - ) + bittensor.logging.error(f"Failed: {error_message}") return False else: return True @@ -167,7 +171,7 @@ def serve_axon_extrinsic( external_port = axon.external_port # ---- Get external ip ---- - if axon.external_ip == None: + if axon.external_ip is None: try: external_ip = net.get_external_ip() bittensor.__console__.print( @@ -204,7 +208,7 @@ def publish_metadata( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", netuid: int, - type: str, + data_type: str, data: bytes, wait_for_inclusion: bool = False, wait_for_finalization: bool = True, @@ -219,7 +223,7 @@ def publish_metadata( The wallet object used for authentication in the transaction. netuid (int): Network UID on which the metadata is to be published. - type (str): + data_type (str): The data type of the information being submitted. It should be one of the following: ``'Sha256'``, ``'Blake256'``, ``'Keccak256'``, or ``'Raw0-128'``. This specifies the format or hashing algorithm used for the data. data (str): The actual metadata content to be published. This should be formatted or hashed according to the ``type`` specified. (Note: max ``str`` length is 128 bytes) @@ -243,7 +247,10 @@ def publish_metadata( call = substrate.compose_call( call_module="Commitments", call_function="set_commitment", - call_params={"netuid": netuid, "info": {"fields": [[{f"{type}": data}]]}}, + call_params={ + "netuid": netuid, + "info": {"fields": [[{f"{data_type}": data}]]}, + }, ) extrinsic = substrate.create_signed_extrinsic(call=call, keypair=wallet.hotkey) @@ -259,11 +266,7 @@ def publish_metadata( if response.is_success: return True else: - raise MetadataError(response.error_message) - - -from retry import retry -from typing import Optional + raise MetadataError(format_error_message(response.error_message)) def get_metadata(self, netuid: int, hotkey: str, block: Optional[int] = None) -> str: @@ -274,7 +277,7 @@ def make_substrate_call_with_retry(): module="Commitments", storage_function="CommitmentOf", params=[netuid, hotkey], - block_hash=None if block == None else substrate.get_block_hash(block), + block_hash=None if block is None else substrate.get_block_hash(block), ) commit_data = make_substrate_call_with_retry() diff --git a/bittensor/extrinsics/set_weights.py b/bittensor/extrinsics/set_weights.py index 5db0a1a7a9..dc3052d0a0 100644 --- a/bittensor/extrinsics/set_weights.py +++ b/bittensor/extrinsics/set_weights.py @@ -44,7 +44,7 @@ def set_weights_extrinsic( r"""Sets the given weights and values on chain for wallet hotkey account. Args: - subtensor_endpoint (bittensor.subtensor): + subtensor (bittensor.subtensor): Subtensor endpoint to use. wallet (bittensor.wallet): Bittensor wallet object. @@ -109,7 +109,7 @@ def set_weights_extrinsic( if not wait_for_finalization and not wait_for_inclusion: return True, "Not waiting for finalization or inclusion." - if success == True: + if success is True: bittensor.__console__.print( ":white_heavy_check_mark: [green]Finalized[/green]" ) @@ -119,12 +119,10 @@ def set_weights_extrinsic( ) return True, "Successfully set weights and Finalized." else: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(error_message) - ) - bittensor.logging.warning( + bittensor.logging.error( + msg=error_message, prefix="Set weights", - suffix="Failed: " + str(error_message), + suffix="Failed: ", ) return False, error_message diff --git a/bittensor/extrinsics/staking.py b/bittensor/extrinsics/staking.py index f3249a8b1c..44a509eae8 100644 --- a/bittensor/extrinsics/staking.py +++ b/bittensor/extrinsics/staking.py @@ -92,7 +92,7 @@ def add_stake_extrinsic( ) # Convert to bittensor.Balance - if amount == None: + if amount is None: # Stake it all. staking_balance = bittensor.Balance.from_tao(old_balance.tao) elif not isinstance(amount, bittensor.Balance): @@ -148,7 +148,7 @@ def add_stake_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully staked. + if staking_response is True: # If we successfully staked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True diff --git a/bittensor/extrinsics/transfer.py b/bittensor/extrinsics/transfer.py index ae09803199..91ef3237eb 100644 --- a/bittensor/extrinsics/transfer.py +++ b/bittensor/extrinsics/transfer.py @@ -130,7 +130,7 @@ def transfer_extrinsic( explorer_urls = bittensor.utils.get_explorer_url_for_network( subtensor.network, block_hash, bittensor.__network_explorer_map__ ) - if explorer_urls != {}: + if explorer_urls != {} and explorer_urls: bittensor.__console__.print( "[green]Opentensor Explorer Link: {}[/green]".format( explorer_urls.get("opentensor") @@ -142,9 +142,7 @@ def transfer_extrinsic( ) ) else: - bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: error:{}".format(err_msg) - ) + bittensor.__console__.print(f":cross_mark: [red]Failed[/red]: {err_msg}") if success: with bittensor.__console__.status(":satellite: Checking Balance..."): diff --git a/bittensor/extrinsics/unstaking.py b/bittensor/extrinsics/unstaking.py index 6046124f40..cf47b07928 100644 --- a/bittensor/extrinsics/unstaking.py +++ b/bittensor/extrinsics/unstaking.py @@ -142,7 +142,7 @@ def unstake_extrinsic( ) # Convert to bittensor.Balance - if amount == None: + if amount is None: # Unstake it all. unstaking_balance = old_stake elif not isinstance(amount, bittensor.Balance): @@ -189,7 +189,7 @@ def unstake_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully unstaked. + if staking_response is True: # If we successfully unstaked. # We only wait here if we expect finalization. if not wait_for_finalization and not wait_for_inclusion: return True @@ -221,7 +221,7 @@ def unstake_extrinsic( return True else: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: Error unknown." + ":cross_mark: [red]Failed[/red]: Unknown Error." ) return False @@ -318,7 +318,7 @@ def unstake_multiple_extrinsic( zip(hotkey_ss58s, amounts, old_stakes) ): # Covert to bittensor.Balance - if amount == None: + if amount is None: # Unstake it all. unstaking_balance = old_stake elif not isinstance(amount, bittensor.Balance): @@ -365,7 +365,7 @@ def unstake_multiple_extrinsic( wait_for_finalization=wait_for_finalization, ) - if staking_response == True: # If we successfully unstaked. + if staking_response is True: # If we successfully unstaked. # We only wait here if we expect finalization. if idx < len(hotkey_ss58s) - 1: @@ -405,7 +405,7 @@ def unstake_multiple_extrinsic( successful_unstakes += 1 else: bittensor.__console__.print( - ":cross_mark: [red]Failed[/red]: Error unknown." + ":cross_mark: [red]Failed[/red]: Unknown Error." ) continue diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 1b422ad276..d309a79504 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -41,7 +41,7 @@ import bittensor from bittensor.btlogging import logging as _logger -from bittensor.utils import torch, weight_utils +from bittensor.utils import torch, weight_utils, format_error_message from .chain_data import ( NeuronInfo, DelegateInfo, @@ -774,6 +774,7 @@ def send_extrinsic( ############### # Set Weights # ############### + # TODO: still needed? Can't find any usage of this method. def set_weights( self, wallet: "bittensor.wallet", @@ -900,7 +901,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, "Successfully set weights." else: - return False, response.error_message + return False, format_error_message(response.error_message) return make_substrate_call_with_retry() @@ -1170,7 +1171,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, None else: - return False, response.error_message + return False, format_error_message(response.error_message) return make_substrate_call_with_retry() @@ -1425,7 +1426,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -1482,7 +1483,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -1538,7 +1539,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -1691,7 +1692,7 @@ def make_substrate_call_with_retry(): block_hash = response.block_hash return True, block_hash, None else: - return False, None, response.error_message + return False, None, format_error_message(response.error_message) return make_substrate_call_with_retry() @@ -1922,7 +1923,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, None else: - return False, response.error_message + return False, format_error_message(response.error_message) else: return True, None @@ -1984,7 +1985,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True, None else: - return False, response.error_message + return False, format_error_message(response.error_message) else: return True, None @@ -2165,7 +2166,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -2292,7 +2293,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -2608,7 +2609,7 @@ def make_substrate_call_with_retry(): # process if registration successful, try again if pow is still valid response.process_events() if not response.is_success: - return False, response.error_message + return False, format_error_message(response.error_message) # Successful registration else: return True, None @@ -5093,7 +5094,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -5147,7 +5148,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise StakeError(response.error_message) + raise StakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -5194,7 +5195,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise NominationError(response.error_message) + raise NominationError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -5249,7 +5250,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise TakeError(response.error_message) + raise TakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() @@ -5304,7 +5305,7 @@ def make_substrate_call_with_retry(): if response.is_success: return True else: - raise TakeError(response.error_message) + raise TakeError(format_error_message(response.error_message)) return make_substrate_call_with_retry() diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 175094da87..700a656131 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -1,7 +1,6 @@ # The MIT License (MIT) # Copyright © 2022 Opentensor Foundation # Copyright © 2023 Opentensor Technologies Inc -import os # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the “Software”), to deal in the Software without restriction, including without limitation @@ -17,17 +16,16 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. +import hashlib from typing import Callable, List, Dict, Literal, Tuple -import bittensor -import hashlib -import requests -import scalecodec import numpy as np +import scalecodec -from .wallet_utils import * # noqa F401 -from .version import version_checking, check_version, VersionCheckError +import bittensor from .registration import torch, use_torch +from .version import version_checking, check_version, VersionCheckError +from .wallet_utils import * # noqa F401 RAOPERTAO = 1e9 U16_MAX = 65535 @@ -260,3 +258,25 @@ def hash(content, encoding="utf-8"): # Produce the hash return sha3.hexdigest() + + +def format_error_message(error_message: dict) -> str: + """ + Formats an error message from the Subtensor error information to using in extrinsics. + + Args: + error_message (dict): A dictionary containing the error information from Subtensor. + + Returns: + str: A formatted error message string. + """ + err_type = "UnknownType" + err_name = "UnknownError" + err_description = "Unknown Description" + + if isinstance(error_message, dict): + err_type = error_message.get("type", err_type) + err_name = error_message.get("name", err_name) + err_docs = error_message.get("docs", []) + err_description = err_docs[0] if len(err_docs) > 0 else err_description + return f"Subtensor returned `{err_name} ({err_type})` error. This means: `{err_description}`" diff --git a/tests/unit_tests/extrinsics/test_init.py b/tests/unit_tests/extrinsics/test_init.py new file mode 100644 index 0000000000..8e3caaf900 --- /dev/null +++ b/tests/unit_tests/extrinsics/test_init.py @@ -0,0 +1,49 @@ +"""Tests for bittensor/extrinsics/__ini__ module.""" + +from bittensor.utils import format_error_message + + +def test_format_error_message_with_right_error_message(): + # Prep + fake_error_message = { + "type": "SomeType", + "name": "SomeErrorName", + "docs": ["Some error description."], + } + + # Call + result = format_error_message(fake_error_message) + + # Assertions + + assert "SomeType" in result + assert "SomeErrorName" in result + assert "Some error description." in result + + +def test_format_error_message_with_empty_error_message(): + # Prep + fake_error_message = {} + + # Call + result = format_error_message(fake_error_message) + + # Assertions + + assert "UnknownType" in result + assert "UnknownError" in result + assert "Unknown Description" in result + + +def test_format_error_message_with_wrong_type_error_message(): + # Prep + fake_error_message = None + + # Call + result = format_error_message(fake_error_message) + + # Assertions + + assert "UnknownType" in result + assert "UnknownError" in result + assert "Unknown Description" in result diff --git a/tests/unit_tests/extrinsics/test_registration.py b/tests/unit_tests/extrinsics/test_registration.py index 5a4d32dff6..49805f0cf4 100644 --- a/tests/unit_tests/extrinsics/test_registration.py +++ b/tests/unit_tests/extrinsics/test_registration.py @@ -369,7 +369,7 @@ def test_register_extrinsic_with_pow( ), patch.object( mock_subtensor, "_do_pow_register", - return_value=(registration_success, "key is already registered"), + return_value=(registration_success, "HotKeyAlreadyRegisteredInSubNet"), ), patch("torch.cuda.is_available", return_value=cuda): # Act if pow_success: diff --git a/tests/unit_tests/extrinsics/test_serving.py b/tests/unit_tests/extrinsics/test_serving.py index bf975e195a..7aa3ebf5b4 100644 --- a/tests/unit_tests/extrinsics/test_serving.py +++ b/tests/unit_tests/extrinsics/test_serving.py @@ -365,7 +365,7 @@ def test_publish_metadata( subtensor=mock_subtensor, wallet=mock_wallet, netuid=net_uid, - type=type_u, + data_type=type_u, data=data, wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, From 2b2ed667d84579f0b02bfe649a8339944e060da8 Mon Sep 17 00:00:00 2001 From: Roman Date: Tue, 4 Jun 2024 17:26:46 -0700 Subject: [PATCH 074/116] Set test timeout in 'config.yaml' --- .circleci/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7ea959790e..a4d07de74b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -108,9 +108,9 @@ jobs: . .venv/bin/activate ./scripts/create_wallet.sh - # TODO: Update test durations on different runs - run: name: Unit Tests + no_output_timeout: 20m command: | . .venv/bin/activate export PYTHONUNBUFFERED=1 @@ -122,6 +122,7 @@ jobs: - run: name: Integration Tests + no_output_timeout: 30m command: | . .venv/bin/activate export PYTHONUNBUFFERED=1 From f6d50e94df3dd4d7c39a764d1f9cdbe2a3a3ac92 Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 4 Jun 2024 20:41:43 -0400 Subject: [PATCH 075/116] revert d88a743 --- bittensor/chain_data.py | 7 ++----- bittensor/utils/networking.py | 22 ---------------------- tests/unit_tests/utils/test_networking.py | 8 -------- 3 files changed, 2 insertions(+), 35 deletions(-) diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index 6edc7116cc..4a9f98244c 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -283,13 +283,10 @@ def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo": Returns: instance (AxonInfo): An instance of AxonInfo created from the dictionary. """ - ip, port = net.unpack_encoded_ip_port( - neuron_info["axon_info"]["ip"], neuron_info["axon_info"]["port"] - ) return cls( version=neuron_info["axon_info"]["version"], - ip=ip, - port=port, + ip=net.int_to_ip(int(neuron_info["axon_info"]["ip"])), + port=neuron_info["axon_info"]["port"], ip_type=neuron_info["axon_info"]["ip_type"], hotkey=neuron_info["hotkey"], coldkey=neuron_info["coldkey"], diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 0437aff80f..675b4e45b4 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -43,28 +43,6 @@ def int_to_ip(int_val: int) -> str: return str(netaddr.IPAddress(int_val)) -def unpack_encoded_ip_port(ip_str: str, port: int) -> tuple: - r"""Unpacks an encoded IP and port if they are encoded together. - Args: - ip_str (:type:`str`, `required`): - The encoded IP address string. - port (:type:`int`, `required`): - The port number. - - Returns: - tuple: A tuple containing the IP address string and port number. - - Raises: - netaddr.core.AddrFormatError (Exception): - Raised when the passed IP string is not a valid IP int value. - """ - if ip_str < (1 << 128) + (1 << 16) and port == 0: - port = ip_str & 0xFFFF - ip = ip_str >> 16 - return int_to_ip(ip), port - return int_to_ip(ip_str), port - - def ip_to_int(str_val: str) -> int: r"""Maps an ip-string to a unique integer. arg: diff --git a/tests/unit_tests/utils/test_networking.py b/tests/unit_tests/utils/test_networking.py index 6bc89d3f27..2037718578 100644 --- a/tests/unit_tests/utils/test_networking.py +++ b/tests/unit_tests/utils/test_networking.py @@ -25,14 +25,6 @@ def test_int_to_ip_range(): ) -def test_packed_ip_port(): - """Test packing and unpacking IP and port.""" - assert utils.networking.unpack_encoded_ip_port(184046647580618, 0) == ( - "167.99.179.13", - 6090, - ) - - def test_int_to_ip4_max(): """Test converting integer to maximum IPv4 address.""" assert utils.networking.int_to_ip(4294967295) == "255.255.255.255" From da883e1ebfe534af76b55d70024a53d54dbbd19f Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 11:14:30 -0700 Subject: [PATCH 076/116] Added constants and retries --- bittensor/axon.py | 32 ++++++++------------------------ bittensor/constants.py | 20 ++++++++++++++++++++ bittensor/dendrite.py | 25 +++++++++++++------------ bittensor/utils/networking.py | 25 +++++++++++++++++++++++++ 4 files changed, 66 insertions(+), 36 deletions(-) create mode 100644 bittensor/constants.py diff --git a/bittensor/axon.py b/bittensor/axon.py index 9280ef6eed..b9241f2d16 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -55,6 +55,7 @@ PostProcessException, SynapseException, ) +from bittensor.constants import ALLOWED_DELTA from bittensor.threadpool import PriorityThreadPoolExecutor from bittensor.utils import networking from bittensor.utils.networking import BittensorNTPClient @@ -343,12 +344,12 @@ def __init__( self.port = self.config.axon.port self.external_ip = ( self.config.axon.external_ip - if self.config.axon.external_ip != None + if self.config.axon.external_ip is not None else bittensor.utils.networking.get_external_ip() ) self.external_port = ( self.config.axon.external_port - if self.config.axon.external_port != None + if self.config.axon.external_port is not None else self.config.axon.port ) self.full_address = str(self.config.axon.ip) + ":" + str(self.config.axon.port) @@ -889,23 +890,13 @@ async def default_verify(self, synapse: bittensor.Synapse): # Build the unique endpoint key. endpoint_key = f"{synapse.dendrite.hotkey}:{synapse.dendrite.uuid}" - # Check the nonce from the endpoint key with 4 second delta - allowedDelta = 4000000000 - # Requests must have nonces to be safe from replays if synapse.dendrite.nonce is None: raise Exception("Missing Nonce") # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. - try: - ntp_client = BittensorNTPClient() - response = ntp_client.request("pool.ntp.org") - current_time = int(response.tx_time * 1e9) # Convert to nanoseconds - except Exception as e: - print(f"Error fetching NTP time: {e}") - # Fallback to local time if NTP fails - current_time = time.time_ns() + current_time = BittensorNTPClient.get_current_ntp_time() # Updated nonce using NTP implementated at v7.2 if synapse.dendrite.version >= 720: @@ -913,7 +904,7 @@ async def default_verify(self, synapse: bittensor.Synapse): # a reasonable delta. if ( self.nonces.get(endpoint_key) is None - and synapse.dendrite.nonce <= current_time - allowedDelta + and synapse.dendrite.nonce <= current_time - ALLOWED_DELTA ): raise Exception("Nonce is too old") if ( @@ -1170,7 +1161,7 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: # Extracts the request name from the URL path. try: request_name = request.url.path.split("/")[1] - except: + except Exception: raise InvalidRequestNameError( f"Improperly formatted request. Could not parser request {request.url.path}." ) @@ -1185,19 +1176,12 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: try: synapse = request_synapse.from_headers(request.headers) # type: ignore - except Exception as e: + except Exception: raise SynapseParsingError( f"Improperly formatted request. Could not parse headers {request.headers} into synapse of type {request_name}." ) synapse.name = request_name - try: - ntp_client = BittensorNTPClient() - response = ntp_client.request("pool.ntp.org") - current_time = int(response.tx_time * 1e9) # Convert to nanoseconds - except Exception as e: - print(f"Error fetching NTP time: {e}") - # Fallback to local time if NTP fails - current_time = time.time_ns() + current_time = BittensorNTPClient.get_current_ntp_time() # Fills the local axon information into the synapse. synapse.axon.__dict__.update( diff --git a/bittensor/constants.py b/bittensor/constants.py new file mode 100644 index 0000000000..0cf78dac95 --- /dev/null +++ b/bittensor/constants.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright © 2023 OpenTensor Foundation + +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# documentation files (the “Software”), to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of +# the Software. + +# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + + +ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation +NTP_POOL_RETRIES = 2 diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 5d5a39c8bd..d405ae2214 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -313,7 +313,7 @@ def query( try: loop = asyncio.get_event_loop() result = loop.run_until_complete(self.forward(*args, **kwargs)) - except: + except Exception: new_loop = asyncio.new_event_loop() asyncio.set_event_loop(new_loop) result = loop.run_until_complete(self.forward(*args, **kwargs)) @@ -655,16 +655,17 @@ def preprocess_synapse_for_request( """ # Set the timeout for the synapse synapse.timeout = timeout - try: - ntp_client = BittensorNTPClient() - response = ntp_client.request("pool.ntp.org") - current_time = int(response.tx_time * 1e9) # Convert to nanoseconds - except Exception as e: - bittensor.logging.debug( - f"Error fetching NTP time: {e}, using system UNIX time" - ) - # Fallback to local time if NTP fails - current_time = time.time_ns() + current_time = BittensorNTPClient.get_current_ntp_time() + # try: + # ntp_client = BittensorNTPClient() + # response = ntp_client.request("pool.ntp.org") + # current_time = int(response.tx_time * 1e9) # Convert to nanoseconds + # except Exception as e: + # bittensor.logging.debug( + # f"Error fetching NTP time: {e}, using system UNIX time" + # ) + # # Fallback to local time if NTP fails + # current_time = time.time_ns() # Build the Dendrite headers using the local system's details synapse.dendrite = bittensor.TerminalInfo( ip=self.external_ip, @@ -716,7 +717,7 @@ def process_server_response( # Set the attribute in the local synapse from the corresponding # attribute in the server synapse setattr(local_synapse, key, getattr(server_synapse, key)) - except: + except Exception: # Ignore errors during attribute setting pass else: diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index d843f822ed..e40764ec5d 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -20,10 +20,13 @@ # DEALINGS IN THE SOFTWARE. # Standard Lib +import bittensor import os import urllib +import time import json import netaddr +from bittensor.constants import NTP_POOL_RETRIES # 3rd party import ntplib @@ -205,3 +208,25 @@ def __new__(cls): if cls._instance is None: cls._instance = ntplib.NTPClient() return cls._instance + + @staticmethod + def get_current_ntp_time(retries: int = NTP_POOL_RETRIES) -> int: + ntp_servers = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"] + attempts = 0 + while attempts < retries: + server = ntp_servers[attempts % len(ntp_servers)] + try: + ntp_client = BittensorNTPClient() + response = ntp_client.request(server) + current_time = int(response.tx_time * 1e9) # Convert to nanoseconds + return current_time + except Exception as e: + attempts += 1 + bittensor.logging.error( + f"Attempt {attempts} - Error fetching NTP time: {e}" + ) + # Fallback to local time if all retries fail + bittensor.logging.error( + "All retries failed, using system UNIX time" + ) + return time.time_ns() From 460a9824f2cf488fce9fd4ca4c854f24dac1c18c Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 11:19:14 -0700 Subject: [PATCH 077/116] Nonces: cleanup and fix logging levels --- bittensor/axon.py | 2 +- bittensor/dendrite.py | 11 ----------- bittensor/utils/networking.py | 6 +++--- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index b9241f2d16..ac8b511e4b 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -896,12 +896,12 @@ async def default_verify(self, synapse: bittensor.Synapse): # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. - current_time = BittensorNTPClient.get_current_ntp_time() # Updated nonce using NTP implementated at v7.2 if synapse.dendrite.version >= 720: # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. + current_time = BittensorNTPClient.get_current_ntp_time() if ( self.nonces.get(endpoint_key) is None and synapse.dendrite.nonce <= current_time - ALLOWED_DELTA diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index d405ae2214..65219e5044 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -656,17 +656,6 @@ def preprocess_synapse_for_request( # Set the timeout for the synapse synapse.timeout = timeout current_time = BittensorNTPClient.get_current_ntp_time() - # try: - # ntp_client = BittensorNTPClient() - # response = ntp_client.request("pool.ntp.org") - # current_time = int(response.tx_time * 1e9) # Convert to nanoseconds - # except Exception as e: - # bittensor.logging.debug( - # f"Error fetching NTP time: {e}, using system UNIX time" - # ) - # # Fallback to local time if NTP fails - # current_time = time.time_ns() - # Build the Dendrite headers using the local system's details synapse.dendrite = bittensor.TerminalInfo( ip=self.external_ip, version=bittensor.__version_as_int__, diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index e40764ec5d..40ae75a71c 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -222,11 +222,11 @@ def get_current_ntp_time(retries: int = NTP_POOL_RETRIES) -> int: return current_time except Exception as e: attempts += 1 - bittensor.logging.error( + bittensor.logging.info( f"Attempt {attempts} - Error fetching NTP time: {e}" ) # Fallback to local time if all retries fail - bittensor.logging.error( - "All retries failed, using system UNIX time" + bittensor.logging.warning( + "All NTP retries failed, using system UNIX time" ) return time.time_ns() From f633f9d987fee7adba35f7a4a52a47dba9e0a38b Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 11:30:34 -0700 Subject: [PATCH 078/116] Added ntplib in requirements --- requirements/prod.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/prod.txt b/requirements/prod.txt index 2d9ecabab5..34cb8eba20 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -11,6 +11,7 @@ fuzzywuzzy>=0.18.0 fastapi~=0.110.1 munch~=2.5.0 netaddr +ntplib==0.4.0 numpy msgpack-numpy-opentensor~=0.5.0 nest_asyncio From 45dc30ce966952be30f85e6905152178eec16b51 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 11:34:25 -0700 Subject: [PATCH 079/116] Fixes ruff checks --- bittensor/constants.py | 2 +- bittensor/utils/networking.py | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/bittensor/constants.py b/bittensor/constants.py index 0cf78dac95..8b1dcfc364 100644 --- a/bittensor/constants.py +++ b/bittensor/constants.py @@ -16,5 +16,5 @@ # DEALINGS IN THE SOFTWARE. -ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation +ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation NTP_POOL_RETRIES = 2 diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 40ae75a71c..ae91d710f1 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -226,7 +226,5 @@ def get_current_ntp_time(retries: int = NTP_POOL_RETRIES) -> int: f"Attempt {attempts} - Error fetching NTP time: {e}" ) # Fallback to local time if all retries fail - bittensor.logging.warning( - "All NTP retries failed, using system UNIX time" - ) + bittensor.logging.warning("All NTP retries failed, using system UNIX time") return time.time_ns() From 4d82b513ff2f1d30979190e9c0b28a5449ca8b44 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 14:58:14 -0700 Subject: [PATCH 080/116] Implementing improvements --- bittensor/axon.py | 4 ++-- bittensor/constants.py | 4 +++- bittensor/dendrite.py | 3 ++- bittensor/utils/networking.py | 5 ++--- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index ac8b511e4b..a2bb2a83cb 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -55,7 +55,7 @@ PostProcessException, SynapseException, ) -from bittensor.constants import ALLOWED_DELTA +from bittensor.constants import ALLOWED_DELTA, V_7_2_0 from bittensor.threadpool import PriorityThreadPoolExecutor from bittensor.utils import networking from bittensor.utils.networking import BittensorNTPClient @@ -898,7 +898,7 @@ async def default_verify(self, synapse: bittensor.Synapse): # a reasonable delta. # Updated nonce using NTP implementated at v7.2 - if synapse.dendrite.version >= 720: + if synapse.dendrite.version >= V_7_2_0: # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. current_time = BittensorNTPClient.get_current_ntp_time() diff --git a/bittensor/constants.py b/bittensor/constants.py index 8b1dcfc364..77b994cba8 100644 --- a/bittensor/constants.py +++ b/bittensor/constants.py @@ -16,5 +16,7 @@ # DEALINGS IN THE SOFTWARE. -ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation +ALLOWED_DELTA = 5000000000 # Delta of 5 seconds for nonce validation NTP_POOL_RETRIES = 2 +NTP_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"] +V_7_2_0 = 720 diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 65219e5044..9b2cfe11e4 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -706,7 +706,8 @@ def process_server_response( # Set the attribute in the local synapse from the corresponding # attribute in the server synapse setattr(local_synapse, key, getattr(server_synapse, key)) - except Exception: + except Exception as e: + bittensor.logging.info(f"Ignoring error when setting attribute: {e}") # Ignore errors during attribute setting pass else: diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 5982341503..4f2d335dc8 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -26,7 +26,7 @@ import time import json import netaddr -from bittensor.constants import NTP_POOL_RETRIES +from bittensor.constants import NTP_POOL_RETRIES, NTP_SERVERS # 3rd party import ntplib @@ -189,10 +189,9 @@ def __new__(cls): @staticmethod def get_current_ntp_time(retries: int = NTP_POOL_RETRIES) -> int: - ntp_servers = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"] attempts = 0 while attempts < retries: - server = ntp_servers[attempts % len(ntp_servers)] + server = NTP_SERVERS[attempts % len(NTP_SERVERS)] try: ntp_client = BittensorNTPClient() response = ntp_client.request(server) From b412b801be8a0939a9e3d64017aed1b1fe6c5129 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 15:05:44 -0700 Subject: [PATCH 081/116] Ruff: Updated format --- bittensor/dendrite.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 9b2cfe11e4..f5c925cd88 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -707,7 +707,9 @@ def process_server_response( # attribute in the server synapse setattr(local_synapse, key, getattr(server_synapse, key)) except Exception as e: - bittensor.logging.info(f"Ignoring error when setting attribute: {e}") + bittensor.logging.info( + f"Ignoring error when setting attribute: {e}" + ) # Ignore errors during attribute setting pass else: From 4d65d525d36547dac5948e96bf1ca9ef70aa342f Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 5 Jun 2024 16:27:34 -0700 Subject: [PATCH 082/116] Add an E2E test for Axon. Create a local subnet and add axon and check its properties. --- tests/e2e_tests/conftest.py | 19 +++- .../e2e_tests/multistep/test_last_tx_block.py | 2 +- tests/e2e_tests/test_parameter_validation.py | 101 ++++++++++++++++++ tests/e2e_tests/utils.py | 43 +++++++- tests/unit_tests/test_dendrite.py | 4 +- 5 files changed, 160 insertions(+), 9 deletions(-) create mode 100644 tests/e2e_tests/test_parameter_validation.py diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index 2300eafc77..a3e5d830ef 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -1,13 +1,19 @@ +import logging import os +import re +import shlex import signal -from substrateinterface import SubstrateInterface -import pytest import subprocess -import logging -import shlex -import re import time +import pytest +from substrateinterface import SubstrateInterface + +from tests.e2e_tests.utils import ( + clone_or_update_templates, + install_templates, +) + logging.basicConfig(level=logging.INFO) @@ -22,6 +28,9 @@ def local_chain(): logging.warning("LOCALNET_SH_PATH env variable is not set, e2e test skipped.") pytest.skip("LOCALNET_SH_PATH environment variable is not set.") + templates_dir = clone_or_update_templates() + install_templates(templates_dir) + # Start new node process cmds = shlex.split(script_path) process = subprocess.Popen( diff --git a/tests/e2e_tests/multistep/test_last_tx_block.py b/tests/e2e_tests/multistep/test_last_tx_block.py index 0d1796f5d8..8f38165934 100644 --- a/tests/e2e_tests/multistep/test_last_tx_block.py +++ b/tests/e2e_tests/multistep/test_last_tx_block.py @@ -35,7 +35,7 @@ def test_takes(local_chain): ).serialize() == 0 ) - exec_command(RegisterCommand, ["s", "register", "--neduid", "1"]) + exec_command(RegisterCommand, ["s", "register", "--netuid", "1"]) exec_command(NominateCommand, ["root", "nominate"]) assert ( local_chain.query( diff --git a/tests/e2e_tests/test_parameter_validation.py b/tests/e2e_tests/test_parameter_validation.py new file mode 100644 index 0000000000..b4a272f709 --- /dev/null +++ b/tests/e2e_tests/test_parameter_validation.py @@ -0,0 +1,101 @@ +import subprocess +import sys +import time + +import bittensor +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, + uninstall_templates, + template_path, + repo_name, +) + + +def test_parameter_validation_from_subtensor(local_chain): + # Register root as Alice + alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) + + # define values + uid = 0 + + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + # Register a neuron to the subnet + exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--wallet.path", + wallet_path, + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + # Create a test wallet and set the coldkey, coldkeypub, and hotkey + # wallet = bittensor.wallet(path="/tmp/btcli-wallet") + # wallet.set_coldkey(keypair=alice_keypair, encrypt=False, overwrite=True) + # wallet.set_coldkeypub(keypair=alice_keypair, encrypt=False, overwrite=True) + # wallet.set_hotkey(keypair=alice_keypair, encrypt=False, overwrite=True) + + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + # validate miner with stake, ip, hotkey + new_axon = metagraph.axons[0] + assert new_axon.hotkey == alice_keypair.ss58_address + assert new_axon.coldkey == alice_keypair.ss58_address + assert new_axon.ip == "0.0.0.0" + assert new_axon.port == 0 + assert new_axon.ip_type == 0 + + # register miner + # "python neurons/miner.py --netuid 1 --subtensor.chain_endpoint ws://localhost:9945 --wallet.name wallet.name --wallet.hotkey wallet.hotkey.ss58_address" + + process = subprocess.Popen( + [ + sys.executable, + f"{template_path}/{repo_name}/neurons/miner.py", + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + ], + shell=True, + stdout=subprocess.PIPE, + ) + + try: + outs, errs = process.communicate(timeout=15) + time.sleep(15) + except subprocess.TimeoutExpired: + process.kill() + # validate miner with new ip + new_axon = metagraph.axons[0] + + uninstall_templates(local_chain.templates_dir) diff --git a/tests/e2e_tests/utils.py b/tests/e2e_tests/utils.py index 3ad789dd6d..30cf4fcee2 100644 --- a/tests/e2e_tests/utils.py +++ b/tests/e2e_tests/utils.py @@ -1,7 +1,14 @@ +import os +import shutil +import subprocess +import sys from substrateinterface import Keypair from typing import List import bittensor +template_path = os.getcwd() + "/neurons/" +repo_name = "templates repository" + def setup_wallet(uri: str): keypair = Keypair.create_from_uri(uri) @@ -29,4 +36,38 @@ def exec_command(command, extra_args: List[str]): cli_instance = bittensor.cli(config) command.run(cli_instance) - return (keypair, exec_command) + return keypair, exec_command, wallet_path + + +def clone_or_update_templates(): + install_dir = template_path + repo_mapping = { + repo_name: "https://github.com/opentensor/bittensor-subnet-template.git", + } + os.makedirs(install_dir, exist_ok=True) + os.chdir(install_dir) + + for repo, git_link in repo_mapping.items(): + if not os.path.exists(repo): + print(f"\033[94mCloning {repo}...\033[0m") + subprocess.run(["git", "clone", git_link, repo], check=True) + else: + print(f"\033[94mUpdating {repo}...\033[0m") + os.chdir(repo) + subprocess.run(["git", "pull"], check=True) + os.chdir("..") + + return install_dir + repo_name + "/" + + +def install_templates(install_dir): + subprocess.check_call([sys.executable, "-m", "pip", "install", install_dir]) + + +def uninstall_templates(install_dir): + # uninstall templates + subprocess.check_call( + [sys.executable, "-m", "pip", "uninstall", "bittensor_subnet_template", "-y"] + ) + # delete everything in directory + shutil.rmtree(install_dir) diff --git a/tests/unit_tests/test_dendrite.py b/tests/unit_tests/test_dendrite.py index c30ecc58fa..0505247728 100644 --- a/tests/unit_tests/test_dendrite.py +++ b/tests/unit_tests/test_dendrite.py @@ -93,7 +93,7 @@ def test_close(dendrite_obj, setup_axon): # Query the axon to open a session dendrite_obj.query(axon, SynapseDummy(input=1)) # Session should be automatically closed after query - assert dendrite_obj._session == None + assert dendrite_obj._session is None @pytest.mark.asyncio @@ -103,7 +103,7 @@ async def test_aclose(dendrite_obj, setup_axon): async with dendrite_obj: resp = await dendrite_obj([axon], SynapseDummy(input=1), deserialize=False) # Close should automatically be called on the session after context manager scope - assert dendrite_obj._session == None + assert dendrite_obj._session is None class AsyncMock(Mock): From a6a7c861921397acb51d750209d37cf6ac423e99 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 17:25:10 -0700 Subject: [PATCH 083/116] Ruff format --- tests/unit_tests/test_subtensor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index 75aed94312..4f4ef29077 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -565,6 +565,7 @@ def test_hyperparameter_normalization( else: assert float(norm_value) == 0.0, f"Failed zero value test for {param_name}" + ########################### # Account functions tests # ########################### From 67c4721868d6bbbbb1017933374e11057bf640a5 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 5 Jun 2024 17:46:23 -0700 Subject: [PATCH 084/116] Fix linter and type errors --- bittensor/axon.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index a2bb2a83cb..a3861263ba 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -898,7 +898,10 @@ async def default_verify(self, synapse: bittensor.Synapse): # a reasonable delta. # Updated nonce using NTP implementated at v7.2 - if synapse.dendrite.version >= V_7_2_0: + if ( + synapse.dendrite.version is not None + and synapse.dendrite.version >= V_7_2_0 + ): # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. current_time = BittensorNTPClient.get_current_ntp_time() From bfc9f664913f1ff65bc86dcaebb70b2e8d8bf643 Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 5 Jun 2024 21:53:21 -0700 Subject: [PATCH 085/116] Update with async function to spin up the axon instance. --- tests/e2e_tests/test_parameter_validation.py | 75 +++++++++++--------- tests/e2e_tests/utils.py | 2 + 2 files changed, 42 insertions(+), 35 deletions(-) diff --git a/tests/e2e_tests/test_parameter_validation.py b/tests/e2e_tests/test_parameter_validation.py index b4a272f709..15e6b7fa23 100644 --- a/tests/e2e_tests/test_parameter_validation.py +++ b/tests/e2e_tests/test_parameter_validation.py @@ -1,8 +1,10 @@ -import subprocess +import asyncio import sys -import time + +import pytest import bittensor +from bittensor.utils import networking from bittensor.commands import ( RegisterCommand, RegisterSubnetworkCommand, @@ -15,14 +17,12 @@ ) -def test_parameter_validation_from_subtensor(local_chain): +@pytest.mark.asyncio +async def test_parameter_validation_from_subtensor(local_chain, updated_axon=None): # Register root as Alice alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RegisterSubnetworkCommand, ["s", "create"]) - # define values - uid = 0 - # Verify subnet 1 created successfully assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() @@ -48,30 +48,24 @@ def test_parameter_validation_from_subtensor(local_chain): ], ) - # Create a test wallet and set the coldkey, coldkeypub, and hotkey - # wallet = bittensor.wallet(path="/tmp/btcli-wallet") - # wallet.set_coldkey(keypair=alice_keypair, encrypt=False, overwrite=True) - # wallet.set_coldkeypub(keypair=alice_keypair, encrypt=False, overwrite=True) - # wallet.set_hotkey(keypair=alice_keypair, encrypt=False, overwrite=True) - metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") - subtensor = bittensor.subtensor(network="ws://localhost:9945") - # validate miner with stake, ip, hotkey - new_axon = metagraph.axons[0] - assert new_axon.hotkey == alice_keypair.ss58_address - assert new_axon.coldkey == alice_keypair.ss58_address - assert new_axon.ip == "0.0.0.0" - assert new_axon.port == 0 - assert new_axon.ip_type == 0 + # validate one miner with ip of none + old_axon = metagraph.axons[0] + + assert len(metagraph.axons) == 1 + assert old_axon.hotkey == alice_keypair.ss58_address + assert old_axon.coldkey == alice_keypair.ss58_address + assert old_axon.ip == "0.0.0.0" + assert old_axon.port == 0 + assert old_axon.ip_type == 0 # register miner # "python neurons/miner.py --netuid 1 --subtensor.chain_endpoint ws://localhost:9945 --wallet.name wallet.name --wallet.hotkey wallet.hotkey.ss58_address" - - process = subprocess.Popen( + cmd = " ".join( [ - sys.executable, - f"{template_path}/{repo_name}/neurons/miner.py", + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/miner.py"', "--no_prompt", "--netuid", "1", @@ -85,17 +79,28 @@ def test_parameter_validation_from_subtensor(local_chain): "default", "--wallet.hotkey", "default", - ], - shell=True, - stdout=subprocess.PIPE, + ] + ) + + await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, ) + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph to refresh with latest data + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + updated_axon = metagraph.axons[0] + external_ip = networking.get_external_ip() - try: - outs, errs = process.communicate(timeout=15) - time.sleep(15) - except subprocess.TimeoutExpired: - process.kill() - # validate miner with new ip - new_axon = metagraph.axons[0] + assert len(metagraph.axons) == 1 + assert updated_axon.ip == external_ip + assert updated_axon.ip_type == networking.ip_version(external_ip) + assert updated_axon.port == 8091 + assert updated_axon.hotkey == alice_keypair.ss58_address + assert updated_axon.coldkey == alice_keypair.ss58_address - uninstall_templates(local_chain.templates_dir) + uninstall_templates(template_path) diff --git a/tests/e2e_tests/utils.py b/tests/e2e_tests/utils.py index 30cf4fcee2..feafe1d962 100644 --- a/tests/e2e_tests/utils.py +++ b/tests/e2e_tests/utils.py @@ -2,6 +2,8 @@ import shutil import subprocess import sys + +import requests from substrateinterface import Keypair from typing import List import bittensor From 958d12b90aba00de13109fdf86586df08cbfc6d7 Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 5 Jun 2024 22:25:26 -0700 Subject: [PATCH 086/116] Move uninstall_templates to conftest.py. Address extra parm in other e2e tests. --- tests/e2e_tests/conftest.py | 14 +++++++++++--- tests/e2e_tests/multistep/test_last_tx_block.py | 4 ++-- .../{ => multistep}/test_parameter_validation.py | 3 --- .../delegation/test_set_delegate_take.py | 4 ++-- .../e2e_tests/subcommands/wallet/test_transfer.py | 2 +- .../subcommands/weights/test_commit_weights.py | 8 ++++---- 6 files changed, 20 insertions(+), 15 deletions(-) rename tests/e2e_tests/{ => multistep}/test_parameter_validation.py (97%) diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index a3e5d830ef..48429e27a6 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -12,6 +12,8 @@ from tests.e2e_tests.utils import ( clone_or_update_templates, install_templates, + uninstall_templates, + template_path, ) logging.basicConfig(level=logging.INFO) @@ -28,9 +30,6 @@ def local_chain(): logging.warning("LOCALNET_SH_PATH env variable is not set, e2e test skipped.") pytest.skip("LOCALNET_SH_PATH environment variable is not set.") - templates_dir = clone_or_update_templates() - install_templates(templates_dir) - # Start new node process cmds = shlex.split(script_path) process = subprocess.Popen( @@ -40,6 +39,11 @@ def local_chain(): # Pattern match indicates node is compiled and ready pattern = re.compile(r"Successfully ran block step\.") + # install neuron templates + logging.info("downloading and installing neuron templates from github") + templates_dir = clone_or_update_templates() + install_templates(templates_dir) + def wait_for_node_start(process, pattern): for line in process.stdout: print(line.strip()) @@ -64,3 +68,7 @@ def wait_for_node_start(process, pattern): # Ensure the process has terminated process.wait() + + # uninstall templates + logging.info("uninstalling neuron templates") + uninstall_templates(template_path) diff --git a/tests/e2e_tests/multistep/test_last_tx_block.py b/tests/e2e_tests/multistep/test_last_tx_block.py index 8f38165934..b97d54f8fa 100644 --- a/tests/e2e_tests/multistep/test_last_tx_block.py +++ b/tests/e2e_tests/multistep/test_last_tx_block.py @@ -9,7 +9,7 @@ # https://discord.com/channels/799672011265015819/1176889736636407808/1236057424134144152 def test_takes(local_chain): # Register root as Alice - (keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RootRegisterCommand, ["root", "register"]) # Create subnet 1 and verify created successfully @@ -21,7 +21,7 @@ def test_takes(local_chain): assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() # Register and nominate Bob - (keypair, exec_command) = setup_wallet("//Bob") + keypair, exec_command, wallet_path = setup_wallet("//Bob") assert ( local_chain.query( "SubtensorModule", "LastTxBlock", [keypair.ss58_address] diff --git a/tests/e2e_tests/test_parameter_validation.py b/tests/e2e_tests/multistep/test_parameter_validation.py similarity index 97% rename from tests/e2e_tests/test_parameter_validation.py rename to tests/e2e_tests/multistep/test_parameter_validation.py index 15e6b7fa23..d548a5fa02 100644 --- a/tests/e2e_tests/test_parameter_validation.py +++ b/tests/e2e_tests/multistep/test_parameter_validation.py @@ -11,7 +11,6 @@ ) from tests.e2e_tests.utils import ( setup_wallet, - uninstall_templates, template_path, repo_name, ) @@ -102,5 +101,3 @@ async def test_parameter_validation_from_subtensor(local_chain, updated_axon=Non assert updated_axon.port == 8091 assert updated_axon.hotkey == alice_keypair.ss58_address assert updated_axon.coldkey == alice_keypair.ss58_address - - uninstall_templates(template_path) diff --git a/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py b/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py index 0453576332..cc7b1b5744 100644 --- a/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py +++ b/tests/e2e_tests/subcommands/delegation/test_set_delegate_take.py @@ -8,7 +8,7 @@ def test_set_delegate_increase_take(local_chain): # Register root as Alice - (keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RootRegisterCommand, ["root", "register"]) # Create subnet 1 and verify created successfully @@ -20,7 +20,7 @@ def test_set_delegate_increase_take(local_chain): assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() # Register and nominate Bob - (keypair, exec_command) = setup_wallet("//Bob") + keypair, exec_command, wallet_path = setup_wallet("//Bob") assert ( local_chain.query( "SubtensorModule", "LastTxBlock", [keypair.ss58_address] diff --git a/tests/e2e_tests/subcommands/wallet/test_transfer.py b/tests/e2e_tests/subcommands/wallet/test_transfer.py index de8052e027..5b491b3f0d 100644 --- a/tests/e2e_tests/subcommands/wallet/test_transfer.py +++ b/tests/e2e_tests/subcommands/wallet/test_transfer.py @@ -5,7 +5,7 @@ # Example test using the local_chain fixture def test_transfer(local_chain): - (keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") acc_before = local_chain.query("System", "Account", [keypair.ss58_address]) exec_command( diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index faed9d3925..ad2ecb7b42 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -17,7 +17,7 @@ def test_commit_and_reveal_weights(local_chain): # Register root as Alice - (alice_keypair, exec_command) = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RegisterSubnetworkCommand, ["s", "create"]) # define values @@ -36,9 +36,9 @@ def test_commit_and_reveal_weights(local_chain): # Create a test wallet and set the coldkey, coldkeypub, and hotkey wallet = bittensor.wallet(path="/tmp/btcli-wallet") - wallet.set_coldkey(keypair=alice_keypair, encrypt=False, overwrite=True) - wallet.set_coldkeypub(keypair=alice_keypair, encrypt=False, overwrite=True) - wallet.set_hotkey(keypair=alice_keypair, encrypt=False, overwrite=True) + wallet.set_coldkey(keypair=keypair, encrypt=False, overwrite=True) + wallet.set_coldkeypub(keypair=keypair, encrypt=False, overwrite=True) + wallet.set_hotkey(keypair=keypair, encrypt=False, overwrite=True) # Stake to become to top neuron after the first epoch exec_command( From 9f0816a32d2013577879a91c1a04d4c5ec65ca17 Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 5 Jun 2024 22:40:10 -0700 Subject: [PATCH 087/116] Rename to test axon --- .../multistep/{test_parameter_validation.py => test_axon.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename tests/e2e_tests/multistep/{test_parameter_validation.py => test_axon.py} (97%) diff --git a/tests/e2e_tests/multistep/test_parameter_validation.py b/tests/e2e_tests/multistep/test_axon.py similarity index 97% rename from tests/e2e_tests/multistep/test_parameter_validation.py rename to tests/e2e_tests/multistep/test_axon.py index d548a5fa02..770bed17e5 100644 --- a/tests/e2e_tests/multistep/test_parameter_validation.py +++ b/tests/e2e_tests/multistep/test_axon.py @@ -17,7 +17,7 @@ @pytest.mark.asyncio -async def test_parameter_validation_from_subtensor(local_chain, updated_axon=None): +async def test_axon(local_chain): # Register root as Alice alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RegisterSubnetworkCommand, ["s", "create"]) From e8f32bfd9be86179a8902fb2b584f9c84530512b Mon Sep 17 00:00:00 2001 From: opendansor Date: Wed, 5 Jun 2024 23:03:56 -0700 Subject: [PATCH 088/116] Add version to pytest-asyncio --- requirements/dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index d5e774702e..6cc94e2679 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,6 +1,6 @@ black==23.7.0 pytest==7.2.0 -pytest-asyncio +pytest-asyncio==0.23.7 pytest-mock==3.12.0 pytest-split==0.8.0 pytest-xdist==3.0.2 From 689ec43e9afb1554e3021216fde92eb439429a15 Mon Sep 17 00:00:00 2001 From: opendansor Date: Thu, 6 Jun 2024 08:59:02 -0700 Subject: [PATCH 089/116] Update e2e-subtensor-tests.yaml --- .github/workflows/e2e-subtensor-tests.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/e2e-subtensor-tests.yaml b/.github/workflows/e2e-subtensor-tests.yaml index bea776f620..1d3d6bb5ce 100644 --- a/.github/workflows/e2e-subtensor-tests.yaml +++ b/.github/workflows/e2e-subtensor-tests.yaml @@ -76,9 +76,9 @@ jobs: - name: Setup subtensor repo working-directory: ${{ github.workspace }}/subtensor - run: git checkout development + run: git checkout testnet - name: Run tests run: | - python3 -m pip install -e .[torch] pytest - LOCALNET_SH_PATH="./subtensor/scripts/localnet.sh" pytest tests/e2e_tests/ -s + python3 -m pip install -e .[dev] pytest + LOCALNET_SH_PATH="${{ github.workspace }}/subtensor/scripts/localnet.sh" pytest tests/e2e_tests/ -s From a836be0c8d432cf0512fce045e8e2ecbbaf2f407 Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Thu, 6 Jun 2024 12:52:57 -0700 Subject: [PATCH 090/116] Replace NTP with system time --- bittensor/axon.py | 7 ++--- bittensor/dendrite.py | 4 +-- bittensor/utils/networking.py | 31 ----------------------- requirements/prod.txt | 1 - tests/unit_tests/utils/test_networking.py | 7 ----- 5 files changed, 3 insertions(+), 47 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index a3861263ba..0a836d9c08 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -58,7 +58,6 @@ from bittensor.constants import ALLOWED_DELTA, V_7_2_0 from bittensor.threadpool import PriorityThreadPoolExecutor from bittensor.utils import networking -from bittensor.utils.networking import BittensorNTPClient class FastAPIThreadedServer(uvicorn.Server): @@ -904,10 +903,9 @@ async def default_verify(self, synapse: bittensor.Synapse): ): # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. - current_time = BittensorNTPClient.get_current_ntp_time() if ( self.nonces.get(endpoint_key) is None - and synapse.dendrite.nonce <= current_time - ALLOWED_DELTA + and synapse.dendrite.nonce <= time.time_ns() - ALLOWED_DELTA ): raise Exception("Nonce is too old") if ( @@ -1184,14 +1182,13 @@ async def preprocess(self, request: Request) -> bittensor.Synapse: f"Improperly formatted request. Could not parse headers {request.headers} into synapse of type {request_name}." ) synapse.name = request_name - current_time = BittensorNTPClient.get_current_ntp_time() # Fills the local axon information into the synapse. synapse.axon.__dict__.update( { "version": str(bittensor.__version_as_int__), "uuid": str(self.axon.uuid), - "nonce": current_time, + "nonce": time.time_ns(), "status_code": 100, } ) diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index f5c925cd88..dca513e0b2 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -26,7 +26,6 @@ import bittensor from typing import Optional, List, Union, AsyncGenerator, Any -from bittensor.utils.networking import BittensorNTPClient from bittensor.utils.registration import torch, use_torch @@ -655,11 +654,10 @@ def preprocess_synapse_for_request( """ # Set the timeout for the synapse synapse.timeout = timeout - current_time = BittensorNTPClient.get_current_ntp_time() synapse.dendrite = bittensor.TerminalInfo( ip=self.external_ip, version=bittensor.__version_as_int__, - nonce=current_time, + nonce=time.time_ns(), uuid=self.uuid, hotkey=self.keypair.ss58_address, ) diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index 4f2d335dc8..e9f2288878 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -29,7 +29,6 @@ from bittensor.constants import NTP_POOL_RETRIES, NTP_SERVERS # 3rd party -import ntplib import requests @@ -175,33 +174,3 @@ def get_formatted_ws_endpoint_url(endpoint_url: str) -> str: endpoint_url = "ws://{}".format(endpoint_url) return endpoint_url - - -class BittensorNTPClient: - """NTP singleton client""" - - _instance = None - - def __new__(cls): - if cls._instance is None: - cls._instance = ntplib.NTPClient() - return cls._instance - - @staticmethod - def get_current_ntp_time(retries: int = NTP_POOL_RETRIES) -> int: - attempts = 0 - while attempts < retries: - server = NTP_SERVERS[attempts % len(NTP_SERVERS)] - try: - ntp_client = BittensorNTPClient() - response = ntp_client.request(server) - current_time = int(response.tx_time * 1e9) # Convert to nanoseconds - return current_time - except Exception as e: - attempts += 1 - bittensor.logging.info( - f"Attempt {attempts} - Error fetching NTP time: {e}" - ) - # Fallback to local time if all retries fail - bittensor.logging.warning("All NTP retries failed, using system UNIX time") - return time.time_ns() diff --git a/requirements/prod.txt b/requirements/prod.txt index 34cb8eba20..2d9ecabab5 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -11,7 +11,6 @@ fuzzywuzzy>=0.18.0 fastapi~=0.110.1 munch~=2.5.0 netaddr -ntplib==0.4.0 numpy msgpack-numpy-opentensor~=0.5.0 nest_asyncio diff --git a/tests/unit_tests/utils/test_networking.py b/tests/unit_tests/utils/test_networking.py index 4b626bf9b6..2037718578 100644 --- a/tests/unit_tests/utils/test_networking.py +++ b/tests/unit_tests/utils/test_networking.py @@ -165,10 +165,3 @@ def urlopen(self): def test_format(url: str, expected: str): """Test formatting WebSocket endpoint URL.""" assert utils.networking.get_formatted_ws_endpoint_url(url) == expected - - -def test_bt_ntp_client(): - client_1 = utils.networking.BittensorNTPClient() - client_2 = utils.networking.BittensorNTPClient() - - assert client_1 == client_2 From 9e7efa0d6831f2aefbd8e17a1e2f9953aebe16bb Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Thu, 6 Jun 2024 12:55:04 -0700 Subject: [PATCH 091/116] Updated constants --- bittensor/constants.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bittensor/constants.py b/bittensor/constants.py index 77b994cba8..770612bcbe 100644 --- a/bittensor/constants.py +++ b/bittensor/constants.py @@ -16,7 +16,5 @@ # DEALINGS IN THE SOFTWARE. -ALLOWED_DELTA = 5000000000 # Delta of 5 seconds for nonce validation -NTP_POOL_RETRIES = 2 -NTP_SERVERS = ["0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org"] +ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation V_7_2_0 = 720 From 8ef6cfee6a161c62aaf96a7fed6fa1468219fc7c Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Thu, 6 Jun 2024 13:08:58 -0700 Subject: [PATCH 092/116] Cleanup --- bittensor/utils/networking.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index e9f2288878..ce19d61d19 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -26,7 +26,6 @@ import time import json import netaddr -from bittensor.constants import NTP_POOL_RETRIES, NTP_SERVERS # 3rd party import requests From 551e3baee2970780cde750fce47686d35e45ef1d Mon Sep 17 00:00:00 2001 From: opendansor Date: Thu, 6 Jun 2024 21:50:04 -0700 Subject: [PATCH 093/116] Add E2E faucet test --- .../subcommands/wallet/test_faucet.py | 72 +++++++++++++++++++ tests/e2e_tests/utils.py | 5 +- 2 files changed, 75 insertions(+), 2 deletions(-) create mode 100644 tests/e2e_tests/subcommands/wallet/test_faucet.py diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py new file mode 100644 index 0000000000..5fbd4ec768 --- /dev/null +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -0,0 +1,72 @@ +import pytest + +import bittensor +from bittensor import logging +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, + RunFaucetCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, +) + + +@pytest.mark.asyncio +async def test_faucet(local_chain): + # Register root as Alice + alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) + + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + # Register a neuron to the subnet + exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + alice_wallet_balance = subtensor.get_balance(alice_keypair.ss58_address) + # verify current balance + assert alice_wallet_balance.tao == 998999.0 + + # run faucet 3 times + for i in range(3): + logging.info(f"running faucet for the {i}th time.") + print(f"running faucet for the {i}th time.") + exec_command( + RunFaucetCommand, + [ + "wallet", + "faucet", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + alice_wallet_balance = subtensor.get_balance(alice_keypair.ss58_address) + # verify balance increase + assert alice_wallet_balance.tao == 999899.0 diff --git a/tests/e2e_tests/utils.py b/tests/e2e_tests/utils.py index feafe1d962..f960135a09 100644 --- a/tests/e2e_tests/utils.py +++ b/tests/e2e_tests/utils.py @@ -3,9 +3,10 @@ import subprocess import sys -import requests -from substrateinterface import Keypair from typing import List + +from substrateinterface import Keypair + import bittensor template_path = os.getcwd() + "/neurons/" From fc47ce904f33898d5466495b62bdbafbaea36bae Mon Sep 17 00:00:00 2001 From: opendansor Date: Thu, 6 Jun 2024 23:06:04 -0700 Subject: [PATCH 094/116] Import keypair from bittensor --- tests/e2e_tests/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e_tests/utils.py b/tests/e2e_tests/utils.py index f960135a09..4b485e3bd9 100644 --- a/tests/e2e_tests/utils.py +++ b/tests/e2e_tests/utils.py @@ -5,7 +5,7 @@ from typing import List -from substrateinterface import Keypair +from bittensor import Keypair import bittensor From 5c1d9da7b1f83eb06bf3bf4db92bbe32797751da Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 7 Jun 2024 00:44:52 -0700 Subject: [PATCH 095/116] E2E test for Validators/Dendrite --- tests/e2e_tests/multistep/test_dendrite.py | 181 +++++++++++++++++++++ 1 file changed, 181 insertions(+) create mode 100644 tests/e2e_tests/multistep/test_dendrite.py diff --git a/tests/e2e_tests/multistep/test_dendrite.py b/tests/e2e_tests/multistep/test_dendrite.py new file mode 100644 index 0000000000..48b27e5bcd --- /dev/null +++ b/tests/e2e_tests/multistep/test_dendrite.py @@ -0,0 +1,181 @@ +import asyncio +import logging +import sys +import time + +import pytest + +import bittensor +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, + StakeCommand, + RootRegisterCommand, + RootSetBoostCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, + template_path, + repo_name, +) + +logging.basicConfig(level=logging.INFO) + + +@pytest.mark.asyncio +async def test_dendrite(local_chain): + # Register root as Alice - the subnet owner + alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") + exec_command(RegisterSubnetworkCommand, ["s", "create"]) + + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + bob_keypair, exec_command, wallet_path = setup_wallet("//Bob") + + # Register a neuron to the subnet + exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + subtensor = bittensor.subtensor(network="ws://localhost:9945") + + # assert one neuron is Bob + assert len(subtensor.neurons(netuid=1)) == 1 + neuron = metagraph.neurons[0] + assert neuron.hotkey == bob_keypair.ss58_address + assert neuron.coldkey == bob_keypair.ss58_address + + # assert stake is 0 + assert neuron.stake.tao == 0 + + # Stake to become to top neuron after the first epoch + exec_command( + StakeCommand, + [ + "stake", + "add", + "--amount", + "10000", + ], + ) + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + neuron = metagraph.neurons[0] + # assert stake is 10000 + assert neuron.stake.tao == 9999.999999 + + # assert neuron is not validator + assert neuron.active is True + assert neuron.validator_permit is False + assert neuron.validator_trust == 0.0 + assert neuron.pruning_score == 0 + + # register validator from template + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/validator.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + ] + ) + + # run validator in the background + await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph and subtensor to refresh with latest data + + # register validator with root network + exec_command( + RootRegisterCommand, + [ + "root", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + exec_command( + RootSetBoostCommand, + [ + "root", + "boost", + "--netuid", + "1", + "--increase", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + # get current block, wait until 360 blocks pass (subnet tempo) + interval = 360 + current_block = subtensor.get_current_block() + next_tempo_block_start = (current_block - (current_block % interval)) + interval + while current_block < next_tempo_block_start: + time.sleep(1) # Wait for 1 second before checking the block number again + current_block = subtensor.get_current_block() + if current_block % 10 == 0: + print( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) + logging.info( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # refresh validator neuron + neuron = metagraph.neurons[0] + + assert len(metagraph.neurons) == 1 + assert neuron.active is True + assert neuron.validator_permit is True + assert neuron.hotkey == bob_keypair.ss58_address + assert neuron.coldkey == bob_keypair.ss58_address From dfe083b10f68af426ab88649c0c3862eb1d59fcf Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 7 Jun 2024 01:07:33 -0700 Subject: [PATCH 096/116] Add a 5-second delay between faucet calls. --- tests/e2e_tests/subcommands/wallet/test_faucet.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py index 5fbd4ec768..13be94467b 100644 --- a/tests/e2e_tests/subcommands/wallet/test_faucet.py +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -1,3 +1,5 @@ +import asyncio + import pytest import bittensor @@ -64,6 +66,7 @@ async def test_faucet(local_chain): "ws://localhost:9945", ], ) + await asyncio.sleep(5) subtensor = bittensor.subtensor(network="ws://localhost:9945") From 0491062c13f866e54c82399db49bef41c940c8f9 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 7 Jun 2024 09:50:53 -0700 Subject: [PATCH 097/116] Remove async, switch wallet to Bob, add try-catch and check for higher balance. --- .../subcommands/wallet/test_faucet.py | 60 ++++++++++--------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py index 13be94467b..bac3741ad1 100644 --- a/tests/e2e_tests/subcommands/wallet/test_faucet.py +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -1,6 +1,4 @@ -import asyncio - -import pytest +import time import bittensor from bittensor import logging @@ -14,10 +12,9 @@ ) -@pytest.mark.asyncio -async def test_faucet(local_chain): +def test_faucet(local_chain): # Register root as Alice - alice_keypair, exec_command, wallet_path = setup_wallet("//Alice") + keypair, exec_command, wallet_path = setup_wallet("//Bob") exec_command(RegisterSubnetworkCommand, ["s", "create"]) # Verify subnet 1 created successfully @@ -45,31 +42,38 @@ async def test_faucet(local_chain): subtensor = bittensor.subtensor(network="ws://localhost:9945") - alice_wallet_balance = subtensor.get_balance(alice_keypair.ss58_address) # verify current balance - assert alice_wallet_balance.tao == 998999.0 + wallet_balance = subtensor.get_balance(keypair.ss58_address) + assert wallet_balance.tao == 998999.0 - # run faucet 3 times - for i in range(3): - logging.info(f"running faucet for the {i}th time.") - print(f"running faucet for the {i}th time.") - exec_command( - RunFaucetCommand, - [ - "wallet", - "faucet", - "--wallet.name", - "default", - "--wallet.hotkey", - "default", - "--subtensor.chain_endpoint", - "ws://localhost:9945", - ], - ) - await asyncio.sleep(5) + # run faucet 5 times + for i in range(5): + logging.info(f"faucet run #:{i+1}") + try: + exec_command( + RunFaucetCommand, + [ + "wallet", + "faucet", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + logging.info( + f"wallet balance is {subtensor.get_balance(keypair.ss58_address).tao} tao" + ) + except Exception: + logging.warning( + "Block not generated fast enough to be within 3 block seconds window." + ) + time.sleep(1) subtensor = bittensor.subtensor(network="ws://localhost:9945") - alice_wallet_balance = subtensor.get_balance(alice_keypair.ss58_address) + new_wallet_balance = subtensor.get_balance(keypair.ss58_address) # verify balance increase - assert alice_wallet_balance.tao == 999899.0 + assert wallet_balance.tao < new_wallet_balance From 89bda9a2dd6caa4dae0ad38cc9dd72a88a89da98 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 7 Jun 2024 13:47:24 -0700 Subject: [PATCH 098/116] Add Params, switch wallet to Alice, add try-catch for System Exit --- tests/e2e_tests/conftest.py | 10 ++++++++-- .../subcommands/wallet/test_faucet.py | 18 +++++++++++------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index 48429e27a6..c30c3f9692 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -21,7 +21,8 @@ # Fixture for setting up and tearing down a localnet.sh chain between tests @pytest.fixture(scope="function") -def local_chain(): +def local_chain(request): + param = request.param if hasattr(request, "param") else None # Get the environment variable for the script path script_path = os.getenv("LOCALNET_SH_PATH") @@ -30,8 +31,13 @@ def local_chain(): logging.warning("LOCALNET_SH_PATH env variable is not set, e2e test skipped.") pytest.skip("LOCALNET_SH_PATH environment variable is not set.") + # Check if param is None, and handle it accordingly + if param is None: + args = "" + else: + args = f"fast_blocks={param}" + cmds = shlex.split(f"{script_path} {args}") # Start new node process - cmds = shlex.split(script_path) process = subprocess.Popen( cmds, stdout=subprocess.PIPE, text=True, preexec_fn=os.setsid ) diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py index bac3741ad1..14df31ccd9 100644 --- a/tests/e2e_tests/subcommands/wallet/test_faucet.py +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -1,4 +1,4 @@ -import time +import pytest import bittensor from bittensor import logging @@ -12,9 +12,10 @@ ) +@pytest.mark.parametrize("local_chain", [False], indirect=True) def test_faucet(local_chain): # Register root as Alice - keypair, exec_command, wallet_path = setup_wallet("//Bob") + keypair, exec_command, wallet_path = setup_wallet("//Alice") exec_command(RegisterSubnetworkCommand, ["s", "create"]) # Verify subnet 1 created successfully @@ -46,8 +47,8 @@ def test_faucet(local_chain): wallet_balance = subtensor.get_balance(keypair.ss58_address) assert wallet_balance.tao == 998999.0 - # run faucet 5 times - for i in range(5): + # run faucet 3 times + for i in range(3): logging.info(f"faucet run #:{i+1}") try: exec_command( @@ -66,14 +67,17 @@ def test_faucet(local_chain): logging.info( f"wallet balance is {subtensor.get_balance(keypair.ss58_address).tao} tao" ) - except Exception: + except SystemExit as e: logging.warning( "Block not generated fast enough to be within 3 block seconds window." ) - time.sleep(1) + # Handle the SystemExit exception + assert e.code == 1 # Assert that the exit code is 1 + except Exception as e: + logging.warning(f"Unexpected exception occurred on faucet: {e}") subtensor = bittensor.subtensor(network="ws://localhost:9945") new_wallet_balance = subtensor.get_balance(keypair.ss58_address) # verify balance increase - assert wallet_balance.tao < new_wallet_balance + assert wallet_balance.tao < new_wallet_balance.tao From e5f2fe8bc1f96c304e2a1665a147bf1892c5a5e5 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 7 Jun 2024 14:10:37 -0700 Subject: [PATCH 099/116] Add assert to assert exact amount of tao dispensed. --- tests/e2e_tests/subcommands/wallet/test_faucet.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py index 14df31ccd9..9e7632ed3f 100644 --- a/tests/e2e_tests/subcommands/wallet/test_faucet.py +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -81,3 +81,4 @@ def test_faucet(local_chain): new_wallet_balance = subtensor.get_balance(keypair.ss58_address) # verify balance increase assert wallet_balance.tao < new_wallet_balance.tao + assert new_wallet_balance.tao == 999899.0 # after 3 runs we should see an increase of 900 tao From 1c8b4f4c574f11be88c800584c8c7e0c536103a2 Mon Sep 17 00:00:00 2001 From: opendansor Date: Fri, 7 Jun 2024 14:13:35 -0700 Subject: [PATCH 100/116] Clean up --- tests/e2e_tests/conftest.py | 7 +++---- tests/e2e_tests/subcommands/wallet/test_faucet.py | 4 +++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index c30c3f9692..7afb6b448f 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -32,10 +32,9 @@ def local_chain(request): pytest.skip("LOCALNET_SH_PATH environment variable is not set.") # Check if param is None, and handle it accordingly - if param is None: - args = "" - else: - args = f"fast_blocks={param}" + args = "" if param is None else f"fast_blocks={param}" + + # compile commands to send to process cmds = shlex.split(f"{script_path} {args}") # Start new node process process = subprocess.Popen( diff --git a/tests/e2e_tests/subcommands/wallet/test_faucet.py b/tests/e2e_tests/subcommands/wallet/test_faucet.py index 9e7632ed3f..0e647387b6 100644 --- a/tests/e2e_tests/subcommands/wallet/test_faucet.py +++ b/tests/e2e_tests/subcommands/wallet/test_faucet.py @@ -81,4 +81,6 @@ def test_faucet(local_chain): new_wallet_balance = subtensor.get_balance(keypair.ss58_address) # verify balance increase assert wallet_balance.tao < new_wallet_balance.tao - assert new_wallet_balance.tao == 999899.0 # after 3 runs we should see an increase of 900 tao + assert ( + new_wallet_balance.tao == 999899.0 + ) # after 3 runs we should see an increase of 900 tao From 11645e670b4a5a5ddce2bcbec1ae19f083566d83 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Sat, 8 Jun 2024 14:53:16 +0200 Subject: [PATCH 101/116] fix __version_as_int__ for >10 minor/patch release vers (resolves #1982) --- bittensor/__init__.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 21b160da82..30273a9d26 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -42,12 +42,15 @@ # Bittensor code and protocol version. __version__ = "7.0.0" -version_split = __version__.split(".") -__version_as_int__: int = ( - (100 * int(version_split[0])) - + (10 * int(version_split[1])) - + (1 * int(version_split[2])) +_version_split = __version__.split(".") +__version_info__ = tuple(map(int, _version_split)) +_version_int_base = 1000 +assert max(__version_info__) < _version_int_base + +__version_as_int__: int = sum( + e * (_version_int_base**i) for i, e in enumerate(reversed(__version_info__)) ) +assert __version_as_int__ < 2**31 # fits in int32 __new_signature_version__ = 360 # Rich console. @@ -58,6 +61,16 @@ install(show_locals=False) +def __getattr__(name): + if name == "version_split": + warnings.warn( + "version_split is deprecated and will be removed in future versions. Use __version__ instead.", + DeprecationWarning, + ) + return _version_split + raise AttributeError(f"module {__name__} has no attribute {name}") + + def turn_console_off(): global __use_console__ global __console__ From 03f3f94c941928b9591c2c1917e34836ff36fb91 Mon Sep 17 00:00:00 2001 From: Maciej Urbanski Date: Tue, 11 Jun 2024 13:55:04 +0200 Subject: [PATCH 102/116] replace map call with generator expression --- bittensor/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 30273a9d26..f9200a271c 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -43,7 +43,7 @@ __version__ = "7.0.0" _version_split = __version__.split(".") -__version_info__ = tuple(map(int, _version_split)) +__version_info__ = tuple(int(part) for part in _version_split) _version_int_base = 1000 assert max(__version_info__) < _version_int_base From dc6daf8319a2e93376ad5aaf4533facabab57d1f Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Tue, 11 Jun 2024 10:27:44 -0700 Subject: [PATCH 103/116] Nonce: Included timeout and fixed versioning --- bittensor/axon.py | 3 ++- bittensor/constants.py | 2 +- bittensor/utils/networking.py | 2 -- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 0a836d9c08..948d65e31f 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -905,7 +905,8 @@ async def default_verify(self, synapse: bittensor.Synapse): # a reasonable delta. if ( self.nonces.get(endpoint_key) is None - and synapse.dendrite.nonce <= time.time_ns() - ALLOWED_DELTA + and synapse.dendrite.nonce + <= time.time_ns() - ALLOWED_DELTA - (synapse.timeout or 0) ): raise Exception("Nonce is too old") if ( diff --git a/bittensor/constants.py b/bittensor/constants.py index 770612bcbe..2b52cfd4bd 100644 --- a/bittensor/constants.py +++ b/bittensor/constants.py @@ -17,4 +17,4 @@ ALLOWED_DELTA = 4000000000 # Delta of 4 seconds for nonce validation -V_7_2_0 = 720 +V_7_2_0 = 7002000 diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index ce19d61d19..4d1af585c3 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -20,10 +20,8 @@ # DEALINGS IN THE SOFTWARE. # Standard Lib -import bittensor import os import urllib -import time import json import netaddr From d5ab67c5b81cba5041dd8f70282bc6bc17e1dd67 Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 11 Jun 2024 13:22:39 -0700 Subject: [PATCH 104/116] Test Incentive E2E --- tests/e2e_tests/multistep/test_incentive.py | 287 ++++++++++++++++++++ 1 file changed, 287 insertions(+) create mode 100644 tests/e2e_tests/multistep/test_incentive.py diff --git a/tests/e2e_tests/multistep/test_incentive.py b/tests/e2e_tests/multistep/test_incentive.py new file mode 100644 index 0000000000..837cb694b7 --- /dev/null +++ b/tests/e2e_tests/multistep/test_incentive.py @@ -0,0 +1,287 @@ +import asyncio +import logging +import sys +import time + +import pytest + +import bittensor +from bittensor.commands import ( + RegisterCommand, + RegisterSubnetworkCommand, + StakeCommand, + RootRegisterCommand, + RootSetBoostCommand, +) +from tests.e2e_tests.utils import ( + setup_wallet, + template_path, + repo_name, +) + +logging.basicConfig(level=logging.INFO) + + +@pytest.mark.asyncio +async def test_incentive(local_chain): + # Register root as Alice - the subnet owner and validator + alice_keypair, alice_exec_command, alice_wallet_path = setup_wallet("//Alice") + alice_exec_command(RegisterSubnetworkCommand, ["s", "create"]) + # Verify subnet 1 created successfully + assert local_chain.query("SubtensorModule", "NetworksAdded", [1]).serialize() + + # Register Bob as miner + bob_keypair, bob_exec_command, bob_wallet_path = setup_wallet("//Bob") + + # Register Alice as neuron to the subnet + alice_exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--wallet.path", + alice_wallet_path, + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + # Register Bob as neuron to the subnet + bob_exec_command( + RegisterCommand, + [ + "s", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--no_prompt", + ], + ) + + subtensor = bittensor.subtensor(network="ws://localhost:9945") + # assert two neurons are in network + assert len(subtensor.neurons(netuid=1)) == 2 + + # Alice to stake to become to top neuron after the first epoch + alice_exec_command( + StakeCommand, + [ + "stake", + "add", + "--amount", + "10000", + ], + ) + + # register Bob as miner + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/miner.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + bob_wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--logging.trace", + ] + ) + + miner_process = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + # Function to write output to the log file + async def miner_write_output(stream): + log_file = "miner.log" + with open(log_file, "a") as f: + while True: + line = await stream.readline() + if not line: + break + f.write(line.decode()) + f.flush() + + # Create tasks to read stdout and stderr concurrently + asyncio.create_task(miner_write_output(miner_process.stdout)) + asyncio.create_task(miner_write_output(miner_process.stderr)) + + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph to refresh with latest data + + # register Alice as validator + cmd = " ".join( + [ + f"{sys.executable}", + f'"{template_path}{repo_name}/neurons/validator.py"', + "--no_prompt", + "--netuid", + "1", + "--subtensor.network", + "local", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + "--wallet.path", + alice_wallet_path, + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--logging.trace", + ] + ) + # run validator in the background + + validator_process = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + # Function to write output to the log file + async def validator_write_output(stream): + log_file = "validator.log" + with open(log_file, "a") as f: + while True: + line = await stream.readline() + if not line: + break + f.write(line.decode()) + f.flush() + + # Create tasks to read stdout and stderr concurrently + asyncio.create_task(validator_write_output(validator_process.stdout)) + asyncio.create_task(validator_write_output(validator_process.stderr)) + + await asyncio.sleep( + 5 + ) # wait for 5 seconds for the metagraph and subtensor to refresh with latest data + + # register validator with root network + alice_exec_command( + RootRegisterCommand, + [ + "root", + "register", + "--netuid", + "1", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + alice_exec_command( + RootSetBoostCommand, + [ + "root", + "boost", + "--netuid", + "1", + "--increase", + "100", + "--wallet.name", + "default", + "--wallet.hotkey", + "default", + "--subtensor.chain_endpoint", + "ws://localhost:9945", + ], + ) + + # get latest metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # get current emissions + bob_neuron = metagraph.neurons[1] + assert bob_neuron.incentive == 0 + assert bob_neuron.consensus == 0 + assert bob_neuron.rank == 0 + assert bob_neuron.trust == 0 + + alice_neuron = metagraph.neurons[0] + assert alice_neuron.validator_permit is False + assert alice_neuron.dividends == 0 + assert alice_neuron.stake.tao == 9999.999999 + assert alice_neuron.validator_trust == 0 + + # wait until 360 blocks pass (subnet tempo) + wait_epoch(360, subtensor) + + # for some reason the weights do not get set through the template. Set weight manually. + alice_wallet = bittensor.wallet() + alice_wallet._hotkey = alice_keypair + subtensor._do_set_weights( + wallet=alice_wallet, + uids=[1], + vals=[65535], + netuid=1, + version_key=0, + wait_for_inclusion=True, + wait_for_finalization=True, + ) + + # wait epoch until weight go into effect + wait_epoch(360, subtensor) + + # refresh metagraph + metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") + + # get current emissions and validate that Alice has gotten tao + bob_neuron = metagraph.neurons[1] + assert bob_neuron.incentive == 1 + assert bob_neuron.consensus == 1 + assert bob_neuron.rank == 1 + assert bob_neuron.trust == 1 + + alice_neuron = metagraph.neurons[0] + assert alice_neuron.validator_permit is True + assert alice_neuron.dividends == 1 + assert alice_neuron.stake.tao == 9999.999999 + assert alice_neuron.validator_trust == 1 + + +def wait_epoch(interval, subtensor): + current_block = subtensor.get_current_block() + next_tempo_block_start = (current_block - (current_block % interval)) + interval + while current_block < next_tempo_block_start: + time.sleep(1) # Wait for 1 second before checking the block number again + current_block = subtensor.get_current_block() + if current_block % 10 == 0: + print( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) + logging.info( + f"Current Block: {current_block} Next tempo at: {next_tempo_block_start}" + ) From 0cad50177521472b536a5ca47126c38062681e0c Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 11 Jun 2024 13:51:44 -0700 Subject: [PATCH 105/116] Update with comments --- tests/e2e_tests/multistep/test_axon.py | 13 +++++++++++++ tests/e2e_tests/multistep/test_dendrite.py | 10 ++++++++++ tests/e2e_tests/multistep/test_incentive.py | 19 +++++++++++++++++++ .../weights/test_commit_weights.py | 14 +++++++++++++- 4 files changed, 55 insertions(+), 1 deletion(-) diff --git a/tests/e2e_tests/multistep/test_axon.py b/tests/e2e_tests/multistep/test_axon.py index 770bed17e5..f23fb4da7b 100644 --- a/tests/e2e_tests/multistep/test_axon.py +++ b/tests/e2e_tests/multistep/test_axon.py @@ -15,6 +15,19 @@ repo_name, ) +""" +Test the axon mechanism. + +Verify that: +* axon is registered on network as a miner +* ip +* type +* port + +are set correctly, and that the miner is currently running + +""" + @pytest.mark.asyncio async def test_axon(local_chain): diff --git a/tests/e2e_tests/multistep/test_dendrite.py b/tests/e2e_tests/multistep/test_dendrite.py index 48b27e5bcd..8f7336e1de 100644 --- a/tests/e2e_tests/multistep/test_dendrite.py +++ b/tests/e2e_tests/multistep/test_dendrite.py @@ -21,6 +21,16 @@ logging.basicConfig(level=logging.INFO) +""" +Test the dendrites mechanism. + +Verify that: +* dendrite is registered on network as a validator +* stake successfully +* validator permit is set + +""" + @pytest.mark.asyncio async def test_dendrite(local_chain): diff --git a/tests/e2e_tests/multistep/test_incentive.py b/tests/e2e_tests/multistep/test_incentive.py index 837cb694b7..d4605faa6a 100644 --- a/tests/e2e_tests/multistep/test_incentive.py +++ b/tests/e2e_tests/multistep/test_incentive.py @@ -21,6 +21,25 @@ logging.basicConfig(level=logging.INFO) +""" +Test the incentive mechanism. + +Verify that for the miner: +* trust +* rank +* consensus +* incentive +are updated with proper values after an epoch has passed. + +For the validator verify that: +* validator_permit +* validator_trust +* dividends +* stake +are updated with proper values after an epoch has passed. + +""" + @pytest.mark.asyncio async def test_incentive(local_chain): diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index ad2ecb7b42..039fed3632 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -14,6 +14,18 @@ ) from tests.e2e_tests.utils import setup_wallet +""" +Test the Commit/Reveal weights mechanism. + +Verify that: +* Weights are commited +* weights are hashed with salt +--- after an epoch --- +* weights are un-hashed with salt +* weights are properly revealed + +""" + def test_commit_and_reveal_weights(local_chain): # Register root as Alice @@ -184,5 +196,5 @@ def test_commit_and_reveal_weights(local_chain): uids, weights_array ) assert ( - expected_weights[0] == revealed_weights.value[0][1] + expected_weights[0] == revealed_weights.value[0][1] ), f"Incorrect revealed weights. Expected: {expected_weights[0]}, Actual: {revealed_weights.value[0][1]}" From 760e7591ea13ade26100341d611e5c1fecec0292 Mon Sep 17 00:00:00 2001 From: opendansor Date: Tue, 11 Jun 2024 13:52:36 -0700 Subject: [PATCH 106/116] Update with comments --- tests/e2e_tests/subcommands/weights/test_commit_weights.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index 039fed3632..53eec95da4 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -196,5 +196,5 @@ def test_commit_and_reveal_weights(local_chain): uids, weights_array ) assert ( - expected_weights[0] == revealed_weights.value[0][1] + expected_weights[0] == revealed_weights.value[0][1] ), f"Incorrect revealed weights. Expected: {expected_weights[0]}, Actual: {revealed_weights.value[0][1]}" From 0c8449fae75fff1191a76d73b5e97027cbe61aed Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 15:17:48 -0400 Subject: [PATCH 107/116] chore: changelog --- CHANGELOG.md | 202 ++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 190 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b61cbdff07..4cefdb9339 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,183 @@ # Changelog +## 7.2.0 / 2024-06-12 + +## What's Changed +* less verbose handled synapse exceptions by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1928 +* Clean up the imports in commands/stake.py by @thewhaleking in https://github.com/opentensor/bittensor/pull/1951 +* Fix E2E test for Commit/Reveal with Salt flag by @opendansor in https://github.com/opentensor/bittensor/pull/1952 +* `bittensor.chain_data.py` module refactoring. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1955 +* ci: e2e tests by @orriin in https://github.com/opentensor/bittensor/pull/1915 +* Dependency cleanup by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1967 +* replace `black` with `ruff` by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1968 +* Fix: IP is packed with port by @dgagn in https://github.com/opentensor/bittensor/pull/1962 +* Fix return of ip version by @opendansor in https://github.com/opentensor/bittensor/pull/1964 +* post-black to ruff migration cleanup by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1979 +* Revert Axon IP decoding changes by @camfairchild in https://github.com/opentensor/bittensor/pull/1981 +* A wrapper for presenting extrinsics errors in a human-readable form. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1980 +* Feat: Added normalized hyperparams by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1891 +* deprecate nest_asyncio use by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1974 +* Add e2e test for axon by @opendansor in https://github.com/opentensor/bittensor/pull/1984 +* Dendrite E2E test by @opendansor in https://github.com/opentensor/bittensor/pull/1988 +* fix __version_as_int__ for >10 minor/patch release vers (resolves #1982) by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1993 +* Test Incentive E2E by @opendansor in https://github.com/opentensor/bittensor/pull/2002 +* Add E2E faucet test by @opendansor in https://github.com/opentensor/bittensor/pull/1987 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.1.1...v7.2.0 + + +## 7.1.1 / 2024-06-11 + +## What's Changed +* commit_reveal_weights_enabled argument parsing hotfix by @camfairchild in https://github.com/opentensor/bittensor/pull/2003 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.1.0...v7.1.1 + +## 7.1.0 / 2024-06-05 + +## What's Changed +* Added _do_set_root_weights by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1838 +* Release/7.0.1 by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1963 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.0.1...v7.1.0 + +## 7.0.1 / 2024-05-31 + +## What's Changed +* Release/7.0.0 by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1899 +* Fix return of ip version. by @opendansor in https://github.com/opentensor/bittensor/pull/1961 +* Fix trigger use_torch() by @renesweet24 https://github.com/opentensor/bittensor/pull/1960 + +## New Contributors +* @renesweet24 made their first contribution in https://github.com/opentensor/bittensor/pull/1960 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.0.0...v7.0.1 + +## 7.0.0 / 2024-05-29 + +## What's Changed +* replace torch with numpy by @andreea-popescu-reef in https://github.com/opentensor/bittensor/pull/1777 +* Fix broken link in contrib/RELEASE_GUIDELINES #1821 by @thewhaleking in https://github.com/opentensor/bittensor/pull/1823 +* Tests: Added coverage for set_weights by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1825 +* Remove irrelevant call to get_delegates method. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1826 +* Support for string mnemonic thru cli when regenerating coldkeys by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1815 +* Logging: Added _primary_loggers by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1797 +* Add in check for minimum stake for unstaking by @thewhaleking in https://github.com/opentensor/bittensor/pull/1832 +* Cache get_decoder_class by @thewhaleking in https://github.com/opentensor/bittensor/pull/1834 +* Warmfix/change decoder cacheing by @thewhaleking in https://github.com/opentensor/bittensor/pull/1842 +* Fix typo in warmfix by @thewhaleking in https://github.com/opentensor/bittensor/pull/1844 +* Add the command btcli root list_delegates_lite to handle the Delegate… by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1840 +* Change: console.error => console.print by @thewhaleking in https://github.com/opentensor/bittensor/pull/1849 +* Small fix with receiving delegates based on a 4-hour archive block by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1854 +* Replace torch with numpy by @sepehr-opentensor in https://github.com/opentensor/bittensor/pull/1786 +* Versioning: Enforcement for eth-utils by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1852 +* Versioning: Dependencies for FastAPI for Apple M's by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1855 +* Retrieving error types from the metadata of the Substrate palette SubtensorModule for the btcli console (logic) by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1862 +* Add version check caching, fix version comparison by @olzhasar-reef in https://github.com/opentensor/bittensor/pull/1835 +* Tests: Added coverage for root.py by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1877 +* Tests: Added coverage for network.py by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1879 +* Tests: extends coverage for overview cmd part 1 by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1873 +* Tests: Added coverage for Unstaking by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1878 +* Tests: Added coverage for staking by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1837 +* Tests: Added coverage for Delegation by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1874 +* Updated error message and a test typo. by @thewhaleking in https://github.com/opentensor/bittensor/pull/1871 +* fix: deprecated usage of `Balances::transfer` method by @orriin in https://github.com/opentensor/bittensor/pull/1886 +* Fix Type Annotation by @opendansor in https://github.com/opentensor/bittensor/pull/1895 +* Docstrings updates for list delegate lite feature by @rajkaramchedu in https://github.com/opentensor/bittensor/pull/1890 +* Add Pre-commit Checker in scripts. Helps reduce CI calls. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1893 +* fix get_coldkey_password_from_environment resolving wrong password by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1843 +* Drop python 3.8 support by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1892 +* feat: Refactor phase 2 overview cmd & add test cov. Adds factories by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1887 +* Add setting delegate take by @gztensor in https://github.com/opentensor/bittensor/pull/1903 +* E2E Test Patterns by @orriin in https://github.com/opentensor/bittensor/pull/1885 +* chore: correct method types by @distributedstatemachine in https://github.com/opentensor/bittensor/pull/1907 +* bittensor.btlogging refactoring by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1896 +* Part 1 for refactoring bittensor/subtensor.py by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1911 +* Update: Pydantic V2 by @opendansor in https://github.com/opentensor/bittensor/pull/1889 +* Add back compatibility with torch by @thewhaleking in https://github.com/opentensor/bittensor/pull/1904 +* Release/6.12.2 by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1910 +* Chore: Updated dev requirements by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1946 + +## New Contributors +* @andreea-popescu-reef made their first contribution in https://github.com/opentensor/bittensor/pull/1777 +* @thewhaleking made their first contribution in https://github.com/opentensor/bittensor/pull/1823 +* @RomanCh-OT made their first contribution in https://github.com/opentensor/bittensor/pull/1826 +* @olzhasar-reef made their first contribution in https://github.com/opentensor/bittensor/pull/1835 +* @orriin made their first contribution in https://github.com/opentensor/bittensor/pull/1886 +* @opendansor made their first contribution in https://github.com/opentensor/bittensor/pull/1895 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.12.2...v7.0.0 + +## 6.12.2 / 2024-05-20 + +## What's Changed +* Add setting delegate take +* fix: deprecated transfer method usage + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.12.1...54eee604c00ac4f04a31d5d7bc663124731a34d8 + + +## 6.12.1 / 2024-05-17 + +## What's Changed +* Hotfix if the subnet UID is not in the Subnets + + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.12.0...fd2442db8bb8aad55ced2ac3b748b04ebdc73292 + + + +## 6.12.0 / 2024-04-29 + +## What's Changed +* Tests: Axon to_string patch import by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1785 +* Tests: Extends coverage on Serving extrinsics methods by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1783 +* Fix: CVE-2024-24762 FastAPI by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1800 +* Fix: CVE-2024-26130 | vulnerability cryptography by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1801 +* fix PR templates by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1778 +* Fix: SNYK-PYTHON-CERTIFI-5805047 | Vulnerability Certifi by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1816 +* Tests: Extends test coverage on Registration methods by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1814 +* Fix: Wallet overwrite functionality by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1802 + + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.11.0...v6.12.0 + +## 6.11.0 / 2024-04-11 + +## What's Changed +* Tests: Adds coverage to subtensor help method & determine_chain_endpoint_and_network by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1761 +* [bug fix] Fix import json by @camfairchild in https://github.com/opentensor/bittensor/pull/1759 +* Remove context management for substrate in subtensor by @sepehr-opentensor in https://github.com/opentensor/bittensor/pull/1766 +* Tests: Extends coverage on axon methods by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1769 +* Revert nonce implementation fix by @ifrit98 in https://github.com/opentensor/bittensor/pull/1774 +* remove tests from package distribution by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1779 +* Tests: Extends test coverage on Senate methods by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/1781 + +## New Contributors +* @mjurbanski-reef made their first contribution in https://github.com/opentensor/bittensor/pull/1779 +* @ibraheem-opentensor made their first contribution in https://github.com/opentensor/bittensor/pull/1781 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.10.1...v6.11.0 +## 6.10.1 / 2024-04-05 +## What's Changed +* Revert nonce implementation fix #1774: Breaking change needs to telegraphed in next release. + +## 6.10.0 / 2024-03-25 + +## What's Changed +* handle req args by parsing and raising by @ifrit98 in https://github.com/opentensor/bittensor/pull/1733 +* Replace wildcard imports with specific imports by @brueningf in https://github.com/opentensor/bittensor/pull/1724 +* Logging Refactor by @sepehr-opentensor in https://github.com/opentensor/bittensor/pull/1751 +* Update DEBUGGING.md by @e-gons in https://github.com/opentensor/bittensor/pull/1755 +* fix: nonce implementation by @GentikSolm in https://github.com/opentensor/bittensor/pull/1754 + +## New Contributors +* @sepehr-opentensor made their first contribution in https://github.com/opentensor/bittensor/pull/1751 +* @e-gons made their first contribution in https://github.com/opentensor/bittensor/pull/1755 +* @GentikSolm made their first contribution in https://github.com/opentensor/bittensor/pull/1754 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.9.3...v6.10.0 + ## 6.9.3 / 2024-03-12 ## What's Changed @@ -39,8 +217,8 @@ * Expands type checking to subtensor by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1731 * Feature: Synapse passing type check by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1725 * bump req for security vulnerability in crpytography by @ifrit98 in https://github.com/opentensor/bittensor/pull/1718 -* Fix: proper association with wallet dir and coldkey addr #1739 by @gus-opentensor & @sepehr-opentensor -* Fixed event lookup on new network added #1741 by @shibshib +* Fix: proper association with wallet dir and coldkey addr #1739 by @gus-opentensor & @sepehr-opentensor +* Fixed event lookup on new network added #1741 by @shibshib **Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.8.2...v6.9.0 @@ -72,11 +250,11 @@ * logging off switch by @ifrit98 in https://github.com/opentensor/bittensor/pull/1704 * Extrinsic update by @ifrit98 in https://github.com/opentensor/bittensor/pull/1703 * Bittensor shared request layer by @ifrit98 in https://github.com/opentensor/bittensor/pull/1698 -* Add no_prompt argument to help printout in https://github.com/opentensor/bittensor/pull/1707 -* Adds mypi typechecking to circleci by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1705 +* Add no_prompt argument to help printout in https://github.com/opentensor/bittensor/pull/1707 +* Adds mypi typechecking to circleci by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1705 * Remove set weights ttl now that we have a better extrinsic method by @ifrit98 -* Bug fix in overview command for dereg stake with outdated `stake_info` object fields by @ifrit98 in https://github.com/opentensor/bittensor/pull/1712 -* Moves mock wallet creation to temp dir by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1711 +* Bug fix in overview command for dereg stake with outdated `stake_info` object fields by @ifrit98 in https://github.com/opentensor/bittensor/pull/1712 +* Moves mock wallet creation to temp dir by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1711 **Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.7.2...v6.8.0 @@ -639,7 +817,7 @@ - allow set synapse All using flag - add test - use dot get - + - [Feature] Mark registration threads as daemons (https://github.com/opentensor/bittensor/pull/998) - make solver processes daemons @@ -689,9 +867,9 @@ ## 3.4.0 / 2022-10-13 ## What's Changed -* Parameters update by @Eugene-hu #936 -* Bittensor Generate by @unconst #941 -* Prometheus by @unconst #928 +* Parameters update by @Eugene-hu #936 +* Bittensor Generate by @unconst #941 +* Prometheus by @unconst #928 * [Tooling][Release] Adding release script by @eduardogr in https://github.com/opentensor/bittensor/pull/948 @@ -750,7 +928,7 @@ ## 3.3.0 / 2022-08-16 ### CUDA registration -This release adds the ability to complete the registration using a CUDA-capable device. +This release adds the ability to complete the registration using a CUDA-capable device. See https://github.com/opentensor/cubit/releases/tag/v1.0.5 for the required `cubit` v1.0.5 release Also a few bug fixes for the CLI @@ -783,5 +961,5 @@ This release refactors the registration code for CPU registration to improve sol ### Synapse update -## +## From 6d39b1297134d5df8cbc1278fa32faf558fdd655 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 16:15:38 -0400 Subject: [PATCH 108/116] chore: bump version --- VERSION | 2 +- bittensor/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index 66ce77b7ea..0ee843cc60 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -7.0.0 +7.2.0 diff --git a/bittensor/__init__.py b/bittensor/__init__.py index f9200a271c..036b98339e 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -40,7 +40,7 @@ # Bittensor code and protocol version. -__version__ = "7.0.0" +__version__ = "7.2.0" _version_split = __version__.split(".") __version_info__ = tuple(int(part) for part in _version_split) From 5db7b4c5ac36503d28d45cbed74735f57b59b9a1 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 16:28:34 -0400 Subject: [PATCH 109/116] chore: ruff --- bittensor/commands/delegates.py | 2 ++ bittensor/commands/weights.py | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index b904ace773..4d03b289e4 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -44,6 +44,7 @@ def _get_coldkey_wallets_for_path(path: str) -> List["bittensor.wallet"]: console = bittensor.__console__ + def show_delegates_lite( delegates_lite: List["bittensor.DelegateInfoLite"], width: Optional[int] = None ): @@ -149,6 +150,7 @@ def show_delegates_lite( ) bittensor.__console__.print(table) + # Uses rich console to pretty print a table of delegates. def show_delegates( delegates: List["bittensor.DelegateInfo"], diff --git a/bittensor/commands/weights.py b/bittensor/commands/weights.py index 19989c94f3..ac4d9dfc36 100644 --- a/bittensor/commands/weights.py +++ b/bittensor/commands/weights.py @@ -19,7 +19,6 @@ """Module that encapsulates the CommitWeightCommand and the RevealWeightCommand. Used to commit and reveal weights for a specific subnet on the Bittensor Network.""" - import argparse import os import re From 365e4d7de5ad473f882e4f14578931c40a66b3e8 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 16:45:17 -0400 Subject: [PATCH 110/116] fix: import --- bittensor/subtensor.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index cfb69a6072..b065fc97f9 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -43,6 +43,7 @@ from bittensor.btlogging import logging as _logger from bittensor.utils import torch, weight_utils, format_error_message from .chain_data import ( + DelegateInfoLite, NeuronInfo, DelegateInfo, PrometheusInfo, @@ -3050,7 +3051,9 @@ def query_runtime_api( """ call_definition = bittensor.__type_registry__["runtime_api"][runtime_api][ # type: ignore "methods" # type: ignore - ][method] # type: ignore + ][ + method + ] # type: ignore json_result = self.state_call( method=f"{runtime_api}_{method}", @@ -4269,9 +4272,9 @@ def make_substrate_call_with_retry(encoded_hotkey_: List[int]): return self.substrate.rpc_request( method="delegateInfo_getDelegate", # custom rpc method - params=[encoded_hotkey_, block_hash] - if block_hash - else [encoded_hotkey_], + params=( + [encoded_hotkey_, block_hash] if block_hash else [encoded_hotkey_] + ), ) encoded_hotkey = ss58_to_vec_u8(hotkey_ss58) @@ -4373,9 +4376,9 @@ def make_substrate_call_with_retry(encoded_coldkey_: List[int]): return self.substrate.rpc_request( method="delegateInfo_getDelegated", - params=[block_hash, encoded_coldkey_] - if block_hash - else [encoded_coldkey_], + params=( + [block_hash, encoded_coldkey_] if block_hash else [encoded_coldkey_] + ), ) encoded_coldkey = ss58_to_vec_u8(coldkey_ss58) From 6e27dc6d53fdac6ec7406c7c808fc2a7c5e09c27 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 16:51:28 -0400 Subject: [PATCH 111/116] fix: adds _do_set_root_weights --- bittensor/subtensor.py | 67 +++++++++++++++++++++++++++++++++ bittensor/utils/weight_utils.py | 1 + 2 files changed, 68 insertions(+) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index b065fc97f9..9c7b2526da 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -2659,6 +2659,73 @@ def root_set_weights( prompt=prompt, ) + def _do_set_root_weights( + self, + wallet: "bittensor.wallet", + uids: List[int], + vals: List[int], + netuid: int = 0, + version_key: int = bittensor.__version_as_int__, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + ) -> Tuple[bool, Optional[str]]: # (success, error_message) + """ + Internal method to send a transaction to the Bittensor blockchain, setting weights + for specified neurons on root. This method constructs and submits the transaction, handling + retries and blockchain communication. + + Args: + wallet (bittensor.wallet): The wallet associated with the neuron setting the weights. + uids (List[int]): List of neuron UIDs for which weights are being set. + vals (List[int]): List of weight values corresponding to each UID. + netuid (int): Unique identifier for the network. + version_key (int, optional): Version key for compatibility with the network. + wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. + wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. + + Returns: + Tuple[bool, Optional[str]]: A tuple containing a success flag and an optional error message. + + This method is vital for the dynamic weighting mechanism in Bittensor, where neurons adjust their + trust in other neurons based on observed performance and contributions on the root network. + """ + + @retry(delay=2, tries=3, backoff=2, max_delay=4, logger=_logger) + def make_substrate_call_with_retry(): + call = self.substrate.compose_call( + call_module="SubtensorModule", + call_function="set_root_weights", + call_params={ + "dests": uids, + "weights": vals, + "netuid": netuid, + "version_key": version_key, + "hotkey": wallet.hotkey.ss58_address, + }, + ) + # Period dictates how long the extrinsic will stay as part of waiting pool + extrinsic = self.substrate.create_signed_extrinsic( + call=call, + keypair=wallet.coldkey, + era={"period": 5}, + ) + response = self.substrate.submit_extrinsic( + extrinsic, + wait_for_inclusion=wait_for_inclusion, + wait_for_finalization=wait_for_finalization, + ) + # We only wait here if we expect finalization. + if not wait_for_finalization and not wait_for_inclusion: + return True, "Not waiting for finalziation or inclusion." + + response.process_events() + if response.is_success: + return True, "Successfully set weights." + else: + return False, response.error_message + + return make_substrate_call_with_retry() + ################## # Registry Calls # ################## diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 89dee57ee8..de26d98c02 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -20,6 +20,7 @@ # DEALINGS IN THE SOFTWARE. import hashlib +import logging from typing import Tuple, List, Union import numpy as np From 913c04ed8d71f6d339421347f54240547b3fa120 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 17:01:49 -0400 Subject: [PATCH 112/116] chore: ruff --- bittensor/subtensor.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 9c7b2526da..0fffa1cc7e 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -3118,9 +3118,7 @@ def query_runtime_api( """ call_definition = bittensor.__type_registry__["runtime_api"][runtime_api][ # type: ignore "methods" # type: ignore - ][ - method - ] # type: ignore + ][method] # type: ignore json_result = self.state_call( method=f"{runtime_api}_{method}", From ae1a5d040cb08b00ff4156f8211e39289d0a010f Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 12 Jun 2024 14:34:54 -0700 Subject: [PATCH 113/116] Added missing nonce implementation --- bittensor/axon.py | 1 - bittensor/dendrite.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/bittensor/axon.py b/bittensor/axon.py index 948d65e31f..ca06335307 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -896,7 +896,6 @@ async def default_verify(self, synapse: bittensor.Synapse): # If we don't have a nonce stored, ensure that the nonce falls within # a reasonable delta. - # Updated nonce using NTP implementated at v7.2 if ( synapse.dendrite.version is not None and synapse.dendrite.version >= V_7_2_0 diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 476fee8645..dca513e0b2 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -657,7 +657,7 @@ def preprocess_synapse_for_request( synapse.dendrite = bittensor.TerminalInfo( ip=self.external_ip, version=bittensor.__version_as_int__, - nonce=time.monotonic_ns(), + nonce=time.time_ns(), uuid=self.uuid, hotkey=self.keypair.ss58_address, ) From 60bd3673e55e1dfd2f3f0dab0ce12e9fe33d31c8 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 18:48:57 -0400 Subject: [PATCH 114/116] chore: update changelog --- CHANGELOG.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 39052b4977..00ad50947b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,8 +10,6 @@ * ci: e2e tests by @orriin in https://github.com/opentensor/bittensor/pull/1915 * Dependency cleanup by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1967 * replace `black` with `ruff` by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1968 -* Fix: IP is packed with port by @dgagn in https://github.com/opentensor/bittensor/pull/1962 -* Fix return of ip version by @opendansor in https://github.com/opentensor/bittensor/pull/1964 * post-black to ruff migration cleanup by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1979 * Revert Axon IP decoding changes by @camfairchild in https://github.com/opentensor/bittensor/pull/1981 * A wrapper for presenting extrinsics errors in a human-readable form. by @RomanCh-OT in https://github.com/opentensor/bittensor/pull/1980 From 51afab89f853764df6d72601783fbd57ce60b0c9 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 12 Jun 2024 19:29:15 -0400 Subject: [PATCH 115/116] chore: update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 00ad50947b..7cb2964f6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ * fix __version_as_int__ for >10 minor/patch release vers (resolves #1982) by @mjurbanski-reef in https://github.com/opentensor/bittensor/pull/1993 * Test Incentive E2E by @opendansor in https://github.com/opentensor/bittensor/pull/2002 * Add E2E faucet test by @opendansor in https://github.com/opentensor/bittensor/pull/1987 +* Allow unstake below network min by @camfairchild in https://github.com/opentensor/bittensor/pull/2016 **Full Changelog**: https://github.com/opentensor/bittensor/compare/v7.1.1...v7.2.0 From ea83f43f11527e2826a2d653c43e3662cb49a51e Mon Sep 17 00:00:00 2001 From: gus-opentensor <158077861+gus-opentensor@users.noreply.github.com> Date: Wed, 12 Jun 2024 19:27:00 -0400 Subject: [PATCH 116/116] Merge pull request #2016 from opentensor/fix/allow-unstake-below-network-min [Fix] Allow unstake below network min --- bittensor/extrinsics/registration.py | 4 + bittensor/extrinsics/staking.py | 50 +++- bittensor/extrinsics/unstaking.py | 41 ++- tests/e2e_tests/multistep/test_dendrite.py | 2 +- tests/e2e_tests/multistep/test_incentive.py | 4 +- .../weights/test_commit_weights.py | 2 +- tests/integration_tests/test_cli.py | 252 +++++++++++++++--- tests/unit_tests/extrinsics/test_staking.py | 18 +- tests/unit_tests/extrinsics/test_unstaking.py | 20 +- 9 files changed, 319 insertions(+), 74 deletions(-) diff --git a/bittensor/extrinsics/registration.py b/bittensor/extrinsics/registration.py index 887dae8345..e82add8383 100644 --- a/bittensor/extrinsics/registration.py +++ b/bittensor/extrinsics/registration.py @@ -462,6 +462,8 @@ def run_faucet_extrinsic( if attempts == max_allowed_attempts: raise MaxAttemptsException attempts += 1 + # Wait a bit before trying again + time.sleep(1) # Successful registration else: @@ -473,6 +475,8 @@ def run_faucet_extrinsic( if successes == 3: raise MaxSuccessException + + attempts = 1 # Reset attempts on success successes += 1 except KeyboardInterrupt: diff --git a/bittensor/extrinsics/staking.py b/bittensor/extrinsics/staking.py index 44a509eae8..298bb1f0d3 100644 --- a/bittensor/extrinsics/staking.py +++ b/bittensor/extrinsics/staking.py @@ -19,10 +19,34 @@ import bittensor from rich.prompt import Confirm from time import sleep -from typing import List, Union, Optional +from typing import List, Union, Optional, Tuple from bittensor.utils.balance import Balance +def _check_threshold_amount( + subtensor: "bittensor.subtensor", stake_balance: Balance +) -> Tuple[bool, Balance]: + """ + Checks if the new stake balance will be above the minimum required stake threshold. + + Args: + stake_balance (Balance): + the balance to check for threshold limits. + + Returns: + success, threshold (bool, Balance): + ``true`` if the staking balance is above the threshold, or ``false`` if the + staking balance is below the threshold. + The threshold balance required to stake. + """ + min_req_stake: Balance = subtensor.get_minimum_required_stake() + + if min_req_stake > stake_balance: + return False, min_req_stake + else: + return True, min_req_stake + + def add_stake_extrinsic( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", @@ -91,6 +115,9 @@ def add_stake_extrinsic( coldkey_ss58=wallet.coldkeypub.ss58_address, hotkey_ss58=hotkey_ss58 ) + # Grab the existential deposit. + existential_deposit = subtensor.get_existential_deposit() + # Convert to bittensor.Balance if amount is None: # Stake it all. @@ -100,9 +127,10 @@ def add_stake_extrinsic( else: staking_balance = amount - # Remove existential balance to keep key alive. - if staking_balance > bittensor.Balance.from_rao(1000): - staking_balance = staking_balance - bittensor.Balance.from_rao(1000) + # Leave existential balance to keep key alive. + if staking_balance > old_balance - existential_deposit: + # If we are staking all, we need to leave at least the existential deposit. + staking_balance = old_balance - existential_deposit else: staking_balance = staking_balance @@ -115,6 +143,18 @@ def add_stake_extrinsic( ) return False + # If nominating, we need to check if the new stake balance will be above the minimum required stake threshold. + if not own_hotkey: + new_stake_balance = old_stake + staking_balance + is_above_threshold, threshold = _check_threshold_amount( + subtensor, new_stake_balance + ) + if not is_above_threshold: + bittensor.__console__.print( + f":cross_mark: [red]New stake balance of {new_stake_balance} is below the minimum required nomination stake threshold {threshold}.[/red]" + ) + return False + # Ask before moving on. if prompt: if not own_hotkey: @@ -167,7 +207,7 @@ def add_stake_extrinsic( block = subtensor.get_current_block() new_stake = subtensor.get_stake_for_coldkey_and_hotkey( coldkey_ss58=wallet.coldkeypub.ss58_address, - hotkey_ss58=wallet.hotkey.ss58_address, + hotkey_ss58=hotkey_ss58, block=block, ) # Get current stake diff --git a/bittensor/extrinsics/unstaking.py b/bittensor/extrinsics/unstaking.py index cf47b07928..105bb145b9 100644 --- a/bittensor/extrinsics/unstaking.py +++ b/bittensor/extrinsics/unstaking.py @@ -72,13 +72,13 @@ def __do_remove_stake_single( def check_threshold_amount( - subtensor: "bittensor.subtensor", unstaking_balance: Balance + subtensor: "bittensor.subtensor", stake_balance: Balance ) -> bool: """ - Checks if the unstaking amount is above the threshold or 0 + Checks if the remaining stake balance is above the minimum required stake threshold. Args: - unstaking_balance (Balance): + stake_balance (Balance): the balance to check for threshold limits. Returns: @@ -88,9 +88,9 @@ def check_threshold_amount( """ min_req_stake: Balance = subtensor.get_minimum_required_stake() - if min_req_stake > unstaking_balance > 0: + if min_req_stake > stake_balance > 0: bittensor.__console__.print( - f":cross_mark: [red]Unstaking balance of {unstaking_balance} less than minimum of {min_req_stake} TAO[/red]" + f":cross_mark: [yellow]Remaining stake balance of {stake_balance} less than minimum of {min_req_stake} TAO[/yellow]" ) return False else: @@ -141,6 +141,9 @@ def unstake_extrinsic( coldkey_ss58=wallet.coldkeypub.ss58_address, hotkey_ss58=hotkey_ss58 ) + hotkey_owner = subtensor.get_hotkey_owner(hotkey_ss58) + own_hotkey: bool = wallet.coldkeypub.ss58_address == hotkey_owner + # Convert to bittensor.Balance if amount is None: # Unstake it all. @@ -160,10 +163,14 @@ def unstake_extrinsic( ) return False - if not check_threshold_amount( - subtensor=subtensor, unstaking_balance=unstaking_balance + # If nomination stake, check threshold. + if not own_hotkey and not check_threshold_amount( + subtensor=subtensor, stake_balance=(stake_on_uid - unstaking_balance) ): - return False + bittensor.__console__.print( + f":warning: [yellow]This action will unstake the entire staked balance![/yellow]" + ) + unstaking_balance = stake_on_uid # Ask before moving on. if prompt: @@ -300,6 +307,7 @@ def unstake_multiple_extrinsic( wallet.coldkey old_stakes = [] + own_hotkeys = [] with bittensor.__console__.status( ":satellite: Syncing with chain: [white]{}[/white] ...".format( subtensor.network @@ -313,9 +321,12 @@ def unstake_multiple_extrinsic( ) # Get stake on hotkey. old_stakes.append(old_stake) # None if not registered. + hotkey_owner = subtensor.get_hotkey_owner(hotkey_ss58) + own_hotkeys.append(wallet.coldkeypub.ss58_address == hotkey_owner) + successful_unstakes = 0 - for idx, (hotkey_ss58, amount, old_stake) in enumerate( - zip(hotkey_ss58s, amounts, old_stakes) + for idx, (hotkey_ss58, amount, old_stake, own_hotkey) in enumerate( + zip(hotkey_ss58s, amounts, old_stakes, own_hotkeys) ): # Covert to bittensor.Balance if amount is None: @@ -336,10 +347,14 @@ def unstake_multiple_extrinsic( ) continue - if not check_threshold_amount( - subtensor=subtensor, unstaking_balance=unstaking_balance + # If nomination stake, check threshold. + if not own_hotkey and not check_threshold_amount( + subtensor=subtensor, stake_balance=(stake_on_uid - unstaking_balance) ): - return False + bittensor.__console__.print( + f":warning: [yellow]This action will unstake the entire staked balance![/yellow]" + ) + unstaking_balance = stake_on_uid # Ask before moving on. if prompt: diff --git a/tests/e2e_tests/multistep/test_dendrite.py b/tests/e2e_tests/multistep/test_dendrite.py index 8f7336e1de..6abde7464d 100644 --- a/tests/e2e_tests/multistep/test_dendrite.py +++ b/tests/e2e_tests/multistep/test_dendrite.py @@ -90,7 +90,7 @@ async def test_dendrite(local_chain): metagraph = bittensor.metagraph(netuid=1, network="ws://localhost:9945") neuron = metagraph.neurons[0] # assert stake is 10000 - assert neuron.stake.tao == 9999.999999 + assert neuron.stake.tao == 10_000.0 # assert neuron is not validator assert neuron.active is True diff --git a/tests/e2e_tests/multistep/test_incentive.py b/tests/e2e_tests/multistep/test_incentive.py index d4605faa6a..ea5809dd7f 100644 --- a/tests/e2e_tests/multistep/test_incentive.py +++ b/tests/e2e_tests/multistep/test_incentive.py @@ -252,7 +252,7 @@ async def validator_write_output(stream): alice_neuron = metagraph.neurons[0] assert alice_neuron.validator_permit is False assert alice_neuron.dividends == 0 - assert alice_neuron.stake.tao == 9999.999999 + assert alice_neuron.stake.tao == 10_000.0 assert alice_neuron.validator_trust == 0 # wait until 360 blocks pass (subnet tempo) @@ -287,7 +287,7 @@ async def validator_write_output(stream): alice_neuron = metagraph.neurons[0] assert alice_neuron.validator_permit is True assert alice_neuron.dividends == 1 - assert alice_neuron.stake.tao == 9999.999999 + assert alice_neuron.stake.tao == 10_000.0 assert alice_neuron.validator_trust == 1 diff --git a/tests/e2e_tests/subcommands/weights/test_commit_weights.py b/tests/e2e_tests/subcommands/weights/test_commit_weights.py index 368775204f..4c719b0ebd 100644 --- a/tests/e2e_tests/subcommands/weights/test_commit_weights.py +++ b/tests/e2e_tests/subcommands/weights/test_commit_weights.py @@ -63,7 +63,7 @@ def test_commit_and_reveal_weights(local_chain): "--wallet.path", "/tmp/btcli-wallet2", "--amount", - "999998998", + "100000", ], ) diff --git a/tests/integration_tests/test_cli.py b/tests/integration_tests/test_cli.py index aa019c4178..6fe1acf3bc 100644 --- a/tests/integration_tests/test_cli.py +++ b/tests/integration_tests/test_cli.py @@ -782,72 +782,123 @@ def test_unstake_with_thresholds(self, _): config.no_prompt = True # as the minimum required stake may change, this method allows us to dynamically # update the amount in the mock without updating the tests - config.amount = Balance.from_rao(_subtensor_mock.min_required_stake() - 1) - config.wallet.name = "fake_wallet" - config.hotkeys = ["hk0", "hk1", "hk2"] + min_stake: Balance = _subtensor_mock.get_minimum_required_stake() + # Must be a float + config.amount = min_stake.tao # Unstake below the minimum required stake + wallet_names = ["w0", "w1", "w2"] config.all_hotkeys = False # Notice no max_stake specified mock_stakes: Dict[str, Balance] = { - "hk0": Balance.from_float(10.0), - "hk1": Balance.from_float(11.1), - "hk2": Balance.from_float(12.2), + "w0": 2 * min_stake - 1, # remaining stake will be below the threshold + "w1": 2 * min_stake - 2, + "w2": 2 * min_stake - 5, } - mock_coldkey_kp = _get_mock_keypair(0, self.id()) - mock_wallets = [ SimpleNamespace( - name=config.wallet.name, - coldkey=mock_coldkey_kp, - coldkeypub=mock_coldkey_kp, - hotkey_str=hk, - hotkey=_get_mock_keypair(idx + 100, self.id()), + name=wallet_name, + coldkey=_get_mock_keypair(idx, self.id()), + coldkeypub=_get_mock_keypair(idx, self.id()), + hotkey_str="hk{}".format(idx), # doesn't matter + hotkey=_get_mock_keypair(idx + 100, self.id()), # doesn't matter ) - for idx, hk in enumerate(config.hotkeys) + for idx, wallet_name in enumerate(wallet_names) ] - # Register mock wallets and give them stakes + delegate_hotkey = mock_wallets[0].hotkey.ss58_address - for wallet in mock_wallets: - _ = _subtensor_mock.force_register_neuron( - netuid=1, - hotkey=wallet.hotkey.ss58_address, - coldkey=wallet.coldkey.ss58_address, - stake=mock_stakes[wallet.hotkey_str].rao, - ) + # Register mock neuron, only for w0 + _ = _subtensor_mock.force_register_neuron( + netuid=1, + hotkey=delegate_hotkey, + coldkey=mock_wallets[0].coldkey.ss58_address, + stake=mock_stakes["w0"], + ) - cli = bittensor.cli(config) + # Become a delegate + _ = _subtensor_mock.nominate( + wallet=mock_wallets[0], + ) + + # Stake to the delegate with the other coldkeys + for wallet in mock_wallets[1:]: + # Give balance + _ = _subtensor_mock.force_set_balance( + ss58_address=wallet.coldkeypub.ss58_address, + balance=( + mock_stakes[wallet.name] + _subtensor_mock.get_existential_deposit() + ).tao + + 1.0, + ) + _ = _subtensor_mock.add_stake( + wallet=wallet, + hotkey_ss58=delegate_hotkey, + amount=mock_stakes[wallet.name], + ) def mock_get_wallet(*args, **kwargs): - if kwargs.get("hotkey"): + if kwargs.get("config") and kwargs["config"].get("wallet"): for wallet in mock_wallets: - if wallet.hotkey_str == kwargs.get("hotkey"): + if wallet.name == kwargs["config"].wallet.name: return wallet - else: - return mock_wallets[0] with patch("bittensor.wallet") as mock_create_wallet: mock_create_wallet.side_effect = mock_get_wallet - # Check stakes before unstaking for wallet in mock_wallets: + # Check stakes before unstaking stake = _subtensor_mock.get_stake_for_coldkey_and_hotkey( - hotkey_ss58=wallet.hotkey.ss58_address, + hotkey_ss58=delegate_hotkey, coldkey_ss58=wallet.coldkey.ss58_address, ) - self.assertEqual(stake.rao, mock_stakes[wallet.hotkey_str].rao) + self.assertEqual(stake.rao, mock_stakes[wallet.name].rao) - cli.run() + config.wallet.name = wallet.name + config.hotkey_ss58address = delegate_hotkey # Single unstake - # Check stakes after unstaking - for wallet in mock_wallets: - stake = _subtensor_mock.get_stake_for_coldkey_and_hotkey( - hotkey_ss58=wallet.hotkey.ss58_address, - coldkey_ss58=wallet.coldkey.ss58_address, - ) - # because the amount is less than the threshold, none of these should unstake - self.assertEqual(stake.tao, mock_stakes[wallet.hotkey_str].tao) + cli = bittensor.cli(config) + with patch.object(_subtensor_mock, "_do_unstake") as mock_unstake: + with patch( + "bittensor.__console__.print" + ) as mock_print: # Catch console print + cli.run() + + # Filter for console print calls + console_prints = [ + call[0][0] for call in mock_print.call_args_list + ] + minimum_print = filter( + lambda x: "less than minimum of" in x, console_prints + ) + + unstake_calls = mock_unstake.call_args_list + self.assertEqual(len(unstake_calls), 1) # Only one unstake call + + _, kwargs = unstake_calls[0] + # Verify delegate was unstaked + self.assertEqual(kwargs["hotkey_ss58"], delegate_hotkey) + self.assertEqual(kwargs["wallet"].name, wallet.name) + + if wallet.name == "w0": + # This wallet owns the delegate + # Should unstake specified amount + self.assertEqual( + kwargs["amount"], bittensor.Balance(config.amount) + ) + # No warning for w0 + self.assertRaises( + StopIteration, next, minimum_print + ) # No warning for w0 + else: + # Should unstake *all* the stake + staked = mock_stakes[wallet.name] + self.assertEqual(kwargs["amount"], staked) + + # Check warning was printed + _ = next( + minimum_print + ) # Doesn't raise, so the warning was printed def test_unstake_all(self, _): config = self.config @@ -1671,6 +1722,129 @@ def mock_get_wallet(*args, **kwargs): ) self.assertAlmostEqual(balance.tao, mock_balance.tao, places=4) + def test_stake_with_thresholds(self, _): + config = self.config + config.command = "stake" + config.subcommand = "add" + config.no_prompt = True + + min_stake: Balance = _subtensor_mock.get_minimum_required_stake() + # Must be a float + wallet_names = ["w0", "w1", "w2"] + config.all_hotkeys = False + # Notice no max_stake specified + + mock_stakes: Dict[str, Balance] = { + "w0": min_stake - 1, # new stake will be below the threshold + "w1": min_stake - 2, + "w2": min_stake - 5, + } + + mock_wallets = [ + SimpleNamespace( + name=wallet_name, + coldkey=_get_mock_keypair(idx, self.id()), + coldkeypub=_get_mock_keypair(idx, self.id()), + hotkey_str="hk{}".format(idx), # doesn't matter + hotkey=_get_mock_keypair(idx + 100, self.id()), # doesn't matter + ) + for idx, wallet_name in enumerate(wallet_names) + ] + + delegate_hotkey = mock_wallets[0].hotkey.ss58_address + + # Register mock neuron, only for w0 + _ = _subtensor_mock.force_register_neuron( + netuid=1, + hotkey=delegate_hotkey, + coldkey=mock_wallets[0].coldkey.ss58_address, + balance=(mock_stakes["w0"] + _subtensor_mock.get_existential_deposit()).tao + + 1.0, + ) # No stake, but enough balance + + # Become a delegate + _ = _subtensor_mock.nominate( + wallet=mock_wallets[0], + ) + + # Give enough balance + for wallet in mock_wallets[1:]: + # Give balance + _ = _subtensor_mock.force_set_balance( + ss58_address=wallet.coldkeypub.ss58_address, + balance=( + mock_stakes[wallet.name] + _subtensor_mock.get_existential_deposit() + ).tao + + 1.0, + ) + + def mock_get_wallet(*args, **kwargs): + if kwargs.get("config") and kwargs["config"].get("wallet"): + for wallet in mock_wallets: + if wallet.name == kwargs["config"].wallet.name: + return wallet + + with patch("bittensor.wallet") as mock_create_wallet: + mock_create_wallet.side_effect = mock_get_wallet + + for wallet in mock_wallets: + # Check balances and stakes before staking + stake = _subtensor_mock.get_stake_for_coldkey_and_hotkey( + hotkey_ss58=delegate_hotkey, + coldkey_ss58=wallet.coldkey.ss58_address, + ) + self.assertEqual(stake.rao, 0) # No stake + + balance = _subtensor_mock.get_balance( + address=wallet.coldkeypub.ss58_address + ) + self.assertGreaterEqual( + balance, mock_stakes[wallet.name] + ) # Enough balance + + config.wallet.name = wallet.name + config.wallet.hotkey = delegate_hotkey # Single stake + config.amount = mock_stakes[ + wallet.name + ].tao # Stake an amount below the threshold + + cli = bittensor.cli(config) + with patch.object(_subtensor_mock, "_do_stake") as mock_stake: + with patch( + "bittensor.__console__.print" + ) as mock_print: # Catch console print + cli.run() + + # Filter for console print calls + console_prints = [ + call[0][0] for call in mock_print.call_args_list + ] + minimum_print = filter( + lambda x: "below the minimum required" in x, console_prints + ) + + if wallet.name == "w0": + # This wallet owns the delegate + stake_calls = mock_stake.call_args_list + # Can stake below the threshold + self.assertEqual(len(stake_calls), 1) + + _, kwargs = stake_calls[0] + + # Should stake specified amount + self.assertEqual( + kwargs["amount"], bittensor.Balance(config.amount) + ) + # No error for w0 + self.assertRaises( + StopIteration, next, minimum_print + ) # No warning for w0 + else: + # Should not call stake + self.assertEqual(len(mock_stake.call_args_list), 0) + # Should print error + self.assertIsNotNone(next(minimum_print)) + def test_nominate(self, _): config = self.config config.command = "root" diff --git a/tests/unit_tests/extrinsics/test_staking.py b/tests/unit_tests/extrinsics/test_staking.py index 288e065f78..c3b888520b 100644 --- a/tests/unit_tests/extrinsics/test_staking.py +++ b/tests/unit_tests/extrinsics/test_staking.py @@ -114,9 +114,6 @@ def test_add_stake_extrinsic( else amount ) - if staking_balance > bittensor.Balance.from_rao(1000): - staking_balance = staking_balance - bittensor.Balance.from_rao(1000) - with patch.object( mock_subtensor, "_do_stake", return_value=expected_success ) as mock_add_stake, patch.object( @@ -135,7 +132,20 @@ def test_add_stake_extrinsic( mock_subtensor, "is_hotkey_delegate", return_value=hotkey_delegate ), patch.object(mock_subtensor, "get_delegate_take", return_value=0.01), patch( "rich.prompt.Confirm.ask", return_value=user_accepts - ) as mock_confirm: + ) as mock_confirm, patch.object( + mock_subtensor, + "get_minimum_required_stake", + return_value=bittensor.Balance.from_tao(0.01), + ), patch.object( + mock_subtensor, + "get_existential_deposit", + return_value=bittensor.Balance.from_rao(100_000), + ): + mock_balance = mock_subtensor.get_balance() + existential_deposit = mock_subtensor.get_existential_deposit() + if staking_balance > mock_balance - existential_deposit: + staking_balance = mock_balance - existential_deposit + # Act if not hotkey_owner and not hotkey_delegate: with pytest.raises(exception): diff --git a/tests/unit_tests/extrinsics/test_unstaking.py b/tests/unit_tests/extrinsics/test_unstaking.py index 6ad0a977e7..0fa6ba84c4 100644 --- a/tests/unit_tests/extrinsics/test_unstaking.py +++ b/tests/unit_tests/extrinsics/test_unstaking.py @@ -39,8 +39,8 @@ def mock_get_minimum_required_stake(): ("5FHneW46...", 10.0, True, True, True, False, False, False), # Not enough stake to unstake ("5FHneW46...", 1000.0, True, True, False, None, False, False), - # Unsuccessful - unstake threshold not reached - (None, 0.01, True, True, False, None, False, False), + # Successful - unstake threshold not reached + (None, 0.01, True, True, False, None, True, True), # Successful unstaking all (None, None, False, False, False, None, True, True), # Failure - unstaking failed @@ -51,7 +51,7 @@ def mock_get_minimum_required_stake(): "successful-with-prompt", "failure-prompt-declined", "failure-not-enough-stake", - "failure-threshold-not-reached", + "success-threshold-not-reached", "success-unstake-all", "failure-unstake-failed", ], @@ -166,18 +166,20 @@ def test_unstake_extrinsic( None, None, ), - # Unsuccessful unstake - threshold not reached + # Successful unstake - new stake below threshold ( ["5FHneW46..."], - [0.01], + [ + 100 - mock_get_minimum_required_stake() + 0.01 + ], # New stake just below threshold 100, True, True, False, True, - [None], - False, - 0, + [True], + True, # Sucessful unstake + 1, None, None, ), @@ -247,7 +249,7 @@ def test_unstake_extrinsic( "partial-success-one-fail", "success-no-hotkey", "failure-not-enough-stake", - "failure-threshold-not-reached", + "success-threshold-not-reached", "failure-prompt-declined", "failure-type-error-hotkeys", "failure-value-error-amounts",