Skip to content

Commit

Permalink
docs for BaseNeuron
Browse files Browse the repository at this point in the history
  • Loading branch information
mccrindlebrian committed Dec 18, 2024
1 parent 01be35c commit 1d8336e
Showing 1 changed file with 114 additions and 8 deletions.
122 changes: 114 additions & 8 deletions atom/base/neuron.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,46 @@

class BaseNeuron(ABC):
"""
Base class for Bittensor miners. This class is abstract and should be inherited by a subclass. It contains the core logic for all neurons; validators and miners.
Base class for Bittensor miners and validators.
In addition to creating a wallet, subtensor, and metagraph, this class also handles the synchronization of the network state via a basic checkpointing mechanism based on epoch length.
This class provides the core functionality for all neurons in the Bittensor network.
It handles wallet management, network synchronization, and state management through
a basic checkpointing mechanism based on epoch length.
Attributes:
subtensor (bt.subtensor): Interface to the Bittensor blockchain
wallet (bt.wallet): Wallet containing cryptographic keys
metagraph (bt.metagraph): Network state information
"""

@classmethod
def check_config(cls, config: "bt.Config"):
"""
Validates the configuration for the neuron.
Args:
config (bt.Config): Configuration object to validate
"""
check_config(cls, config)

@classmethod
def add_args(cls, parser):
"""
Adds neuron-specific arguments to the command line parser.
Args:
parser: The argument parser to add arguments to
"""
add_args(cls, parser)

@classmethod
def config(cls):
"""
Returns the default configuration for the neuron.
Returns:
bt.Config: Default configuration object
"""
return config(cls)

subtensor: "bt.subtensor"
Expand All @@ -39,10 +64,22 @@ def config(cls):
@property
@abstractmethod
def spec_version(self):
"""
Abstract property for the neuron's specification version.
Returns:
Version information for the neuron implementation
"""
...

@property
def block(self):
"""
Gets the current block number from the Bittensor network.
Returns:
int: Current block number
"""
return ttl_get_block(self)

def __init__(self, config=None):
Expand Down Expand Up @@ -99,7 +136,13 @@ def run(self):

def sync(self):
"""
Wrapper for synchronizing the state of the network for the given miner or validator.
Synchronizes the neuron's state with the network.
Performs the following tasks:
1. Verifies registration status
2. Syncs metagraph if necessary
3. Sets weights if conditions are met
4. Saves current state
"""
# Ensure miner or validator hotkey is still registered on the network.
self.check_registered()
Expand All @@ -114,6 +157,12 @@ def sync(self):
self.save_state()

def check_registered(self):
"""
Verifies that the neuron's hotkey is registered on the network.
Raises:
SystemExit: If the hotkey is not registered on the specified subnet
"""
# --- Check for registration.
if self.config.mock:
return
Expand All @@ -130,13 +179,22 @@ def check_registered(self):

def should_sync_metagraph(self):
"""
Check if enough epoch blocks have elapsed since the last checkpoint to sync.
Determines if the metagraph should be synchronized.
Returns:
bool: True if enough blocks have elapsed since last sync, False otherwise
"""
return (
self.block - self.metagraph.last_update[self.uid]
) > self.config.neuron.metagraph_resync_length

def should_set_weights(self) -> bool:
"""
Determines if the neuron should set weights on the network.
Returns:
bool: True if weights should be set, False otherwise
"""
# Don't set weights on initialization.
if self.step == 0:
return False
Expand All @@ -156,8 +214,9 @@ def should_set_weights(self) -> bool:

def run_in_background_thread(self):
"""
Starts the operations in a separate background thread.
This is useful for non-blocking operations.
Starts the neuron's operations in a background thread.
The thread runs as a daemon, allowing the program to exit when the main thread ends.
"""
if not self.is_running:
bt.logging.debug("Starting in background thread.")
Expand All @@ -169,7 +228,9 @@ def run_in_background_thread(self):

def stop_run_thread(self):
"""
Stops the operations that are running in the background thread.
Stops the neuron's background operations.
Attempts to gracefully stop the background thread with a 5-second timeout.
"""
if self.is_running:
bt.logging.debug("Stopping in background thread.")
Expand All @@ -179,22 +240,67 @@ def stop_run_thread(self):
bt.logging.debug("Stopped")

def __enter__(self):
# should call self.run_in_background_thread() or self.run() here
"""
Context manager entry point.
Raises:
NotImplementedError: Must be implemented by subclasses
"""
raise NotImplementedError

async def __aenter__(self):
"""
Asynchronous context manager entry point.
Raises:
NotImplementedError: Must be implemented by subclasses
"""
raise NotImplementedError

def __exit__(self, exc_type, exc_value, traceback):
"""
Context manager exit point.
Args:
exc_type: Type of the exception that caused the context to be exited
exc_value: Instance of the exception that caused the context to be exited
traceback: Traceback if an exception occurred
Raises:
NotImplementedError: Must be implemented by subclasses
"""
raise NotImplementedError

async def __aexit__(self, exc_type, exc_value, traceback):
"""
Asynchronous context manager exit point.
Args:
exc_type: Type of the exception that caused the context to be exited
exc_value: Instance of the exception that caused the context to be exited
traceback: Traceback if an exception occurred
Raises:
NotImplementedError: Must be implemented by subclasses
"""
raise NotImplementedError

def save_state(self):
"""
Saves the current state of the neuron.
This is an empty implementation that can be overridden by subclasses
to save model checkpoints or other state information.
"""
pass

def load_state(self):
"""
Loads the previously saved state of the neuron.
This is a placeholder implementation that logs a warning. Subclasses
should override this method to load their specific state information.
"""
bt.logging.warning(
"load_state() not implemented for this neuron. You can implement this function to load model checkpoints or other useful data."
)

0 comments on commit 1d8336e

Please sign in to comment.