diff --git a/requirements.txt b/requirements.txt index 196e1382..4bb31c08 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,5 @@ rich>=13 pytest>=8 torch>=2 numpy>=1 -setuptools>=68 \ No newline at end of file +setuptools>=68 +git+https://github.com/opentensor/bittensor.git@ca653c34c833edf591d917b9d78c6123ec07ca6e#egg=bittensor \ No newline at end of file diff --git a/template/base/miner.py b/template/base/miner.py index 1f9f3fa8..b9d02681 100644 --- a/template/base/miner.py +++ b/template/base/miner.py @@ -190,7 +190,7 @@ def __exit__(self, exc_type, exc_value, traceback): def resync_metagraph(self): """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph.""" - bt.logging.info("resync_metagraph()") + # bt.logging.info("resync_metagraph()") # Sync the metagraph. self.metagraph.sync(subtensor=self.subtensor) diff --git a/template/base/neuron.py b/template/base/neuron.py index 156c435a..894d5513 100644 --- a/template/base/neuron.py +++ b/template/base/neuron.py @@ -173,11 +173,13 @@ def should_set_weights(self) -> bool: ) # don't set weights if you're a miner def save_state(self): - bt.logging.warning( - "save_state() not implemented for this neuron. You can implement this function to save model checkpoints or other useful data." - ) + pass + # bt.logging.warning( + # "save_state() not implemented for this neuron. You can implement this function to save model checkpoints or other useful data." + # ) def load_state(self): - bt.logging.warning( - "load_state() not implemented for this neuron. You can implement this function to load model checkpoints or other useful data." - ) + pass + # bt.logging.warning( + # "load_state() not implemented for this neuron. You can implement this function to load model checkpoints or other useful data." + # ) diff --git a/template/base/utils/weight_utils.py b/template/base/utils/weight_utils.py index 51e0872d..8207a8ea 100644 --- a/template/base/utils/weight_utils.py +++ b/template/base/utils/weight_utils.py @@ -150,7 +150,7 @@ def process_weights_for_netuid( tuple[Any, ndarray], ]: bittensor.logging.debug("process_weights_for_netuid()") - bittensor.logging.debug("weights", weights) + bittensor.logging.debug(f"weights: {str(weights)}") bittensor.logging.debug("netuid", netuid) bittensor.logging.debug("subtensor", subtensor) bittensor.logging.debug("metagraph", metagraph) @@ -169,8 +169,8 @@ def process_weights_for_netuid( min_allowed_weights = subtensor.min_allowed_weights(netuid=netuid) max_weight_limit = subtensor.max_weight_limit(netuid=netuid) bittensor.logging.debug("quantile", quantile) - bittensor.logging.debug("min_allowed_weights", min_allowed_weights) - bittensor.logging.debug("max_weight_limit", max_weight_limit) + bittensor.logging.debug(f"min_allowed_weights: {str(min_allowed_weights)}") + bittensor.logging.debug(f"max_weight_limit: {str(max_weight_limit)}") # Find all non zero weights. non_zero_weight_idx = np.argwhere(weights > 0).squeeze() @@ -180,7 +180,7 @@ def process_weights_for_netuid( if non_zero_weights.size == 0 or metagraph.n < min_allowed_weights: bittensor.logging.warning("No non-zero weights returning all ones.") final_weights = np.ones(metagraph.n) / metagraph.n - bittensor.logging.debug("final_weights", final_weights) + bittensor.logging.debug(f"final_weights: {str(final_weights)}") return np.arange(len(final_weights)), final_weights elif non_zero_weights.size < min_allowed_weights: @@ -191,13 +191,13 @@ def process_weights_for_netuid( np.ones(metagraph.n) * 1e-5 ) # creating minimum even non-zero weights weights[non_zero_weight_idx] += non_zero_weights - bittensor.logging.debug("final_weights", weights) + bittensor.logging.debug("final_weights", *weights) normalized_weights = normalize_max_weight( x=weights, limit=max_weight_limit ) return np.arange(len(normalized_weights)), normalized_weights - bittensor.logging.debug("non_zero_weights", non_zero_weights) + bittensor.logging.debug("non_zero_weights", *non_zero_weights) # Compute the exclude quantile and find the weights in the lowest quantile max_exclude = max(0, len(non_zero_weights) - min_allowed_weights) / len( @@ -214,13 +214,13 @@ def process_weights_for_netuid( lowest_quantile <= non_zero_weights ] non_zero_weights = non_zero_weights[lowest_quantile <= non_zero_weights] - bittensor.logging.debug("non_zero_weight_uids", non_zero_weight_uids) - bittensor.logging.debug("non_zero_weights", non_zero_weights) + bittensor.logging.debug(non_zero_weight_uids, "non_zero_weight_uids") + bittensor.logging.debug(non_zero_weights, "non_zero_weights",) # Normalize weights and return. normalized_weights = normalize_max_weight( x=non_zero_weights, limit=max_weight_limit ) - bittensor.logging.debug("final_weights", normalized_weights) + bittensor.logging.debug("final_weights", *normalized_weights) return non_zero_weight_uids, normalized_weights diff --git a/template/base/validator.py b/template/base/validator.py index 9f6f22ab..6ae04210 100644 --- a/template/base/validator.py +++ b/template/base/validator.py @@ -242,7 +242,7 @@ def set_weights(self): # Compute raw_weights safely raw_weights = self.scores / norm - bt.logging.debug("raw_weights", raw_weights) + # bt.logging.debug("raw_weights", raw_weights) bt.logging.debug("raw_weight_uids", str(self.metagraph.uids.tolist())) # Process the raw weights to final_weights via subtensor limitations. ( @@ -255,8 +255,8 @@ def set_weights(self): subtensor=self.subtensor, metagraph=self.metagraph, ) - bt.logging.debug("processed_weights", processed_weights) - bt.logging.debug("processed_weight_uids", processed_weight_uids) + bt.logging.debug(f"processed_weights {str(processed_weights)}") + bt.logging.debug(f"processed_weight_uids {str(processed_weight_uids)}") # Convert to uint16 weights and uids. ( @@ -265,8 +265,8 @@ def set_weights(self): ) = convert_weights_and_uids_for_emit( uids=processed_weight_uids, weights=processed_weights ) - bt.logging.debug("uint_weights", uint_weights) - bt.logging.debug("uint_uids", uint_uids) + bt.logging.debug(f"uint_weights: {str(uint_weights)}") + bt.logging.debug(f"uint_uids: {str(uint_uids)}") # Set the weights on chain via our subtensor connection. result, msg = self.subtensor.set_weights( @@ -285,7 +285,7 @@ def set_weights(self): def resync_metagraph(self): """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph.""" - bt.logging.info("resync_metagraph()") + # bt.logging.info("resync_metagraph()") # Copies state of metagraph before syncing. previous_metagraph = copy.deepcopy(self.metagraph)