Skip to content

Commit 34d9560

Browse files
Merge pull request #389 from macrocosm-os/staging
Staging
2 parents 8dcf8c5 + 0ab9b77 commit 34d9560

15 files changed

+612
-350
lines changed

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -192,3 +192,4 @@ tests/mock_data
192192

193193
db
194194
local-gjp
195+
*.ipynb

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ When the simulations finally converge (ΔE/t < threshold), they produce the form
111111
## Requirements
112112
Protein folding utilizes an open-source package called [OpenMM](https://openmm.org). To run, you will need:
113113
1. A Linux-based machine
114-
2. At least 1 CUDA-compatible GPU. We recommend an RXT 4090.
114+
2. At least 1 CUDA-compatible GPU. We recommend an RTX 4090.
115115
3. Conda Distribution (we recommend [Miniconda](https://docs.anaconda.com/miniconda/)). Using conda is an [OpenMM requirement](http://docs.openmm.org/latest/userguide/application/01_getting_started.html#installing-openmm).
116116

117117
For more information regarding recommended hardware specifications, look at [min_compute.yml](./min_compute.yml)

folding/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "2.2.0"
1+
__version__ = "2.3.0"
22
version_split = __version__.split(".")
33
__spec_version__ = (
44
(10000 * int(version_split[0]))

folding/base/miner.py

+11-51
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import bittensor as bt
88

99
from folding.base.neuron import BaseNeuron
10-
from folding.protocol import PingSynapse
10+
from folding.protocol import PingSynapse, ParticipationSynapse
1111
from folding.utils.config import add_miner_args
1212
from folding.utils.logger import logger
1313

@@ -49,6 +49,8 @@ def __init__(self, config=None):
4949
priority_fn=self.priority,
5050
).attach(
5151
forward_fn=self.ping_forward, # not sure if we need blacklist on this.
52+
).attach(
53+
forward_fn=self.participation_forward,
5254
)
5355
logger.info(f"Axon created: {self.axon}")
5456

@@ -75,59 +77,16 @@ def ping_forward(self, synapse: PingSynapse):
7577
logger.success("Telling validator you can serve ✅")
7678
return synapse
7779

78-
def run(self):
79-
"""
80-
Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors.
81-
82-
This function performs the following primary tasks:
83-
1. Check for registration on the Bittensor network.
84-
2. Starts the miner's axon, making it active on the network.
85-
3. Periodically resynchronizes with the chain; updating the metagraph with the latest network state and setting weights.
86-
87-
The miner continues its operations until `should_exit` is set to True or an external interruption occurs.
88-
During each epoch of its operation, the miner waits for new blocks on the Bittensor network, updates its
89-
knowledge of the network (metagraph), and sets its weights. This process ensures the miner remains active
90-
and up-to-date with the network's latest state.
80+
def participation_forward(self, synapse: ParticipationSynapse):
81+
"""Respond to the validator with the necessary information about participating in a specified job
9182
92-
Note:
93-
- The function leverages the global configurations set during the initialization of the miner.
94-
- The miner's axon serves as its interface to the Bittensor network, handling incoming and outgoing requests.
95-
96-
Raises:
97-
KeyboardInterrupt: If the miner is stopped by a manual interruption.
98-
Exception: For unforeseen errors during the miner's operation, which are logged for diagnosis.
83+
Args:
84+
self (ParticipationSynapse): must attach "is_participating"
9985
"""
86+
pass
10087

101-
# Check that miner is registered on the network.
102-
self.sync()
103-
104-
# Serve passes the axon information to the network + netuid we are hosting on.
105-
# This will auto-update if the axon port of external ip have changed.
106-
logger.info(
107-
f"Serving miner axon {self.axon} on network: {self.config.subtensor.chain_endpoint} with netuid: {self.config.netuid}"
108-
)
109-
self.axon.serve(netuid=self.config.netuid, subtensor=self.subtensor)
110-
111-
# Start starts the miner's axon, making it active on the network.
112-
self.axon.start()
113-
114-
logger.info(f"Miner starting at block: {self.block}")
115-
116-
# This loop maintains the miner's operations until intentionally stopped.
117-
try:
118-
while not self.should_exit:
119-
time.sleep(10)
120-
self.sync()
121-
122-
# If someone intentionally stops the miner, it'll safely terminate operations.
123-
except KeyboardInterrupt:
124-
self.axon.stop()
125-
logger.success("Miner killed by keyboard interrupt.")
126-
exit()
127-
128-
# In case of unforeseen errors, the miner will log the error and continue operations.
129-
except Exception as e:
130-
logger.error(traceback.format_exc())
88+
def run(self):
89+
pass
13190

13291
def run_in_background_thread(self):
13392
"""
@@ -159,6 +118,7 @@ def __enter__(self):
159118
This method facilitates the use of the miner in a 'with' statement.
160119
"""
161120
self.run_in_background_thread()
121+
162122
return self
163123

164124
def __exit__(self, exc_type, exc_value, traceback):

folding/base/neuron.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,8 @@ def remove_wandb_id(self, pdb_id: str):
142142
write_pkl(self.wandb_ids, f"{self.config.neuron.full_path}/wandb_ids.pkl", "wb")
143143

144144
@abstractmethod
145-
async def forward(self, synapse: bt.Synapse) -> bt.Synapse: ...
145+
async def forward(self, synapse: bt.Synapse) -> bt.Synapse:
146+
...
146147

147148
def sync(self):
148149
"""

0 commit comments

Comments
 (0)