Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 1 addition & 65 deletions peercrawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,55 +321,9 @@ def parse_args():
help='run peer crawler as a service')
parser.add_argument('-l', '--nolisten', action='store_true', default=False,
help='listen to incoming connections')
parser.add_argument('-p', '--port', type=int, default=7070,
help='tcp port number to listen on in service mode')
return parser.parse_args()


class peer_service_header:
size = 124

def __init__(self, net_id, good_peers, total_peers, software_ver="devel", protocol_ver=3):
self.magic = b'PEER'
assert (isinstance(net_id, network_id))
assert (isinstance(software_ver, str))
self.net_id = net_id
self.good_peers = good_peers
self.total_peers = total_peers
self.software_ver = software_ver
self.protocol_ver = protocol_ver

def serialise(self):
data = self.magic
data += self.net_id.id.to_bytes(1, "big")
data += self.protocol_ver.to_bytes(3, "big")
data += self.good_peers.to_bytes(8, "big")
data += self.total_peers.to_bytes(8, "big")
data += string_to_bytes(self.software_ver, 100)
return data

@classmethod
def parse(cls, data):
assert (len(data) == peer_service_header.size)
assert (data[0:4] == b'PEER')
return peer_service_header(
net_id=network_id(data[4]),
protocol_ver=int.from_bytes(data[5:8], "big"),
good_peers=int.from_bytes(data[8:16], "big"),
total_peers=int.from_bytes(data[16:24], "big"),
software_ver=data[24:].decode("utf-8")
)

def __str__(self):
s = ''
s += 'NetID: %s\n' % self.net_id
s += 'GoodPeers: %s\n' % self.good_peers
s += 'TotalPeers: %s\n' % self.total_peers
s += 'ProtoVers: %s\n' % self.protocol_ver
s += 'SwVers: %s' % self.software_ver
return s


class peer_crawler_thread(threading.Thread):
def __init__(self, ctx, forever, delay, verbosity=0):
threading.Thread.__init__(self, daemon=True)
Expand All @@ -390,24 +344,6 @@ def spawn_peer_crawler_thread(ctx, forever, delay, verbosity):
return t


def run_peer_service_forever(peerman, addr='', port=7070):
with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as s:
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((addr, port))
s.listen()

while True:
conn, addr = s.accept()
with conn:
conn.settimeout(10)
hdr = peer_service_header(peerman.ctx["net_id"], peerman.count_good_peers(), peerman.count_peers())
data = hdr.serialise()
json_list = jsonpickle.encode(peerman.get_peers_as_list())
data += json_list.encode()
conn.sendall(data)


def get_peers_from_service(ctx, addr='::ffff:78.46.80.199'):
with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as s:
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
Expand Down Expand Up @@ -532,7 +468,7 @@ def main():
if args.service:
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The whole concept of peercrawler service needs to be removed

verbosity = args.verbosity if (args.verbosity is not None) else 0
crawler_thread = spawn_peer_crawler_thread(ctx, True, args.delay, verbosity)
run_peer_service_forever(crawler_thread.peerman, port=args.port)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The port command line argument can be deleted too

crawler_thread.join()
else:
verbosity = args.verbosity if (args.verbosity is not None) else 1
peerman = peer_manager(ctx, listen=(not args.nolisten), verbosity=verbosity)
Expand Down