pull/20/head
quadrismegistus 4 years ago
parent eb448c5a28
commit 95bf840926

@ -225,7 +225,7 @@ class MainApp(MDApp):
def log(self,*args):
line = ' '.join(str(x) for x in args)
self.logger.debug(line)
self.logger.debug(line+'\n')
def __init__(self, **kwargs):
super().__init__(**kwargs)

@ -162,7 +162,7 @@
orientation: "vertical"
padding: "20dp"
size_hint: (None, None)
# size:('400sp','800sp')
size:('500sp','800sp')
# adaptive_height: True
pos_hint: {"center_x": .5, "center_y": .5}
md_bg_color: rgb(*COLOR_CARD)

@ -13,7 +13,7 @@ class SpiderCrawl:
"""
Crawl the network and look for given 160-bit keys.
"""
def __init__(self, protocol, node, peers, ksize, alpha):
def __init__(self, protocol, node, peers, ksize, alpha, log = print):
"""
Create a new C{SpiderCrawl}er.
@ -32,8 +32,9 @@ class SpiderCrawl:
self.node = node
self.nearest = NodeHeap(self.node, self.ksize)
self.last_ids_crawled = []
log.info("creating spider with peers: %s", peers)
self.log("creating spider with peers: %s" % peers)
self.nearest.push(peers)
self.log = log
async def _find(self, rpcmethod):
"""
@ -51,7 +52,7 @@ class SpiderCrawl:
yet queried
4. repeat, unless nearest list has all been queried, then ur done
"""
log.info("crawling network with nearest: %s", str(tuple(self.nearest)))
self.log("crawling network with nearest: %s" % str(tuple(self.nearest)))
count = self.alpha
if self.nearest.get_ids() == self.last_ids_crawled:
count = len(self.nearest)
@ -69,8 +70,9 @@ class SpiderCrawl:
class ValueSpiderCrawl(SpiderCrawl):
def __init__(self, protocol, node, peers, ksize, alpha):
SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha)
def __init__(self, protocol, node, peers, ksize, alpha, log=print):
self.log = log
SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha, log = log)
# keep track of the single nearest node without value - per
# section 2.3 so we can set the key there if found
self.nearest_without_value = NodeHeap(self.node, 1)
@ -115,8 +117,8 @@ class ValueSpiderCrawl(SpiderCrawl):
"""
value_counts = Counter(values)
if len(value_counts) != 1:
log.warning("Got multiple values for key %i: %s",
self.node.long_id, str(values))
self.log("Got multiple values for key %i: %s" %
(self.node.long_id, str(values)) )
value = value_counts.most_common(1)[0][0]
peer = self.nearest_without_value.popleft()
@ -126,6 +128,11 @@ class ValueSpiderCrawl(SpiderCrawl):
class NodeSpiderCrawl(SpiderCrawl):
def __init__(self,*x,**y):
self.log=y.get('log',print)
super().__init__(*x)
async def find(self):
"""
Find the closest nodes.

@ -58,18 +58,14 @@ class Server:
# return [asyncio.create_task(self.set_digest(k,v)) for k,v in self.storage.items()]
def __repr__(self):
repr = f"""
kademlia.network.Server status:
ksize = {self.ksize}
alpha = {self.alpha}
storage = {self.storage}
node = {self.node}
transport = {self.transport}
protocol = {self.protocol}
refresh_loop = {self.refresh_loop}
save_state_loop = {self.save_state_loop}
bootstrappable_neighbors = {self.bootstrappable_neighbors()}
"""
neighbs=self.bootstrappable_neighbors()
neighbors=' '.join(':'.join(str(x) for x in ip_port) for ip_port in neighbs)
repr = f"""storing {len(self.storage.data)} keys and has {len(neighbs)} neighbors""" #:\n\t{neighbors}"""
# transport = {self.transport}
# protocol = {self.protocol}
# refresh_loop = {self.refresh_loop}
# save_state_loop = {self.save_state_loop}
return repr
@ -85,7 +81,7 @@ class Server:
self.save_state_loop.cancel()
def _create_protocol(self):
return self.protocol_class(self.node, self.storage, self.ksize)
return self.protocol_class(self.node, self.storage, self.ksize, self.log)
async def listen(self, port, interface='0.0.0.0'):
"""
@ -96,8 +92,7 @@ class Server:
loop = asyncio.get_event_loop()
listen = loop.create_datagram_endpoint(self._create_protocol,
local_addr=(interface, port))
self.log("Node %i listening on %s:%i",
self.node.long_id, interface, port)
self.log("Node %i listening on %s:%i" % (self.node.long_id, interface, port))
self.transport, self.protocol = await listen
# finally, schedule refreshing table
self.refresh_table()
@ -119,6 +114,7 @@ class Server:
nearest = self.protocol.router.find_neighbors(node, self.alpha)
spider = NodeSpiderCrawl(self.protocol, node, nearest,
self.ksize, self.alpha)
spider.log=self.log
results.append(spider.find())
# do our crawling
@ -158,6 +154,7 @@ class Server:
nodes = [node for node in gathered if node is not None]
spider = NodeSpiderCrawl(self.protocol, self.node, nodes,
self.ksize, self.alpha)
spider.log=self.log
return await spider.find()
async def bootstrap_node(self, addr):
@ -188,7 +185,7 @@ class Server:
found = None
#while found is None:
spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha, log=self.log)
self.log(f'spider crawling... {spider}')
found = await spider.find()
self.log('spider found <-',found,'for key',key,'(',dkey,')')
@ -231,12 +228,12 @@ class Server:
nearest = self.protocol.router.find_neighbors(node)
self.log('set_digest() nearest -->',nearest)
if not nearest:
self.log("There are no known neighbors to set key %s",
dkey.hex())
self.log("There are no known neighbors to set key %s" % dkey.hex())
return False
spider = NodeSpiderCrawl(self.protocol, node, nearest,
self.ksize, self.alpha)
self.ksize, self.alpha, log=self.log)
nodes = await spider.find()
self.log(f"setting '%s' on %s" % (dkey.hex(), list(map(str, nodes))))
@ -263,7 +260,7 @@ class Server:
Save the state of this node (the alpha/ksize/id/immediate neighbors)
to a cache file with the given fname.
"""
self.log("Saving state to %s", fname)
self.log("Saving state to %s" % fname)
data = {
'ksize': self.ksize,
'alpha': self.alpha,
@ -283,7 +280,7 @@ class Server:
from a cache file with the given fname and then bootstrap the node
(using the given port/interface to start listening/bootstrapping).
"""
self.log("Loading state from %s", fname)
self.log("Loading state from %s" % fname)
with open(fname, 'rb') as file:
data = pickle.load(file)
svr = Server(data['ksize'], data['alpha'], data['id'])

@ -62,11 +62,12 @@ log = logging.getLogger(__name__) # pylint: disable=invalid-name
class KademliaProtocol(RPCProtocol):
def __init__(self, source_node, storage, ksize):
def __init__(self, source_node, storage, ksize, log=print):
RPCProtocol.__init__(self)
self.router = RoutingTable(self, ksize, source_node)
self.storage = storage
self.source_node = source_node
self.log=print
def get_refresh_ids(self):
"""
@ -89,13 +90,13 @@ class KademliaProtocol(RPCProtocol):
def rpc_store(self, sender, nodeid, key, value):
source = Node(nodeid, sender[0], sender[1])
self.welcome_if_new(source)
log.debug("got a store request from %s, storing '%s' -> %s (binary keys)'",
sender, key.hex(), len(value))
self.log("got a store request from %s, storing '%s' -> %s (binary keys)'" %
(sender, key.hex(), len(value)))
self.storage[key] = value
return True
def rpc_find_node(self, sender, nodeid, key):
log.info("finding neighbors of %i in local table",
self.log("finding neighbors of %i in local table" %
int(nodeid.hex(), 16))
source = Node(nodeid, sender[0], sender[1])
self.welcome_if_new(source)
@ -150,7 +151,7 @@ class KademliaProtocol(RPCProtocol):
if not self.router.is_new_node(node):
return
log.info("never seen %s before, adding to router", node)
self.log("never seen %s before, adding to router" % node)
#for key, value in self.storage:
for key in self.storage.keys():
value = self.storage[key]
@ -175,6 +176,6 @@ class KademliaProtocol(RPCProtocol):
self.router.remove_contact(node)
return result
log.info("got successful response from %s", node)
self.log("got successful response from %s" % node)
self.welcome_if_new(node)
return result

@ -192,11 +192,8 @@ class HalfForgetfulStorage(IStorage):
def __repr__(self,lim_eg=5):
#self.cull()
#return repr(self.data)
eg = list(sorted(self.data.keys()))[:lim_eg]
msg=f"""HalfForgetfulStorage()
# keys = {len(self.data)}
e.g. = {eg}
"""
#eg = list(sorted(self.data.keys()))[:lim_eg]
msg=f"""HFS() # keys = {len(self.data)}"""
return msg
def iter_older_than(self, seconds_old):

@ -7,6 +7,15 @@ import pickle,os
NODES_PRIME = [("128.232.229.63",8467), ("68.66.241.111",8467)]
#68.66.224.46
def logger():
import logging
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger = logging.getLogger(__file__)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
def boot_lonely_selfless_node(port=8467):
async def go():

Loading…
Cancel
Save