2020-08-18 09:01:22 +00:00
|
|
|
###
|
|
|
|
# Kademlia patches
|
|
|
|
###
|
|
|
|
|
|
|
|
from kademlia.storage import *
|
|
|
|
from kademlia.network import *
|
|
|
|
from kademlia.routing import RoutingTable
|
|
|
|
from rpcudp.protocol import RPCProtocol
|
|
|
|
import os
|
|
|
|
|
2020-08-19 11:14:52 +00:00
|
|
|
handler = logging.StreamHandler()
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
log = logging.getLogger('kademlia')
|
|
|
|
log.addHandler(handler)
|
|
|
|
log.setLevel(logging.DEBUG)
|
|
|
|
|
2020-08-18 09:01:22 +00:00
|
|
|
|
2020-08-18 10:16:46 +00:00
|
|
|
PROXY_ADDR = ('0.0.0.0',8368)
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
class HalfForgetfulStorage(ForgetfulStorage):
|
2020-08-20 16:31:17 +00:00
|
|
|
def __init__(self, fn='cache.h5', ttl=604800, log=print):
|
2020-08-18 09:01:22 +00:00
|
|
|
"""
|
|
|
|
By default, max age is a week.
|
|
|
|
"""
|
2020-08-19 15:13:49 +00:00
|
|
|
self.fn=fn
|
2020-08-20 16:31:17 +00:00
|
|
|
self.log=log
|
|
|
|
#from sqlitedict import SqliteDict
|
|
|
|
#self.data = SqliteDict(self.fn, autocommit=True)
|
|
|
|
|
|
|
|
#import h5py
|
|
|
|
#self.data = h5py.File(self.fn,'a')
|
2020-08-19 15:06:40 +00:00
|
|
|
# if not os.path.exists(self.fn):
|
|
|
|
# self.data={}
|
|
|
|
# else:
|
|
|
|
# with open(self.fn,'rb') as f:
|
|
|
|
# self.data=pickle.load(f)
|
2020-08-20 16:31:17 +00:00
|
|
|
# import shelve
|
|
|
|
# self.data = shelve.open(self.fn,'a')
|
|
|
|
import pickledb
|
2020-08-20 17:10:05 +00:00
|
|
|
self.data = pickledb.load(self.fn,True)
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
#print('>> loaded %s keys' % len(self.data))
|
|
|
|
|
|
|
|
#self.data = pickle.open('sto.dat','rb') #,writeback=True)
|
|
|
|
# self.data = self.store.get('OrderedDict',OrderedDict())
|
|
|
|
self.ttl = ttl
|
|
|
|
|
2020-08-20 16:31:17 +00:00
|
|
|
def cull(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def keys(self):
|
|
|
|
return self.data.getall()
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self.keys())
|
|
|
|
|
2020-08-18 09:01:22 +00:00
|
|
|
def __setitem__(self, key, value):
|
2020-08-20 16:31:17 +00:00
|
|
|
# try:
|
|
|
|
# sofar=self.data.get(key)
|
|
|
|
# except (KeyError,ValueError) as e:
|
|
|
|
# sofar = []
|
|
|
|
sofar = self.data.get(key)
|
2020-08-20 17:10:05 +00:00
|
|
|
if not sofar: sofar=[]
|
|
|
|
print('SOFAR',sofar)
|
|
|
|
#sofar = [sofar] if sofar and type(sofar)!=list else []
|
|
|
|
print('SOFAR',sofar)
|
2020-08-20 16:31:17 +00:00
|
|
|
newdat = (time.monotonic(), value)
|
|
|
|
newval = sofar + [newdat]
|
2020-08-20 17:10:05 +00:00
|
|
|
print('NEWVAL',newval)
|
2020-08-20 16:31:17 +00:00
|
|
|
#del self.data[key]
|
|
|
|
#self.data[key]=newval
|
|
|
|
|
|
|
|
self.data.set(key,newval)
|
2020-08-20 17:10:05 +00:00
|
|
|
# self.data.dump()
|
2020-08-20 16:31:17 +00:00
|
|
|
|
2020-08-20 17:10:05 +00:00
|
|
|
print('VALUE IS NOW'+str(self.data.get(key)))
|
2020-08-19 15:06:40 +00:00
|
|
|
#self.write()
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
def set(key,value):
|
|
|
|
self[key]=value
|
|
|
|
|
|
|
|
def write(self):
|
2020-08-19 15:06:40 +00:00
|
|
|
pass
|
|
|
|
#with open(self.fn,'wb') as f:
|
|
|
|
# pickle.dump(self.data, f)
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
# self.cull()
|
2020-08-20 09:51:57 +00:00
|
|
|
# print('looking for key: ', key)
|
2020-08-20 17:10:05 +00:00
|
|
|
# if key in self.data:
|
|
|
|
# val=list(self[key])
|
|
|
|
# # print('...found it! = %s' % val)
|
|
|
|
# return self[key]
|
|
|
|
# return default
|
|
|
|
return self[key]
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
def __getitem__(self, key):
|
2020-08-20 17:10:05 +00:00
|
|
|
# print(f'??!?\n{key}\n{self.data[key]}')
|
2020-08-20 15:48:08 +00:00
|
|
|
# return self.data[key][1]
|
|
|
|
# (skip time part of tuple)
|
2020-08-20 17:10:05 +00:00
|
|
|
try:
|
|
|
|
val=self.data[key]
|
|
|
|
except KeyError:
|
|
|
|
val=[]
|
|
|
|
if not val: return []
|
2020-08-20 17:43:28 +00:00
|
|
|
#data_list = list(val)
|
|
|
|
return [dat[1] for dat in data_list]
|
|
|
|
#return data_list
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""UDP proxy server."""
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
|
|
|
|
|
|
|
|
class ProxyDatagramProtocol(asyncio.DatagramProtocol):
|
|
|
|
|
|
|
|
def __init__(self, remote_address=PROXY_ADDR):
|
|
|
|
self.remote_address = remote_address
|
|
|
|
self.remotes_d = {}
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
def connection_made(self, transport):
|
|
|
|
self.transport = transport
|
|
|
|
|
|
|
|
def datagram_received(self, data, addr):
|
|
|
|
if addr in self.remotes_d:
|
|
|
|
self.remotes_d[addr].transport.sendto(data)
|
|
|
|
return
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
self.remotes_d[addr] = RemoteDatagramProtocol(self, addr, data)
|
|
|
|
coro = loop.create_datagram_endpoint(
|
|
|
|
lambda: self.remotes_d[addr], remote_addr=self.remote_address)
|
|
|
|
asyncio.ensure_future(coro)
|
|
|
|
|
|
|
|
|
|
|
|
class RemoteDatagramProtocol(asyncio.DatagramProtocol):
|
|
|
|
|
|
|
|
def __init__(self, proxy, addr, data):
|
|
|
|
print('RemoteDP got:',proxy,addr,data)
|
|
|
|
self.proxy = proxy
|
|
|
|
self.addr = addr
|
|
|
|
self.data = data
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
def connection_made(self, transport):
|
|
|
|
self.transport = transport
|
|
|
|
self.transport.sendto(self.data)
|
|
|
|
|
|
|
|
def datagram_received(self, data, _):
|
|
|
|
self.proxy.transport.sendto(data, self.addr)
|
|
|
|
|
|
|
|
def connection_lost(self, exc):
|
|
|
|
self.proxy.remotes.pop(self.attr)
|
|
|
|
|
|
|
|
|
|
|
|
async def start_datagram_proxy(protocol_class, bind, port, remote_host, remote_port):
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
protocol = protocol_class((remote_host, remote_port))
|
|
|
|
return await loop.create_datagram_endpoint(
|
|
|
|
lambda: protocol, local_addr=(bind, port))
|
|
|
|
|
|
|
|
|
|
|
|
def main(bind='0.0.0.0', port=8888,
|
|
|
|
remote_host='0.0.0.0', remote_port=9999):
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
print("Starting datagram proxy...")
|
|
|
|
coro = start_datagram_proxy(bind, port, remote_host, remote_port)
|
|
|
|
transport, _ = loop.run_until_complete(coro)
|
|
|
|
print("Datagram proxy is running...")
|
|
|
|
try:
|
|
|
|
loop.run_forever()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
|
|
|
print("Closing transport...")
|
|
|
|
transport.close()
|
|
|
|
loop.close()
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-18 15:33:34 +00:00
|
|
|
log = logging.getLogger('kademlia') # pylint: disable=invalid-name
|
|
|
|
|
|
|
|
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
class KadProtocol(KademliaProtocol):
|
2020-08-18 15:17:43 +00:00
|
|
|
# remote_address = PROXY_ADDR
|
|
|
|
# REMOTES_D={}
|
|
|
|
|
2020-08-18 17:25:15 +00:00
|
|
|
def __init__(self, source_node, storage, ksize):
|
2020-08-19 11:30:23 +00:00
|
|
|
RPCProtocol.__init__(self,wait_timeout=5)
|
2020-08-18 17:25:15 +00:00
|
|
|
self.router = RoutingTable(self, ksize, source_node)
|
|
|
|
self.storage = storage
|
|
|
|
self.source_node = source_node
|
|
|
|
|
2020-08-18 15:17:43 +00:00
|
|
|
# def datagram_received(self, data, addr):
|
|
|
|
# #if not hasattr(self,'remotes_d'): self.remotes_d={}
|
|
|
|
# # print('\n\n!?!?!?',self.REMOTES_D, type(self.REMOTES_D))
|
|
|
|
# # if addr in self.REMOTES_D:
|
|
|
|
# # self.REMOTES_D[addr].transport.sendto(data)
|
|
|
|
# # return
|
|
|
|
# loop = asyncio.get_event_loop()
|
|
|
|
# # self.REMOTES_D[addr] = RemoteDatagramProtocol(self, addr, data)
|
|
|
|
# RDP = RemoteDatagramProtocol(self, addr, data)
|
|
|
|
# coro = loop.create_datagram_endpoint(lambda: RDP, remote_addr=self.remote_address)
|
|
|
|
# asyncio.ensure_future(coro)
|
|
|
|
|
2020-08-18 15:33:34 +00:00
|
|
|
def handle_call_response(self, result, node):
|
2020-08-18 15:17:43 +00:00
|
|
|
"""
|
|
|
|
If we get a response, add the node to the routing table. If
|
|
|
|
we get no response, make sure it's removed from the routing table.
|
|
|
|
"""
|
2020-08-18 15:33:34 +00:00
|
|
|
if not result[0]:
|
|
|
|
log.warning("no response from %s, ?removing from router", node)
|
2020-08-19 12:20:12 +00:00
|
|
|
# self.router.remove_contact(node)
|
2020-08-18 15:33:34 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
log.info("got successful response from %s", node)
|
|
|
|
self.welcome_if_new(node)
|
2020-08-18 15:17:43 +00:00
|
|
|
return result
|
2020-08-18 09:01:22 +00:00
|
|
|
|
|
|
|
|
2020-08-19 12:15:35 +00:00
|
|
|
|
2020-08-18 09:01:22 +00:00
|
|
|
class KadServer(Server):
|
2020-08-19 12:15:35 +00:00
|
|
|
protocol_class = KadProtocol # KadProtocol #KademliaProtocol
|
2020-08-18 09:01:22 +00:00
|
|
|
|
2020-08-19 11:30:23 +00:00
|
|
|
def __init__(self, *x, **y):
|
|
|
|
super().__init__(*x,**y)
|
|
|
|
log.info(f'Storage has {len(self.storage.data)} keys')
|
|
|
|
|
2020-08-19 11:14:52 +00:00
|
|
|
def __repr__(self):
|
|
|
|
repr = f"""
|
|
|
|
KadServer()
|
|
|
|
ksize = {self.ksize}
|
|
|
|
alpha = {self.alpha}
|
|
|
|
storage = {self.storage}
|
|
|
|
node = {self.node}
|
|
|
|
transport = {self.transport}
|
|
|
|
protocol = {self.protocol}
|
|
|
|
refresh_loop = {self.refresh_loop}
|
|
|
|
save_state_loop = {self.save_state_loop}
|
|
|
|
bootstrappable_neighbors = {self.bootstrappable_neighbors()}
|
|
|
|
"""
|
|
|
|
return repr
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-20 11:48:26 +00:00
|
|
|
async def get(self, key):
|
|
|
|
"""
|
|
|
|
Get a key if the network has it.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:class:`None` if not found, the value otherwise.
|
|
|
|
"""
|
|
|
|
log.info("Looking up key %s", key)
|
|
|
|
dkey = digest(key)
|
|
|
|
# if this node has it, return it
|
|
|
|
if self.storage.get(dkey) is not None:
|
|
|
|
log.info('I already have this')
|
|
|
|
return self.storage.get(dkey)
|
|
|
|
node = Node(dkey)
|
|
|
|
nearest = self.protocol.router.find_neighbors(node)
|
|
|
|
log.info(f'My nearest nodes are: {nearest}')
|
|
|
|
if not nearest:
|
|
|
|
log.warning("There are no known neighbors to get key %s", key)
|
|
|
|
return None
|
|
|
|
spider = ValueSpiderCrawl(self.protocol, node, nearest,
|
|
|
|
self.ksize, self.alpha)
|
|
|
|
found = await spider.find()
|
|
|
|
|
|
|
|
log.info(f'spider done crawling: {spider}')
|
|
|
|
log.info(f'spider found value: {found}')
|
|
|
|
|
|
|
|
self.storage[dkey]=found
|
|
|
|
return found
|
2020-08-19 12:20:12 +00:00
|
|
|
|
2020-08-20 09:51:57 +00:00
|
|
|
async def set(self, key, value):
|
|
|
|
"""
|
|
|
|
Set the given string key to the given value in the network.
|
|
|
|
"""
|
|
|
|
if not check_dht_value_type(value):
|
|
|
|
raise TypeError(
|
|
|
|
"Value must be of type int, float, bool, str, or bytes"
|
|
|
|
)
|
|
|
|
log.info("setting '%s' = '%s' on network", key, value)
|
|
|
|
dkey = digest(key)
|
2020-08-20 15:48:08 +00:00
|
|
|
|
|
|
|
print('STORE??',type(self.storage),self.storage)
|
2020-08-20 09:51:57 +00:00
|
|
|
self.storage[dkey]=value
|
2020-08-20 16:31:17 +00:00
|
|
|
newvalue=self.storage[dkey]
|
|
|
|
return await self.set_digest(dkey, newvalue)
|
2020-08-18 09:01:22 +00:00
|
|
|
|
2020-08-18 17:25:15 +00:00
|
|
|
async def set_digest(self, dkey, value):
|
|
|
|
"""
|
|
|
|
Set the given SHA1 digest key (bytes) to the given value in the
|
|
|
|
network.
|
|
|
|
"""
|
|
|
|
node = Node(dkey)
|
|
|
|
|
|
|
|
nearest = self.protocol.router.find_neighbors(node)
|
|
|
|
if not nearest:
|
|
|
|
log.warning("There are no known neighbors to set key %s",
|
|
|
|
dkey.hex())
|
2020-08-19 11:14:52 +00:00
|
|
|
#return False
|
2020-08-18 17:25:15 +00:00
|
|
|
|
|
|
|
spider = NodeSpiderCrawl(self.protocol, node, nearest,
|
|
|
|
self.ksize, self.alpha)
|
|
|
|
nodes = await spider.find()
|
|
|
|
log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes)))
|
|
|
|
|
|
|
|
# if this node is close too, then store here as well
|
|
|
|
neighbs=[n.distance_to(node) for n in nodes]
|
2020-08-19 12:20:12 +00:00
|
|
|
log.info('setting on %s neighbors', neighbs)
|
2020-08-18 17:25:15 +00:00
|
|
|
biggest = max(neighbs) if neighbs else 0
|
2020-08-19 12:20:12 +00:00
|
|
|
log.info('my distance to node is %s, biggest distance is %s',
|
|
|
|
self.node.distance_to(node),biggest)
|
|
|
|
if self.node.distance_to(node) < biggest:
|
|
|
|
self.storage[dkey] = value
|
|
|
|
|
|
|
|
log.info('here are the nodes %s' % nodes)
|
2020-08-18 17:25:15 +00:00
|
|
|
results = [self.protocol.call_store(n, dkey, value) for n in nodes]
|
2020-08-19 12:20:12 +00:00
|
|
|
log.info('here are the results')
|
|
|
|
|
2020-08-18 17:25:15 +00:00
|
|
|
# return true only if at least one store call succeeded
|
2020-08-19 12:15:35 +00:00
|
|
|
return any(await asyncio.gather(*results))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#### NEVERMIND
|
2020-08-20 15:48:08 +00:00
|
|
|
# KadServer = Server
|
|
|
|
|
2020-08-20 17:10:05 +00:00
|
|
|
import time
|
|
|
|
if __name__=='__main__':
|
|
|
|
# test
|
|
|
|
hfs = HalfForgetfulStorage(fn='test.db')
|
|
|
|
|
|
|
|
#hfs['a']=1
|
|
|
|
# time.sleep(2)
|
|
|
|
hfs['a']=1000
|
|
|
|
|
|
|
|
print(hfs['a'])
|
|
|
|
|
2020-08-20 15:48:08 +00:00
|
|
|
|
2020-08-20 17:10:05 +00:00
|
|
|
print(hfs['a'])
|