2020-08-19 13:01:37 +00:00
|
|
|
import os,time,sys,logging
|
2020-08-17 13:33:26 +00:00
|
|
|
from pathlib import Path
|
2020-08-20 00:34:14 +00:00
|
|
|
import asyncio,time
|
2020-08-19 13:01:37 +00:00
|
|
|
# handler = logging.StreamHandler()
|
|
|
|
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
|
|
# handler.setFormatter(formatter)
|
|
|
|
# logger = logging.getLogger(__file__)
|
|
|
|
# logger.addHandler(handler)
|
|
|
|
# logger.setLevel(logging.DEBUG)
|
|
|
|
sys.path.append('../p2p')
|
|
|
|
# logger.info(os.getcwd(), sys.path)
|
2020-08-22 08:37:05 +00:00
|
|
|
BSEP=b'||||||||||'
|
|
|
|
BSEP2=b'@@@@@@@@@@'
|
|
|
|
BSEP3=b'##########'
|
2020-08-21 13:29:46 +00:00
|
|
|
NODE_SLEEP_FOR=1
|
2020-08-22 11:16:50 +00:00
|
|
|
PATH_WORLD_KEY='.world.key'
|
2020-08-19 13:01:37 +00:00
|
|
|
|
2020-08-22 13:52:27 +00:00
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
try:
|
|
|
|
from .crypto import *
|
|
|
|
from .p2p import *
|
|
|
|
from .kad import *
|
2020-08-20 15:48:08 +00:00
|
|
|
except (ModuleNotFoundError,ImportError):
|
2020-08-19 12:06:44 +00:00
|
|
|
from crypto import *
|
|
|
|
from p2p import *
|
2020-08-20 15:48:08 +00:00
|
|
|
from kad import *
|
2020-08-17 20:40:48 +00:00
|
|
|
from pathlib import Path
|
|
|
|
from functools import partial
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
# works better with tor?
|
|
|
|
import json
|
|
|
|
jsonify = json.dumps
|
2020-08-19 10:29:56 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
# Start server
|
|
|
|
|
|
|
|
DEBUG = True
|
|
|
|
UPLOAD_DIR = 'uploads/'
|
|
|
|
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
|
2020-08-19 10:29:56 +00:00
|
|
|
|
2020-08-18 15:14:19 +00:00
|
|
|
# PORT_SPEAK = 8468
|
2020-08-19 14:07:12 +00:00
|
|
|
PORT_LISTEN = 5639
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
# Api Functions
|
2020-08-18 14:32:10 +00:00
|
|
|
from threading import Thread
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
|
|
|
|
NODES_PRIME = [("128.232.229.63",8467), ("68.66.241.111",8467)]
|
|
|
|
#68.66.224.46
|
|
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
home = str(Path.home())
|
2020-08-22 12:11:51 +00:00
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
KEYDIR = os.path.join(home,'.komrade','.keys')
|
|
|
|
if not os.path.exists(KEYDIR): os.makedirs(KEYDIR)
|
|
|
|
|
2020-08-22 12:11:51 +00:00
|
|
|
KEYDIR_BUILTIN = '.'
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-23 09:59:52 +00:00
|
|
|
class NetworkStillConnectingError(OSError): pass
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
async def _getdb(self=None,port=PORT_LISTEN):
|
2020-08-21 08:18:20 +00:00
|
|
|
from kademlia.network import Server
|
2020-08-21 11:11:50 +00:00
|
|
|
|
2020-08-22 21:20:27 +00:00
|
|
|
if self:
|
|
|
|
self.log('starting server..')
|
2020-08-19 15:13:49 +00:00
|
|
|
|
|
|
|
import os
|
2020-08-20 15:48:08 +00:00
|
|
|
if self: self.log(os.getcwd())
|
2020-08-23 10:11:31 +00:00
|
|
|
node = Server(log=self.log if self else None) #fn='../p2p/data.db',log=(self.log if self else print)))
|
2020-08-19 11:30:23 +00:00
|
|
|
|
2020-08-23 09:59:52 +00:00
|
|
|
try:
|
|
|
|
if self: self.log('listening..')
|
|
|
|
await node.listen(port)
|
|
|
|
except OSError:
|
|
|
|
raise NetworkStillConnectingError('Still connecting...')
|
|
|
|
#await asyncio.sleep(3)
|
2020-08-19 11:30:23 +00:00
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
if self: self.log('bootstrapping server..')
|
2020-08-19 11:30:23 +00:00
|
|
|
await node.bootstrap(NODES_PRIME)
|
2020-08-22 07:00:22 +00:00
|
|
|
|
|
|
|
if self: node.log = self.log
|
2020-08-21 16:12:40 +00:00
|
|
|
self.log('NODE:',node)
|
2020-08-22 21:20:27 +00:00
|
|
|
|
|
|
|
# if self and self.app:
|
|
|
|
# self.app.close_dialog()
|
|
|
|
|
2020-08-19 11:30:23 +00:00
|
|
|
return node
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
def logg(*x):
|
|
|
|
print(*x)
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-17 16:23:40 +00:00
|
|
|
class Api(object):
|
2020-08-22 21:20:27 +00:00
|
|
|
def __init__(self,user=None,log=None,app=None):
|
2020-08-21 15:22:30 +00:00
|
|
|
self.log = log if log is not None else logg
|
|
|
|
self.username = user
|
2020-08-22 21:20:27 +00:00
|
|
|
self.app=app
|
2020-08-19 10:29:56 +00:00
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
def private_key(self):
|
|
|
|
if self.username:
|
2020-08-21 15:46:33 +00:00
|
|
|
pass
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-22 12:11:51 +00:00
|
|
|
async def connect_forever(self,port=PORT_LISTEN,save_every=60):
|
2020-08-19 14:07:12 +00:00
|
|
|
try:
|
|
|
|
i = 0
|
2020-08-20 20:08:47 +00:00
|
|
|
self._node = await self.connect(port=port)
|
2020-08-19 14:07:12 +00:00
|
|
|
while True:
|
2020-08-22 13:52:27 +00:00
|
|
|
if not i%90: self.log(f'Node status (tick {i}): {self._node}')
|
2020-08-21 07:41:34 +00:00
|
|
|
if i and not i%save_every: await self.flush()
|
2020-08-20 01:39:26 +00:00
|
|
|
i += 1
|
2020-08-21 16:15:42 +00:00
|
|
|
await asyncio.sleep(NODE_SLEEP_FOR)
|
|
|
|
# asyncio.sleep(0)
|
2020-08-21 05:31:42 +00:00
|
|
|
except (asyncio.CancelledError,KeyboardInterrupt) as e:
|
2020-08-20 09:51:57 +00:00
|
|
|
self.log('P2P node cancelled', e)
|
2020-08-20 22:04:21 +00:00
|
|
|
await self.flush()
|
2020-08-19 14:07:12 +00:00
|
|
|
finally:
|
|
|
|
# when canceled, print that it finished
|
2020-08-20 09:51:57 +00:00
|
|
|
self.log('P2P node shutting down')
|
2020-08-21 05:31:42 +00:00
|
|
|
pass
|
2020-08-21 16:12:40 +00:00
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-19 14:07:12 +00:00
|
|
|
@property
|
|
|
|
async def node(self):
|
2020-08-21 16:12:40 +00:00
|
|
|
# while not hasattr(self,'_node'):
|
|
|
|
# self.log('[API] waiting forr connection...')
|
|
|
|
# await asyncio.sleep(1)
|
|
|
|
# return self._node
|
|
|
|
|
2020-08-19 14:07:12 +00:00
|
|
|
if not hasattr(self,'_node'):
|
2020-08-21 16:12:40 +00:00
|
|
|
await self.connect()
|
2020-08-22 07:00:22 +00:00
|
|
|
self._node.log=self.log
|
2020-08-19 14:07:12 +00:00
|
|
|
return self._node
|
|
|
|
|
|
|
|
async def connect(self,port=PORT_LISTEN):
|
2020-08-22 21:20:27 +00:00
|
|
|
if self.app: self.app.open_dialog('hello?')
|
2020-08-19 14:07:12 +00:00
|
|
|
self.log('connecting...')
|
2020-08-21 16:12:40 +00:00
|
|
|
node = await _getdb(self,port)
|
|
|
|
self.log(f'connect() has node {node}')
|
|
|
|
self._node = node
|
|
|
|
return node
|
2020-08-19 14:07:12 +00:00
|
|
|
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-21 15:46:33 +00:00
|
|
|
async def get(self,key_or_keys,decode_data=True):
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f'api.get({key_or_keys},decode_data={decode_data}) --> ...')
|
2020-08-17 16:23:40 +00:00
|
|
|
async def _get():
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f'api._get({key_or_keys},decode_data={decode_data}) --> ...')
|
2020-08-19 14:07:12 +00:00
|
|
|
node=await self.node
|
2020-08-20 21:10:59 +00:00
|
|
|
res=None
|
2020-08-22 08:26:52 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
if type(key_or_keys) in {list,tuple,dict}:
|
|
|
|
keys = key_or_keys
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log('keys is plural',keys)
|
2020-08-22 07:00:22 +00:00
|
|
|
res =[]
|
2020-08-20 18:26:33 +00:00
|
|
|
for key in keys:
|
2020-08-22 08:26:52 +00:00
|
|
|
val = None
|
|
|
|
|
|
|
|
# while not val:
|
|
|
|
self.log('trying again...')
|
2020-08-20 21:10:59 +00:00
|
|
|
val = await node.get(key)
|
2020-08-22 08:26:52 +00:00
|
|
|
self.log('got',val)
|
2020-08-22 11:16:50 +00:00
|
|
|
#asyncio.sleep(1)
|
2020-08-22 08:26:52 +00:00
|
|
|
|
2020-08-22 07:00:22 +00:00
|
|
|
self.log(f'val for {key} = {val} {type(val)}')
|
|
|
|
if decode_data:
|
|
|
|
self.log(f'api._get() decoding data {keys} -> {val} {type(val)}')
|
|
|
|
val = await self.decode_data(val)
|
|
|
|
self.log(f'api._get() got back decodied data {keys} -> {val} {type(val)}')
|
|
|
|
res+=[val]
|
|
|
|
#res = await asyncio.gather(*tasks)
|
2020-08-20 18:26:33 +00:00
|
|
|
else:
|
2020-08-20 21:10:59 +00:00
|
|
|
key=key_or_keys
|
2020-08-23 11:03:41 +00:00
|
|
|
# self.log('keys is singular',key)
|
2020-08-20 21:10:59 +00:00
|
|
|
val = await node.get(key)
|
2020-08-22 07:00:22 +00:00
|
|
|
if decode_data:
|
|
|
|
self.log(f'api._get() decoding data {key} -> {val} {type(val)}')
|
|
|
|
val = await self.decode_data(val)
|
|
|
|
self.log(f'api._get() got back decodied data {key} -> {val} {type(val)}')
|
2020-08-23 10:24:39 +00:00
|
|
|
# self.log('wtf is val =',val)
|
2020-08-22 07:00:22 +00:00
|
|
|
res=val
|
2020-08-22 06:05:32 +00:00
|
|
|
|
2020-08-23 10:24:39 +00:00
|
|
|
# self.log('wtf is res =',res)
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-21 21:15:20 +00:00
|
|
|
self.log(f'_get({key_or_keys}) --> {res}')
|
2020-08-20 21:10:59 +00:00
|
|
|
return res
|
2020-08-19 14:07:12 +00:00
|
|
|
return await _get()
|
2020-08-20 00:34:14 +00:00
|
|
|
|
2020-08-22 08:26:52 +00:00
|
|
|
# async def get(self,key_or_keys,decode_data=True):
|
|
|
|
# self.log(f'api.get({key_or_keys},decode_data={decode_data}) --> ...')
|
|
|
|
# async def _get():
|
|
|
|
# self.log(f'api._get({key_or_keys},decode_data={decode_data}) --> ...')
|
|
|
|
# node=await self.node
|
|
|
|
# res=None
|
|
|
|
|
|
|
|
# if type(key_or_keys) in {list,tuple,dict}:
|
|
|
|
# keys = key_or_keys
|
|
|
|
# self.log('keys is plural',keys)
|
|
|
|
# res =[]
|
|
|
|
# for key in keys:
|
|
|
|
# val = None
|
|
|
|
|
|
|
|
# # while not val:
|
|
|
|
# self.log('trying again...')
|
|
|
|
# val = await node.get(key)
|
|
|
|
# self.log('got',val)
|
|
|
|
# asyncio.sleep(1)
|
|
|
|
|
|
|
|
# self.log(f'val for {key} = {val} {type(val)}')
|
|
|
|
# if decode_data:
|
|
|
|
# self.log(f'api._get() decoding data {keys} -> {val} {type(val)}')
|
|
|
|
# val = await self.decode_data(val)
|
|
|
|
# self.log(f'api._get() got back decodied data {keys} -> {val} {type(val)}')
|
|
|
|
# res+=[val]
|
|
|
|
# #res = await asyncio.gather(*tasks)
|
|
|
|
# else:
|
|
|
|
# key=key_or_keys
|
|
|
|
# self.log('keys is singular',key)
|
|
|
|
# val = await node.get(key)
|
|
|
|
# if decode_data:
|
|
|
|
# self.log(f'api._get() decoding data {key} -> {val} {type(val)}')
|
|
|
|
# val = await self.decode_data(val)
|
|
|
|
# self.log(f'api._get() got back decodied data {key} -> {val} {type(val)}')
|
|
|
|
# self.log('wtf is val =',val)
|
|
|
|
# res=val
|
|
|
|
|
|
|
|
# self.log('wtf is res =',res)
|
|
|
|
|
|
|
|
# self.log(f'_get({key_or_keys}) --> {res}')
|
|
|
|
# return res
|
|
|
|
# return await _get()
|
|
|
|
|
2020-08-22 08:37:05 +00:00
|
|
|
def encode_data(self,val,sep=BSEP,sep2=BSEP2,do_encrypt=False,encrypt_for_pubkey=None,private_signature_key=None):
|
2020-08-22 07:00:22 +00:00
|
|
|
assert type(val)==bytes
|
2020-08-20 00:34:14 +00:00
|
|
|
"""
|
|
|
|
What do we want to store with
|
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
1) [Encrypted payload:]
|
|
|
|
1) Timestamp
|
|
|
|
2) Public key of sender
|
|
|
|
3) Public key of recipient
|
|
|
|
4) AES-encrypted Value
|
|
|
|
2) [Decryption tools]
|
|
|
|
1) AES-decryption key
|
|
|
|
2) AES decryption IV value
|
|
|
|
5) Signature of value by author
|
2020-08-20 00:34:14 +00:00
|
|
|
"""
|
2020-08-20 08:55:50 +00:00
|
|
|
import time
|
2020-08-20 00:34:14 +00:00
|
|
|
timestamp=time.time()
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f"""api.encode_data(
|
|
|
|
val={val},
|
|
|
|
sep={sep},
|
|
|
|
sep2={sep2},
|
|
|
|
do_encrypt={do_encrypt},
|
|
|
|
encrypt_for_pubkey={encrypt_for_pubkey},
|
|
|
|
private_signature_key={private_signature_key})""")
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
# check input
|
2020-08-22 06:05:32 +00:00
|
|
|
if not encrypt_for_pubkey:
|
2020-08-22 07:31:36 +00:00
|
|
|
raise Exception('we need a receiver !!')
|
|
|
|
# return None
|
2020-08-20 00:34:14 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
# convert val to bytes
|
2020-08-22 07:00:22 +00:00
|
|
|
# if type(val)!=bytes: val = bytes(val,'utf-8')
|
|
|
|
# value_bytes=base64.b64encode(val)
|
|
|
|
value_bytes = val
|
2020-08-20 01:39:26 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
# sign
|
2020-08-21 15:22:30 +00:00
|
|
|
private_signature_key = private_signature_key if private_signature_key is not None else self.private_key
|
|
|
|
signature = sign(value_bytes, private_signature_key)
|
|
|
|
public_sender_key = private_signature_key.public_key()
|
|
|
|
sender_pubkey_b = serialize_pubkey(public_sender_key)
|
2020-08-20 00:34:14 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
# Verify!
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f'''encode_data().verify_signature(
|
|
|
|
signature={signature}
|
|
|
|
value={value_bytes}
|
|
|
|
sender_pubkey={sender_pubkey_b}''')
|
|
|
|
authentic = verify_signature(signature, value_bytes, public_sender_key)
|
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
if not authentic:
|
2020-08-22 08:07:46 +00:00
|
|
|
raise Exception('message is inauthentic for set??' +str(authentic))
|
2020-08-20 15:09:43 +00:00
|
|
|
return None
|
2020-08-20 00:34:14 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
# encrypt?
|
2020-08-22 06:05:32 +00:00
|
|
|
encrypt_for_pubkey_b = serialize_pubkey(encrypt_for_pubkey)
|
2020-08-23 13:22:00 +00:00
|
|
|
#time_b=base64.b64encode(str(timestamp).encode('utf-8')) #.encode()
|
|
|
|
time_b=str(timestamp).encode('utf-8')
|
2020-08-20 15:09:43 +00:00
|
|
|
msg=value_bytes
|
2020-08-20 08:55:50 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
# whole binary package
|
|
|
|
WDV = [
|
2020-08-20 08:55:50 +00:00
|
|
|
time_b,
|
|
|
|
sender_pubkey_b,
|
2020-08-22 06:05:32 +00:00
|
|
|
encrypt_for_pubkey_b,
|
2020-08-20 15:09:43 +00:00
|
|
|
msg,
|
2020-08-20 08:55:50 +00:00
|
|
|
signature
|
2020-08-20 15:09:43 +00:00
|
|
|
]
|
|
|
|
payload = sep2.join(WDV)
|
|
|
|
|
2020-08-22 06:05:32 +00:00
|
|
|
res = aes_rsa_encrypt(payload,encrypt_for_pubkey)
|
2020-08-22 08:07:46 +00:00
|
|
|
if res is None:
|
|
|
|
raise Exception('encryption result does not exist')
|
|
|
|
return None
|
2020-08-20 15:09:43 +00:00
|
|
|
payload_encr_aes, payload_encr_aes_key, payload_encr_aes_iv = res
|
|
|
|
|
|
|
|
decryption_tools = sep2.join([
|
|
|
|
payload_encr_aes_key,
|
|
|
|
payload_encr_aes_iv
|
2020-08-20 08:55:50 +00:00
|
|
|
])
|
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
final_packet = sep.join([
|
|
|
|
payload_encr_aes,
|
|
|
|
decryption_tools
|
|
|
|
])
|
|
|
|
|
2020-08-22 08:37:05 +00:00
|
|
|
self.log('FINAL PACKET:',final_packet,type(final_packet))
|
2020-08-20 15:09:43 +00:00
|
|
|
return final_packet
|
2020-08-20 00:34:14 +00:00
|
|
|
|
2020-08-22 00:27:20 +00:00
|
|
|
|
|
|
|
|
2020-08-21 21:15:20 +00:00
|
|
|
async def decode_data(self,entire_packet_orig,sep=BSEP,private_key=None,sep2=BSEP2):
|
2020-08-22 21:20:27 +00:00
|
|
|
if entire_packet_orig is None: return entire_packet_orig
|
2020-08-22 07:00:22 +00:00
|
|
|
self.log(f'decode_data({entire_packet_orig})...')
|
2020-08-22 00:27:20 +00:00
|
|
|
import binascii
|
|
|
|
entire_packet = entire_packet_orig
|
|
|
|
|
2020-08-23 11:03:41 +00:00
|
|
|
#self.log('PACKED =',entire_packet,type(entire_packet))
|
2020-08-22 00:27:20 +00:00
|
|
|
|
2020-08-23 11:03:41 +00:00
|
|
|
#self.log('????',type(entire_packet))
|
|
|
|
#self.log(entire_packet)
|
2020-08-20 08:55:50 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
# get data
|
2020-08-21 21:15:20 +00:00
|
|
|
try:
|
2020-08-22 09:15:52 +00:00
|
|
|
encrypted_payload, decryption_tools = entire_packet.split(sep) #split_binary(entire_packet, sep=sep) #entire_packet.split(sep)
|
|
|
|
decryption_tools=decryption_tools.split(sep2) #split_binary(decryption_tools,sep=sep2)
|
2020-08-22 11:16:50 +00:00
|
|
|
except ValueError as e:
|
2020-08-22 00:27:20 +00:00
|
|
|
|
|
|
|
self.log('!! decode_data() got incorrect format:',e)
|
2020-08-22 11:16:50 +00:00
|
|
|
self.log('packet =',entire_packet)
|
2020-08-21 21:15:20 +00:00
|
|
|
return entire_packet_orig
|
2020-08-20 08:55:50 +00:00
|
|
|
|
2020-08-20 01:39:26 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
### NEW FIRST LINE: Try to decrypt!
|
|
|
|
val=None
|
2020-08-23 13:22:00 +00:00
|
|
|
key_used=None
|
2020-08-22 09:15:52 +00:00
|
|
|
for keyname,privkey in self.keys.items():
|
2020-08-22 13:52:27 +00:00
|
|
|
self.log(keyname,privkey,'??')
|
2020-08-20 15:09:43 +00:00
|
|
|
try:
|
2020-08-23 13:22:00 +00:00
|
|
|
# clicked!
|
2020-08-20 15:09:43 +00:00
|
|
|
val = aes_rsa_decrypt(encrypted_payload,privkey,*decryption_tools)
|
2020-08-23 13:22:00 +00:00
|
|
|
key_used=keyname
|
|
|
|
# this must mean this was the recipient
|
|
|
|
self.log(f'unlocked using key {keyname}!')
|
2020-08-20 15:09:43 +00:00
|
|
|
break
|
|
|
|
except ValueError as e:
|
|
|
|
self.log(keyname,'did not work!') #,privkey,pubkey)
|
|
|
|
pass
|
|
|
|
if not val:
|
2020-08-22 08:56:33 +00:00
|
|
|
raise Exception('Content not intended for us')
|
2020-08-20 15:09:43 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
#stop
|
|
|
|
|
|
|
|
### THIRD LINE: SIGNATURE VERIFICATION
|
|
|
|
# can we decrypt signature?
|
|
|
|
val_array = val.split(sep2)
|
2020-08-23 11:03:41 +00:00
|
|
|
# self.log('val_array =',val_array)
|
2020-08-20 15:09:43 +00:00
|
|
|
time_b,sender_pubkey_b,receiver_pubkey_b,msg,signature = val_array
|
2020-08-22 08:56:33 +00:00
|
|
|
if not signature:
|
|
|
|
raise Exception('no signature!')
|
|
|
|
return None
|
2020-08-20 15:09:43 +00:00
|
|
|
sender_pubkey=load_pubkey(sender_pubkey_b)
|
|
|
|
authentic = verify_signature(signature,msg,sender_pubkey)
|
2020-08-20 01:39:26 +00:00
|
|
|
if not authentic:
|
2020-08-22 08:56:33 +00:00
|
|
|
raise Exception('inauthentic message')
|
2020-08-20 15:09:43 +00:00
|
|
|
return None
|
2020-08-20 01:39:26 +00:00
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
|
|
|
|
# ### FOURTH LINE: CONTENT ENCRYPTION
|
|
|
|
# if private_key is None:
|
|
|
|
# private_key=self.private_key_global
|
2020-08-20 11:48:26 +00:00
|
|
|
|
2020-08-20 08:55:50 +00:00
|
|
|
WDV={
|
2020-08-23 13:22:00 +00:00
|
|
|
'time':float(time_b.decode('utf-8')),
|
|
|
|
'val':msg.decode('utf-8'),
|
|
|
|
'channel':key_used
|
2020-08-23 11:03:41 +00:00
|
|
|
# 'to':receiver_pubkey_b,
|
|
|
|
# 'from':sender_pubkey_b,
|
|
|
|
# 'sign':signature
|
2020-08-20 08:55:50 +00:00
|
|
|
}
|
|
|
|
|
2020-08-22 07:00:22 +00:00
|
|
|
self.log('GOT WDV:',WDV)
|
2020-08-20 08:55:50 +00:00
|
|
|
return WDV
|
|
|
|
|
|
|
|
|
|
|
|
#,signature
|
2020-08-20 01:39:26 +00:00
|
|
|
|
2020-08-22 05:08:40 +00:00
|
|
|
# async def set_on_channel(self,key_or_keys,value_or_values):
|
|
|
|
# tasks=[]
|
|
|
|
# if type(channel_or_channels) not in {list,tuple}:
|
|
|
|
# channels=[channel_or_channels]
|
|
|
|
# else:
|
|
|
|
# channels=channel_or_channels
|
|
|
|
|
|
|
|
# for channel in channels:
|
|
|
|
# uri =
|
|
|
|
# tasks.append(self.set)
|
|
|
|
|
|
|
|
# if type(channel_or_channels) == str:
|
|
|
|
# return await self.set(self,key_or_keys,value_or_values,channel_or_channels)
|
|
|
|
# elif type(channel_or_channels) ==
|
2020-08-19 14:27:12 +00:00
|
|
|
|
2020-08-22 05:08:40 +00:00
|
|
|
async def set(self,key_or_keys,value_or_values,private_signature_key=None,encode_data=True,encrypt_for_pubkey=None):
|
2020-08-21 21:15:20 +00:00
|
|
|
self.log(f'api.set({key_or_keys}) --> {type(value_or_values)}')
|
2020-08-19 14:27:12 +00:00
|
|
|
async def _set():
|
2020-08-19 11:14:52 +00:00
|
|
|
# self.log('async _set()',self.node)
|
2020-08-19 11:30:23 +00:00
|
|
|
# node=self.node
|
2020-08-19 14:07:12 +00:00
|
|
|
#node=await _getdb(self)
|
|
|
|
node=await self.node
|
2020-08-21 15:46:33 +00:00
|
|
|
|
2020-08-22 05:08:40 +00:00
|
|
|
def proc(key,value):
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f'encodeing data for {key} -> {type(value)} ...')
|
2020-08-22 06:12:50 +00:00
|
|
|
if encode_data and encrypt_for_pubkey is not None and type(value)==bytes:
|
2020-08-22 06:05:32 +00:00
|
|
|
x = self.encode_data(
|
|
|
|
val = value,
|
2020-08-22 08:37:05 +00:00
|
|
|
do_encrypt=False,
|
2020-08-22 06:05:32 +00:00
|
|
|
encrypt_for_pubkey=encrypt_for_pubkey,
|
|
|
|
private_signature_key=private_signature_key
|
2020-08-22 05:08:40 +00:00
|
|
|
)
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f'got back encoded data for {key} -> {x} ...')
|
2020-08-22 11:16:50 +00:00
|
|
|
return x
|
2020-08-22 05:08:40 +00:00
|
|
|
else:
|
2020-08-22 06:05:32 +00:00
|
|
|
self.log(f'did not encode data for {key} -> {value} ...')
|
2020-08-22 11:16:50 +00:00
|
|
|
return value
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
if type(key_or_keys) in {list,tuple,dict}:
|
|
|
|
keys = key_or_keys
|
|
|
|
values = value_or_values
|
|
|
|
assert len(keys)==len(values)
|
2020-08-22 08:56:33 +00:00
|
|
|
res=[]
|
|
|
|
for key,value in zip(keys,values):
|
|
|
|
newval = proc(key,value)
|
2020-08-22 11:16:50 +00:00
|
|
|
self.log(f'kvv (plural) <- {keys}:{value} -> {newval}')
|
2020-08-22 08:56:33 +00:00
|
|
|
await node.set(key,newval)
|
|
|
|
res+=[newval]
|
2020-08-17 20:40:48 +00:00
|
|
|
else:
|
|
|
|
key = key_or_keys
|
|
|
|
value = value_or_values
|
2020-08-22 08:56:33 +00:00
|
|
|
newval = proc(key,value)
|
2020-08-22 11:16:50 +00:00
|
|
|
self.log(f'kvv (singular) <- {key}:{value} -> {newval}')
|
2020-08-22 08:56:33 +00:00
|
|
|
res = newval
|
|
|
|
await node.set(key,newval)
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-22 11:16:50 +00:00
|
|
|
self.log(f'api.set(key={key_or_keys}, \
|
|
|
|
res = {res})')
|
2020-08-19 14:07:12 +00:00
|
|
|
#node.stop()
|
2020-08-22 08:56:33 +00:00
|
|
|
# self.log('reconnecting ...',self._node)
|
2020-08-22 08:07:46 +00:00
|
|
|
#await self._node.stop()
|
|
|
|
#await self.connect()
|
2020-08-17 20:40:48 +00:00
|
|
|
return res
|
2020-08-18 17:25:15 +00:00
|
|
|
|
2020-08-19 14:27:12 +00:00
|
|
|
return await _set()
|
2020-08-18 17:25:15 +00:00
|
|
|
|
2020-08-22 08:37:05 +00:00
|
|
|
async def get_json(self,key_or_keys,decode_data=False):
|
2020-08-22 09:28:12 +00:00
|
|
|
|
2020-08-23 11:38:50 +00:00
|
|
|
def jsonize_dat(dat_dict):
|
|
|
|
if type(dat_dict)==dict and 'val' in dat_dict:
|
|
|
|
self.log('is this json???',dat_dict['val'],'???')
|
2020-08-23 13:22:00 +00:00
|
|
|
dat_dict['val']=json.loads(dat_dict['val'])
|
2020-08-23 11:38:50 +00:00
|
|
|
#dat_dict['val']=json.loads(base64.b64decode(dat_dict['val']).decode('utf-8'))
|
|
|
|
return dat_dict
|
|
|
|
|
|
|
|
def jsonize_res(res):
|
|
|
|
if not res:
|
|
|
|
return None
|
|
|
|
if type(res)==list:
|
|
|
|
return [jsonize_dat(d) for d in res]
|
2020-08-22 09:28:12 +00:00
|
|
|
else:
|
2020-08-23 11:38:50 +00:00
|
|
|
return jsonize_dat(res)
|
2020-08-20 18:26:33 +00:00
|
|
|
|
2020-08-21 15:46:33 +00:00
|
|
|
res = await self.get(key_or_keys,decode_data=decode_data)
|
2020-08-23 11:38:50 +00:00
|
|
|
self.log('get_json() got from get() a',type(res))
|
2020-08-20 21:10:59 +00:00
|
|
|
return jsonize_res(res)
|
2020-08-20 18:26:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-22 05:08:40 +00:00
|
|
|
async def set_json(self,key,value,private_signature_key=None,encode_data=True,encrypt_for_pubkey=None):
|
2020-08-23 11:38:50 +00:00
|
|
|
#def jsonize_dat(dat_dict):
|
|
|
|
#if type(dat_dict)==dict and 'val' in dat_dict:
|
|
|
|
# self.log('is this json???',dat_dict['val'],'???')
|
|
|
|
# dat_dict['val']=json.loads(dat_dict['val'].decode('utf-8'))
|
|
|
|
# #dat_dict['val']=json.loads(base64.b64decode(dat_dict['val']).decode('utf-8'))
|
|
|
|
# return dat_dict
|
|
|
|
|
|
|
|
def prep_json(val):
|
|
|
|
if type(val)!=str:
|
|
|
|
val=json.dumps(value)
|
|
|
|
bval=val.encode('utf-8')
|
2020-08-23 13:22:00 +00:00
|
|
|
return bval
|
2020-08-23 11:38:50 +00:00
|
|
|
|
|
|
|
|
2020-08-22 11:16:50 +00:00
|
|
|
self.log(f'api.set_json({key}, {value} ...)')
|
2020-08-23 11:38:50 +00:00
|
|
|
json_b = prep_json(value)
|
|
|
|
self.log(f'bjson -> {json_b}')
|
2020-08-22 07:31:36 +00:00
|
|
|
set_res = await self.set(
|
2020-08-22 06:05:32 +00:00
|
|
|
key,
|
2020-08-23 11:38:50 +00:00
|
|
|
json_b,
|
2020-08-22 06:05:32 +00:00
|
|
|
private_signature_key=private_signature_key,
|
|
|
|
encode_data=encode_data,
|
|
|
|
encrypt_for_pubkey=encrypt_for_pubkey)
|
2020-08-23 11:38:50 +00:00
|
|
|
self.log(f'api.set_json() <-- {set_res}')
|
2020-08-22 07:31:36 +00:00
|
|
|
return set_res
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-19 19:33:25 +00:00
|
|
|
async def has(self,key):
|
|
|
|
val=await self.get(key)
|
|
|
|
return val is not None
|
2020-08-17 16:23:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
## PERSONS
|
2020-08-19 19:33:25 +00:00
|
|
|
async def get_person(self,username):
|
2020-08-22 09:40:40 +00:00
|
|
|
return await self.get('/pubkey/'+username,decode_data=False)
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-21 15:46:33 +00:00
|
|
|
async def set_person(self,username,pem_public_key):
|
2020-08-20 08:55:50 +00:00
|
|
|
# pem_public_key = save_public_key(public_key,return_instead=True)
|
2020-08-21 15:22:30 +00:00
|
|
|
#obj = {'name':username, 'public_key':pem_public_key}
|
|
|
|
# await self.set_json('/person/'+username,obj)
|
2020-08-22 11:16:50 +00:00
|
|
|
# keystr=base64.b64encode(pem_public_key).decode()
|
|
|
|
# self.log('keystr',type(keystr))
|
2020-08-22 09:40:40 +00:00
|
|
|
await self.set('/pubkey/'+username,pem_public_key,encode_data=False)
|
2020-08-22 11:16:50 +00:00
|
|
|
# keystr=pem_public_key
|
|
|
|
# await self.set(b'/name/'+keystr,username,encode_data=False)
|
2020-08-21 15:46:33 +00:00
|
|
|
|
2020-08-17 16:23:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
## Register
|
2020-08-22 12:11:51 +00:00
|
|
|
async def register(self,name,passkey=None,just_return_keys=False):
|
2020-08-21 15:22:30 +00:00
|
|
|
# if not (name and passkey): return {'error':'Name and password needed'}
|
2020-08-23 09:59:52 +00:00
|
|
|
import kademlia
|
|
|
|
try:
|
|
|
|
person = await self.get_person(name)
|
|
|
|
except kademlia.network.CannotReachNetworkError:
|
|
|
|
return {'error':'Network disconnected'}
|
|
|
|
except NetworkStillConnectingError:
|
|
|
|
return {'error':'Network still connecting...'}
|
|
|
|
|
2020-08-22 13:27:37 +00:00
|
|
|
keys = self.get_keys()
|
2020-08-21 17:52:21 +00:00
|
|
|
if person is not None:
|
2020-08-22 08:07:46 +00:00
|
|
|
self.log('register() person <-',person)
|
2020-08-21 17:52:21 +00:00
|
|
|
# try to log in
|
2020-08-22 12:11:51 +00:00
|
|
|
|
|
|
|
self.log('my keys',keys)
|
|
|
|
if not name in keys:
|
2020-08-22 08:07:46 +00:00
|
|
|
self.log('!! person already exists')
|
2020-08-21 17:52:21 +00:00
|
|
|
return {'error':'Person already exists'}
|
|
|
|
|
|
|
|
# test 3 conditions
|
2020-08-22 12:11:51 +00:00
|
|
|
privkey=keys[name]
|
2020-08-21 17:52:21 +00:00
|
|
|
pubkey=load_pubkey(person)
|
|
|
|
|
|
|
|
if simple_lock_test(privkey,pubkey):
|
|
|
|
self.username=name
|
2020-08-22 08:07:46 +00:00
|
|
|
self.log('!! logging into',name)
|
2020-08-21 17:52:21 +00:00
|
|
|
return {'success':'Logging back in...'}
|
2020-08-21 15:22:30 +00:00
|
|
|
|
|
|
|
private_key = generate_rsa_key()
|
|
|
|
public_key = private_key.public_key()
|
|
|
|
pem_private_key = serialize_privkey(private_key, password=passkey)# save_private_key(private_key,password=passkey,return_instead=True)
|
|
|
|
pem_public_key = serialize_pubkey(public_key)
|
|
|
|
|
2020-08-22 12:11:51 +00:00
|
|
|
if just_return_keys:
|
|
|
|
return (pem_private_key,pem_public_key)
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
# save pub key in db
|
2020-08-21 15:46:33 +00:00
|
|
|
await self.set_person(name,pem_public_key)
|
|
|
|
# save priv key on hardware
|
|
|
|
fn_privkey = os.path.join(KEYDIR,f'.{name}.key')
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-21 15:46:33 +00:00
|
|
|
self.log('priv key =',pem_private_key)
|
|
|
|
write_key_b(pem_private_key, fn_privkey)
|
2020-08-21 15:22:30 +00:00
|
|
|
|
|
|
|
# good
|
2020-08-21 17:52:21 +00:00
|
|
|
return {'success':'Person created ...', 'username':name}
|
2020-08-20 01:39:26 +00:00
|
|
|
|
|
|
|
|
2020-08-17 16:23:40 +00:00
|
|
|
def load_private_key(self,password):
|
2020-08-21 15:22:30 +00:00
|
|
|
#if not self.app_storage.exists('_keys'): return {'error':'No login keys present on this device'}
|
2020-08-17 16:23:40 +00:00
|
|
|
pem_private_key=self.app_storage.get('_keys').get('private')
|
2020-08-20 22:04:21 +00:00
|
|
|
# self.log('my private key ====',pem_private_key)
|
2020-08-17 16:23:40 +00:00
|
|
|
try:
|
2020-08-20 08:55:50 +00:00
|
|
|
return {'success':load_privkey(pem_private_key,password)}
|
2020-08-17 16:23:40 +00:00
|
|
|
except ValueError as e:
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('!!',e)
|
2020-08-19 20:15:38 +00:00
|
|
|
return {'error':'Incorrect password'}
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-22 13:06:50 +00:00
|
|
|
def add_world_key(self,fn=PATH_WORLD_KEY):
|
2020-08-22 11:16:50 +00:00
|
|
|
import shutil
|
2020-08-22 13:06:50 +00:00
|
|
|
thisdir=os.path.dirname(__file__)
|
2020-08-23 14:40:02 +00:00
|
|
|
fnfn=os.path.join(thisdir,fn+'.priv')
|
2020-08-22 13:06:50 +00:00
|
|
|
self.log('getting',fnfn)
|
|
|
|
name='.'.join(os.path.basename(fn).split('.')[1:-1])
|
2020-08-22 11:16:50 +00:00
|
|
|
|
2020-08-22 13:06:50 +00:00
|
|
|
priv_key=load_privkey_fn(fnfn)
|
2020-08-22 11:16:50 +00:00
|
|
|
pub_key=priv_key.public_key()
|
|
|
|
pub_key_b=serialize_pubkey(pub_key)
|
|
|
|
|
2020-08-22 13:06:50 +00:00
|
|
|
ofn=os.path.join(KEYDIR,f'.{name}.key')
|
|
|
|
shutil.copyfile(fnfn,ofn)
|
|
|
|
|
|
|
|
asyncio.create_task(self.add_world_key_to_net(name,pub_key_b))
|
|
|
|
|
|
|
|
async def add_world_key_to_net(self,name,pub_key_b):
|
|
|
|
await self.set_person(name,pub_key_b)
|
|
|
|
|
2020-08-20 15:09:43 +00:00
|
|
|
#@property
|
2020-08-22 13:06:50 +00:00
|
|
|
def get_keys(self):
|
2020-08-21 15:46:33 +00:00
|
|
|
res={}
|
2020-08-22 11:16:50 +00:00
|
|
|
key_files = os.listdir(KEYDIR)
|
|
|
|
world_key_fn = os.path.basename(PATH_WORLD_KEY)
|
|
|
|
if not world_key_fn in key_files:
|
2020-08-22 13:06:50 +00:00
|
|
|
self.log('[first time?] adding world key:',world_key_fn)
|
|
|
|
self.add_world_key()
|
|
|
|
|
2020-08-22 11:16:50 +00:00
|
|
|
|
|
|
|
for priv_key_fn in key_files:
|
2020-08-21 16:30:28 +00:00
|
|
|
if (not priv_key_fn.startswith('.') or not priv_key_fn.endswith('.key')): continue
|
2020-08-21 15:46:33 +00:00
|
|
|
fnfn = os.path.join(KEYDIR,priv_key_fn)
|
2020-08-21 16:30:28 +00:00
|
|
|
print(fnfn)
|
2020-08-21 15:46:33 +00:00
|
|
|
priv_key=load_privkey_fn(fnfn)
|
2020-08-21 21:15:20 +00:00
|
|
|
#pub_key=priv_key.public_key()
|
2020-08-21 16:30:28 +00:00
|
|
|
name_key= '.'.join(priv_key_fn.split('.')[1:-1])
|
2020-08-21 21:15:20 +00:00
|
|
|
res[name_key] = priv_key
|
2020-08-21 17:52:21 +00:00
|
|
|
self.log(f'[API] found key {name_key} and added to keychain')
|
2020-08-21 15:22:30 +00:00
|
|
|
return res
|
2020-08-21 15:46:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
@property
|
2020-08-22 13:06:50 +00:00
|
|
|
def keys(self):
|
2020-08-22 17:51:09 +00:00
|
|
|
if not hasattr(self,'_keys'):
|
|
|
|
self.load_keys()
|
2020-08-22 13:06:50 +00:00
|
|
|
return self._keys
|
|
|
|
|
|
|
|
def load_keys(self):
|
|
|
|
self._keys = self.get_keys()
|
|
|
|
|
2020-08-20 01:39:26 +00:00
|
|
|
|
|
|
|
|
2020-08-23 11:38:50 +00:00
|
|
|
async def append_data(self,uri,bdata):
|
2020-08-23 13:22:00 +00:00
|
|
|
self.log(f'appending to uri {uri}, data {bdata}')
|
|
|
|
if type(bdata)!=bytes and type(bdata)==str:
|
|
|
|
bdata=bdata.encode('utf-8')
|
|
|
|
self.log(f'--> encoded bdata to {bdata}')
|
2020-08-23 11:38:50 +00:00
|
|
|
|
|
|
|
# get blob so far
|
|
|
|
sofar = await self.get(uri,decode_data=False)
|
2020-08-22 05:08:40 +00:00
|
|
|
|
|
|
|
# get sofar
|
2020-08-22 00:27:20 +00:00
|
|
|
self.log(f'sofar = {sofar}')
|
2020-08-22 05:08:40 +00:00
|
|
|
|
2020-08-23 13:22:00 +00:00
|
|
|
newval = bdata if sofar is None else sofar+BSEP+bdata
|
|
|
|
self.log(f'newval = {newval}')
|
2020-08-23 11:38:50 +00:00
|
|
|
|
2020-08-23 13:22:00 +00:00
|
|
|
res = await self.set(uri,newval,encode_data=False)
|
2020-08-23 11:38:50 +00:00
|
|
|
if res:
|
|
|
|
length = newval.count(BSEP)+1
|
|
|
|
return {'success':'Length increased to %s' % length}
|
|
|
|
return {'error':'Could not append data'}
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-19 19:33:25 +00:00
|
|
|
async def upload(self,filename,file_id=None, uri='/file/',uri_part='/part/'):
|
2020-08-17 20:40:48 +00:00
|
|
|
import sys
|
|
|
|
|
|
|
|
if not file_id: file_id = get_random_id()
|
|
|
|
part_ids = []
|
|
|
|
part_keys = []
|
|
|
|
parts=[]
|
|
|
|
PARTS=[]
|
|
|
|
buffer_size=100
|
2020-08-20 11:48:26 +00:00
|
|
|
for part in bytes_from_file(filename,chunksize=1024*2):
|
2020-08-17 20:40:48 +00:00
|
|
|
part_id = get_random_id()
|
|
|
|
part_ids.append(part_id)
|
|
|
|
part_key='/part/'+part_id
|
|
|
|
part_keys.append(part_key)
|
|
|
|
parts.append(part)
|
|
|
|
# PARTS.append(part)
|
|
|
|
|
2020-08-20 22:04:21 +00:00
|
|
|
# self.log('part!:',sys.getsizeof(part))
|
2020-08-17 20:40:48 +00:00
|
|
|
#self.set(part_key,part)
|
|
|
|
|
|
|
|
if len(parts)>=buffer_size:
|
2020-08-20 22:04:21 +00:00
|
|
|
# self.log('setting...')
|
2020-08-19 19:33:25 +00:00
|
|
|
await self.set(part_keys,parts)
|
2020-08-17 20:40:48 +00:00
|
|
|
part_keys=[]
|
|
|
|
PARTS+=parts
|
|
|
|
parts=[]
|
|
|
|
|
|
|
|
# set all parts
|
|
|
|
#self.set(part_keys,PARTS)
|
2020-08-20 22:04:21 +00:00
|
|
|
# self.log('# parts:',len(PARTS))
|
2020-08-20 10:59:53 +00:00
|
|
|
if parts and part_keys:
|
|
|
|
await self.set(part_keys, parts)
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
# how many parts?
|
2020-08-20 22:04:21 +00:00
|
|
|
# self.log('# pieces!',len(part_ids))
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
file_store = {'ext':os.path.splitext(filename)[-1][1:], 'parts':part_ids}
|
2020-08-20 22:04:21 +00:00
|
|
|
# self.log('FILE STORE??',file_store)
|
2020-08-19 19:33:25 +00:00
|
|
|
await self.set_json(uri+file_id,file_store)
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
# file_store['data'].seek(0)
|
|
|
|
file_store['id']=file_id
|
|
|
|
return file_store
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-19 19:33:25 +00:00
|
|
|
async def download(self,file_id):
|
2020-08-20 10:59:53 +00:00
|
|
|
self.log('file_id =',file_id)
|
2020-08-20 17:43:28 +00:00
|
|
|
file_store = await self.get_json_val('/file/'+file_id)
|
2020-08-20 10:59:53 +00:00
|
|
|
self.log('file_store =',file_store)
|
2020-08-17 22:06:31 +00:00
|
|
|
if file_store is None: return
|
|
|
|
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('file_store!?',file_store)
|
2020-08-17 22:06:31 +00:00
|
|
|
keys = ['/part/'+x for x in file_store['parts']]
|
2020-08-20 11:48:26 +00:00
|
|
|
|
2020-08-20 17:43:28 +00:00
|
|
|
#time,pieces,pub,sign = await self.get_json_val(keys)
|
|
|
|
pieces = await self.get_json_val(keys)
|
2020-08-20 11:48:26 +00:00
|
|
|
self.log('pieces = ',pieces)
|
2020-08-17 22:06:31 +00:00
|
|
|
file_store['parts_data']=pieces
|
|
|
|
return file_store
|
|
|
|
|
2020-08-20 21:45:30 +00:00
|
|
|
async def flush(self):
|
2020-08-23 09:59:52 +00:00
|
|
|
#self.log('saving back to db file...')
|
2020-08-20 21:45:30 +00:00
|
|
|
node = await self.node
|
|
|
|
node.storage.dump()
|
2020-08-22 13:52:27 +00:00
|
|
|
# self.log('DONE saving back to db file...')
|
2020-08-20 21:45:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-23 11:38:50 +00:00
|
|
|
async def post(self,data,channel,add_to_outbox=True):
|
2020-08-17 20:40:48 +00:00
|
|
|
post_id=get_random_id()
|
2020-08-22 07:31:36 +00:00
|
|
|
#tasks = []
|
2020-08-22 06:05:32 +00:00
|
|
|
|
|
|
|
self.log(f'api.post({data},add_to_outbox={add_to_outbox}) --> ...')
|
2020-08-23 11:03:41 +00:00
|
|
|
# stop
|
2020-08-21 21:15:20 +00:00
|
|
|
|
|
|
|
# ## add to inbox
|
2020-08-22 06:05:32 +00:00
|
|
|
post_id = get_random_id()
|
2020-08-23 13:22:00 +00:00
|
|
|
self.load_keys()
|
2020-08-22 06:05:32 +00:00
|
|
|
author_privkey = self.keys[data.get('author')]
|
2020-08-22 07:31:36 +00:00
|
|
|
|
2020-08-22 20:11:19 +00:00
|
|
|
self.log('ADDING TO CHANNEL??',channel)
|
|
|
|
pubkey_channel = self.keys[channel].public_key()
|
2020-08-23 11:03:41 +00:00
|
|
|
|
|
|
|
## 1) STORE ACTUAL CONTENT OF POST UNDER CENTRAL POST URI
|
|
|
|
# HAS NO CHANNEL: just one post/msg in a sea of many
|
|
|
|
# e.g. /post/5e4a355873194399a5b356def5f40ff9
|
|
|
|
# does not reveal who cand decrypt it
|
|
|
|
uri = '/post/'+post_id
|
2020-08-22 20:11:19 +00:00
|
|
|
json_res = await self.set_json(
|
2020-08-23 11:03:41 +00:00
|
|
|
uri,
|
|
|
|
data, #data_channel,
|
2020-08-22 20:11:19 +00:00
|
|
|
encode_data=True,
|
|
|
|
encrypt_for_pubkey=pubkey_channel,
|
|
|
|
private_signature_key=author_privkey
|
|
|
|
)
|
|
|
|
self.log(f'json_res() <- {json_res}')
|
|
|
|
|
2020-08-23 11:03:41 +00:00
|
|
|
|
|
|
|
## 2) Store under the channels a reference to the post,
|
|
|
|
# as a hint they may be able to decrypt it with one of their keys
|
|
|
|
add_post_id_as_hint_to_channels = [f'/inbox/{channel}']
|
2020-08-22 20:11:19 +00:00
|
|
|
if add_to_outbox:
|
|
|
|
un=data.get('author')
|
|
|
|
if un:
|
2020-08-23 11:03:41 +00:00
|
|
|
add_post_id_as_hint_to_channels += [f'/outbox/{un}']
|
|
|
|
tasks = [
|
2020-08-23 11:38:50 +00:00
|
|
|
self.append_data(uri,post_id) for uri in add_post_id_as_hint_to_channels
|
2020-08-23 11:03:41 +00:00
|
|
|
]
|
|
|
|
res = await asyncio.gather(*tasks)
|
|
|
|
if res and all([(d and 'success' in d) for d in res]):
|
|
|
|
return {'success':'Posted! %s' % post_id, 'post_id':post_id}
|
2020-08-23 11:38:50 +00:00
|
|
|
|
2020-08-23 11:03:41 +00:00
|
|
|
return {'error':'Post unsuccessful'}
|
|
|
|
|
|
|
|
# append_res=await self.append_json(f'/inbox/{channel}',post_id)
|
|
|
|
# self.log(f'json_res.append_json({channel}) <- {append_res}')
|
|
|
|
# #tasks.append(task)
|
|
|
|
|
|
|
|
# # add to outbox
|
|
|
|
# if add_to_outbox:
|
|
|
|
# un=data.get('author')
|
|
|
|
# if un:
|
|
|
|
# append_res = await self.append_json(f'/outbox/{un}', post_id)
|
|
|
|
# self.log(f'json_res.append_json({un}) <- {append_res}')
|
|
|
|
# #tasks.append(task)
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-22 07:31:36 +00:00
|
|
|
#asyncio.create_task(self.flush())
|
2020-08-23 11:03:41 +00:00
|
|
|
# return {'success':'Posted! %s' % post_id, 'post_id':post_id}
|
2020-08-22 07:31:36 +00:00
|
|
|
#return {'error':'Post failed'}
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-21 15:46:33 +00:00
|
|
|
async def get_json_val(self,uri,decode_data=True):
|
|
|
|
res=await self.get_json(uri,decode_data=decode_data)
|
2020-08-22 07:00:22 +00:00
|
|
|
self.log('get_json_val() got from get_json():',res,type(res))
|
2020-08-22 00:27:20 +00:00
|
|
|
|
|
|
|
r=res
|
2020-08-20 11:48:26 +00:00
|
|
|
if type(res) == dict:
|
2020-08-22 07:00:22 +00:00
|
|
|
r=res.get('val',None) if res is not None else res
|
2020-08-20 09:51:57 +00:00
|
|
|
elif type(res) == list:
|
2020-08-22 07:00:22 +00:00
|
|
|
r=[(x.get('val') if type(x)==dict else x) for x in res if x is not None]
|
2020-08-22 00:27:20 +00:00
|
|
|
elif type(res) == str:
|
|
|
|
r=json.loads(res)
|
2020-08-22 07:00:22 +00:00
|
|
|
self.log(f'get_json_val() --> {r} {type(r)}')
|
2020-08-20 11:48:26 +00:00
|
|
|
return r
|
2020-08-20 09:51:57 +00:00
|
|
|
|
2020-08-19 14:07:12 +00:00
|
|
|
async def get_post(self,post_id):
|
2020-08-23 11:03:41 +00:00
|
|
|
self.log(f'api.get_post({post_id}) ?')
|
|
|
|
post_json = await self.get_json('/post/'+post_id, decode_data=True)
|
|
|
|
self.log(f'api.get_post({post_id}) --> {post_json}')
|
|
|
|
return post_json
|
|
|
|
|
|
|
|
async def get_post_ids(self,uri='/inbox/world'):
|
|
|
|
## GET POST IDS
|
|
|
|
self.log(f'api.get_post_ids(uri={uri}) ?')
|
2020-08-22 06:05:32 +00:00
|
|
|
index = await self.get(uri,decode_data=False)
|
2020-08-23 11:03:41 +00:00
|
|
|
self.log(f'api.get_post_ids(uri={uri}) <-- api.get()',index)
|
2020-08-22 09:15:52 +00:00
|
|
|
if not index: return []
|
2020-08-23 13:22:00 +00:00
|
|
|
|
|
|
|
#index = json.loads(base64.b64decode(index).decode())
|
|
|
|
index = [x.decode('utf-8') for x in index.split(BSEP)]
|
2020-08-23 11:03:41 +00:00
|
|
|
|
2020-08-22 09:15:52 +00:00
|
|
|
if index is None: return []
|
2020-08-20 18:26:33 +00:00
|
|
|
if type(index)!=list: index=[index]
|
|
|
|
index = [x for x in index if x is not None]
|
2020-08-22 11:16:50 +00:00
|
|
|
|
2020-08-23 11:03:41 +00:00
|
|
|
self.log(f'api.get_post_ids({uri}) --> {index}')
|
|
|
|
return index
|
|
|
|
|
|
|
|
async def get_posts(self,uri='/inbox/world'):
|
|
|
|
|
|
|
|
# get IDs
|
|
|
|
post_ids = await self.get_post_ids(uri)
|
|
|
|
|
|
|
|
# get posts
|
|
|
|
posts = [self.get_post(post_id) for post_id in post_ids]
|
|
|
|
return await asyncio.gather(*posts)
|
2020-08-21 07:27:27 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-19 19:33:25 +00:00
|
|
|
## func
|
|
|
|
|
|
|
|
def bytes_from_file(filename,chunksize=8192):
|
|
|
|
with open(filename, 'rb') as f:
|
|
|
|
while True:
|
|
|
|
piece = f.read(chunksize)
|
|
|
|
if not piece:
|
|
|
|
break
|
|
|
|
yield piece
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def get_random_id():
|
|
|
|
import uuid
|
|
|
|
return uuid.uuid4().hex
|
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
def test_api():
|
2020-08-20 15:48:08 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
# api.set(['a','b','c'],[1,2,3])
|
2020-08-20 15:48:08 +00:00
|
|
|
async def run():
|
2020-08-20 16:31:17 +00:00
|
|
|
api = Api()
|
2020-08-20 15:48:08 +00:00
|
|
|
# await api.connect()
|
|
|
|
|
|
|
|
#await api.set_json('whattttt',{'aaaaa':12222})
|
|
|
|
#await api.set_json('whattttt',[111])
|
|
|
|
#await api.set_json('whattttt',[111])
|
|
|
|
|
|
|
|
#val = await api.get_json('whattttt')
|
|
|
|
|
2020-08-20 16:31:17 +00:00
|
|
|
server = await _getdb(api)
|
2020-08-20 15:48:08 +00:00
|
|
|
await server.set('a',1)
|
2020-08-20 16:31:17 +00:00
|
|
|
print(await server.get('a'))
|
|
|
|
await asyncio.sleep(5)
|
2020-08-20 15:48:08 +00:00
|
|
|
await server.set('a',2)
|
2020-08-20 16:31:17 +00:00
|
|
|
|
|
|
|
print(await server.get('a'))
|
|
|
|
await asyncio.sleep(5)
|
|
|
|
|
2020-08-20 15:48:08 +00:00
|
|
|
await server.set('a',str([2,3,4,5]))
|
2020-08-20 16:31:17 +00:00
|
|
|
print(await server.get('a'))
|
|
|
|
await asyncio.sleep(5)
|
2020-08-20 15:48:08 +00:00
|
|
|
|
|
|
|
val = await server.get('a')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
print(f'VAL = {val}')
|
|
|
|
return val
|
|
|
|
|
|
|
|
asyncio.run(run())
|
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
|
|
|
|
def test_basic():
|
|
|
|
import asyncio
|
|
|
|
from kademlia.network import Server
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-19 12:11:48 +00:00
|
|
|
#api = Api()
|
2020-08-19 12:06:44 +00:00
|
|
|
|
|
|
|
# not working!
|
2020-08-19 12:11:48 +00:00
|
|
|
#api.set_json('my key',{'a':'value'})
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
async def run():
|
|
|
|
# Create a node and start listening on port 5678
|
|
|
|
node = Server()
|
|
|
|
await node.listen(5678)
|
|
|
|
|
|
|
|
# Bootstrap the node by connecting to other known nodes, in this case
|
|
|
|
# replace 123.123.123.123 with the IP of another node and optionally
|
|
|
|
# give as many ip/port combos as you can for other nodes.
|
|
|
|
await node.bootstrap(NODES_PRIME)
|
|
|
|
|
|
|
|
# set a value for the key "my-key" on the network
|
|
|
|
await node.set("my-key", "my awesome value")
|
2020-08-20 15:48:08 +00:00
|
|
|
await node.set("my-key", "my awesome value2")
|
|
|
|
await node.set("my-key", "my awesome value3")
|
2020-08-19 13:01:37 +00:00
|
|
|
|
2020-08-20 17:10:05 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
# get the value associated with "my-key" from the network
|
|
|
|
result = await node.get("my-key")
|
|
|
|
|
|
|
|
print(result)
|
|
|
|
return result
|
|
|
|
|
|
|
|
res = asyncio.run(run())
|
|
|
|
print('res = ',res)
|
|
|
|
# res = asyncio.run(node.set(key,value))
|
|
|
|
# print(res)
|
|
|
|
|
|
|
|
def test_provided_eg():
|
|
|
|
import asyncio
|
|
|
|
from kademlia.network import Server
|
|
|
|
|
|
|
|
async def run():
|
|
|
|
# Create a node and start listening on port 5678
|
|
|
|
node = Server()
|
|
|
|
await node.listen(5678)
|
|
|
|
|
|
|
|
# Bootstrap the node by connecting to other known nodes, in this case
|
|
|
|
# replace 123.123.123.123 with the IP of another node and optionally
|
|
|
|
# give as many ip/port combos as you can for other nodes.
|
|
|
|
await node.bootstrap(NODES_PRIME)
|
|
|
|
|
|
|
|
# set a value for the key "my-key" on the network
|
|
|
|
await node.set("my-key", "my awesome value")
|
|
|
|
|
|
|
|
# get the value associated with "my-key" from the network
|
|
|
|
result = await node.get("my-key")
|
|
|
|
|
|
|
|
print(result)
|
|
|
|
|
|
|
|
asyncio.run(run())
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-21 16:12:40 +00:00
|
|
|
async def lonely_selfless_node():
|
|
|
|
from api import Api,PORT_LISTEN
|
|
|
|
API = Api()
|
|
|
|
return await API.connect_forever(8467)
|
2020-08-21 15:22:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def boot_lonely_selfless_node(port=8467):
|
2020-08-21 16:12:40 +00:00
|
|
|
API = Api()
|
|
|
|
asyncio.run(API.connect_forever())
|
2020-08-21 15:22:30 +00:00
|
|
|
|
|
|
|
|
2020-08-22 11:16:50 +00:00
|
|
|
def init_entities(usernames = ['world']):
|
|
|
|
## make global entity called world
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-21 16:12:40 +00:00
|
|
|
#loop=asyncio.new_event_loop()
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-21 16:12:40 +00:00
|
|
|
async def register(username):
|
|
|
|
API = Api()
|
|
|
|
#await API.connect_forever()
|
2020-08-22 12:11:51 +00:00
|
|
|
#privkey,pubkey = await API.register(username,just_return_keys=True)
|
|
|
|
|
|
|
|
private_key = generate_rsa_key()
|
|
|
|
public_key = private_key.public_key()
|
|
|
|
pem_private_key = serialize_privkey(private_key)
|
|
|
|
pem_public_key = serialize_pubkey(public_key)
|
|
|
|
|
|
|
|
|
|
|
|
privkey_fn = os.path.join(KEYDIR_BUILTIN,f'.{username}.key.priv')
|
|
|
|
pubkey_fn = os.path.join(KEYDIR_BUILTIN,f'.{username}.key.pub')
|
|
|
|
with open(privkey_fn,'wb') as of: of.write(pem_private_key)
|
|
|
|
with open(pubkey_fn,'wb') as of: of.write(pem_public_key)
|
|
|
|
# print(API.keys)
|
|
|
|
|
|
|
|
await API.set_person(username,pem_public_key)
|
2020-08-21 15:46:33 +00:00
|
|
|
print('done')
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-21 16:12:40 +00:00
|
|
|
for un in usernames:
|
|
|
|
asyncio.run(register(un))
|
|
|
|
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-22 00:27:20 +00:00
|
|
|
def split_binary(data, sep=BSEP):
|
|
|
|
seplen = len(BSEP)
|
|
|
|
res=[]
|
|
|
|
stack=None
|
|
|
|
print('!!',data[:4],seplen,sep)
|
2020-08-21 15:22:30 +00:00
|
|
|
|
2020-08-22 00:27:20 +00:00
|
|
|
cutoffs=[]
|
|
|
|
for i in range(0, len(data)):
|
|
|
|
seg=data[i:i+seplen]
|
|
|
|
print(i,seg,sep,stack)
|
|
|
|
if seg==sep:
|
|
|
|
# split_piece = data[:i+seplen]
|
|
|
|
print('!')
|
|
|
|
cutoff_lasttime = cutoffs[-1][-1] if cutoffs and cutoffs else 0
|
|
|
|
cutoff = (cutoff_lasttime-seplen, i)
|
|
|
|
print(cutoff)
|
|
|
|
cutoffs.append(cutoff)
|
|
|
|
stack = data[cutoff[0] if cutoff[0]>0 else 0: cutoff[1]]
|
|
|
|
print(stack)
|
|
|
|
res += [stack]
|
|
|
|
stack = None
|
|
|
|
|
|
|
|
cutoff_lasttime = cutoffs[-1][-1] if cutoffs and cutoffs else 0
|
|
|
|
print(cutoff_lasttime)
|
|
|
|
stack = data[cutoff_lasttime+seplen :]
|
|
|
|
res+=[stack]
|
|
|
|
print('RES:',res)
|
|
|
|
return res
|
2020-08-21 15:22:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
if __name__=='__main__':
|
2020-08-22 11:16:50 +00:00
|
|
|
init_entities()
|
2020-08-22 00:27:20 +00:00
|
|
|
|
2020-08-22 11:16:50 +00:00
|
|
|
# res = split_binary(b'eeeehey||||whatsueep',b'||||')
|
|
|
|
# print(res)
|