2020-08-19 13:01:37 +00:00
|
|
|
import os,time,sys,logging
|
2020-08-17 13:33:26 +00:00
|
|
|
from pathlib import Path
|
|
|
|
import asyncio
|
2020-08-19 13:01:37 +00:00
|
|
|
# handler = logging.StreamHandler()
|
|
|
|
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
|
|
# handler.setFormatter(formatter)
|
|
|
|
# logger = logging.getLogger(__file__)
|
|
|
|
# logger.addHandler(handler)
|
|
|
|
# logger.setLevel(logging.DEBUG)
|
|
|
|
sys.path.append('../p2p')
|
|
|
|
# logger.info(os.getcwd(), sys.path)
|
|
|
|
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
try:
|
|
|
|
from .crypto import *
|
|
|
|
from .p2p import *
|
|
|
|
from .kad import *
|
2020-08-19 13:01:37 +00:00
|
|
|
except ModuleNotFoundError:
|
2020-08-19 12:06:44 +00:00
|
|
|
from crypto import *
|
|
|
|
from p2p import *
|
|
|
|
from kad import KadServer
|
2020-08-17 20:40:48 +00:00
|
|
|
from pathlib import Path
|
|
|
|
from functools import partial
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
# works better with tor?
|
|
|
|
import json
|
|
|
|
jsonify = json.dumps
|
2020-08-19 10:29:56 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
# Start server
|
|
|
|
|
|
|
|
DEBUG = True
|
|
|
|
UPLOAD_DIR = 'uploads/'
|
|
|
|
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
|
2020-08-19 10:29:56 +00:00
|
|
|
|
2020-08-18 15:14:19 +00:00
|
|
|
# PORT_SPEAK = 8468
|
2020-08-19 13:01:37 +00:00
|
|
|
PORT_LISTEN = 5637
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
# Api Functions
|
2020-08-18 14:32:10 +00:00
|
|
|
from threading import Thread
|
|
|
|
|
|
|
|
def start_selfless_thread():
|
|
|
|
async def _go():
|
|
|
|
loop=asyncio.get_event_loop()
|
|
|
|
return boot_selfless_node(port=PORT_SPEAK, loop=loop)
|
|
|
|
return asyncio.run(_go())
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
async def _getdb(self=None,port=PORT_LISTEN):
|
|
|
|
|
|
|
|
if self: self.log('starting server..')
|
2020-08-19 11:30:23 +00:00
|
|
|
node = KadServer() #storage=HalfForgetfulStorage())
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
if self: self.log('listening..')
|
2020-08-19 11:30:23 +00:00
|
|
|
await node.listen(port)
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
if self: self.log('bootstrapping server..')
|
2020-08-19 11:30:23 +00:00
|
|
|
await node.bootstrap(NODES_PRIME)
|
|
|
|
return node
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
def logg(x):
|
|
|
|
print(x)
|
|
|
|
|
2020-08-17 16:23:40 +00:00
|
|
|
class Api(object):
|
2020-08-19 12:06:44 +00:00
|
|
|
def __init__(self,app = None):
|
2020-08-19 10:29:56 +00:00
|
|
|
self.app=app
|
2020-08-19 12:06:44 +00:00
|
|
|
self.app_storage = self.app.store if app else {}
|
|
|
|
self.log = self.app.log if app else logg
|
2020-08-18 17:25:15 +00:00
|
|
|
|
2020-08-19 10:29:56 +00:00
|
|
|
# self.log('starting selfless daemon...')
|
2020-08-18 15:00:14 +00:00
|
|
|
# self.selfless = Thread(target=start_selfless_thread)
|
|
|
|
# self.selfless.daemon = True
|
|
|
|
# self.selfless.start()
|
2020-08-19 10:29:56 +00:00
|
|
|
|
|
|
|
# connect?
|
2020-08-19 13:01:37 +00:00
|
|
|
#self._node=self.connect()
|
2020-08-17 16:23:40 +00:00
|
|
|
pass
|
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
# @property
|
|
|
|
# def node(self):
|
|
|
|
# if not hasattr(self,'_node'):
|
|
|
|
# self._node=self.connect()
|
|
|
|
# return self._node
|
2020-08-18 17:25:15 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
# def connect(self,port=PORT_LISTEN):
|
|
|
|
# self.log('connecting...')
|
|
|
|
# async def _connect():
|
|
|
|
# return await _getdb(self,port)
|
|
|
|
# return asyncio.run(_connect())
|
2020-08-18 14:32:10 +00:00
|
|
|
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def get(self,key_or_keys):
|
2020-08-18 09:01:22 +00:00
|
|
|
|
2020-08-17 16:23:40 +00:00
|
|
|
async def _get():
|
2020-08-19 11:14:52 +00:00
|
|
|
# self.log('async _get()',self.node)
|
2020-08-19 13:01:37 +00:00
|
|
|
node=await _getdb(self)
|
|
|
|
#node=self.node
|
2020-08-18 14:52:20 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
if type(key_or_keys) in {list,tuple,dict}:
|
|
|
|
keys = key_or_keys
|
|
|
|
res = []
|
|
|
|
res = await asyncio.gather(*[node.get(key) for key in keys])
|
2020-08-17 22:06:31 +00:00
|
|
|
#log('RES?',res)
|
2020-08-17 20:40:48 +00:00
|
|
|
else:
|
|
|
|
key = key_or_keys
|
|
|
|
res = await node.get(key)
|
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
node.stop()
|
2020-08-17 20:40:48 +00:00
|
|
|
return res
|
|
|
|
|
2020-08-18 13:39:32 +00:00
|
|
|
return asyncio.run(_get())
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-18 17:25:15 +00:00
|
|
|
# return loop.create_task(_get())
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
def get_json(self,key_or_keys):
|
|
|
|
res = self.get(key_or_keys)
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('GET_JSON',res)
|
2020-08-17 20:40:48 +00:00
|
|
|
if type(res)==list:
|
2020-08-19 10:29:56 +00:00
|
|
|
# self.log('is a list!',json.loads(res[0]))
|
2020-08-17 20:40:48 +00:00
|
|
|
return [None if x is None else json.loads(x) for x in res]
|
|
|
|
else:
|
2020-08-17 22:06:31 +00:00
|
|
|
#log('RES!!!',res)
|
2020-08-17 20:40:48 +00:00
|
|
|
return None if res is None else json.loads(res)
|
|
|
|
|
|
|
|
def set(self,key_or_keys,value_or_values):
|
2020-08-19 13:01:37 +00:00
|
|
|
async def _go():
|
2020-08-19 11:14:52 +00:00
|
|
|
# self.log('async _set()',self.node)
|
2020-08-19 11:30:23 +00:00
|
|
|
# node=self.node
|
2020-08-19 13:01:37 +00:00
|
|
|
node=await _getdb(self)
|
|
|
|
# node=self.node
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
if type(key_or_keys) in {list,tuple,dict}:
|
|
|
|
keys = key_or_keys
|
|
|
|
values = value_or_values
|
|
|
|
assert len(keys)==len(values)
|
|
|
|
res = await asyncio.gather(*[node.set(key,value) for key,value in zip(keys,values)])
|
2020-08-19 10:29:56 +00:00
|
|
|
# self.log('RES?',res)
|
2020-08-17 20:40:48 +00:00
|
|
|
else:
|
|
|
|
key = key_or_keys
|
|
|
|
value = value_or_values
|
2020-08-17 20:59:53 +00:00
|
|
|
res = await node.set(key,value) #'this is a test')
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
node.stop()
|
2020-08-17 20:40:48 +00:00
|
|
|
return res
|
2020-08-18 17:25:15 +00:00
|
|
|
|
|
|
|
# loop=asyncio.get_event_loop()
|
|
|
|
# loop.create_task(_set())
|
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
# async def _set(key,value):
|
|
|
|
# import asyncio
|
|
|
|
# from kademlia.network import Server
|
|
|
|
|
|
|
|
# # Create a node and start listening on port 5678
|
|
|
|
# node = Server()
|
|
|
|
# await node.listen(5678)
|
|
|
|
|
|
|
|
# # Bootstrap the node by connecting to other known nodes, in this case
|
|
|
|
# # replace 123.123.123.123 with the IP of another node and optionally
|
|
|
|
# # give as many ip/port combos as you can for other nodes.
|
|
|
|
# await node.bootstrap(NODES_PRIME)
|
|
|
|
|
|
|
|
# # set a value for the key "my-key" on the network
|
|
|
|
# await node.set(key, value)
|
|
|
|
|
|
|
|
# # get the value associated with "my-key" from the network
|
|
|
|
# result = await node.get(key)
|
|
|
|
|
|
|
|
# print(result)
|
|
|
|
# return result
|
|
|
|
|
|
|
|
res= asyncio.run(_go(), debug=True)#
|
|
|
|
# res= asyncio.run(_set(key_or_keys,value_or_values), debug=True)#
|
|
|
|
print('res = ',res)
|
|
|
|
return res
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
def set_json(self,key,value):
|
|
|
|
value_json = jsonify(value)
|
2020-08-19 10:29:56 +00:00
|
|
|
# self.log('OH NO!',sys.getsizeof(value_json))
|
2020-08-17 20:40:48 +00:00
|
|
|
return self.set(key,value_json)
|
2020-08-17 16:23:40 +00:00
|
|
|
|
|
|
|
def has(self,key):
|
|
|
|
return self.get(key) is not None
|
|
|
|
|
|
|
|
|
|
|
|
## PERSONS
|
|
|
|
def get_person(self,username):
|
2020-08-17 20:40:48 +00:00
|
|
|
return self.get_json('/person/'+username)
|
2020-08-17 16:23:40 +00:00
|
|
|
|
|
|
|
def set_person(self,username,public_key):
|
|
|
|
pem_public_key = save_public_key(public_key,return_instead=True)
|
|
|
|
obj = {'name':username, 'public_key':pem_public_key.decode()}
|
2020-08-17 20:40:48 +00:00
|
|
|
self.set_json('/person/'+username,obj)
|
2020-08-17 16:23:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
## Register
|
|
|
|
def register(self,name,passkey):
|
|
|
|
|
|
|
|
if not (name and passkey):
|
|
|
|
error('name and passkey not set')
|
2020-08-18 12:08:06 +00:00
|
|
|
return {'error':'Register failed'}
|
2020-08-17 16:23:40 +00:00
|
|
|
person = self.get_person(name)
|
|
|
|
if person is not None:
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('error! person exists')
|
2020-08-17 16:23:40 +00:00
|
|
|
return {'error':'Register failed'}
|
|
|
|
|
|
|
|
private_key,public_key = new_keys(password=passkey,save=False)
|
|
|
|
pem_private_key = save_private_key(private_key,password=passkey,return_instead=True)
|
|
|
|
pem_public_key = save_public_key(public_key,return_instead=True)
|
|
|
|
|
|
|
|
self.app_storage.put('_keys',
|
|
|
|
private=str(pem_private_key.decode()),
|
|
|
|
public=str(pem_public_key.decode())) #(private_key,password=passkey)
|
|
|
|
self.set_person(name,public_key)
|
|
|
|
|
|
|
|
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('success! Account created')
|
2020-08-17 16:23:40 +00:00
|
|
|
return {'success':'Account created', 'username':name}
|
|
|
|
|
|
|
|
def load_private_key(self,password):
|
|
|
|
if not self.app_storage.exists('_keys'): return None
|
|
|
|
pem_private_key=self.app_storage.get('_keys').get('private')
|
|
|
|
try:
|
|
|
|
return load_private_key(pem_private_key.encode(),password)
|
|
|
|
except ValueError as e:
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('!!',e)
|
2020-08-17 16:23:40 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
## LOGIN
|
|
|
|
def login(self,name,passkey):
|
|
|
|
# verify input
|
|
|
|
if not (name and passkey):
|
|
|
|
return {'error':'Name and password required'}
|
|
|
|
|
|
|
|
# try to load private key
|
|
|
|
private_key = self.load_private_key(passkey)
|
|
|
|
if private_key is None:
|
|
|
|
return {'error':'You have never registered on this device'}
|
|
|
|
|
|
|
|
# see if user exists
|
|
|
|
person = self.get_person(name)
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log(person)
|
2020-08-17 16:23:40 +00:00
|
|
|
if person is None:
|
|
|
|
return {'error':'Login failed'}
|
|
|
|
|
|
|
|
# verify keys
|
|
|
|
person_public_key_pem = person['public_key']
|
2020-08-17 20:40:48 +00:00
|
|
|
public_key = load_public_key(person_public_key_pem.encode())
|
2020-08-17 16:23:40 +00:00
|
|
|
real_public_key = private_key.public_key()
|
|
|
|
|
|
|
|
#log('PUBLIC',public_key.public_numbers())
|
|
|
|
#log('REAL PUBLIC',real_public_key.public_numbers())
|
|
|
|
|
|
|
|
if public_key.public_numbers() != real_public_key.public_numbers():
|
|
|
|
return {'error':'keys do not match!'}
|
|
|
|
return {'success':'Login successful', 'username':name}
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def append_json(self,key,data):
|
|
|
|
sofar=self.get_json(key)
|
|
|
|
if sofar is None: sofar = []
|
|
|
|
new=sofar + ([data] if type(data)!=list else data)
|
|
|
|
if self.set_json(key, new):
|
|
|
|
return {'success':'Length increased to %s' % len(new)}
|
|
|
|
return {'error':'Could not append json'}
|
|
|
|
|
2020-08-17 22:06:31 +00:00
|
|
|
def upload(self,filename,file_id=None, uri='/file/',uri_part='/part/'):
|
2020-08-17 20:40:48 +00:00
|
|
|
import sys
|
|
|
|
|
|
|
|
if not file_id: file_id = get_random_id()
|
|
|
|
part_ids = []
|
|
|
|
part_keys = []
|
|
|
|
parts=[]
|
|
|
|
PARTS=[]
|
|
|
|
buffer_size=100
|
|
|
|
for part in bytes_from_file(filename,chunksize=1024*7):
|
|
|
|
part_id = get_random_id()
|
|
|
|
part_ids.append(part_id)
|
|
|
|
part_key='/part/'+part_id
|
|
|
|
part_keys.append(part_key)
|
|
|
|
parts.append(part)
|
|
|
|
# PARTS.append(part)
|
|
|
|
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('part!:',sys.getsizeof(part))
|
2020-08-17 20:40:48 +00:00
|
|
|
#self.set(part_key,part)
|
|
|
|
|
|
|
|
if len(parts)>=buffer_size:
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('setting...')
|
2020-08-17 20:40:48 +00:00
|
|
|
self.set(part_keys,parts)
|
|
|
|
part_keys=[]
|
|
|
|
PARTS+=parts
|
|
|
|
parts=[]
|
|
|
|
|
|
|
|
# set all parts
|
|
|
|
#self.set(part_keys,PARTS)
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('# parts:',len(PARTS))
|
2020-08-17 20:40:48 +00:00
|
|
|
if parts and part_keys: self.set(part_keys, parts)
|
|
|
|
|
|
|
|
# how many parts?
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('# pieces!',len(part_ids))
|
2020-08-17 20:40:48 +00:00
|
|
|
|
|
|
|
file_store = {'ext':os.path.splitext(filename)[-1][1:], 'parts':part_ids}
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('FILE STORE??',file_store)
|
2020-08-17 20:40:48 +00:00
|
|
|
self.set_json(uri+file_id,file_store)
|
|
|
|
|
|
|
|
# file_store['data'].seek(0)
|
|
|
|
file_store['id']=file_id
|
|
|
|
return file_store
|
2020-08-17 16:23:40 +00:00
|
|
|
|
2020-08-17 22:06:31 +00:00
|
|
|
def download(self,file_id):
|
|
|
|
file_store = self.get_json('/file/'+file_id)
|
|
|
|
if file_store is None: return
|
|
|
|
|
2020-08-19 10:29:56 +00:00
|
|
|
self.log('file_store!?',file_store)
|
2020-08-17 22:06:31 +00:00
|
|
|
keys = ['/part/'+x for x in file_store['parts']]
|
|
|
|
pieces = self.get(keys)
|
|
|
|
file_store['parts_data']=pieces
|
|
|
|
return file_store
|
|
|
|
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def post(self,data):
|
|
|
|
post_id=get_random_id()
|
|
|
|
res = self.set_json('/post/'+post_id, data)
|
2020-08-19 11:14:52 +00:00
|
|
|
self.log('Api.post() got data back from set_json():',res)
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-19 11:14:52 +00:00
|
|
|
# ## add to channels
|
2020-08-19 13:01:37 +00:00
|
|
|
self.append_json('/posts/channel/earth', post_id)
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-19 11:14:52 +00:00
|
|
|
# ## add to user
|
2020-08-19 13:01:37 +00:00
|
|
|
un=data.get('author')
|
|
|
|
if un: self.append_json('/posts/author/'+un, post_id)
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
if res:
|
|
|
|
return {'success':'Posted! %s' % post_id, 'post_id':post_id}
|
|
|
|
return {'error':'Post failed'}
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def get_post(self,post_id):
|
|
|
|
return self.get_json('/post/'+post_id)
|
2020-08-17 13:33:26 +00:00
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def get_posts(self,uri='/channel/earth'):
|
|
|
|
index = self.get_json('/posts'+uri)
|
|
|
|
if index is None: return []
|
|
|
|
data = self.get_json(['/post/'+x for x in index])
|
|
|
|
return data
|
2020-08-17 13:33:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
## CREATE
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
def get_random_id():
|
|
|
|
import uuid
|
|
|
|
return uuid.uuid4().hex
|
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
def allowed_file(filename):
|
|
|
|
return '.' in filename and \
|
|
|
|
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
|
|
|
|
|
|
|
def get_random_filename(filename):
|
|
|
|
import uuid
|
|
|
|
fn=uuid.uuid4().hex
|
|
|
|
return (fn[:3],fn[3:]+os.path.splitext(filename)[-1])
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
def upload():
|
|
|
|
files = request.files
|
|
|
|
# check if the post request has the file part
|
|
|
|
if 'file' not in request.files:
|
|
|
|
return {'error':'No file found'},status.HTTP_204_NO_CONTENT
|
|
|
|
|
|
|
|
file = request.files['file']
|
|
|
|
|
|
|
|
# if user does not select file, browser also
|
|
|
|
# submit an empty part without filename
|
|
|
|
print('filename!',file.filename)
|
|
|
|
if file.filename == '':
|
|
|
|
return {'error':'No filename'},status.HTTP_206_PARTIAL_CONTENT
|
|
|
|
|
|
|
|
if file and allowed_file(file.filename):
|
|
|
|
print('uploading file...')
|
|
|
|
#prefix,filename = get_random_filename(file.filename) #secure_filename(file.filename)
|
|
|
|
#odir = os.path.join(app.config['UPLOAD_DIR'], os.path.dirname(filename))
|
|
|
|
#if not os.path.exists(odir):
|
|
|
|
ext = os.path.splitext(file.filename)[-1]
|
|
|
|
media = Media(ext=ext).save()
|
|
|
|
uid = media.uid
|
|
|
|
filename = media.filename
|
|
|
|
prefix,fn=filename.split('/')
|
|
|
|
|
|
|
|
folder = os.path.join(app.config['UPLOAD_DIR'], prefix)
|
|
|
|
if not os.path.exists(folder): os.makedirs(folder)
|
|
|
|
file.save(os.path.join(folder, fn))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#return redirect(url_for('uploaded_file', filename=filename))
|
|
|
|
return {'media_uid':uid, 'filename':filename}, status.HTTP_200_OK
|
|
|
|
|
|
|
|
return {'error':'Upload failed'},status.HTTP_406_NOT_ACCEPTABLE
|
|
|
|
|
|
|
|
|
|
|
|
def download(prefix, filename):
|
|
|
|
filedir = os.path.join(app.config['UPLOAD_DIR'], prefix)
|
|
|
|
print(filedir, filename)
|
|
|
|
return send_from_directory(filedir, filename)
|
|
|
|
|
|
|
|
### READ
|
|
|
|
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
def get_followers(name=None):
|
|
|
|
person = Person.match(G, name).first()
|
|
|
|
data = [p.data for p in person.followers]
|
|
|
|
return jsonify(data)
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
def get_follows(name=None):
|
|
|
|
person = Person.match(G, name).first()
|
|
|
|
data = [p.data for p in person.follows]
|
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
def get_posts(name=None):
|
|
|
|
if name:
|
|
|
|
person = Person.nodes.get_or_none(name=name)
|
|
|
|
data = [p.data for p in person.wrote.all()] if person is not None else []
|
|
|
|
else:
|
|
|
|
data = [p.data for p in Post.nodes.order_by('-timestamp')]
|
|
|
|
# print(data)
|
|
|
|
|
|
|
|
return jsonify({'posts':data})
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
|
2020-08-17 13:33:26 +00:00
|
|
|
def get_post(id=None):
|
|
|
|
post = Post.match(G, int(id)).first()
|
|
|
|
data = post.data
|
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-17 20:40:48 +00:00
|
|
|
import sys
|
|
|
|
# def bytes_from_file(filename, chunksize=8192//2):
|
|
|
|
# with open(filename, "rb") as f:
|
|
|
|
# while True:
|
|
|
|
# chunk = f.read(chunksize)
|
|
|
|
# if chunk:
|
2020-08-19 10:29:56 +00:00
|
|
|
# self.log(type(chunk), sys.getsizeof(chunk))
|
2020-08-17 20:40:48 +00:00
|
|
|
# yield chunk
|
|
|
|
# #yield from chunk
|
|
|
|
# else:
|
|
|
|
# break
|
|
|
|
|
|
|
|
# def bytes_from_file(filename,chunksize=8192):
|
|
|
|
# with open(filename,'rb') as f:
|
|
|
|
# barray = bytearray(f.read())
|
|
|
|
|
|
|
|
# for part in barray[0:-1:chunksize]:
|
2020-08-19 10:29:56 +00:00
|
|
|
# self.log('!?',part)
|
2020-08-17 20:40:48 +00:00
|
|
|
# yield bytes(part)
|
|
|
|
|
|
|
|
def bytes_from_file(filename,chunksize=8192):
|
|
|
|
with open(filename, 'rb') as f:
|
|
|
|
while True:
|
|
|
|
piece = f.read(chunksize)
|
|
|
|
if not piece:
|
|
|
|
break
|
|
|
|
yield piece
|
|
|
|
|
|
|
|
# import sys
|
|
|
|
# def bytes_from_file(path,chunksize=8000):
|
|
|
|
# ''' Given a path, return an iterator over the file
|
|
|
|
# that lazily loads the file.
|
|
|
|
# '''
|
|
|
|
# path = Path(path)
|
|
|
|
# bufsize = get_buffer_size(path)
|
|
|
|
|
|
|
|
# with path.open('rb') as file:
|
|
|
|
# reader = partial(file.read1, bufsize)
|
|
|
|
# for chunk in iter(reader, bytes()):
|
|
|
|
# _bytes=bytearray()
|
|
|
|
# for byte in chunk:
|
|
|
|
# #if _bytes is None:
|
|
|
|
# # _bytes=byte
|
|
|
|
# #else:
|
|
|
|
# _bytes.append(byte)
|
|
|
|
|
|
|
|
# if sys.getsizeof(_bytes)>=8192:
|
|
|
|
# yield bytes(_bytes) #.bytes()
|
|
|
|
# _bytes=bytearray()
|
|
|
|
# if _bytes:
|
|
|
|
# yield bytes(_bytes)
|
|
|
|
|
|
|
|
# def get_buffer_size(path):
|
|
|
|
# """ Determine optimal buffer size for reading files. """
|
|
|
|
# st = os.stat(path)
|
|
|
|
# try:
|
|
|
|
# bufsize = st.st_blksize # Available on some Unix systems (like Linux)
|
|
|
|
# except AttributeError:
|
|
|
|
# bufsize = io.DEFAULT_BUFFER_SIZE
|
2020-08-19 12:06:44 +00:00
|
|
|
# return bufsize
|
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
def test_api():
|
|
|
|
api = Api()
|
|
|
|
# api.set(['a','b','c'],[1,2,3])
|
|
|
|
api.set_json('whattttt',{'aaaaa':12222})
|
|
|
|
|
|
|
|
def test_basic():
|
|
|
|
import asyncio
|
|
|
|
from kademlia.network import Server
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-19 12:11:48 +00:00
|
|
|
#api = Api()
|
2020-08-19 12:06:44 +00:00
|
|
|
|
|
|
|
# not working!
|
2020-08-19 12:11:48 +00:00
|
|
|
#api.set_json('my key',{'a':'value'})
|
2020-08-19 12:06:44 +00:00
|
|
|
|
2020-08-19 13:01:37 +00:00
|
|
|
async def run():
|
|
|
|
# Create a node and start listening on port 5678
|
|
|
|
node = Server()
|
|
|
|
await node.listen(5678)
|
|
|
|
|
|
|
|
# Bootstrap the node by connecting to other known nodes, in this case
|
|
|
|
# replace 123.123.123.123 with the IP of another node and optionally
|
|
|
|
# give as many ip/port combos as you can for other nodes.
|
|
|
|
await node.bootstrap(NODES_PRIME)
|
|
|
|
|
|
|
|
# set a value for the key "my-key" on the network
|
|
|
|
await node.set("my-key", "my awesome value")
|
|
|
|
|
|
|
|
# get the value associated with "my-key" from the network
|
|
|
|
result = await node.get("my-key")
|
|
|
|
|
|
|
|
print(result)
|
|
|
|
return result
|
|
|
|
|
|
|
|
res = asyncio.run(run())
|
|
|
|
print('res = ',res)
|
|
|
|
# res = asyncio.run(node.set(key,value))
|
|
|
|
# print(res)
|
|
|
|
|
|
|
|
def test_provided_eg():
|
|
|
|
import asyncio
|
|
|
|
from kademlia.network import Server
|
|
|
|
|
|
|
|
async def run():
|
|
|
|
# Create a node and start listening on port 5678
|
|
|
|
node = Server()
|
|
|
|
await node.listen(5678)
|
|
|
|
|
|
|
|
# Bootstrap the node by connecting to other known nodes, in this case
|
|
|
|
# replace 123.123.123.123 with the IP of another node and optionally
|
|
|
|
# give as many ip/port combos as you can for other nodes.
|
|
|
|
await node.bootstrap(NODES_PRIME)
|
|
|
|
|
|
|
|
# set a value for the key "my-key" on the network
|
|
|
|
await node.set("my-key", "my awesome value")
|
|
|
|
|
|
|
|
# get the value associated with "my-key" from the network
|
|
|
|
result = await node.get("my-key")
|
|
|
|
|
|
|
|
print(result)
|
|
|
|
|
|
|
|
asyncio.run(run())
|
|
|
|
|
2020-08-19 12:06:44 +00:00
|
|
|
|
|
|
|
if __name__=='__main__':
|
2020-08-19 13:01:37 +00:00
|
|
|
test_api()
|