diff --git a/p2p/api.py b/p2p/api.py deleted file mode 100644 index ecd441f..0000000 --- a/p2p/api.py +++ /dev/null @@ -1,1050 +0,0 @@ -import os,time,sys,logging -from pathlib import Path -import asyncio,time -# handler = logging.StreamHandler() -# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') -# handler.setFormatter(formatter) -# logger = logging.getLogger(__file__) -# logger.addHandler(handler) -# logger.setLevel(logging.DEBUG) -sys.path.append('../p2p') -# logger.info(os.getcwd(), sys.path) -BSEP=b'||||||||||' -BSEP2=b'@@@@@@@@@@' -BSEP3=b'##########' -NODE_SLEEP_FOR=1 -PATH_WORLD_KEY='.world.key' - - -try: - from .crypto import * - from .p2p import * - from .kad import * -except (ModuleNotFoundError,ImportError): - from crypto import * - from p2p import * - from kad import * -from pathlib import Path -from functools import partial - -# works better with tor? -import json -jsonify = json.dumps - -# Start server - -DEBUG = True -UPLOAD_DIR = 'uploads/' -ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'} - -# PORT_SPEAK = 8468 -PORT_LISTEN = 5639 - -# Api Functions -from threading import Thread - - -NODES_PRIME = [("128.232.229.63",8467), ("68.66.241.111",8467)] -#68.66.224.46 - -from pathlib import Path -home = str(Path.home()) - -KEYDIR = os.path.join(home,'.komrade','.keys') -if not os.path.exists(KEYDIR): os.makedirs(KEYDIR) - -KEYDIR_BUILTIN = '.' - -class NetworkStillConnectingError(OSError): pass - -async def _getdb(self=None,port=PORT_LISTEN): - from kademlia.network import Server - - if self: - self.log('starting server..') - - import os - if self: self.log(os.getcwd()) - node = Server(log=self.log if self else None) #fn='../p2p/data.db',log=(self.log if self else print))) - - try: - if self: self.log('listening..') - await node.listen(port) - except OSError: - raise NetworkStillConnectingError('Still connecting...') - #await asyncio.sleep(3) - - if self: self.log('bootstrapping server..') - await node.bootstrap(NODES_PRIME) - - if self: node.log = self.log - self.log('NODE:',node) - - # if self and self.app: - # self.app.close_dialog() - - return node - -def logg(*x): - print(*x) - -class Api(object): - def __init__(self,user=None,log=None,app=None): - self.log = log if log is not None else logg - self.username = user - self.app=app - - def private_key(self): - if self.username: - pass - - async def connect_forever(self,port=PORT_LISTEN,save_every=60): - try: - i = 0 - self._node = await self.connect(port=port) - while True: - if not i%90: self.log(f'Node status (tick {i}): {self._node}') - if i and not i%save_every: await self.flush() - i += 1 - await asyncio.sleep(NODE_SLEEP_FOR) - # asyncio.sleep(0) - except (asyncio.CancelledError,KeyboardInterrupt) as e: - self.log('P2P node cancelled', e) - await self.flush() - finally: - # when canceled, print that it finished - self.log('P2P node shutting down') - pass - - - @property - async def node(self): - # while not hasattr(self,'_node'): - # self.log('[API] waiting forr connection...') - # await asyncio.sleep(1) - # return self._node - - if not hasattr(self,'_node'): - await self.connect() - self._node.log=self.log - return self._node - - async def connect(self,port=PORT_LISTEN): - if self.app: self.app.open_dialog('hello?') - self.log('connecting...') - node = await _getdb(self,port) - self.log(f'connect() has node {node}') - self._node = node - return node - - - - async def get(self,key_or_keys,decode_data=True): - self.log(f'api.get({key_or_keys},decode_data={decode_data}) --> ...') - async def _get(): - self.log(f'api._get({key_or_keys},decode_data={decode_data}) --> ...') - node=await self.node - res=None - - if type(key_or_keys) in {list,tuple,dict}: - keys = key_or_keys - self.log('keys is plural',keys) - res =[] - for key in keys: - val = None - - # while not val: - self.log('trying again...') - val = await node.get(key) - self.log('got',val) - #asyncio.sleep(1) - - self.log(f'val for {key} = {val} {type(val)}') - if decode_data: - self.log(f'api._get() decoding data {keys} -> {val} {type(val)}') - val = await self.decode_data(val) - self.log(f'api._get() got back decodied data {keys} -> {val} {type(val)}') - res+=[val] - #res = await asyncio.gather(*tasks) - else: - key=key_or_keys - # self.log('keys is singular',key) - val = await node.get(key) - if decode_data: - self.log(f'api._get() decoding data {key} -> {val} {type(val)}') - val = await self.decode_data(val) - self.log(f'api._get() got back decodied data {key} -> {val} {type(val)}') - # self.log('wtf is val =',val) - res=val - - # self.log('wtf is res =',res) - - self.log(f'_get({key_or_keys}) --> {res}') - return res - return await _get() - - # async def get(self,key_or_keys,decode_data=True): - # self.log(f'api.get({key_or_keys},decode_data={decode_data}) --> ...') - # async def _get(): - # self.log(f'api._get({key_or_keys},decode_data={decode_data}) --> ...') - # node=await self.node - # res=None - - # if type(key_or_keys) in {list,tuple,dict}: - # keys = key_or_keys - # self.log('keys is plural',keys) - # res =[] - # for key in keys: - # val = None - - # # while not val: - # self.log('trying again...') - # val = await node.get(key) - # self.log('got',val) - # asyncio.sleep(1) - - # self.log(f'val for {key} = {val} {type(val)}') - # if decode_data: - # self.log(f'api._get() decoding data {keys} -> {val} {type(val)}') - # val = await self.decode_data(val) - # self.log(f'api._get() got back decodied data {keys} -> {val} {type(val)}') - # res+=[val] - # #res = await asyncio.gather(*tasks) - # else: - # key=key_or_keys - # self.log('keys is singular',key) - # val = await node.get(key) - # if decode_data: - # self.log(f'api._get() decoding data {key} -> {val} {type(val)}') - # val = await self.decode_data(val) - # self.log(f'api._get() got back decodied data {key} -> {val} {type(val)}') - # self.log('wtf is val =',val) - # res=val - - # self.log('wtf is res =',res) - - # self.log(f'_get({key_or_keys}) --> {res}') - # return res - # return await _get() - - def encode_data(self,val,sep=BSEP,sep2=BSEP2,do_encrypt=False,encrypt_for_pubkey=None,private_signature_key=None): - assert type(val)==bytes - """ - What do we want to store with - - 1) [Encrypted payload:] - 1) Timestamp - 2) Public key of sender - 3) Public key of recipient - 4) AES-encrypted Value - 2) [Decryption tools] - 1) AES-decryption key - 2) AES decryption IV value - 5) Signature of value by author - """ - import time - timestamp=time.time() - - self.log(f"""api.encode_data( - val={val}, - sep={sep}, - sep2={sep2}, - do_encrypt={do_encrypt}, - encrypt_for_pubkey={encrypt_for_pubkey}, - private_signature_key={private_signature_key})""") - - # check input - if not encrypt_for_pubkey: - raise Exception('we need a receiver !!') - # return None - - # convert val to bytes - # if type(val)!=bytes: val = bytes(val,'utf-8') - # value_bytes=base64.b64encode(val) - value_bytes = val - - # sign - private_signature_key = private_signature_key if private_signature_key is not None else self.private_key - signature = sign(value_bytes, private_signature_key) - public_sender_key = private_signature_key.public_key() - sender_pubkey_b = serialize_pubkey(public_sender_key) - - # Verify! - self.log(f'''encode_data().verify_signature( - signature={signature} - value={value_bytes} - sender_pubkey={sender_pubkey_b}''') - authentic = verify_signature(signature, value_bytes, public_sender_key) - - if not authentic: - raise Exception('message is inauthentic for set??' +str(authentic)) - return None - - # encrypt? - encrypt_for_pubkey_b = serialize_pubkey(encrypt_for_pubkey) - #time_b=base64.b64encode(str(timestamp).encode('utf-8')) #.encode() - time_b=str(timestamp).encode('utf-8') - msg=value_bytes - - # whole binary package - WDV = [ - time_b, - sender_pubkey_b, - encrypt_for_pubkey_b, - msg, - signature - ] - payload = sep2.join(WDV) - - res = aes_rsa_encrypt(payload,encrypt_for_pubkey) - if res is None: - raise Exception('encryption result does not exist') - return None - payload_encr_aes, payload_encr_aes_key, payload_encr_aes_iv = res - - decryption_tools = sep2.join([ - payload_encr_aes_key, - payload_encr_aes_iv - ]) - - final_packet = sep.join([ - payload_encr_aes, - decryption_tools - ]) - - self.log('FINAL PACKET:',final_packet,type(final_packet)) - return final_packet - - - - async def decode_data(self,entire_packet_orig,sep=BSEP,private_key=None,sep2=BSEP2): - if entire_packet_orig is None: return entire_packet_orig - self.log(f'decode_data({entire_packet_orig})...') - import binascii - entire_packet = entire_packet_orig - - #self.log('PACKED =',entire_packet,type(entire_packet)) - - #self.log('????',type(entire_packet)) - #self.log(entire_packet) - - # get data - try: - encrypted_payload, decryption_tools = entire_packet.split(sep) #split_binary(entire_packet, sep=sep) #entire_packet.split(sep) - decryption_tools=decryption_tools.split(sep2) #split_binary(decryption_tools,sep=sep2) - except ValueError as e: - - self.log('!! decode_data() got incorrect format:',e) - self.log('packet =',entire_packet) - return entire_packet_orig - - - ### NEW FIRST LINE: Try to decrypt! - val=None - key_used=None - for keyname,privkey in self.keys.items(): - self.log(keyname,privkey,'??') - try: - # clicked! - val = aes_rsa_decrypt(encrypted_payload,privkey,*decryption_tools) - key_used=keyname - # this must mean this was the recipient - self.log(f'unlocked using key {keyname}!') - break - except ValueError as e: - self.log(keyname,'did not work!') #,privkey,pubkey) - pass - if not val: - raise Exception('Content not intended for us') - return None - - #stop - - ### THIRD LINE: SIGNATURE VERIFICATION - # can we decrypt signature? - val_array = val.split(sep2) - # self.log('val_array =',val_array) - time_b,sender_pubkey_b,receiver_pubkey_b,msg,signature = val_array - if not signature: - raise Exception('no signature!') - return None - sender_pubkey=load_pubkey(sender_pubkey_b) - authentic = verify_signature(signature,msg,sender_pubkey) - if not authentic: - raise Exception('inauthentic message') - return None - - - # ### FOURTH LINE: CONTENT ENCRYPTION - # if private_key is None: - # private_key=self.private_key_global - - WDV={ - 'time':float(time_b.decode('utf-8')), - 'val':msg.decode('utf-8'), - 'channel':key_used - # 'to':receiver_pubkey_b, - # 'from':sender_pubkey_b, - # 'sign':signature - } - - self.log('GOT WDV:',WDV) - return WDV - - - #,signature - - # async def set_on_channel(self,key_or_keys,value_or_values): - # tasks=[] - # if type(channel_or_channels) not in {list,tuple}: - # channels=[channel_or_channels] - # else: - # channels=channel_or_channels - - # for channel in channels: - # uri = - # tasks.append(self.set) - - # if type(channel_or_channels) == str: - # return await self.set(self,key_or_keys,value_or_values,channel_or_channels) - # elif type(channel_or_channels) == - - async def set(self,key_or_keys,value_or_values,private_signature_key=None,encode_data=True,encrypt_for_pubkey=None): - self.log(f'api.set({key_or_keys}) --> {type(value_or_values)}') - async def _set(): - # self.log('async _set()',self.node) - # node=self.node - #node=await _getdb(self) - node=await self.node - - def proc(key,value): - self.log(f'encodeing data for {key} -> {type(value)} ...') - if encode_data and encrypt_for_pubkey is not None and type(value)==bytes: - x = self.encode_data( - val = value, - do_encrypt=False, - encrypt_for_pubkey=encrypt_for_pubkey, - private_signature_key=private_signature_key - ) - self.log(f'got back encoded data for {key} -> {x} ...') - return x - else: - self.log(f'did not encode data for {key} -> {value} ...') - return value - - if type(key_or_keys) in {list,tuple,dict}: - keys = key_or_keys - values = value_or_values - assert len(keys)==len(values) - res=[] - for key,value in zip(keys,values): - newval = proc(key,value) - self.log(f'kvv (plural) <- {keys}:{value} -> {newval}') - await node.set(key,newval) - res+=[newval] - else: - key = key_or_keys - value = value_or_values - newval = proc(key,value) - self.log(f'kvv (singular) <- {key}:{value} -> {newval}') - res = newval - await node.set(key,newval) - - self.log(f'api.set(key={key_or_keys}, \ - res = {res})') - #node.stop() - # self.log('reconnecting ...',self._node) - #await self._node.stop() - #await self.connect() - return res - - return await _set() - - async def get_json(self,key_or_keys,decode_data=False): - - def jsonize_dat(dat_dict): - if type(dat_dict)==dict and 'val' in dat_dict: - self.log('is this json???',dat_dict['val'],'???') - dat_dict['val']=json.loads(dat_dict['val']) - #dat_dict['val']=json.loads(base64.b64decode(dat_dict['val']).decode('utf-8')) - return dat_dict - - def jsonize_res(res): - if not res: - return None - if type(res)==list: - return [jsonize_dat(d) for d in res] - else: - return jsonize_dat(res) - - res = await self.get(key_or_keys,decode_data=decode_data) - self.log('get_json() got from get() a',type(res)) - return jsonize_res(res) - - - - - - - - - async def set_json(self,key,value,private_signature_key=None,encode_data=True,encrypt_for_pubkey=None): - #def jsonize_dat(dat_dict): - #if type(dat_dict)==dict and 'val' in dat_dict: - # self.log('is this json???',dat_dict['val'],'???') - # dat_dict['val']=json.loads(dat_dict['val'].decode('utf-8')) - # #dat_dict['val']=json.loads(base64.b64decode(dat_dict['val']).decode('utf-8')) - # return dat_dict - - def prep_json(val): - if type(val)!=str: - val=json.dumps(value) - bval=val.encode('utf-8') - return bval - - - self.log(f'api.set_json({key}, {value} ...)') - json_b = prep_json(value) - self.log(f'bjson -> {json_b}') - set_res = await self.set( - key, - json_b, - private_signature_key=private_signature_key, - encode_data=encode_data, - encrypt_for_pubkey=encrypt_for_pubkey) - self.log(f'api.set_json() <-- {set_res}') - return set_res - - async def has(self,key): - val=await self.get(key) - return val is not None - - - ## PERSONS - async def get_person(self,username): - return await self.get('/pubkey/'+username,decode_data=False) - - async def set_person(self,username,pem_public_key): - # pem_public_key = save_public_key(public_key,return_instead=True) - #obj = {'name':username, 'public_key':pem_public_key} - # await self.set_json('/person/'+username,obj) - # keystr=base64.b64encode(pem_public_key).decode() - # self.log('keystr',type(keystr)) - await self.set('/pubkey/'+username,pem_public_key,encode_data=False) - # keystr=pem_public_key - # await self.set(b'/name/'+keystr,username,encode_data=False) - - - - - - - ## Register - async def register(self,name,passkey=None,just_return_keys=False): - # if not (name and passkey): return {'error':'Name and password needed'} - import kademlia - try: - person = await self.get_person(name) - except kademlia.network.CannotReachNetworkError: - return {'error':'Network disconnected'} - except NetworkStillConnectingError: - return {'error':'Network still connecting...'} - - keys = self.get_keys() - if person is not None: - self.log('register() person <-',person) - # try to log in - - self.log('my keys',keys) - if not name in keys: - self.log('!! person already exists') - return {'error':'Person already exists'} - - # test 3 conditions - privkey=keys[name] - pubkey=load_pubkey(person) - - if simple_lock_test(privkey,pubkey): - self.username=name - self.log('!! logging into',name) - return {'success':'Logging back in...'} - - private_key = generate_rsa_key() - public_key = private_key.public_key() - pem_private_key = serialize_privkey(private_key, password=passkey)# save_private_key(private_key,password=passkey,return_instead=True) - pem_public_key = serialize_pubkey(public_key) - - if just_return_keys: - return (pem_private_key,pem_public_key) - - # save pub key in db - await self.set_person(name,pem_public_key) - # save priv key on hardware - fn_privkey = os.path.join(KEYDIR,f'.{name}.key') - - self.log('priv key =',pem_private_key) - write_key_b(pem_private_key, fn_privkey) - - # good - return {'success':'Person created ...', 'username':name} - - - def load_private_key(self,password): - #if not self.app_storage.exists('_keys'): return {'error':'No login keys present on this device'} - pem_private_key=self.app_storage.get('_keys').get('private') - # self.log('my private key ====',pem_private_key) - try: - return {'success':load_privkey(pem_private_key,password)} - except ValueError as e: - self.log('!!',e) - return {'error':'Incorrect password'} - - def add_world_key(self,fn=PATH_WORLD_KEY): - import shutil - thisdir=os.path.dirname(__file__) - fnfn=os.path.join(thisdir,fn+'.priv') - self.log('getting',fnfn) - name='.'.join(os.path.basename(fn).split('.')[1:-1]) - - priv_key=load_privkey_fn(fnfn) - pub_key=priv_key.public_key() - pub_key_b=serialize_pubkey(pub_key) - - ofn=os.path.join(KEYDIR,f'.{name}.key') - shutil.copyfile(fnfn,ofn) - - asyncio.create_task(self.add_world_key_to_net(name,pub_key_b)) - - async def add_world_key_to_net(self,name,pub_key_b): - await self.set_person(name,pub_key_b) - - #@property - def get_keys(self): - res={} - key_files = os.listdir(KEYDIR) - world_key_fn = os.path.basename(PATH_WORLD_KEY) - if not world_key_fn in key_files: - self.log('[first time?] adding world key:',world_key_fn) - self.add_world_key() - - - for priv_key_fn in key_files: - if (not priv_key_fn.startswith('.') or not priv_key_fn.endswith('.key')): continue - fnfn = os.path.join(KEYDIR,priv_key_fn) - print(fnfn) - priv_key=load_privkey_fn(fnfn) - #pub_key=priv_key.public_key() - name_key= '.'.join(priv_key_fn.split('.')[1:-1]) - res[name_key] = priv_key - self.log(f'[API] found key {name_key} and added to keychain') - return res - - - @property - def keys(self): - if not hasattr(self,'_keys'): - self.load_keys() - return self._keys - - def load_keys(self): - self._keys = self.get_keys() - - - - async def append_data(self,uri,bdata): - self.log(f'appending to uri {uri}, data {bdata}') - if type(bdata)!=bytes and type(bdata)==str: - bdata=bdata.encode('utf-8') - self.log(f'--> encoded bdata to {bdata}') - - # get blob so far - sofar = await self.get(uri,decode_data=False) - - # get sofar - self.log(f'sofar = {sofar}') - - newval = bdata if sofar is None else sofar+BSEP+bdata - self.log(f'newval = {newval}') - - res = await self.set(uri,newval,encode_data=False) - if res: - length = newval.count(BSEP)+1 - return {'success':'Length increased to %s' % length} - return {'error':'Could not append data'} - - async def upload(self,filename,file_id=None, uri='/file/',uri_part='/part/'): - import sys - - if not file_id: file_id = get_random_id() - part_ids = [] - part_keys = [] - parts=[] - PARTS=[] - buffer_size=100 - for part in bytes_from_file(filename,chunksize=1024*2): - part_id = get_random_id() - part_ids.append(part_id) - part_key='/part/'+part_id - part_keys.append(part_key) - parts.append(part) - # PARTS.append(part) - - # self.log('part!:',sys.getsizeof(part)) - #self.set(part_key,part) - - if len(parts)>=buffer_size: - # self.log('setting...') - await self.set(part_keys,parts) - part_keys=[] - PARTS+=parts - parts=[] - - # set all parts - #self.set(part_keys,PARTS) - # self.log('# parts:',len(PARTS)) - if parts and part_keys: - await self.set(part_keys, parts) - - # how many parts? - # self.log('# pieces!',len(part_ids)) - - file_store = {'ext':os.path.splitext(filename)[-1][1:], 'parts':part_ids} - # self.log('FILE STORE??',file_store) - await self.set_json(uri+file_id,file_store) - - # file_store['data'].seek(0) - file_store['id']=file_id - return file_store - - async def download(self,file_id): - self.log('file_id =',file_id) - file_store = await self.get_json_val('/file/'+file_id) - self.log('file_store =',file_store) - if file_store is None: return - - self.log('file_store!?',file_store) - keys = ['/part/'+x for x in file_store['parts']] - - #time,pieces,pub,sign = await self.get_json_val(keys) - pieces = await self.get_json_val(keys) - self.log('pieces = ',pieces) - file_store['parts_data']=pieces - return file_store - - async def flush(self): - #self.log('saving back to db file...') - node = await self.node - node.storage.dump() - # self.log('DONE saving back to db file...') - - - - async def post(self,data,channel,add_to_outbox=True): - post_id=get_random_id() - #tasks = [] - - self.log(f'api.post({data},add_to_outbox={add_to_outbox}) --> ...') - # stop - - # ## add to inbox - post_id = get_random_id() - self.load_keys() - author_privkey = self.keys[data.get('author')] - - self.log('ADDING TO CHANNEL??',channel) - pubkey_channel = self.keys[channel].public_key() - - ## 1) STORE ACTUAL CONTENT OF POST UNDER CENTRAL POST URI - # HAS NO CHANNEL: just one post/msg in a sea of many - # e.g. /post/5e4a355873194399a5b356def5f40ff9 - # does not reveal who cand decrypt it - uri = '/post/'+post_id - json_res = await self.set_json( - uri, - data, #data_channel, - encode_data=True, - encrypt_for_pubkey=pubkey_channel, - private_signature_key=author_privkey - ) - self.log(f'json_res() <- {json_res}') - - - ## 2) Store under the channels a reference to the post, - # as a hint they may be able to decrypt it with one of their keys - add_post_id_as_hint_to_channels = [f'/inbox/{channel}'] - if add_to_outbox: - un=data.get('author') - if un: - add_post_id_as_hint_to_channels += [f'/outbox/{un}'] - tasks = [ - self.append_data(uri,post_id) for uri in add_post_id_as_hint_to_channels - ] - res = await asyncio.gather(*tasks) - if res and all([(d and 'success' in d) for d in res]): - return {'success':'Posted! %s' % post_id, 'post_id':post_id} - - return {'error':'Post unsuccessful'} - - # append_res=await self.append_json(f'/inbox/{channel}',post_id) - # self.log(f'json_res.append_json({channel}) <- {append_res}') - # #tasks.append(task) - - # # add to outbox - # if add_to_outbox: - # un=data.get('author') - # if un: - # append_res = await self.append_json(f'/outbox/{un}', post_id) - # self.log(f'json_res.append_json({un}) <- {append_res}') - # #tasks.append(task) - - #asyncio.create_task(self.flush()) - # return {'success':'Posted! %s' % post_id, 'post_id':post_id} - #return {'error':'Post failed'} - - async def get_json_val(self,uri,decode_data=True): - res=await self.get_json(uri,decode_data=decode_data) - self.log('get_json_val() got from get_json():',res,type(res)) - - r=res - if type(res) == dict: - r=res.get('val',None) if res is not None else res - elif type(res) == list: - r=[(x.get('val') if type(x)==dict else x) for x in res if x is not None] - elif type(res) == str: - r=json.loads(res) - self.log(f'get_json_val() --> {r} {type(r)}') - return r - - async def get_post(self,post_id): - self.log(f'api.get_post({post_id}) ?') - post_json = await self.get_json('/post/'+post_id, decode_data=True) - self.log(f'api.get_post({post_id}) --> {post_json}') - return post_json - - async def get_post_ids(self,uri='/inbox/world'): - ## GET POST IDS - self.log(f'api.get_post_ids(uri={uri}) ?') - index = await self.get(uri,decode_data=False) - self.log(f'api.get_post_ids(uri={uri}) <-- api.get()',index) - if not index: return [] - - #index = json.loads(base64.b64decode(index).decode()) - index = [x.decode('utf-8') for x in index.split(BSEP)] - - if index is None: return [] - if type(index)!=list: index=[index] - index = [x for x in index if x is not None] - - self.log(f'api.get_post_ids({uri}) --> {index}') - return index - - async def get_posts(self,uri='/inbox/world'): - - # get IDs - post_ids = await self.get_post_ids(uri) - - # get posts - posts = [self.get_post(post_id) for post_id in post_ids] - return await asyncio.gather(*posts) - - - - - -## func - -def bytes_from_file(filename,chunksize=8192): - with open(filename, 'rb') as f: - while True: - piece = f.read(chunksize) - if not piece: - break - yield piece - -def get_random_id(): - import uuid - return uuid.uuid4().hex - - - - - - - -def test_api(): - - # api.set(['a','b','c'],[1,2,3]) - async def run(): - api = Api() - # await api.connect() - - #await api.set_json('whattttt',{'aaaaa':12222}) - #await api.set_json('whattttt',[111]) - #await api.set_json('whattttt',[111]) - - #val = await api.get_json('whattttt') - - server = await _getdb(api) - await server.set('a',1) - print(await server.get('a')) - await asyncio.sleep(5) - await server.set('a',2) - - print(await server.get('a')) - await asyncio.sleep(5) - - await server.set('a',str([2,3,4,5])) - print(await server.get('a')) - await asyncio.sleep(5) - - val = await server.get('a') - - - - print(f'VAL = {val}') - return val - - asyncio.run(run()) - - -def test_basic(): - import asyncio - from kademlia.network import Server - - #api = Api() - - # not working! - #api.set_json('my key',{'a':'value'}) - - async def run(): - # Create a node and start listening on port 5678 - node = Server() - await node.listen(5678) - - # Bootstrap the node by connecting to other known nodes, in this case - # replace 123.123.123.123 with the IP of another node and optionally - # give as many ip/port combos as you can for other nodes. - await node.bootstrap(NODES_PRIME) - - # set a value for the key "my-key" on the network - await node.set("my-key", "my awesome value") - await node.set("my-key", "my awesome value2") - await node.set("my-key", "my awesome value3") - - - # get the value associated with "my-key" from the network - result = await node.get("my-key") - - print(result) - return result - - res = asyncio.run(run()) - print('res = ',res) - # res = asyncio.run(node.set(key,value)) - # print(res) - -def test_provided_eg(): - import asyncio - from kademlia.network import Server - - async def run(): - # Create a node and start listening on port 5678 - node = Server() - await node.listen(5678) - - # Bootstrap the node by connecting to other known nodes, in this case - # replace 123.123.123.123 with the IP of another node and optionally - # give as many ip/port combos as you can for other nodes. - await node.bootstrap(NODES_PRIME) - - # set a value for the key "my-key" on the network - await node.set("my-key", "my awesome value") - - # get the value associated with "my-key" from the network - result = await node.get("my-key") - - print(result) - - asyncio.run(run()) - - - -async def lonely_selfless_node(): - from api import Api,PORT_LISTEN - API = Api() - return await API.connect_forever(8467) - - -def boot_lonely_selfless_node(port=8467): - API = Api() - asyncio.run(API.connect_forever()) - - -def init_entities(usernames = ['world']): - ## make global entity called world - - #loop=asyncio.new_event_loop() - - async def register(username): - API = Api() - #await API.connect_forever() - #privkey,pubkey = await API.register(username,just_return_keys=True) - - private_key = generate_rsa_key() - public_key = private_key.public_key() - pem_private_key = serialize_privkey(private_key) - pem_public_key = serialize_pubkey(public_key) - - - privkey_fn = os.path.join(KEYDIR_BUILTIN,f'.{username}.key.priv') - pubkey_fn = os.path.join(KEYDIR_BUILTIN,f'.{username}.key.pub') - with open(privkey_fn,'wb') as of: of.write(pem_private_key) - with open(pubkey_fn,'wb') as of: of.write(pem_public_key) - # print(API.keys) - - await API.set_person(username,pem_public_key) - print('done') - - - for un in usernames: - asyncio.run(register(un)) - - -def split_binary(data, sep=BSEP): - seplen = len(BSEP) - res=[] - stack=None - print('!!',data[:4],seplen,sep) - - cutoffs=[] - for i in range(0, len(data)): - seg=data[i:i+seplen] - print(i,seg,sep,stack) - if seg==sep: - # split_piece = data[:i+seplen] - print('!') - cutoff_lasttime = cutoffs[-1][-1] if cutoffs and cutoffs else 0 - cutoff = (cutoff_lasttime-seplen, i) - print(cutoff) - cutoffs.append(cutoff) - stack = data[cutoff[0] if cutoff[0]>0 else 0: cutoff[1]] - print(stack) - res += [stack] - stack = None - - cutoff_lasttime = cutoffs[-1][-1] if cutoffs and cutoffs else 0 - print(cutoff_lasttime) - stack = data[cutoff_lasttime+seplen :] - res+=[stack] - print('RES:',res) - return res - - - - - -if __name__=='__main__': - init_entities() - - # res = split_binary(b'eeeehey||||whatsueep',b'||||') - # print(res) \ No newline at end of file diff --git a/p2p/crypto.py b/p2p/crypto.py deleted file mode 100644 index c46c688..0000000 --- a/p2p/crypto.py +++ /dev/null @@ -1,295 +0,0 @@ -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.exceptions import InvalidSignature -import os -try: - from .syfr import * #import syfr -except ImportError: - from syfr import * - -key_dir = os.path.join(os.path.expanduser('~'),'.keys','komrade') -if not os.path.exists(key_dir): os.makedirs(key_dir) -PATH_PRIVATE_KEY=os.path.join(key_dir,'private_key.pem') -PATH_PUBLIC_KEY=os.path.join(key_dir,'public_key.pem') - - -### CREATING KEYS - -def new_keys(save=True,password=None): - private_key = rsa.generate_private_key( - public_exponent=65537, - key_size=2048, - backend=default_backend() - ) - public_key = private_key.public_key() - - if save: - save_private_key(private_key,password=password) - save_public_key(public_key) - - return private_key,public_key - - - -def save_private_key(private_key,fn=PATH_PRIVATE_KEY,password=None, return_instead=False): - pem = private_key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption() if not password else serialization.BestAvailableEncryption(password.encode()) - ) - if return_instead: return pem - with open(fn,'wb') as f: f.write(pem) - -def save_public_key(public_key,fn=PATH_PUBLIC_KEY,return_instead=False): - pem = public_key.public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo - ) - if return_instead: return pem - with open(fn,'wb') as f: f.write(pem) - - - -### LOADING KEYS -def load_keys(): - return (load_private_key_from_file(), load_public_key_from_file()) - -def load_private_key(pem,password=None): - return serialization.load_pem_private_key( - pem, - password=password.encode() if password else None, - backend=default_backend() - ) - -def load_private_key_from_file(fn=PATH_PRIVATE_KEY,password=None): - with open(fn, "rb") as key_file: - return load_private_key(key_file.read(), password) - -def load_public_key(pem): - return serialization.load_pem_public_key( - pem, - backend=default_backend() - ) - -def load_public_key_from_file(fn=PATH_PUBLIC_KEY): - with open(fn, "rb") as key_file: - return load_public_key(key_file.read()) - -### DE/ENCRYPTING -def encrypt_msg(message, public_key): - return public_key.encrypt( - message, - padding.OAEP( - mgf=padding.MGF1(algorithm=hashes.SHA256()), - algorithm=hashes.SHA256(), - label=None - ) - ) - -def encrypt_msg_symmetric(message): - import os - from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes - from cryptography.hazmat.backends import default_backend - backend = default_backend() - key = os.urandom(32) - iv = os.urandom(16) - cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend) - encryptor = cipher.encryptor() - - ct = encryptor.update(message) + encryptor.finalize() - return ct - -def decrypt_msg_symmetric(): - import os - from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes - from cryptography.hazmat.backends import default_backend - backend = default_backend() - key = os.urandom(32) - iv = os.urandom(16) - cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend) - - return ct - decryptor = cipher.decryptor() - decryptor.update(ct) + decryptor.finalize() - b'a secret message' - -def decrypt_msg(encrypted, private_key): - return private_key.decrypt( - encrypted, - padding.OAEP( - mgf=padding.MGF1(algorithm=hashes.SHA256()), - algorithm=hashes.SHA256(), - label=None - ) - ) - -### SIGNING/VERIFYING -def sign_msg(message, private_key): - return private_key.sign( - message, - padding.PSS( - mgf=padding.MGF1(hashes.SHA256()), - salt_length=padding.PSS.MAX_LENGTH - ), - hashes.SHA256() - ) - -def verify_msg(message, signature, public_key): - try: - verified = public_key.verify( - signature, - message, - padding.PSS( - mgf=padding.MGF1(hashes.SHA256()), - salt_length=padding.PSS.MAX_LENGTH - ), - hashes.SHA256() - ) - return True - except InvalidSignature: - return False - return None - - - - -# #private_key,public_key = new_keys() -# private_key,public_key = load_keys() - -# #print(private_key) -# #print(public_key) - - -# #enc = encrypt_msg('Drive your plow over the bones of the dead', public_key) -# #print(enc) - -# dec = decrypt_msg(enc,private_key) -# #print(dec) - -# msg = b'hello' -# signature = sign_msg(msg,private_key) - -# #print(encrypt_msg(b'hello',public_key)) - -# print(verify_msg(msg+b'!!',signature,public_key)) - - - -## ONLY NEEDS RUN ONCE! -def gen_global_keys1(fn='.keys.global.json'): - from kivy.storage.jsonstore import JsonStore - - private_key,public_key=new_keys(save=False,password=None) - pem_private_key = save_private_key(private_key,password=None,return_instead=True) - pem_public_key = save_public_key(public_key,return_instead=True) - - store = JsonStore('./.keys.global.json') - - store.put('_keys',private=str(pem_private_key.decode()),public=str(pem_public_key.decode())) #(private_key,password=passkey) - -def gen_global_keys(fn='.keys.global.json'): - from kivy.storage.jsonstore import JsonStore - - store = JsonStore('./.keys.global.json') - - #store.put('_keys',private=str(pem_private_key.decode()),public=str(pem_public_key.decode())) #(private_key,password=passkey) - - private_key = generate_rsa_key() - pem_private_key = serialize_privkey(private_key, password=None)# save_private_key(private_key,password=passkey,return_instead=True) - pem_public_key = serialize_pubkey(private_key.public_key()) - store.put('_keys',private=pem_private_key.decode(),public=pem_public_key.decode()) #(private_key,password=passkey) - -""" -import os -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes -from cryptography.hazmat.backends import default_backend -backend = default_backend() -key = os.urandom(32) -iv = os.urandom(16) -cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend) -encryptor = cipher.encryptor() -ct = encryptor.update(b"a secret message") + encryptor.finalize() -decryptor = cipher.decryptor() -decryptor.update(ct) + decryptor.finalize() -b'a secret message'""" - - - - - - -def aes_rsa_encrypt(message, recipient_rsa_pub): - aes_key = create_aes_key() - aes_ciphertext, iv = aes_encrypt(message, aes_key) - encry_aes_key = rsa_encrypt(aes_key, recipient_rsa_pub) - return aes_ciphertext, encry_aes_key, iv #, sign - - -def aes_rsa_decrypt(aes_ciphertext, rsa_priv, encry_aes_key, iv): #, hmac, hmac_signature, rsa_priv, iv, metadata): - aes_key = rsa_decrypt(encry_aes_key, rsa_priv) - plaintext = aes_decrypt(aes_ciphertext, aes_key, iv) - return plaintext - - - - -# def _decrypt_rsa(x,privkey=CORRECT_PRIV_KEY): -# x_decr = rsa_decrypt(x,privkey) -# return x_decr - - - - - - # encrypt sender - #def _enc_sender(x): - # recv=receiver_pubkey - - # encrypt recipient too? - - - - # sender_pubkey_b_encr = sep2.join( - # aes_rsa_encrypt( - # serialize_pubkey(self.public_key), receiver_pubkey - # ) - # ) - - # receiver_pubkey_b_encr = sep2.join( - # aes_rsa_encrypt( - # serialize_pubkey(receiver_pubkey_b, receiver_pubkey - # ) - # ) - - - - # msg_encr = sep2.join([val_encr,val_encr_key,iv]) - # sender_encr = sep2.join( - # aes_rsa_encrypt( - # serialize_pubkey(self.public_key), receiver_pubkey - # ) - # ) - # signature_encr = sep2.join( - # rsa_encrypt( - # signature, - # receiver_pubkey - # ) - # ) - - - - # sender = sep2.join(sender_pubkey_b, signature) - - # WDV = sep.join([ - # time_b, - # receiver_encr, - # msg_encr, - # sender_encr, - # signature_encr - # ]) - -def simple_lock_test(privkey,pubkey): - return privkey.public_key().public_numbers() == pubkey.public_numbers() \ No newline at end of file diff --git a/p2p/kad.py b/p2p/kad.py deleted file mode 100644 index b39a0f2..0000000 --- a/p2p/kad.py +++ /dev/null @@ -1,255 +0,0 @@ -# ### -# # Kademlia patches -# ### - -# from kademlia.storage import * -# from kademlia.network import * -# from kademlia.routing import RoutingTable -# from rpcudp.protocol import RPCProtocol -# import os - -# handler = logging.StreamHandler() -# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') -# handler.setFormatter(formatter) -# log = logging.getLogger('kademlia') -# log.addHandler(handler) -# log.setLevel(logging.DEBUG) - - -# PROXY_ADDR = ('0.0.0.0',8368) - - -# """UDP proxy server.""" - -# import asyncio - - -# class ProxyDatagramProtocol(asyncio.DatagramProtocol): - -# def __init__(self, remote_address=PROXY_ADDR): -# self.remote_address = remote_address -# self.remotes_d = {} -# super().__init__() - -# def connection_made(self, transport): -# self.transport = transport - -# def datagram_received(self, data, addr): -# if addr in self.remotes_d: -# self.remotes_d[addr].transport.sendto(data) -# return -# loop = asyncio.get_event_loop() -# self.remotes_d[addr] = RemoteDatagramProtocol(self, addr, data) -# coro = loop.create_datagram_endpoint( -# lambda: self.remotes_d[addr], remote_addr=self.remote_address) -# asyncio.ensure_future(coro) - - -# class RemoteDatagramProtocol(asyncio.DatagramProtocol): - -# def __init__(self, proxy, addr, data): -# print('RemoteDP got:',proxy,addr,data) -# self.proxy = proxy -# self.addr = addr -# self.data = data -# super().__init__() - -# def connection_made(self, transport): -# self.transport = transport -# self.transport.sendto(self.data) - -# def datagram_received(self, data, _): -# self.proxy.transport.sendto(data, self.addr) - -# def connection_lost(self, exc): -# self.proxy.remotes.pop(self.attr) - - -# async def start_datagram_proxy(protocol_class, bind, port, remote_host, remote_port): -# loop = asyncio.get_event_loop() -# protocol = protocol_class((remote_host, remote_port)) -# return await loop.create_datagram_endpoint( -# lambda: protocol, local_addr=(bind, port)) - - -# def main(bind='0.0.0.0', port=8888, -# remote_host='0.0.0.0', remote_port=9999): -# loop = asyncio.get_event_loop() -# print("Starting datagram proxy...") -# coro = start_datagram_proxy(bind, port, remote_host, remote_port) -# transport, _ = loop.run_until_complete(coro) -# print("Datagram proxy is running...") -# try: -# loop.run_forever() -# except KeyboardInterrupt: -# pass -# print("Closing transport...") -# transport.close() -# loop.close() - - - -# log = logging.getLogger('kademlia') # pylint: disable=invalid-name - - - -# class KadProtocol(KademliaProtocol): -# # remote_address = PROXY_ADDR -# # REMOTES_D={} - -# # def __init__(self, source_node, storage, ksize): -# # RPCProtocol.__init__(self,wait_timeout=5) -# # self.router = RoutingTable(self, ksize, source_node) -# # self.storage = storage -# # self.source_node = source_node - -# # def datagram_received(self, data, addr): -# # #if not hasattr(self,'remotes_d'): self.remotes_d={} -# # # print('\n\n!?!?!?',self.REMOTES_D, type(self.REMOTES_D)) -# # # if addr in self.REMOTES_D: -# # # self.REMOTES_D[addr].transport.sendto(data) -# # # return -# # loop = asyncio.get_event_loop() -# # # self.REMOTES_D[addr] = RemoteDatagramProtocol(self, addr, data) -# # RDP = RemoteDatagramProtocol(self, addr, data) -# # coro = loop.create_datagram_endpoint(lambda: RDP, remote_addr=self.remote_address) -# # asyncio.ensure_future(coro) - -# def handle_call_response(self, result, node): -# """ -# If we get a response, add the node to the routing table. If -# we get no response, make sure it's removed from the routing table. -# """ -# if not result[0]: -# log.warning("no response from %s, ?removing from router", node) -# self.router.remove_contact(node) -# return result - -# log.info("got successful response from %s", node) -# self.welcome_if_new(node) -# return result - - - -# class KadServer(Server): -# protocol_class = KademliaProtocol # KadProtocol #KademliaProtocol - -# # def __init__(self, *x, **y): -# # self.storage = y['storage'] -# # # raise Exception(str(self.storage)) -# # super().__init__(*x,**y) -# # log.info(f'Storage has {len(self.storage.data)} keys') - -# def __repr__(self): -# repr = f""" -# KadServer() -# ksize = {self.ksize} -# alpha = {self.alpha} -# storage = {len(self.storage.data)} keys -# node = {self.node} -# transport = {self.transport} -# protocol = {self.protocol} -# refresh_loop = {self.refresh_loop} -# save_state_loop = {self.save_state_loop} -# bootstrappable_neighbors = {self.bootstrappable_neighbors()} -# """ -# return repr - - - -# # async def get(self, key): -# # """ -# # Get a key if the network has it. - -# # Returns: -# # :class:`None` if not found, the value otherwise. -# # """ -# # log.info("Looking up key %s", key) -# # dkey = digest(key) -# # # if this node has it, return it -# # if self.storage.get(dkey) is not None: -# # log.info('I already have this') -# # return self.storage.get(dkey) -# # node = Node(dkey) -# # nearest = self.protocol.router.find_neighbors(node) -# # log.info(f'My nearest nodes are: {nearest}') -# # if not nearest: -# # log.warning("There are no known neighbors to get key %s", key) -# # return None -# # spider = ValueSpiderCrawl(self.protocol, node, nearest, -# # self.ksize, self.alpha) -# # found = await spider.find() - -# # log.info(f'spider done crawling: {spider}') -# # log.info(f'spider found value: {found}') - -# # self.storage[dkey]=found -# # return found - -# # async def set(self, key, value): -# # """ -# # Set the given string key to the given value in the network. -# # """ -# # if not check_dht_value_type(value): -# # raise TypeError( -# # "Value must be of type int, float, bool, str, or bytes" -# # ) -# # log.info("setting '%s' = '%s' on network", key, value) -# # dkey = digest(key) - -# # print('STORE??',type(self.storage),self.storage) -# # self.storage.set(dkey,value) -# # return await self.set_digest(dkey, value) - -# # async def set_digest(self, dkey, value): -# # """ -# # Set the given SHA1 digest key (bytes) to the given value in the -# # network. -# # """ -# # node = Node(dkey) - -# # nearest = self.protocol.router.find_neighbors(node) -# # if not nearest: -# # log.warning("There are no known neighbors to set key %s", -# # dkey.hex()) -# # #return False - -# # spider = NodeSpiderCrawl(self.protocol, node, nearest, -# # self.ksize, self.alpha) -# # nodes = await spider.find() -# # log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes))) - -# # # if this node is close too, then store here as well -# # neighbs=[n.distance_to(node) for n in nodes] -# # log.info('setting on %s neighbors', neighbs) -# # biggest = max(neighbs) if neighbs else 0 -# # log.info('my distance to node is %s, biggest distance is %s', -# # self.node.distance_to(node),biggest) -# # if self.node.distance_to(node) < biggest: -# # self.storage.set(dkey,value) - -# # log.info('here are the nodes %s' % nodes) -# # results = [self.protocol.call_store(n, dkey, value) for n in nodes] -# # log.info('here are the results') - -# # # return true only if at least one store call succeeded -# # return any(await asyncio.gather(*results)) - - - -# #### NEVERMIND -# # KadServer = Server - -# import time -# if __name__=='__main__': -# # test -# hfs = HalfForgetfulStorage(fn='test.db') - -# #hfs['a']=1 -# # time.sleep(2) -# hfs['a']=1000 - -# print(hfs['a']) - - -# print(hfs['a']) \ No newline at end of file diff --git a/p2p/p2p.py b/p2p/p2p.py index 5a78ce1..1293042 100644 --- a/p2p/p2p.py +++ b/p2p/p2p.py @@ -1,3 +1,20 @@ +import os,time,sys,logging +from pathlib import Path +import asyncio,time +# handler = logging.StreamHandler() +# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') +# handler.setFormatter(formatter) +# logger = logging.getLogger(__file__) +# logger.addHandler(handler) +# logger.setLevel(logging.DEBUG) +sys.path.append('../p2p') +# logger.info(os.getcwd(), sys.path) +BSEP=b'||||||||||' +BSEP2=b'@@@@@@@@@@' +BSEP3=b'##########' +NODE_SLEEP_FOR=1 +PATH_WORLD_KEY='.world.key' + import logging import asyncio import shelve @@ -33,3 +50,948 @@ def boot_lonely_selfless_node(port=8467): await API.connect_forever(8467) asyncio.run(go()) + + +# works better with tor? +import json +jsonify = json.dumps + +# Start server + +DEBUG = True +UPLOAD_DIR = 'uploads/' +ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'} + +# PORT_SPEAK = 8468 +PORT_LISTEN = 5639 + +# Api Functions +from threading import Thread + + +NODES_PRIME = [("128.232.229.63",8467), ("68.66.241.111",8467)] +#68.66.224.46 + +from pathlib import Path +home = str(Path.home()) + +KEYDIR = os.path.join(home,'.komrade','.keys') +if not os.path.exists(KEYDIR): os.makedirs(KEYDIR) + +KEYDIR_BUILTIN = '.' + +class NetworkStillConnectingError(OSError): pass + +async def _getdb(self=None,port=PORT_LISTEN): + from kademlia.network import Server + + if self: + self.log('starting server..') + + import os + if self: self.log(os.getcwd()) + node = Server(log=self.log if self else None) #fn='../p2p/data.db',log=(self.log if self else print))) + + try: + if self: self.log('listening..') + await node.listen(port) + except OSError: + raise NetworkStillConnectingError('Still connecting...') + #await asyncio.sleep(3) + + if self: self.log('bootstrapping server..') + await node.bootstrap(NODES_PRIME) + + if self: node.log = self.log + self.log('NODE:',node) + + # if self and self.app: + # self.app.close_dialog() + + return node + +def logg(*x): + print(*x) + +class Api(object): + def __init__(self,user=None,log=None,app=None): + self.log = log if log is not None else logg + self.username = user + self.app=app + + def private_key(self): + if self.username: + pass + + async def connect_forever(self,port=PORT_LISTEN,save_every=60): + try: + i = 0 + self._node = await self.connect(port=port) + while True: + if not i%90: self.log(f'Node status (tick {i}): {self._node}') + if i and not i%save_every: await self.flush() + i += 1 + await asyncio.sleep(NODE_SLEEP_FOR) + # asyncio.sleep(0) + except (asyncio.CancelledError,KeyboardInterrupt) as e: + self.log('P2P node cancelled', e) + await self.flush() + finally: + # when canceled, print that it finished + self.log('P2P node shutting down') + pass + + + @property + async def node(self): + # while not hasattr(self,'_node'): + # self.log('[API] waiting forr connection...') + # await asyncio.sleep(1) + # return self._node + + if not hasattr(self,'_node'): + await self.connect() + self._node.log=self.log + return self._node + + async def connect(self,port=PORT_LISTEN): + if self.app: self.app.open_dialog('hello?') + self.log('connecting...') + node = await _getdb(self,port) + self.log(f'connect() has node {node}') + self._node = node + return node + + + + async def get(self,key_or_keys,decode_data=True): + self.log(f'api.get({key_or_keys},decode_data={decode_data}) --> ...') + async def _get(): + self.log(f'api._get({key_or_keys},decode_data={decode_data}) --> ...') + node=await self.node + res=None + + if type(key_or_keys) in {list,tuple,dict}: + keys = key_or_keys + self.log('keys is plural',keys) + res =[] + for key in keys: + val = None + + # while not val: + self.log('trying again...') + val = await node.get(key) + self.log('got',val) + #asyncio.sleep(1) + + self.log(f'val for {key} = {val} {type(val)}') + if decode_data: + self.log(f'api._get() decoding data {keys} -> {val} {type(val)}') + val = await self.decode_data(val) + self.log(f'api._get() got back decodied data {keys} -> {val} {type(val)}') + res+=[val] + #res = await asyncio.gather(*tasks) + else: + key=key_or_keys + # self.log('keys is singular',key) + val = await node.get(key) + if decode_data: + self.log(f'api._get() decoding data {key} -> {val} {type(val)}') + val = await self.decode_data(val) + self.log(f'api._get() got back decodied data {key} -> {val} {type(val)}') + # self.log('wtf is val =',val) + res=val + + # self.log('wtf is res =',res) + + self.log(f'_get({key_or_keys}) --> {res}') + return res + return await _get() + + # async def get(self,key_or_keys,decode_data=True): + # self.log(f'api.get({key_or_keys},decode_data={decode_data}) --> ...') + # async def _get(): + # self.log(f'api._get({key_or_keys},decode_data={decode_data}) --> ...') + # node=await self.node + # res=None + + # if type(key_or_keys) in {list,tuple,dict}: + # keys = key_or_keys + # self.log('keys is plural',keys) + # res =[] + # for key in keys: + # val = None + + # # while not val: + # self.log('trying again...') + # val = await node.get(key) + # self.log('got',val) + # asyncio.sleep(1) + + # self.log(f'val for {key} = {val} {type(val)}') + # if decode_data: + # self.log(f'api._get() decoding data {keys} -> {val} {type(val)}') + # val = await self.decode_data(val) + # self.log(f'api._get() got back decodied data {keys} -> {val} {type(val)}') + # res+=[val] + # #res = await asyncio.gather(*tasks) + # else: + # key=key_or_keys + # self.log('keys is singular',key) + # val = await node.get(key) + # if decode_data: + # self.log(f'api._get() decoding data {key} -> {val} {type(val)}') + # val = await self.decode_data(val) + # self.log(f'api._get() got back decodied data {key} -> {val} {type(val)}') + # self.log('wtf is val =',val) + # res=val + + # self.log('wtf is res =',res) + + # self.log(f'_get({key_or_keys}) --> {res}') + # return res + # return await _get() + + def encode_data(self,val,sep=BSEP,sep2=BSEP2,do_encrypt=False,encrypt_for_pubkey=None,private_signature_key=None): + assert type(val)==bytes + """ + What do we want to store with + + 1) [Encrypted payload:] + 1) Timestamp + 2) Public key of sender + 3) Public key of recipient + 4) AES-encrypted Value + 2) [Decryption tools] + 1) AES-decryption key + 2) AES decryption IV value + 5) Signature of value by author + """ + import time + timestamp=time.time() + + self.log(f"""api.encode_data( + val={val}, + sep={sep}, + sep2={sep2}, + do_encrypt={do_encrypt}, + encrypt_for_pubkey={encrypt_for_pubkey}, + private_signature_key={private_signature_key})""") + + # check input + if not encrypt_for_pubkey: + raise Exception('we need a receiver !!') + # return None + + # convert val to bytes + # if type(val)!=bytes: val = bytes(val,'utf-8') + # value_bytes=base64.b64encode(val) + value_bytes = val + + # sign + private_signature_key = private_signature_key if private_signature_key is not None else self.private_key + signature = sign(value_bytes, private_signature_key) + public_sender_key = private_signature_key.public_key() + sender_pubkey_b = serialize_pubkey(public_sender_key) + + # Verify! + self.log(f'''encode_data().verify_signature( + signature={signature} + value={value_bytes} + sender_pubkey={sender_pubkey_b}''') + authentic = verify_signature(signature, value_bytes, public_sender_key) + + if not authentic: + raise Exception('message is inauthentic for set??' +str(authentic)) + return None + + # encrypt? + encrypt_for_pubkey_b = serialize_pubkey(encrypt_for_pubkey) + #time_b=base64.b64encode(str(timestamp).encode('utf-8')) #.encode() + time_b=str(timestamp).encode('utf-8') + msg=value_bytes + + # whole binary package + WDV = [ + time_b, + sender_pubkey_b, + encrypt_for_pubkey_b, + msg, + signature + ] + payload = sep2.join(WDV) + + res = aes_rsa_encrypt(payload,encrypt_for_pubkey) + if res is None: + raise Exception('encryption result does not exist') + return None + payload_encr_aes, payload_encr_aes_key, payload_encr_aes_iv = res + + decryption_tools = sep2.join([ + payload_encr_aes_key, + payload_encr_aes_iv + ]) + + final_packet = sep.join([ + payload_encr_aes, + decryption_tools + ]) + + self.log('FINAL PACKET:',final_packet,type(final_packet)) + return final_packet + + + + async def decode_data(self,entire_packet_orig,sep=BSEP,private_key=None,sep2=BSEP2): + if entire_packet_orig is None: return entire_packet_orig + self.log(f'decode_data({entire_packet_orig})...') + import binascii + entire_packet = entire_packet_orig + + #self.log('PACKED =',entire_packet,type(entire_packet)) + + #self.log('????',type(entire_packet)) + #self.log(entire_packet) + + # get data + try: + encrypted_payload, decryption_tools = entire_packet.split(sep) #split_binary(entire_packet, sep=sep) #entire_packet.split(sep) + decryption_tools=decryption_tools.split(sep2) #split_binary(decryption_tools,sep=sep2) + except ValueError as e: + + self.log('!! decode_data() got incorrect format:',e) + self.log('packet =',entire_packet) + return entire_packet_orig + + + ### NEW FIRST LINE: Try to decrypt! + val=None + key_used=None + for keyname,privkey in self.keys.items(): + self.log(keyname,privkey,'??') + try: + # clicked! + val = aes_rsa_decrypt(encrypted_payload,privkey,*decryption_tools) + key_used=keyname + # this must mean this was the recipient + self.log(f'unlocked using key {keyname}!') + break + except ValueError as e: + self.log(keyname,'did not work!') #,privkey,pubkey) + pass + if not val: + raise Exception('Content not intended for us') + return None + + #stop + + ### THIRD LINE: SIGNATURE VERIFICATION + # can we decrypt signature? + val_array = val.split(sep2) + # self.log('val_array =',val_array) + time_b,sender_pubkey_b,receiver_pubkey_b,msg,signature = val_array + if not signature: + raise Exception('no signature!') + return None + sender_pubkey=load_pubkey(sender_pubkey_b) + authentic = verify_signature(signature,msg,sender_pubkey) + if not authentic: + raise Exception('inauthentic message') + return None + + + # ### FOURTH LINE: CONTENT ENCRYPTION + # if private_key is None: + # private_key=self.private_key_global + + WDV={ + 'time':float(time_b.decode('utf-8')), + 'val':msg.decode('utf-8'), + 'channel':key_used + # 'to':receiver_pubkey_b, + # 'from':sender_pubkey_b, + # 'sign':signature + } + + self.log('GOT WDV:',WDV) + return WDV + + + #,signature + + # async def set_on_channel(self,key_or_keys,value_or_values): + # tasks=[] + # if type(channel_or_channels) not in {list,tuple}: + # channels=[channel_or_channels] + # else: + # channels=channel_or_channels + + # for channel in channels: + # uri = + # tasks.append(self.set) + + # if type(channel_or_channels) == str: + # return await self.set(self,key_or_keys,value_or_values,channel_or_channels) + # elif type(channel_or_channels) == + + async def set(self,key_or_keys,value_or_values,private_signature_key=None,encode_data=True,encrypt_for_pubkey=None): + self.log(f'api.set({key_or_keys}) --> {type(value_or_values)}') + async def _set(): + # self.log('async _set()',self.node) + # node=self.node + #node=await _getdb(self) + node=await self.node + + def proc(key,value): + self.log(f'encodeing data for {key} -> {type(value)} ...') + if encode_data and encrypt_for_pubkey is not None and type(value)==bytes: + x = self.encode_data( + val = value, + do_encrypt=False, + encrypt_for_pubkey=encrypt_for_pubkey, + private_signature_key=private_signature_key + ) + self.log(f'got back encoded data for {key} -> {x} ...') + return x + else: + self.log(f'did not encode data for {key} -> {value} ...') + return value + + if type(key_or_keys) in {list,tuple,dict}: + keys = key_or_keys + values = value_or_values + assert len(keys)==len(values) + res=[] + for key,value in zip(keys,values): + newval = proc(key,value) + self.log(f'kvv (plural) <- {keys}:{value} -> {newval}') + await node.set(key,newval) + res+=[newval] + else: + key = key_or_keys + value = value_or_values + newval = proc(key,value) + self.log(f'kvv (singular) <- {key}:{value} -> {newval}') + res = newval + await node.set(key,newval) + + self.log(f'api.set(key={key_or_keys}, \ + res = {res})') + #node.stop() + # self.log('reconnecting ...',self._node) + #await self._node.stop() + #await self.connect() + return res + + return await _set() + + async def get_json(self,key_or_keys,decode_data=False): + + def jsonize_dat(dat_dict): + if type(dat_dict)==dict and 'val' in dat_dict: + self.log('is this json???',dat_dict['val'],'???') + dat_dict['val']=json.loads(dat_dict['val']) + #dat_dict['val']=json.loads(base64.b64decode(dat_dict['val']).decode('utf-8')) + return dat_dict + + def jsonize_res(res): + if not res: + return None + if type(res)==list: + return [jsonize_dat(d) for d in res] + else: + return jsonize_dat(res) + + res = await self.get(key_or_keys,decode_data=decode_data) + self.log('get_json() got from get() a',type(res)) + return jsonize_res(res) + + + + + + + + + async def set_json(self,key,value,private_signature_key=None,encode_data=True,encrypt_for_pubkey=None): + #def jsonize_dat(dat_dict): + #if type(dat_dict)==dict and 'val' in dat_dict: + # self.log('is this json???',dat_dict['val'],'???') + # dat_dict['val']=json.loads(dat_dict['val'].decode('utf-8')) + # #dat_dict['val']=json.loads(base64.b64decode(dat_dict['val']).decode('utf-8')) + # return dat_dict + + def prep_json(val): + if type(val)!=str: + val=json.dumps(value) + bval=val.encode('utf-8') + return bval + + + self.log(f'api.set_json({key}, {value} ...)') + json_b = prep_json(value) + self.log(f'bjson -> {json_b}') + set_res = await self.set( + key, + json_b, + private_signature_key=private_signature_key, + encode_data=encode_data, + encrypt_for_pubkey=encrypt_for_pubkey) + self.log(f'api.set_json() <-- {set_res}') + return set_res + + async def has(self,key): + val=await self.get(key) + return val is not None + + + + def add_world_key(self,fn=PATH_WORLD_KEY): + import shutil + thisdir=os.path.dirname(__file__) + fnfn=os.path.join(thisdir,fn+'.priv') + self.log('getting',fnfn) + name='.'.join(os.path.basename(fn).split('.')[1:-1]) + + priv_key=load_privkey_fn(fnfn) + pub_key=priv_key.public_key() + pub_key_b=serialize_pubkey(pub_key) + + ofn=os.path.join(KEYDIR,f'.{name}.key') + shutil.copyfile(fnfn,ofn) + + asyncio.create_task(self.add_world_key_to_net(name,pub_key_b)) + + async def add_world_key_to_net(self,name,pub_key_b): + await self.set_person(name,pub_key_b) + + #@property + def get_keys(self): + res={} + key_files = os.listdir(KEYDIR) + world_key_fn = os.path.basename(PATH_WORLD_KEY) + if not world_key_fn in key_files: + self.log('[first time?] adding world key:',world_key_fn) + self.add_world_key() + + + for priv_key_fn in key_files: + if (not priv_key_fn.startswith('.') or not priv_key_fn.endswith('.key')): continue + fnfn = os.path.join(KEYDIR,priv_key_fn) + print(fnfn) + priv_key=load_privkey_fn(fnfn) + #pub_key=priv_key.public_key() + name_key= '.'.join(priv_key_fn.split('.')[1:-1]) + res[name_key] = priv_key + self.log(f'[API] found key {name_key} and added to keychain') + return res + + + @property + def keys(self): + if not hasattr(self,'_keys'): + self.load_keys() + return self._keys + + def load_keys(self): + self._keys = self.get_keys() + + + + async def append_data(self,uri,bdata): + self.log(f'appending to uri {uri}, data {bdata}') + if type(bdata)!=bytes and type(bdata)==str: + bdata=bdata.encode('utf-8') + self.log(f'--> encoded bdata to {bdata}') + + # get blob so far + sofar = await self.get(uri,decode_data=False) + + # get sofar + self.log(f'sofar = {sofar}') + + newval = bdata if sofar is None else sofar+BSEP+bdata + self.log(f'newval = {newval}') + + res = await self.set(uri,newval,encode_data=False) + if res: + length = newval.count(BSEP)+1 + return {'success':'Length increased to %s' % length} + return {'error':'Could not append data'} + + async def upload(self,filename,file_id=None, uri='/file/',uri_part='/part/'): + import sys + + if not file_id: file_id = get_random_id() + part_ids = [] + part_keys = [] + parts=[] + PARTS=[] + buffer_size=100 + for part in bytes_from_file(filename,chunksize=1024*2): + part_id = get_random_id() + part_ids.append(part_id) + part_key='/part/'+part_id + part_keys.append(part_key) + parts.append(part) + # PARTS.append(part) + + # self.log('part!:',sys.getsizeof(part)) + #self.set(part_key,part) + + if len(parts)>=buffer_size: + # self.log('setting...') + await self.set(part_keys,parts) + part_keys=[] + PARTS+=parts + parts=[] + + # set all parts + #self.set(part_keys,PARTS) + # self.log('# parts:',len(PARTS)) + if parts and part_keys: + await self.set(part_keys, parts) + + # how many parts? + # self.log('# pieces!',len(part_ids)) + + file_store = {'ext':os.path.splitext(filename)[-1][1:], 'parts':part_ids} + # self.log('FILE STORE??',file_store) + await self.set_json(uri+file_id,file_store) + + # file_store['data'].seek(0) + file_store['id']=file_id + return file_store + + async def download(self,file_id): + self.log('file_id =',file_id) + file_store = await self.get_json_val('/file/'+file_id) + self.log('file_store =',file_store) + if file_store is None: return + + self.log('file_store!?',file_store) + keys = ['/part/'+x for x in file_store['parts']] + + #time,pieces,pub,sign = await self.get_json_val(keys) + pieces = await self.get_json_val(keys) + self.log('pieces = ',pieces) + file_store['parts_data']=pieces + return file_store + + async def flush(self): + #self.log('saving back to db file...') + node = await self.node + node.storage.dump() + # self.log('DONE saving back to db file...') + + + + async def post(self,data,channel,add_to_outbox=True): + post_id=get_random_id() + #tasks = [] + + self.log(f'api.post({data},add_to_outbox={add_to_outbox}) --> ...') + # stop + + # ## add to inbox + post_id = get_random_id() + self.load_keys() + author_privkey = self.keys[data.get('author')] + + self.log('ADDING TO CHANNEL??',channel) + pubkey_channel = self.keys[channel].public_key() + + ## 1) STORE ACTUAL CONTENT OF POST UNDER CENTRAL POST URI + # HAS NO CHANNEL: just one post/msg in a sea of many + # e.g. /post/5e4a355873194399a5b356def5f40ff9 + # does not reveal who cand decrypt it + uri = '/post/'+post_id + json_res = await self.set_json( + uri, + data, #data_channel, + encode_data=True, + encrypt_for_pubkey=pubkey_channel, + private_signature_key=author_privkey + ) + self.log(f'json_res() <- {json_res}') + + + ## 2) Store under the channels a reference to the post, + # as a hint they may be able to decrypt it with one of their keys + add_post_id_as_hint_to_channels = [f'/inbox/{channel}'] + if add_to_outbox: + un=data.get('author') + if un: + add_post_id_as_hint_to_channels += [f'/outbox/{un}'] + tasks = [ + self.append_data(uri,post_id) for uri in add_post_id_as_hint_to_channels + ] + res = await asyncio.gather(*tasks) + if res and all([(d and 'success' in d) for d in res]): + return {'success':'Posted! %s' % post_id, 'post_id':post_id} + + return {'error':'Post unsuccessful'} + + # append_res=await self.append_json(f'/inbox/{channel}',post_id) + # self.log(f'json_res.append_json({channel}) <- {append_res}') + # #tasks.append(task) + + # # add to outbox + # if add_to_outbox: + # un=data.get('author') + # if un: + # append_res = await self.append_json(f'/outbox/{un}', post_id) + # self.log(f'json_res.append_json({un}) <- {append_res}') + # #tasks.append(task) + + #asyncio.create_task(self.flush()) + # return {'success':'Posted! %s' % post_id, 'post_id':post_id} + #return {'error':'Post failed'} + + async def get_json_val(self,uri,decode_data=True): + res=await self.get_json(uri,decode_data=decode_data) + self.log('get_json_val() got from get_json():',res,type(res)) + + r=res + if type(res) == dict: + r=res.get('val',None) if res is not None else res + elif type(res) == list: + r=[(x.get('val') if type(x)==dict else x) for x in res if x is not None] + elif type(res) == str: + r=json.loads(res) + self.log(f'get_json_val() --> {r} {type(r)}') + return r + + async def get_post(self,post_id): + self.log(f'api.get_post({post_id}) ?') + post_json = await self.get_json('/post/'+post_id, decode_data=True) + self.log(f'api.get_post({post_id}) --> {post_json}') + return post_json + + async def get_post_ids(self,uri='/inbox/world'): + ## GET POST IDS + self.log(f'api.get_post_ids(uri={uri}) ?') + index = await self.get(uri,decode_data=False) + self.log(f'api.get_post_ids(uri={uri}) <-- api.get()',index) + if not index: return [] + + #index = json.loads(base64.b64decode(index).decode()) + index = [x.decode('utf-8') for x in index.split(BSEP)] + + if index is None: return [] + if type(index)!=list: index=[index] + index = [x for x in index if x is not None] + + self.log(f'api.get_post_ids({uri}) --> {index}') + return index + + async def get_posts(self,uri='/inbox/world'): + + # get IDs + post_ids = await self.get_post_ids(uri) + + # get posts + posts = [self.get_post(post_id) for post_id in post_ids] + return await asyncio.gather(*posts) + + + + + +## func + +def bytes_from_file(filename,chunksize=8192): + with open(filename, 'rb') as f: + while True: + piece = f.read(chunksize) + if not piece: + break + yield piece + +def get_random_id(): + import uuid + return uuid.uuid4().hex + + + + + + + +def test_api(): + + # api.set(['a','b','c'],[1,2,3]) + async def run(): + api = Api() + # await api.connect() + + #await api.set_json('whattttt',{'aaaaa':12222}) + #await api.set_json('whattttt',[111]) + #await api.set_json('whattttt',[111]) + + #val = await api.get_json('whattttt') + + server = await _getdb(api) + await server.set('a',1) + print(await server.get('a')) + await asyncio.sleep(5) + await server.set('a',2) + + print(await server.get('a')) + await asyncio.sleep(5) + + await server.set('a',str([2,3,4,5])) + print(await server.get('a')) + await asyncio.sleep(5) + + val = await server.get('a') + + + + print(f'VAL = {val}') + return val + + asyncio.run(run()) + + +def test_basic(): + import asyncio + from kademlia.network import Server + + #api = Api() + + # not working! + #api.set_json('my key',{'a':'value'}) + + async def run(): + # Create a node and start listening on port 5678 + node = Server() + await node.listen(5678) + + # Bootstrap the node by connecting to other known nodes, in this case + # replace 123.123.123.123 with the IP of another node and optionally + # give as many ip/port combos as you can for other nodes. + await node.bootstrap(NODES_PRIME) + + # set a value for the key "my-key" on the network + await node.set("my-key", "my awesome value") + await node.set("my-key", "my awesome value2") + await node.set("my-key", "my awesome value3") + + + # get the value associated with "my-key" from the network + result = await node.get("my-key") + + print(result) + return result + + res = asyncio.run(run()) + print('res = ',res) + # res = asyncio.run(node.set(key,value)) + # print(res) + +def test_provided_eg(): + import asyncio + from kademlia.network import Server + + async def run(): + # Create a node and start listening on port 5678 + node = Server() + await node.listen(5678) + + # Bootstrap the node by connecting to other known nodes, in this case + # replace 123.123.123.123 with the IP of another node and optionally + # give as many ip/port combos as you can for other nodes. + await node.bootstrap(NODES_PRIME) + + # set a value for the key "my-key" on the network + await node.set("my-key", "my awesome value") + + # get the value associated with "my-key" from the network + result = await node.get("my-key") + + print(result) + + asyncio.run(run()) + + + +async def lonely_selfless_node(): + from api import Api,PORT_LISTEN + API = Api() + return await API.connect_forever(8467) + + +def boot_lonely_selfless_node(port=8467): + API = Api() + asyncio.run(API.connect_forever()) + + +def init_entities(usernames = ['world']): + ## make global entity called world + + #loop=asyncio.new_event_loop() + + async def register(username): + API = Api() + #await API.connect_forever() + #privkey,pubkey = await API.register(username,just_return_keys=True) + + private_key = generate_rsa_key() + public_key = private_key.public_key() + pem_private_key = serialize_privkey(private_key) + pem_public_key = serialize_pubkey(public_key) + + + privkey_fn = os.path.join(KEYDIR_BUILTIN,f'.{username}.key.priv') + pubkey_fn = os.path.join(KEYDIR_BUILTIN,f'.{username}.key.pub') + with open(privkey_fn,'wb') as of: of.write(pem_private_key) + with open(pubkey_fn,'wb') as of: of.write(pem_public_key) + # print(API.keys) + + await API.set_person(username,pem_public_key) + print('done') + + + for un in usernames: + asyncio.run(register(un)) + + +def split_binary(data, sep=BSEP): + seplen = len(BSEP) + res=[] + stack=None + print('!!',data[:4],seplen,sep) + + cutoffs=[] + for i in range(0, len(data)): + seg=data[i:i+seplen] + print(i,seg,sep,stack) + if seg==sep: + # split_piece = data[:i+seplen] + print('!') + cutoff_lasttime = cutoffs[-1][-1] if cutoffs and cutoffs else 0 + cutoff = (cutoff_lasttime-seplen, i) + print(cutoff) + cutoffs.append(cutoff) + stack = data[cutoff[0] if cutoff[0]>0 else 0: cutoff[1]] + print(stack) + res += [stack] + stack = None + + cutoff_lasttime = cutoffs[-1][-1] if cutoffs and cutoffs else 0 + print(cutoff_lasttime) + stack = data[cutoff_lasttime+seplen :] + res+=[stack] + print('RES:',res) + return res + + + + + +if __name__=='__main__': + init_entities() + + # res = split_binary(b'eeeehey||||whatsueep',b'||||') + # print(res) \ No newline at end of file diff --git a/p2p/person.py b/p2p/person.py new file mode 100644 index 0000000..b3b9de1 --- /dev/null +++ b/p2p/person.py @@ -0,0 +1,187 @@ +import os +from pythemis.skeygen import KEY_PAIR_TYPE, GenerateKeyPair +from pythemis.smessage import SMessage, ssign, sverify +from pythemis.exception import ThemisError +from base64 import b64decode,b64encode + +KEY_PATH = os.path.join(os.path.expanduser('~'),'.komrade') + +class Person(object): + + def __init__(self,name,api): + self.name=name + self.api=api + + self.privkey=None + self.pubkey=None + + # self.load_or_gen() + + @property + def key_path_pub(self): + return os.path.join(KEY_PATH,'.komrade.'+self.name+'.addr') + + @property + def key_path_priv(self): + return os.path.join(KEY_PATH,'.komrade.'+self.name+'.key') + + @property + def privkey_b64(self): + return b64encode(self.privkey) + + @property + def pubkey_b64(self): + return b64encode(self.pubkey) + ## genearating keys + + def gen_key(self): + keypair = GenerateKeyPair(KEY_PAIR_TYPE.EC) + self.privkey = keypair.export_private_key() + self.pubkey = keypair.export_public_key() + + with open(self.key_path_priv, "wb") as private_key_file: + private_key_file.write(self.privkey_b64) + + with open(self.key_path_pub, "wb") as public_key_file: + public_key_file.write(self.pubkey_b64) + + + ## loading keys from disk + + def load_key(self): + if os.path.exists(self.key_path_pub): + with open(self.key_path_pub) as pub_f: + self.pubkey=b64decode(pub_f.read()) + + if os.path.exists(self.key_path_priv): + with open(self.key_path_priv) as priv_f: + self.privkey=b64decode(priv_f.read()) + + def load_or_gen(self): + self.load_key() + if not self.pubkey: + self.gen_key(passphrase) + + def encrypt(self,msg,for_pubkey_b64): + # handle verification failure + for_pubkey = b64decode(for_pubkey_b64) + encrypted_msg = SMessage(self.privkey, for_pubkey).wrap(msg) + return encrypted_msg + + def decrypt(self,encrypted_msg,from_pubkey_b64): + # handle verification failure + from_pubkey = b64decode(from_pubkey_b64) + decrypted_msg = SMessage(self.privkey, from_pubkey).unwrap(encrypted_msg) + + def sign(self,msg): + signed_msg = b64encode(ssign(self.privkey, msg)) + return signed_msg + + def verify(self,signed_msg_b64,pubkey_b64): + signed_msg = b64decode(signed_msg_b64) + public_key = b64decode(pubkey_b64) + try: + verified_msg = sverify(public_key, signed_msg) + return verified_msg + except ThemisError as e: + print('!!',e) + return None + + + + + + ## PERSONS + async def find_pubkey(self,username): + return await self.api.get('/pubkey/'+username,decode_data=False) + + async def set_pubkey(self): #,username),pem_public_key): + + await self.set('/pubkey/'+self.name,self.public_key,encode_data=False) + # keystr=pem_public_key + # await self.set(b'/name/'+keystr,username,encode_data=False) + + + + + + + ## Register + async def register(self,name,passkey=None,just_return_keys=False): + # if not (name and passkey): return {'error':'Name and password needed'} + import kademlia + try: + person = await self.get_person(name) + except kademlia.network.CannotReachNetworkError: + return {'error':'Network disconnected'} + except NetworkStillConnectingError: + return {'error':'Network still connecting...'} + + keys = self.get_keys() + if person is not None: + self.log('register() person <-',person) + # try to log in + + self.log('my keys',keys) + if not name in keys: + self.log('!! person already exists') + return {'error':'Person already exists'} + + # test 3 conditions + privkey=keys[name] + pubkey=load_pubkey(person) + + if simple_lock_test(privkey,pubkey): + self.username=name + self.log('!! logging into',name) + return {'success':'Logging back in...'} + + private_key = generate_rsa_key() + public_key = private_key.public_key() + pem_private_key = serialize_privkey(private_key, password=passkey)# save_private_key(private_key,password=passkey,return_instead=True) + pem_public_key = serialize_pubkey(public_key) + + if just_return_keys: + return (pem_private_key,pem_public_key) + + # save pub key in db + await self.set_person(name,pem_public_key) + # save priv key on hardware + fn_privkey = os.path.join(KEYDIR,f'.{name}.key') + + self.log('priv key =',pem_private_key) + write_key_b(pem_private_key, fn_privkey) + + # good + return {'success':'Person created ...', 'username':name} + + + def load_private_key(self,password): + #if not self.app_storage.exists('_keys'): return {'error':'No login keys present on this device'} + pem_private_key=self.app_storage.get('_keys').get('private') + # self.log('my private key ====',pem_private_key) + try: + return {'success':load_privkey(pem_private_key,password)} + except ValueError as e: + self.log('!!',e) + return {'error':'Incorrect password'} + +if __name__=='__main__': + from p2p import Api + api = Api() + marx = Person('marx',api=api) #,'marx@marxzuckerburg.com',passphrase='twig') + + marx.gen_key() + + elon = Person('elon',api=api) #,'elon@marxzuckerburg.com',passphrase='twig') + elon.gen_key() + + + msg = b'My public key is '+marx.pubkey_b64 + signed_msg = marx.sign(msg) + + print(msg) + print(signed_msg) + + verified_msg = elon.verify(signed_msg, marx.pubkey_b64) + print('verified?',verified_msg) \ No newline at end of file diff --git a/p2p/syfr/__init__.py b/p2p/syfr/__init__.py deleted file mode 100644 index 4874c11..0000000 --- a/p2p/syfr/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .syfr import * -from .loader import * diff --git a/p2p/syfr/loader.py b/p2p/syfr/loader.py deleted file mode 100644 index 52a1e15..0000000 --- a/p2p/syfr/loader.py +++ /dev/null @@ -1,222 +0,0 @@ -import copy -import hashlib -import os -import requests - -from .syfr import * - -DATA_BLOCK_SIZE = 65536 - - -def encrypt_file(file_path, rsa_priv, receiver_pubkey): - contents = open(file_path).read() - - return blocks - - -def masters_from_children(children, rsa_priv, receiver_pubkey): - contents = [] - masters_content = [] - last_master_content = "MASTER:" - - for child in children: - if len(last_master_content) + len(child['id']) > DATA_BLOCK_SIZE - bitsize_marker_length - 32: - masters_content.append(last_master_content) - last_master_content = "MASTER:" - - last_master_content += "\n{0}".format(child['id']) - - if len(last_master_content) > 8: - masters_content.append(last_master_content) - - masters = [encrypt_block(long_pad(master_c), rsa_priv, receiver_pubkey) for master_c in masters_content] - return masters - - -def fetch_block(id): - url = "https://syfr.io/data/v0/{0}".format(id) - print("Fetching block {0} from {1}.".format(id, url)) - return requests.get(url).content - - -def tree_decrypt_block(block, priv): - """ - In parsing a tree, determine if the block is a master block. - If so, return TRUE and a list of ids. - Else it is a leaf-content block, return FALSE and whole contents. - Whole block dict assumed to be passed in. If ID passed in, try to fetch. - """ - - if isinstance(block, str): - block = fetch_block(id) - - contents = long_unpad(full_decrypt_block(block, priv)) - if contents[0:7] == "MASTER:": - return True, contents[7:].split('\n')[1:] - else: - return False, contents - - -def tree_decrypt(block, priv, cached_blocks=None): - """ - Decrypts and assembles an entire block tree. - If blocks are provided, checks here for cached blocks, otherwise it fetches - on Internet. - """ - content = "" - cont = True - blocks = [block] - - if isinstance(cached_blocks, list): - cb = dict([(b['id'], b) for b in cached_blocks]) - cached_blocks = cb - level = 0 - - while cont: - new_contents = [] - print("Decrypting {0} blocks at level {1}.".format(len(blocks), level)) - level += 1 - for b in blocks: - if cached_blocks and isinstance(b, str) and b in cached_blocks: - new_contents.append(tree_decrypt_block(cached_blocks[b], priv)) - else: - new_contents.append(tree_decrypt_block(b, priv)) - - blocks = [] - for is_master, con in new_contents: - if not is_master: - content += con - else: - blocks += con - cont = len(blocks) > 0 - - return content - - -def assemble_block_tree(contents, rsa_priv, receiver_pubkey): - content_pieces = divide_contents(contents) - print("Encrypting {0} Leaf Blocks.".format(len(content_pieces))) - leaf_blocks = [encrypt_block(c, rsa_priv, receiver_pubkey) for c in content_pieces] - blocks = copy.copy(leaf_blocks) - n = len(blocks) - new_blocks = blocks - - while n > 1: - new_blocks = masters_from_children(new_blocks, rsa_priv, receiver_pubkey) - print("APPENDING {0} Masters".format(len(new_blocks))) - blocks += new_blocks - n = len(new_blocks) - - return blocks - - -def divide_contents(contents): - subcontents = [] - n = 0 - while n < len(contents): - m = min(len(contents), n + DATA_BLOCK_SIZE - bitsize_marker_length) - subcontent = contents[n:m] - subcontent = long_pad(subcontent, DATA_BLOCK_SIZE) - subcontents.append(subcontent) - n += DATA_BLOCK_SIZE - bitsize_marker_length - return subcontents - - -def unite_contents(content_blocks): - content = "" - for n, x in enumerate(content_blocks): - content += long_unpad(x) - - return content - - -def compute_block_hash(block_dict): - s = "" - for k in sorted(block_dict.keys()): - if k in ['id']: - continue - s += "&{0}:{1}".format(k, block_dict[k]) - return hashlib.sha256(s).hexdigest() - - -def decompose_metadata(metadata): - sender, receiver = [x.split(':')[-1] for x in metadata.split(';')] - return sender, receiver - - -def recompose_metadata(sender, receiver): - # TODO remove this - return "sender_pubkey:{0};receiver_pubkey:{1}".format(sender, receiver) - - -def encrypt_block(content, rsa_priv, receiver_pubkey): - assert len(content) == DATA_BLOCK_SIZE - aes_ciphertext, encry_aes_key, hmac, hmac_signature, iv, metadata = \ - encrypt(content, rsa_priv, receiver_pubkey) - sender, receiver = decompose_metadata(metadata) - response = { - 'aes_ciphertext': aes_ciphertext, - 'encry_aes_key': encry_aes_key, - 'hmac': hmac, - 'hmac_signature': hmac_signature, - 'iv': iv, - 'sender_public_key': sender, - 'receiver_public_key': receiver - } - response['id'] = compute_block_hash(response) - return response - - -def full_decrypt_block(response, receiver_privkey): - assert compute_block_hash(response) == response['id'] - - - return decrypt( - response['aes_ciphertext'], - response['encry_aes_key'], - response['hmac'], - response['hmac_signature'], - receiver_privkey, - response['iv'], - recompose_metadata( - response['sender_public_key'], response['receiver_public_key']) - ) - - -def aes_decrypt_block(response, aes_key): - return - -# def aes_rsa_encrypt(message, recipient_rsa_pub): -# aes_key = create_aes_key() -# aes_ciphertext, iv = aes_encrypt(message, aes_key) -# # hmac_key = hashlib.sha256(aes_key).hexdigest() - -# #sender_pubkey = serialize_pubkey(rsa_priv.public_key()) -# # recipient_rsa_pub = load_pubkey(receiver_pubkey) -# #recipient_rsa_pub = receiver_pubkey -# # metadata = loader.recompose_metadata(sender_pubkey, receiver_pubkey) - -# encry_aes_key = rsa_encrypt(aes_key, recipient_rsa_pub) - -# # hmac_list = [metadata, iv, aes_ciphertext, encry_aes_key] -# # hmac = create_hmac(hmac_key, hmac_list) -# # hmac_signature = sign(hmac, rsa_priv) - -# # return aes_ciphertext, encry_aes_key, hmac, hmac_signature, iv, metadata -# return aes_ciphertext, encry_aes_key, iv #, sign - - -# def aes_rsa_decrypt(aes_ciphertext, encry_aes_key, iv, rsa_priv): #, hmac, hmac_signature, rsa_priv, iv, metadata): -# aes_key = rsa_decrypt(encry_aes_key, rsa_priv) - -# # hmac_key = hashlib.sha256(aes_key).hexdigest() -# # hmac_list = [metadata, iv, aes_ciphertext, encry_aes_key] -# # independent_hmac = create_hmac(hmac_key, hmac_list) -# # assert hmac == independent_hmac - -# # sender_pub, receiver_pub = [x.split(':')[-1] for x in metadata.split(';')] -# # sender_pub = load_pubkey(sender_pub) -# #assert verify_signature(hmac_signature, hmac, sender_pub) - -# plaintext = aes_decrypt(aes_ciphertext, aes_key, iv) -# return plaintext diff --git a/p2p/syfr/syfr.py b/p2p/syfr/syfr.py deleted file mode 100644 index 2aacb22..0000000 --- a/p2p/syfr/syfr.py +++ /dev/null @@ -1,233 +0,0 @@ -import base64 -import hashlib -import os - -import cryptography -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives import hashes, hmac -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives.padding import PKCS7 - -from . import loader - -bitsize_marker_length = 10 - - -def generate_rsa_key(complexity=4096): - private_key = rsa.generate_private_key( - public_exponent=65537, - key_size=complexity, - backend=default_backend() - ) - return private_key - - -def serialize_privkey(key, password=False): - return base64.b64encode(key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption() if not password else serialization.BestAvailableEncryption(password.encode()) - )) - - -def serialize_pubkey(pubkey): - return base64.b64encode(pubkey.public_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo - )) - - -def load_pubkey(pubkey_text): - return serialization.load_der_public_key( - base64.b64decode(pubkey_text), - backend=default_backend()) - - -def load_privkey(privkey_text,password=None): - return serialization.load_der_private_key( - base64.b64decode(privkey_text), - password=password.encode() if password else None, - backend=default_backend() - ) - -def load_privkey_fn(fn_privkey,password=None): - with open(fn_privkey,'rb') as f: - privkey=load_privkey(f.read(),password=password) - return privkey - -def load_pubkey_fn(fn_pubkey): - with open(fn_pubkey,'rb') as f: - privkey=load_pubkey(f.read()) - -def write_key(key, file_path='mykey.pem'): - with open(file_path, 'w+') as fh: - fh.write(key) - -def write_key_b(key, file_path='mykey.pem'): - with open(file_path, 'wb') as fh: - fh.write(key) - - -def sign(message, private_key): - signature = private_key.sign( - message, - padding.PSS( - mgf=padding.MGF1(hashes.SHA256()), - salt_length=padding.PSS.MAX_LENGTH), - hashes.SHA256() - ) - # signer.update(message) - return base64.b64encode(signature) #signer.finalize()) - - -def verify_signature(signature, message, pubkey): - try: - verified = pubkey.verify( - base64.b64decode(signature), - message, - padding.PSS( - mgf=padding.MGF1(hashes.SHA256()), - salt_length=padding.PSS.MAX_LENGTH - ), - hashes.SHA256() - ) - return True - except cryptography.exceptions.InvalidSignature as e: - print('!?',e) - return False - return None - # verifier.update(message) - # try: - # verifier.verify() - # return True - # except cryptography.exceptions.InvalidSignature: - # print("Invalid Signature") - # return False - - -def rsa_encrypt(message, pubkey): - ciphertext = pubkey.encrypt( - message, - padding.OAEP( - mgf=padding.MGF1(algorithm=hashes.SHA1()), # SHA1 is suspect - algorithm=hashes.SHA1(), - label=None - ) - ) - return base64.b64encode(ciphertext) - - -def rsa_decrypt(ciphertext, key): - plaintext = key.decrypt( - base64.b64decode(ciphertext), - padding.OAEP( - mgf=padding.MGF1(algorithm=hashes.SHA1()), - algorithm=hashes.SHA1(), - label=None - ) - ) - return plaintext - - -def create_aes_key(complexity=32): - return base64.b64encode(os.urandom(complexity)) - - -def create_hmac(key, message_list): - h = hmac.HMAC(key, hashes.SHA256(), backend=default_backend()) - message = "?".join([str(x) for x in message_list]) - h.update(message) - return base64.b64encode(h.finalize()) - - -def pad(message, blocksize=128): - padder = PKCS7(blocksize).padder() - padded_data = padder.update(message) - padded_data += padder.finalize() - return padded_data - - -def long_pad(message, goal_length=loader.DATA_BLOCK_SIZE): - assert len(message) + bitsize_marker_length <= goal_length - c = 0 - for _ in range(goal_length - len(message) - bitsize_marker_length): - message += "0" - c += 1 - d = str(c).zfill(bitsize_marker_length) - message += d - return message - - -def unpad(padded_data, blocksize=128): - unpadder = PKCS7(blocksize).unpadder() - data = unpadder.update(padded_data) - return data + unpadder.finalize() - - -def long_unpad(message): - assert len(message) <= 10**bitsize_marker_length - padding_size = int(message[-bitsize_marker_length:]) - return message[0:-bitsize_marker_length-padding_size] - - -def aes_encrypt(message, key): - iv = os.urandom(16) - cipher = Cipher( - algorithms.AES(base64.b64decode(key)), - modes.CBC(iv), - backend=default_backend() - ) - message = pad(message) - encryptor = cipher.encryptor() - ciphertext = encryptor.update(message) + encryptor.finalize() - return base64.b64encode(ciphertext), base64.b64encode(iv) - - -def aes_decrypt(ciphertext, key, iv): - cipher = Cipher( - algorithms.AES(base64.b64decode(key)), - modes.CBC(base64.b64decode(iv)), - backend=default_backend() - ) - decryptor = cipher.decryptor() - padded = decryptor.update(base64.b64decode(ciphertext)) + decryptor.finalize() - return unpad(padded) - - -def aes_rsa_encrypt(message, recipient_rsa_pub): - aes_key = create_aes_key() - aes_ciphertext, iv = aes_encrypt(message, aes_key) - # hmac_key = hashlib.sha256(aes_key).hexdigest() - - #sender_pubkey = serialize_pubkey(rsa_priv.public_key()) - # recipient_rsa_pub = load_pubkey(receiver_pubkey) - #recipient_rsa_pub = receiver_pubkey - # metadata = loader.recompose_metadata(sender_pubkey, receiver_pubkey) - - encry_aes_key = rsa_encrypt(aes_key, recipient_rsa_pub) - - # hmac_list = [metadata, iv, aes_ciphertext, encry_aes_key] - # hmac = create_hmac(hmac_key, hmac_list) - # hmac_signature = sign(hmac, rsa_priv) - - # return aes_ciphertext, encry_aes_key, hmac, hmac_signature, iv, metadata - return aes_ciphertext, encry_aes_key, iv #, sign - - -def aes_rsa_decrypt(aes_ciphertext, encry_aes_key, iv, rsa_priv): #, hmac, hmac_signature, rsa_priv, iv, metadata): - aes_key = rsa_decrypt(encry_aes_key, rsa_priv) - - # hmac_key = hashlib.sha256(aes_key).hexdigest() - # hmac_list = [metadata, iv, aes_ciphertext, encry_aes_key] - # independent_hmac = create_hmac(hmac_key, hmac_list) - # assert hmac == independent_hmac - - # sender_pub, receiver_pub = [x.split(':')[-1] for x in metadata.split(';')] - # sender_pub = load_pubkey(sender_pub) - #assert verify_signature(hmac_signature, hmac, sender_pub) - - plaintext = aes_decrypt(aes_ciphertext, aes_key, iv) - return plaintext diff --git a/p2p/syfr/test_crypto.py b/p2p/syfr/test_crypto.py deleted file mode 100644 index f4cd83b..0000000 --- a/p2p/syfr/test_crypto.py +++ /dev/null @@ -1,52 +0,0 @@ -import base64 -import os - -from . import syfr as crypto - - -def test_rsa_encrypt_and_decrypt(): - priv = crypto.generate_rsa_key(complexity=512) - message = "Attack at Calais" - ciphertext = crypto.rsa_encrypt(message, priv.public_key()) - assert crypto.rsa_decrypt(ciphertext, priv) == message - - -def test_aes_encrypt_decrypt(): - priv = crypto.create_aes_key() - message = "Sell Watermelons" - ciphertext, iv = crypto.aes_encrypt(message, priv) - assert crypto.aes_decrypt(ciphertext, priv, iv) == message - - -def test_full_encrypt(): - priv1 = crypto.generate_rsa_key() - priv2 = crypto.generate_rsa_key() - target_pubkey = crypto.serialize_pubkey(priv2.public_key()) - message = "Santa is not real." - - aes_ciphertext, encry_aes_key, hmac, hmac_signature, iv, metadata = \ - crypto.encrypt(message, priv1, target_pubkey) - - aes_key = crypto.rsa_decrypt(encry_aes_key, priv2) - - assert crypto.aes_decrypt(aes_ciphertext, aes_key, iv) == message - - assert message == \ - crypto.decrypt( - aes_ciphertext, encry_aes_key, hmac, hmac_signature, - priv2, iv, metadata - ) - - -def test_sign(): - priv = crypto.generate_rsa_key(complexity=512) - message = "Secret wish list" - sig = crypto.sign(message, priv) - assert crypto.verify_signature(sig, message, priv.public_key()) - - -def test_long_pad(): - complexity = 10**3 # won't create exactly this length - contents = base64.b64encode(os.urandom(complexity)) - padded = crypto.long_pad(contents, 3*complexity) - assert crypto.long_unpad(padded) == contents diff --git a/p2p/syfr/test_loader.py b/p2p/syfr/test_loader.py deleted file mode 100644 index 0b0d91f..0000000 --- a/p2p/syfr/test_loader.py +++ /dev/null @@ -1,44 +0,0 @@ -import base64 -import hashlib -import math -import os - -from . import syfr as crypto -from . import loader - - -def random_content(pseudolength): - return base64.b64encode(os.urandom(pseudolength)) - - -def test_divide_unite_contents(): - # random contents - complexity = loader.DATA_BLOCK_SIZE * 100 # won't create exactly this length - contents = random_content(complexity) - size = len(contents) - subcontents = loader.divide_contents(contents) - assert len(subcontents) == math.ceil(float(size) / float(loader.DATA_BLOCK_SIZE)) - assert all([len(x) == loader.DATA_BLOCK_SIZE for x in subcontents]) - - united = loader.unite_contents(subcontents) - assert hashlib.sha256(united).hexdigest() == hashlib.sha256(contents).hexdigest() - - -def test_encrypt_block(): - content = crypto.long_pad(random_content(1000)) - rsa_priv = crypto.generate_rsa_key() - priv2 = crypto.generate_rsa_key() - receiver_pubkey = crypto.serialize_pubkey(priv2.public_key()) - - response = loader.encrypt_block(content, rsa_priv, receiver_pubkey) - assert loader.full_decrypt_block(response, priv2) == content - - -def test_assemble_block_tree(): - contents = random_content(10**6) - rsa_priv = crypto.generate_rsa_key() - priv2 = crypto.generate_rsa_key() - receiver_pubkey = crypto.serialize_pubkey(priv2.public_key()) - blocks = loader.assemble_block_tree(contents, rsa_priv, receiver_pubkey) - derived_contents = loader.tree_decrypt(blocks[-1], priv2, cached_blocks=blocks) - assert derived_contents == contents diff --git a/p2p/udp2tcp.py b/p2p/udp2tcp.py deleted file mode 100644 index a15344c..0000000 --- a/p2p/udp2tcp.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -1-way UDP to TCP relay. - -Test with netcat -1) Run TCP server: - nc -l 999 -2) Run UDP proxy: - python udpproxy.py -3) Run UDP client: - nc -u 127.0.0.1 8888 -4) Type some strings, type enter, they should show on the TCP server -""" - -import asyncio - - -class ProxyDatagramProtocol(asyncio.DatagramProtocol): - - def __init__(self, remote_address): - self.remote_address = remote_address - self.remotes = {} - self.transport = None - super().__init__() - - def connection_made(self, transport): - self.transport = transport - - def datagram_received(self, data, addr): - if addr in self.remotes: - self.remotes[addr].transport.write(data) - return - loop = asyncio.get_event_loop() - self.remotes[addr] = RemoteStreamProtocol(self, data) - coro = loop.create_connection( - lambda: self.remotes[addr], host=self.remote_address[0], port=int(self.remote_address[1])) - asyncio.ensure_future(coro) - - -class RemoteStreamProtocol(asyncio.Protocol): - - def __init__(self, proxy, data): - self.proxy = proxy - self.data = data - self.transport = None - super().__init__() - - def connection_made(self, transport): - self.transport = transport - self.transport.write(self.data) - - def data_received(self, data, _): - pass - - def eof_received(self): - pass - - -def start_datagram_proxy(bind, port, remote_host, remote_port): - loop = asyncio.get_event_loop() - protocol = ProxyDatagramProtocol((remote_host, remote_port)) - return (yield from loop.create_datagram_endpoint(lambda: protocol, local_addr=(bind, port))) - - -def main(bind='0.0.0.0', port=8888, remote_host='127.0.0.1', remote_port=9999): - loop = asyncio.get_event_loop() - print("Starting datagram proxy...") - coro = start_datagram_proxy(bind, port, remote_host, remote_port) - transport, _ = loop.run_until_complete(coro) - print("Datagram proxy is running on " + str(port)) - try: - loop.run_forever() - except KeyboardInterrupt: - pass - print("Closing transport...") - transport.close() - loop.close() - - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/script/bootstrap b/script/bootstrap index a27de63..c1a169e 100644 --- a/script/bootstrap +++ b/script/bootstrap @@ -4,7 +4,7 @@ pyenv install --skip-existing VENV="${PWD##*/}.venv" VENV=${VENV#-} -python -m venv $VENV +python3 -m venv $VENV . $VENV/bin/activate -python -m pip install -U pip wheel -python -m pip install -r requirements.txt +python3 -m pip install -U pip wheel +python3 -m pip install -r requirements.txt