diff --git a/GPoHC.py b/GPoHC.py
index 19d73e5..40aef3b 100644
--- a/GPoHC.py
+++ b/GPoHC.py
@@ -1,32 +1,44 @@
-from gan import Gan
+from imports import *
+import gan
class GPoHC():
- def __init__(self):
+ def __init__(self, node, name):
self.strenght = 50
- self.model = Gan()
+ self.node = node
+ self.model = object()
+ self.generator = gan.Generator()
+ self.discriminator = gan.Descriminator()
+ self.new = True
+
+ def initialize(self):
+ if self.generator.new == True:
+ self.generator.train(self.node.chain)
+
+ if self.discriminator.new == True:
+ self.discriminator.train(self.node.chain)
def create_consensus(self, block, chain):
hash = block.calculate_hash()
SOURCE_BLOCKS = []
- for validator_block in range(self.strenght):
- if self.validator_online(chain[-self.strenght[validator_block]]):
+ for validator_block in range(min([self.strenght, len(chain)])):
+ if self.validator_online(chain[validator_block]):
SOURCE_BLOCKS.append(validator_block)
SOURCE_SEED = ""
for block in SOURCE_BLOCKS:
SOURCE_SEED.append(block.seed)
- SEED_ROOT = ""
- for root in self.collect_roots(SOURCE_BLOCKS):
- SEED_ROOT = self.add_by_each_byte(SOURCE_SEED, root)
+ SEED_ROOT = self.collect_seed_root(SOURCE_SEED)
+
+ SUPER_SEED = self.generator.gen(SEED_ROOT)
+
+ score, scores = self.score_super_seed(SUPER_SEED)
+ block.validators = scores
- SEED_ROOT_PROCESSED = []
- for char in SEED_ROOT:
- SEED_ROOT_PROCESSED.append([int(y) for y in list("".join(format(ord(x), 'b') for x in str(char)))])
+ SEED = rsa.encrypt(bytes(SUPER_SEED[:117]), self.node.private_key)
- SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED)
- self.broadcast_super_seed(SUPER_SEED)
+ return True, SEED
def add_by_each_byte(self, a, b):
result = ""
@@ -35,6 +47,46 @@ def add_by_each_byte(self, a, b):
return result
-
def validator_online(self, block):
- pass
\ No newline at end of file
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(block.creator)
+ sock.send(b"conn:ack")
+ if sock.recv(8) == b"conn:ack":
+ return True
+ return False
+
+ def roots_broadcast_handler(self, sock):
+ return list(sock.recv(128))
+
+ def collect_seed_root(self, source_seed):
+ roots = self.node.p2pInterface.broadcast([
+ b"seed:rot",
+ f'{len(source_seed):05d}'.encode(),
+ source_seed.encode()
+ ], handler=self.roots_broadcast_handler)
+
+ out = [0]*128
+ for root in roots:
+ for byte in range(128):
+ out[byte] += root[byte]
+ out[byte] = out[byte] % 255
+
+ return out
+
+ def seed_score_broadcast_handler(self, sock):
+ score_len = int(sock.recv(2).decode())
+ score = (sock.getpeername(), float(sock.recv(score_len).decode()))
+ return score
+
+ def score_super_seed(self, super_seed):
+ scores = self.node.p2pInterface.broadcast([
+ b"seed:scr",
+ f'{len(super_seed):05d}'.encode(),
+ bytes(super_seed)
+ ], handler=self.seed_score_broadcast_handler)
+
+ total_score = 0
+ for score in scores:
+ total_score += score[1]
+
+ return total_score, scores
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..a9a0409
--- /dev/null
+++ b/README.md
@@ -0,0 +1,124 @@
+# Funzochain
+
+Funzochain is a blockchain implementation and the proof of concept for our GPoHC Consensus Mechanism or the Generative Proof of History and Co-operation. We have developed GPoHC with a vision to make block creation and validation faster without compromise to decentralisation and security, and to make sure all nodes are equal participants regardless of processing power and other barriers.
+
+---
+
+## Creating a node
+
+To create a node clone this repository and create a file named `run.py`
+
+```python
+from node import node
+import socket
+import rsa
+
+#create an identity (Run Only Once)
+with open("identity", "wb") as f:
+ f.write(rsa.newkeys(1024)[1])
+
+#load an identity
+with open('identity', mode='rb') as f:
+ identity = rsa.PrivateKey.load_pkcs1(f.read())
+
+
+node = node(identity, (socket.gethostname(),80), "node-name")
+
+if __name__ == "__main__":
+ node.run()
+```
+
+---
+
+## Generative Proof Of History And Co-operation
+
+### Block Structure
+
+```python
+{
+ “creator”: “0x00000.....”,
+ “data”: [...],
+ “timestamp”: 1652949456,
+ “seed”: “abcdef”,
+ “validators”: [...],
+ “previous_hash”: “...”
+}
+```
+
+### Creation Of A New Block
+
+Here is a hypothetical situation where node 0x007 wants to propose a new block to the blockchain.
+
+##### Current Blockchain
+
+```mermaid
+graph TD;
+ Node1([Node 0x001])--Created-->B;
+ Node2([Node 0x002])--Created-->C;
+ Node3([Node 0x003])--Created-->D;
+ Node4([Node 0x004])--Created-->E;
+ Node5([Node 0x005])--Created-->F;
+ Node6([Node 0x006])--Created-->G;
+ Node7([Node 0x007])-.Wants To Create.->H[Block 7];
+
+ A--Seed-->Seed1(AAAA...);
+ B--Seed-->Seed2(BBBB...);
+ C--Seed-->Seed3(CCCC...);
+ D--Seed-->Seed4(DDDD...);
+ E--Seed-->Seed5(EEEE...);
+ F--Seed-->Seed6(FFFF...);
+ G--Seed-->Seed7(GGGG...);
+
+ A[Genesis]---B;
+ B[BlocK 1]---C;
+ C[Block 2]---D;
+ D[Block 3]---E;
+ E[Block 4]---F;
+ F[Block 5]---G;
+ G[Block 6]-.-H;
+```
+
+##### Network Situation
+
+All Nodes Except 0x005 Are Online.
+
+
+
+##### How The Node Proposes A New Block
+
+1. The Node Looks Up The Last `x` Blocks In The Blockchain From The End.
+
+2. If The Creator Of A Block Is Online, It is Added To A List Called `Source Blocks`.
+
+ Here:
+
+```mermaid
+graph TD;
+ A[Block 1]-->B[Block 2]-->C[Block 3]-->D[Block 4]-->F[Block 6]
+```
+
+3. Seeds Of All The Blocks In ‘Source Blocks’ Are Joined In Order As A String Called `Source Seed`.
+
+ Here: 'AAAABBBBCCCCDDDDFFFFGGGG'.
+
+4. Node 0x007 Now Broadcasts The `Source Seed` To All The Creators Of The Blocks In `Source Blocks`. All The Nodes Will Likely Already Have The Same `Source Seed` And Will Have Encrypted It With Their Private Key As `Seed Root Segment`. If Both The Versions Of `Source Seed` Are Same The Node Will Respond With The `Seed Root Segment`.
+
+5. Node 0x007 Now Takes All Values Of `Seed Root Segment` And Saves As `Seed Root` After Adding Each Character Individually Using:
+
+ ```python
+ seed_root = []
+ for segment in seed_root_segments:
+ seed_root.extend([int(y) for y in list("".join(format(ord(x), 'b') for x in str(segment)))])
+ ```
+
+6. Node 0x007 Then Passes The Seed Root Array (Lenght 128 Bytes) Through It's GAN's Generator And Stores The Output As `Super Seed`.
+
+7. Node 0x007 Then Encrypts The First 117 Bytes Using It's Private Key And Uses It As The `seed` Attribute For The Block
+
+8. Node 0x007 Now Broadcasts The `Super Seed` To All The Validators Which Then Return A Score After Passing It Through Their GAN's Discriminator. Node 0x007 Then Sums Up All The Score.
+
+9. However, Node 0x003 Has Already Created And Added A Block. Node 0x007 Then Polls All The Active Nodes For 0x003's Last Score, And Finds Out That It Had Gotten More Accumulative Score Than 0x003.
+
+10. After This Node 0x007 Requests An Override On The Last Block (Timeframe Of 5 Seconds For Overriding). More Than 50% Nodes Agree And Update Their Blockchains.
+
+Since Node 0x007 Has Now Become A Creator He Will Become A Validator For For The Next `x` Blocks (Only If Active), And Will Receive Validator Rewards Without Actually Creating A Block.
diff --git a/Templates/index.html b/Templates/index.html
new file mode 100644
index 0000000..5db52db
--- /dev/null
+++ b/Templates/index.html
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+ Host Node WebUI
+
+
+
+
+
\ No newline at end of file
diff --git a/block.py b/block.py
index ae6b979..176898e 100644
--- a/block.py
+++ b/block.py
@@ -26,11 +26,11 @@ def calculate_hash(self):
@property
def serialised(self):
- return pickle.dumps(self.__dict__)
+ return pickle.dumps(self.__dict__).hex()
def deserialised(self, data):
block = self
- block.__dict__ = pickle.loads(data)
+ block.__dict__ = pickle.loads(bytes.fromhex(data))
return block
class LogBlock(Block):
diff --git a/blockchain.py b/blockchain.py
index 3c37356..d0de6e8 100644
--- a/blockchain.py
+++ b/blockchain.py
@@ -3,9 +3,10 @@
import json
class Blockchain(list):
- def __init__(self, mainfile, p2pInterface):
+ def __init__(self, node, mainfile, p2pInterface):
self.chain = list()
self.index = dict()
+ self.node = node
self.mainfile = mainfile
self.p2pInterface = p2pInterface
self.load()
@@ -15,58 +16,69 @@ def __getitem__(self, key):
def append(self, item):
if isinstance(item, Block):
+ item.seed, winner = self.node.consensus.create_consensus(item, self)
+ if not winner:
+ raise Exception("Consensus Lost. Failed To Add Block.")
if item.hash in self.index:
raise ValueError("Block already exists")
self.index[item.calculate_hash()] = len(self.chain)
self.chain.append(item)
+ print(f"Added block {item.hash[:6]} to chain")
self.save()
block_data = item.serialised
self.p2pInterface.broadcast([
b"blck:new",
f'{len(block_data):05d}'.encode(),
- block_data
+ block_data.encode()
])
else:
raise TypeError("Blockchain can only append blocks")
def save(self):
+ old_file = self.currfile
block_index = 0
- for iter_ in range(len(self.chain)):
- with open(self.currfile, "wb") as f:
- print(self.currfile)
- for item in self.chain[block_index:50]:
+ with open(self.currfile, "r") as f:
+ file_size = len(f.readlines())
+ for iter_ in range(int(len(self.chain)/50)+1):
+ with open(self.currfile, "w") as f:
+ for item in self.chain[block_index:(50-file_size)]:
f.write(item.serialised)
- f.write("\n".encode())
+ f.write("\n")
if len(self.chain) > 50*(iter_+1):
- f.write("\n\n".encode())
- print(50*(iter_+1))
- print((iter_+1)*50)
+ f.write("\n\n")
self.currfile = self.chain[(iter_+1)*50].calculate_hash()
- f.write(self.currfile.encode())
- self.load()
-
- self.currfile = self.mainfile
+ f.write(self.currfile)
+ file_size = 0
+ if old_file != self.currfile:
+ self.load()
def load(self):
self.currfile = self.mainfile
self.index = dict()
- while True:
- try:
+ if os.path.exists(self.currfile):
+ print("Loading blockchain")
+ while True:
chain_lenght = 0
- with open(self.currfile, "rb") as f:
+ with open(self.currfile, "r") as f:
for line in f.readlines():
- if line != b"\n":
- block = Block(self, "").deserialised(line)
+ if line != "\n":
+ print(line)
+ block = Block(self.node, "").deserialised(line)
+ print(block.__dict__)
self.index[block.hash] = chain_lenght
chain_lenght += 1
self.chain.append(block)
- if f.readline()[-2] == f.readline()[-3] == b"\n":
- print(f.readline[-1])
- self.currfile = f.readline()[-1].decode()
- else:
+ else:
+ break
+ print(f.readlines())
+ if len(f.readlines()) > 50:
+ self.currfile = f.readline()[-1]
break
- except:
- break
+ else:
+ return
+ else:
+ print("Started A New Chain")
+ open(self.currfile, "w")
def __setitem__(self, key, value):
diff --git a/gan.py b/gan.py
index 0b2af83..f21c55b 100644
--- a/gan.py
+++ b/gan.py
@@ -1,2 +1,167 @@
-class Gan():
- pass
\ No newline at end of file
+import torch
+import torch.nn as nn
+import pickle
+import rsa
+import pandas as pd
+import numpy as np
+from sklearn.svm import SVC
+import os
+
+class Generator(nn.Module):
+ def __init__(self):
+ super(Generator, self).__init__()
+ self.encoder = nn.Sequential(
+ nn.Linear(128, 80),
+ nn.ReLU(True),
+ nn.Linear(80, 64),
+ nn.ReLU(True),
+ nn.Linear(64, 32),
+ nn.ReLU(True),
+ nn.Linear(32, 8)
+ )
+
+ self.decoder = nn.Sequential(
+ nn.Linear(8, 32),
+ nn.ReLU(True),
+ nn.Linear(32, 64),
+ nn.ReLU(True),
+ nn.Linear(64, 80),
+ nn.ReLU(True),
+ nn.Linear(80, 128)
+ )
+
+ self.new = False
+
+ try:
+ self.load_state_dict(torch.load('gan-gen'))
+ except:
+ self.new = True
+
+ def train(self, chain):
+ self.learning_rate = 1e-3
+ self.criterion = nn.MSELoss()
+ self.optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=1e-5)
+
+ input_ = []
+ output_ = []
+ for i in range(len(chain)-1):
+ if len(input_) == 512:
+ break
+ input_.append(list(chain[i].seed))
+ print(i)
+ output_.append(list(chain[i+1].seed))
+
+ self.inp=torch.tensor(input_,dtype=torch.float32)
+ self.out=torch.tensor(output_,dtype=torch.float32)
+ self.test_inp=self.inp[384:]
+ self.test_out=self.out[384:]
+ self.inp=self.inp[:384]
+ self.out=self.out[:384]
+
+ for epoch in range(5000):
+ output = self(self.inp)
+ loss = self.criterion(output , self.out)
+ self.optimizer.zero_grad()
+ loss.backward()
+ self.optimizer.step()
+
+ print('epoch [{}/{}], loss:{:.4f}'.format(epoch + 1, 5000, loss.item()))
+
+ torch.save(self.state_dict(),'gan-gen')
+
+ def forward(self,x):
+ x = self.encoder(x)
+ x = self.decoder(x)
+ return x
+
+ def gen(self, seed_root):
+ tensor_=torch.tensor(list(seed_root), dtype=torch.float32)
+ score = self(tensor_)
+ score = torch.round(score)
+ score = score.detach().numpy()
+ return score
+
+class Descriminator():
+ def __init__(self):
+ self.new = False
+
+ try:
+ self.classifier = pickle.load(open('gan-desc', 'rb'))
+ except:
+ self.new = True
+
+ def train(self, chain):
+ input_ = []
+ output_ = []
+ for i in range(len(chain)-1):
+ if len(input_) == 512:
+ break
+ input_.append(list(chain[i].seed))
+ print(i)
+ output_.append(list(chain[i+1].seed))
+
+ self.inp=torch.tensor(input_,dtype=torch.float32)
+ self.out=torch.tensor(output_,dtype=torch.float32)
+ self.test_inp=self.inp[384:]
+ self.test_out=self.out[384:]
+ self.inp=self.inp[:384]
+ self.out=self.out[:384]
+
+ generator = Generator()
+
+ output_ = generator(self.inp)
+ output_ = torch.round(output_)
+ output_=output_.detach().numpy()
+
+ temp=output_
+ df1=pd.DataFrame(temp)
+ df1['label']=0
+
+ temp=self.out.detach().numpy()
+ df2=pd.DataFrame(temp)
+ df2['label']=1
+
+ df=df1.append(df2, ignore_index = True)
+ df=df.sample(frac=1)
+
+ x_train=df.drop(['label'], axis = 1)
+ y_train=df['label']
+
+ self.classifier = SVC(probability=True).fit(x_train, y_train)
+ pred_svm = self.classifier.predict_proba(x_train)
+ print("Classification accuracy : ",self.classifier.score(x_train,y_train))
+ pickle.dump(self.classifier, open('classifier', 'wb'))
+
+ def score(self,data_):
+ temp=np.array(data_)
+ temp=temp.reshape(1, -1)
+ result = self.classifier.predict_proba(temp)
+ return result[0][1]
+
+class MockGAN(object):
+ def __init__(self, name):
+ if not os.path.exists('mockgan'):
+ open("mockgan", "w").close()
+
+ def initialize(self):
+ pass
+
+ def feedData(self, chain):
+ pass
+
+ def train(self):
+ pass
+
+ def trainClassifier(self):
+ pass
+
+ def clf_score(self,data_):
+ return str(list(data_)[0]/1000).encode()
+
+ def generator_forward(self,data_):
+ import random, string
+
+ pub, priv = rsa.newkeys(1024)
+ msg = ''.join(random.choices(string.ascii_uppercase + string.digits, k=117))
+
+ return [x for x in rsa.encrypt(msg.encode(), priv)]
\ No newline at end of file
diff --git a/imports.py b/imports.py
index b22ea97..91174d8 100644
--- a/imports.py
+++ b/imports.py
@@ -4,4 +4,5 @@
import socket
import select
import rsa
-import copy
\ No newline at end of file
+import copy
+import os
\ No newline at end of file
diff --git a/network_handler.py b/network_handler.py
index 058a509..a21d4fd 100644
--- a/network_handler.py
+++ b/network_handler.py
@@ -1,3 +1,5 @@
+from imports import *
+
comm_types = {
"conn": [
"req",
@@ -19,7 +21,8 @@
"int",
],
"seed": [
- "scr"
+ "scr",
+ "rot",
]
}
@@ -34,49 +37,56 @@ def parse_data(data):
return None, None
class connection_handler():
- def req(self, peer):
- remote_host = peer.recv(64).decode().split(":")
+ def req(self, peer, node):
+ addr_lenght = peer.recv(2)
+ remote_host = peer.recv(int(addr_lenght)).decode().split(":")
remote_host = (remote_host[0],int(remote_host[1]))
self.addPeer(remote_host,False)
- def ack(self, peer):
+ def ack(self, peer, node):
peer.send(b"conn:ack")
- def drp(self, peer):
+ def drp(self, peer, node):
self.removePeer(peer.getpeername())
class block_handler():
- def new(self, peer):
+ def new(self, peer, node):
data_lenght = peer.recv(5)
data = peer.recv(int(data_lenght))
return "blck", data
- def req(self, peer):
+ def req(self, peer, node):
pass
- def ack(self, peer):
+ def ack(self, peer, node):
pass
class transaction_handler():
- def new(self, peer):
+ def new(self, peer, node):
pass
- def req(self, peer):
+ def req(self, peer, node):
pass
- def ack(self, peer):
+ def ack(self, peer, node):
pass
class seed_handler():
- def scr(self, peer):
+ def scr(self, peer, node):
seed = peer.recv(512)
- peer.send(self.score_seed(seed))
+ score = node.consensus.model.descriminator.score(seed)
+ peer.send(f'{len(score):02d}'.encode())
+ peer.send(score)
+ node.seed_store[seed] = score
- def score_seed(self, seed):
- return "0"
+ def rot(self, peer, node):
+ seed_lenght = int(peer.recv(5))
+ seed_root_segment = rsa.encrypt(peer.recv(seed_lenght), node.private_key)
+ peer.send(seed_root_segment)
handlers = {
"conn": connection_handler,
"blck": block_handler,
- "trnx": transaction_handler
+ "trnx": transaction_handler,
+ "seed": seed_handler,
}
\ No newline at end of file
diff --git a/node.py b/node.py
index 628beb1..2fdfaae 100644
--- a/node.py
+++ b/node.py
@@ -1,50 +1,40 @@
from queue import Queue
-from gan import Gan
+from gan import MockGAN as GAN
from block import Block, LogBlock
from p2p import p2pInterface
from blockchain import Blockchain
+from GPoHC import GPoHC
import threading
import sys
-class node():
+class Node():
def __init__(self, private_key, host, name="chain"):
self.host = host
self.private_key = private_key
self.address = private_key
self.p2pInterface = p2pInterface(self)
- self.chain = Blockchain(name,self.p2pInterface)
-
- def initialize_gan(self):
- self.gan = Gan(self.host, self.private_key)
- if not self.gan.is_initialized():
- self.p2pInterface.sync_chain(self)
- block = LogBlock(self, "Initializing Gan")
- self.p2pInterface.broadcast("blck:new".encode())
- self.p2pInterface.broadcast(block.serialised)
- self.gan.train(self.chain)
-
- block = LogBlock(self, "Gan initialized")
- self.p2pInterface.broadcast("blck:new".encode())
- self.p2pInterface.broadcast(block.serialised)
+ self.consensus = GPoHC(self, name)
+ self.chain = Blockchain(self, name,self.p2pInterface)
+ self.seed_store = {}
@staticmethod
- def runtime(first_run=True):
+ def runtime(self, first_run=True):
pass
def run(self):
try:
data_queue = Queue()
- thread = threading.Thread(target=self.p2pInterface.listen, args=(data_queue,), daemon=True)
- thread.start()
+ self.main_thread = threading.Thread(target=self.p2pInterface.listen, args=(data_queue,), daemon=True)
+ self.main_thread.start()
first_run = True
- while True:
- self.runtime(first_run)
+ while self.p2pInterface.listening:
+ self.runtime(self, first_run)
first_run = False
if data_queue.qsize() > 0:
data_type, data = data_queue.get(timeout=1)
if data_type == "blck":
print("Received Block Data")
- block = Block(self,"").deserialised(data)
+ block = Block(self,"").deserialised(data.decode())
if not block.valid():
try:
self.chain.append(block)
@@ -55,4 +45,12 @@ def run(self):
print(f"Block {block.hash[:6]} invalid")
except KeyboardInterrupt as e:
print("Shutting down")
- sys.exit(0)
\ No newline at end of file
+ sys.exit(0)
+
+ def shutdown(self):
+ self.p2pInterface.listening = False
+ self.main_thread.terminate()
+
+ def restart(self):
+ self.shutdown()
+ self.run()
\ No newline at end of file
diff --git a/p2p.py b/p2p.py
index f39a34f..ff6ce4d 100644
--- a/p2p.py
+++ b/p2p.py
@@ -11,26 +11,49 @@ def __init__(self, node):
def addPeer(self, peer, ping=True):
if peer not in self.peerList.keys():
- self.peerList[peer] = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.peerList[peer].connect(peer)
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(peer)
+ self.peerList[sock.getpeername()] = sock
if ping:
- self.peerList[peer].send(b"conn:req")
- self.peerList[peer].send(f"{self.node.host[0]}:{self.node.host[1]}".encode())
+ self.peerList[sock.getpeername()].send(b"conn:req")
+ message = f"{self.node.host[0]}:{self.node.host[1]}"
+ self.peerList[sock.getpeername()].send(f"{len(message):02d}".encode())
+ self.peerList[sock.getpeername()].send(message.encode())
return True
+ else:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(peer)
+ self.peerList[sock.getpeername()] = sock
+
+
def removePeer(self, peer):
self.peerList[peer].close()
del self.peerList[peer]
- def broadcast(self, message):
- for sock in self.peerList.values():
- print(sock.getpeername())
- if type(message) == bytes:
- sock.send(message)
- elif type(message) == list:
- for m in message:
- print(m, flush=True)
- sock.send(m)
+ def broadcast(self, message, handler=None, handler_args=[]):
+ print(self.peerList.items())
+ returns = []
+ for addr, sock in self.peerList.items():
+ try:
+ print(sock.getpeername())
+ if type(message) == bytes:
+ sock.send(message)
+ elif type(message) == list:
+ for m in message:
+ sock.send(m)
+ if handler:
+ returns.append(handler(sock, *handler_args))
+ sock.close()
+ sock = -1
+ self.addPeer(addr, False)
+ except ConnectionResetError:
+ print(f"Peer {sock.getpeername()} Disconnected", flush=True)
+ self.removePeer(sock.getpeername())
+ except OSError:
+ print(f"Peer {sock.getpeername()} Disconnected", flush=True)
+ self.removePeer(sock.getpeername())
+ return returns if handler else None
def sync_chain(self, node):
shuffled_nodes = list(self.peerList.keys())
@@ -75,11 +98,20 @@ def listen(self, queue):
for peer in self.peerList.values():
read_sockets.append(peer)
for sock in select.select(read_sockets, [], [])[0]:
- if sock == self.open_port:
- sock, addr = self.open_port.accept()
- data = sock.recv(8)
- if len(data) == 8:
- class_, type_ = network_handler.parse_data(data)
- data = getattr(network_handler.handlers[class_],type_)(self, sock)
- if data:
- queue.put(data)
\ No newline at end of file
+ try:
+ if sock == self.open_port:
+ sock, addr = self.open_port.accept()
+ print("Connected to", addr, flush=True)
+ data = sock.recv(8)
+ if len(data) == 8:
+ class_, type_ = network_handler.parse_data(data)
+ data = getattr(network_handler.handlers[class_],type_)(self, sock, self.node)
+ if data:
+ queue.put(data)
+ except ConnectionResetError:
+ print(f"Peer {sock.getpeername()} Disconnected", flush=True)
+ self.removePeer(sock.getpeername())
+
+ def stop(self):
+ self.listening = False
+ self.open_port.close()
\ No newline at end of file
diff --git a/site.py b/site.py
new file mode 100644
index 0000000..6b7cf7f
--- /dev/null
+++ b/site.py
@@ -0,0 +1,27 @@
+from node import Node
+import socket
+import Retica
+import Retica.Render
+import Retica.Sockets
+
+node = Node("0x0", (socket.gethostname(),880), "test-one")
+
+def runtime(first_run):
+ if first_run:
+ retica = Retica.Server(__name__)
+
+ templator = Retica.Render.TemplateRender(retica,template_dir="Templates")
+
+ @retica.create_endpoint("/")
+ def index(request: Retica.Request.request, response: Retica.Response.response, **data):
+ response.body = templator.render("index.html", data)
+
+ http_socket = Retica.Sockets.HTTP_Socket(Retica.Sockets.gethostname(), 80)
+
+ if __name__ == "__main__":
+ retica.run([http_socket])
+
+
+if __name__ == "__main__":
+ node.runtime = runtime
+ node.run()
\ No newline at end of file
diff --git a/transaction.py b/transaction.py
index d86b7c3..be0bf18 100644
--- a/transaction.py
+++ b/transaction.py
@@ -24,9 +24,9 @@ def calculate_hash(self):
def serialised(self):
dict_ = self.__dict__
del dict_["node"]
- return pickle.dumps(self.__dict__)
+ return pickle.dumps(self.__dict__).hex()
def deserialised(self, data):
- block = self.__class__()
- block.__dict__ = pickle.loads(data)
- return block
\ No newline at end of file
+ transaction = self.__class__()
+ transaction.__dict__ = pickle.loads(bytes.fromhex(data))
+ return transaction
\ No newline at end of file