From af885edfe84d912ef8a907773e1f621f2ca50fbb Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 12 Jul 2022 09:19:56 +0530 Subject: [PATCH 01/88] Encodings Are Now Done In Hex --- block.py | 4 ++-- transaction.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/block.py b/block.py index ae6b979..176898e 100644 --- a/block.py +++ b/block.py @@ -26,11 +26,11 @@ def calculate_hash(self): @property def serialised(self): - return pickle.dumps(self.__dict__) + return pickle.dumps(self.__dict__).hex() def deserialised(self, data): block = self - block.__dict__ = pickle.loads(data) + block.__dict__ = pickle.loads(bytes.fromhex(data)) return block class LogBlock(Block): diff --git a/transaction.py b/transaction.py index d86b7c3..c9c389e 100644 --- a/transaction.py +++ b/transaction.py @@ -24,9 +24,9 @@ def calculate_hash(self): def serialised(self): dict_ = self.__dict__ del dict_["node"] - return pickle.dumps(self.__dict__) + return pickle.dumps(self.__dict__).hex() def deserialised(self, data): block = self.__class__() - block.__dict__ = pickle.loads(data) + block.__dict__ = pickle.loads(bytes.fromhex(data)) return block \ No newline at end of file From 9710bfffb01e397e49eb28ba0499b7fb7b5f96dd Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 12 Jul 2022 09:20:39 +0530 Subject: [PATCH 02/88] Fixed Issue Where Transaction Is Called Block --- transaction.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/transaction.py b/transaction.py index c9c389e..be0bf18 100644 --- a/transaction.py +++ b/transaction.py @@ -27,6 +27,6 @@ def serialised(self): return pickle.dumps(self.__dict__).hex() def deserialised(self, data): - block = self.__class__() - block.__dict__ = pickle.loads(bytes.fromhex(data)) - return block \ No newline at end of file + transaction = self.__class__() + transaction.__dict__ = pickle.loads(bytes.fromhex(data)) + return transaction \ No newline at end of file From d3a7dc6fe3c2370b307b34d33517c57946819243 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 13 Jul 2022 22:30:27 +0530 Subject: [PATCH 03/88] Peers Are Now Added Using raddr Instead Of haddr --- p2p.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/p2p.py b/p2p.py index f39a34f..2d77705 100644 --- a/p2p.py +++ b/p2p.py @@ -11,8 +11,9 @@ def __init__(self, node): def addPeer(self, peer, ping=True): if peer not in self.peerList.keys(): - self.peerList[peer] = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.peerList[peer].connect(peer) + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect(peer) + self.peerList[sock.getpeername()] = sock if ping: self.peerList[peer].send(b"conn:req") self.peerList[peer].send(f"{self.node.host[0]}:{self.node.host[1]}".encode()) From 8725063c48c5b499412744886c4b0b608862a3cd Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 13 Jul 2022 22:31:29 +0530 Subject: [PATCH 04/88] node Object Is Now Stored Within Blockchain --- blockchain.py | 59 +++++++++++++++++++++++++++++---------------------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/blockchain.py b/blockchain.py index 3c37356..69c2650 100644 --- a/blockchain.py +++ b/blockchain.py @@ -3,9 +3,10 @@ import json class Blockchain(list): - def __init__(self, mainfile, p2pInterface): + def __init__(self, node, mainfile, p2pInterface): self.chain = list() self.index = dict() + self.node = node self.mainfile = mainfile self.p2pInterface = p2pInterface self.load() @@ -19,54 +20,62 @@ def append(self, item): raise ValueError("Block already exists") self.index[item.calculate_hash()] = len(self.chain) self.chain.append(item) + print(f"Added block {item.hash[:6]} to chain") self.save() block_data = item.serialised self.p2pInterface.broadcast([ b"blck:new", f'{len(block_data):05d}'.encode(), - block_data + block_data.encode() ]) else: raise TypeError("Blockchain can only append blocks") def save(self): + old_file = self.currfile block_index = 0 - for iter_ in range(len(self.chain)): - with open(self.currfile, "wb") as f: - print(self.currfile) - for item in self.chain[block_index:50]: + with open(self.currfile, "r") as f: + file_size = len(f.readlines()) + for iter_ in range(int(len(self.chain)/50)+1): + with open(self.currfile, "w") as f: + for item in self.chain[block_index:(50-file_size)]: f.write(item.serialised) - f.write("\n".encode()) + f.write("\n") if len(self.chain) > 50*(iter_+1): - f.write("\n\n".encode()) - print(50*(iter_+1)) - print((iter_+1)*50) + f.write("\n\n") self.currfile = self.chain[(iter_+1)*50].calculate_hash() - f.write(self.currfile.encode()) - self.load() - - self.currfile = self.mainfile + f.write(self.currfile) + file_size = 0 + if old_file != self.currfile: + self.load() def load(self): self.currfile = self.mainfile self.index = dict() - while True: - try: + if os.path.exists(self.currfile): + print("Hi") + while True: chain_lenght = 0 - with open(self.currfile, "rb") as f: + with open(self.currfile, "r") as f: for line in f.readlines(): - if line != b"\n": - block = Block(self, "").deserialised(line) + if line != "\n": + print(line) + block = Block(self.node, "").deserialised(line) + print(block.__dict__) self.index[block.hash] = chain_lenght chain_lenght += 1 self.chain.append(block) - if f.readline()[-2] == f.readline()[-3] == b"\n": - print(f.readline[-1]) - self.currfile = f.readline()[-1].decode() - else: + else: + break + print(f.readlines()) + if len(f.readlines()) > 50: + self.currfile = f.readline()[-1] break - except: - break + else: + return + else: + print("Bye") + open(self.currfile, "w") def __setitem__(self, key, value): From 4612fadef32248fb6a9da8d90403e8b4759e4709 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 14 Jul 2022 16:49:34 +0530 Subject: [PATCH 05/88] Added os Module To Imports --- imports.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/imports.py b/imports.py index b22ea97..91174d8 100644 --- a/imports.py +++ b/imports.py @@ -4,4 +4,5 @@ import socket import select import rsa -import copy \ No newline at end of file +import copy +import os \ No newline at end of file From 61cb79b76700121a7167bfbdfc950e8b0e1aa49e Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 14 Jul 2022 16:50:13 +0530 Subject: [PATCH 06/88] Modified Messages --- blockchain.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/blockchain.py b/blockchain.py index 69c2650..f6d2aba 100644 --- a/blockchain.py +++ b/blockchain.py @@ -53,7 +53,7 @@ def load(self): self.currfile = self.mainfile self.index = dict() if os.path.exists(self.currfile): - print("Hi") + print("Loading blockchain") while True: chain_lenght = 0 with open(self.currfile, "r") as f: @@ -74,7 +74,7 @@ def load(self): else: return else: - print("Bye") + print("Started A New Chain") open(self.currfile, "w") From 1c483f41881caef7728ff8bd570bcab7e9f36e3e Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 14 Jul 2022 17:08:03 +0530 Subject: [PATCH 07/88] Node With Web UI --- site.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 site.py diff --git a/site.py b/site.py new file mode 100644 index 0000000..0a28219 --- /dev/null +++ b/site.py @@ -0,0 +1,27 @@ +from node import node +import socket +import Retica +import Retica.Render +import Retica.Sockets + +node = node("0x0", (socket.gethostname(),880), "test-one") + +def runtime(first_run): + if first_run: + retica = Retica.Server(__name__) + + templator = Retica.Render.TemplateRender(retica,template_dir="Templates") + + @retica.create_endpoint("/") + def index(request: Retica.Request.request, response: Retica.Response.response, **data): + response.body = templator.render("index.html", data) + + http_socket = Retica.Sockets.HTTP_Socket(Retica.Sockets.gethostname(), 80) + + if __name__ == "__main__": + retica.run([http_socket]) + + +if __name__ == "__main__": + node.runtime = runtime + node.run() \ No newline at end of file From 91d4ad5674a024f4ff35410b0715f8f3237ffd68 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 14 Jul 2022 17:08:51 +0530 Subject: [PATCH 08/88] Node Now Passes Itself To The Blockchain Object --- node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node.py b/node.py index 628beb1..4e41e93 100644 --- a/node.py +++ b/node.py @@ -12,7 +12,7 @@ def __init__(self, private_key, host, name="chain"): self.private_key = private_key self.address = private_key self.p2pInterface = p2pInterface(self) - self.chain = Blockchain(name,self.p2pInterface) + self.chain = Blockchain(self, name,self.p2pInterface) def initialize_gan(self): self.gan = Gan(self.host, self.private_key) From d17003229bd63211a86264f8892881158e6220e3 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 14 Jul 2022 17:09:44 +0530 Subject: [PATCH 09/88] Network Block Now Decoded Before Deserialization --- node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node.py b/node.py index 4e41e93..2bbf482 100644 --- a/node.py +++ b/node.py @@ -44,7 +44,7 @@ def run(self): data_type, data = data_queue.get(timeout=1) if data_type == "blck": print("Received Block Data") - block = Block(self,"").deserialised(data) + block = Block(self,"").deserialised(data.decode()) if not block.valid(): try: self.chain.append(block) From 2a91ec71eff3ff53730f7ca96b16e0bf58cbca55 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 14 Jul 2022 17:10:29 +0530 Subject: [PATCH 10/88] Add Index File For WebUI --- Templates/index.html | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 Templates/index.html diff --git a/Templates/index.html b/Templates/index.html new file mode 100644 index 0000000..5db52db --- /dev/null +++ b/Templates/index.html @@ -0,0 +1,12 @@ + + + + + + + Host Node WebUI + + + + + \ No newline at end of file From 6a2420b43775fac26d9a1fdab98e65c6224d9a90 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 17 Jul 2022 13:12:56 +0530 Subject: [PATCH 11/88] Lenght Of Address Is Now Sent Before Sending The Address --- network_handler.py | 3 ++- p2p.py | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/network_handler.py b/network_handler.py index 058a509..1abd82e 100644 --- a/network_handler.py +++ b/network_handler.py @@ -35,7 +35,8 @@ def parse_data(data): class connection_handler(): def req(self, peer): - remote_host = peer.recv(64).decode().split(":") + addr_lenght = peer.recv(2) + remote_host = peer.recv(int(addr_lenght)).decode().split(":") remote_host = (remote_host[0],int(remote_host[1])) self.addPeer(remote_host,False) diff --git a/p2p.py b/p2p.py index 2d77705..8d6b771 100644 --- a/p2p.py +++ b/p2p.py @@ -15,8 +15,10 @@ def addPeer(self, peer, ping=True): sock.connect(peer) self.peerList[sock.getpeername()] = sock if ping: - self.peerList[peer].send(b"conn:req") - self.peerList[peer].send(f"{self.node.host[0]}:{self.node.host[1]}".encode()) + self.peerList[sock.getpeername()].send(b"conn:req") + message = f"{self.node.host[0]}:{self.node.host[1]}" + self.peerList[sock.getpeername()].send(f"{len(message):02d}".encode()) + self.peerList[sock.getpeername()].send(message.encode()) return True def removePeer(self, peer): From 23c4a0143859865a7069bdcf81f0b5131d62255a Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 17 Jul 2022 13:22:37 +0530 Subject: [PATCH 12/88] Added Handling For Connection Errors --- p2p.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/p2p.py b/p2p.py index 8d6b771..0ae9da1 100644 --- a/p2p.py +++ b/p2p.py @@ -27,13 +27,20 @@ def removePeer(self, peer): def broadcast(self, message): for sock in self.peerList.values(): - print(sock.getpeername()) - if type(message) == bytes: - sock.send(message) - elif type(message) == list: - for m in message: - print(m, flush=True) - sock.send(m) + try: + print(sock.getpeername()) + if type(message) == bytes: + sock.send(message) + elif type(message) == list: + for m in message: + print(m, flush=True) + sock.send(m) + except ConnectionResetError: + print(f"Peer {sock.getpeername()} Disconnected", flush=True) + self.removePeer(sock.getpeername()) + except OSError: + print(f"Peer {sock.getpeername()} Disconnected", flush=True) + self.removePeer(sock.getpeername()) def sync_chain(self, node): shuffled_nodes = list(self.peerList.keys()) From a61a5d5b47da404e1597cbb85b21e20d9e413257 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 17 Jul 2022 13:24:55 +0530 Subject: [PATCH 13/88] Improved P2P Listening --- p2p.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/p2p.py b/p2p.py index 0ae9da1..1f50e98 100644 --- a/p2p.py +++ b/p2p.py @@ -85,11 +85,16 @@ def listen(self, queue): for peer in self.peerList.values(): read_sockets.append(peer) for sock in select.select(read_sockets, [], [])[0]: - if sock == self.open_port: - sock, addr = self.open_port.accept() - data = sock.recv(8) - if len(data) == 8: - class_, type_ = network_handler.parse_data(data) - data = getattr(network_handler.handlers[class_],type_)(self, sock) - if data: - queue.put(data) \ No newline at end of file + try: + if sock == self.open_port: + sock, addr = self.open_port.accept() + print("Connected to", addr, flush=True) + data = sock.recv(8) + if len(data) == 8: + class_, type_ = network_handler.parse_data(data) + data = getattr(network_handler.handlers[class_],type_)(self, sock) + if data: + queue.put(data) + except ConnectionResetError: + print(f"Peer {sock.getpeername()} Disconnected", flush=True) + self.removePeer(sock.getpeername()) \ No newline at end of file From 19014cd4cb89c551a41da2b71cf9f0b7938274d1 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Mon, 18 Jul 2022 12:03:14 +0530 Subject: [PATCH 14/88] Connections Now Re-Established After Every Broadcast --- p2p.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/p2p.py b/p2p.py index 1f50e98..dd5ae55 100644 --- a/p2p.py +++ b/p2p.py @@ -20,21 +20,31 @@ def addPeer(self, peer, ping=True): self.peerList[sock.getpeername()].send(f"{len(message):02d}".encode()) self.peerList[sock.getpeername()].send(message.encode()) return True + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect(peer) + self.peerList[sock.getpeername()] = sock + + def removePeer(self, peer): self.peerList[peer].close() del self.peerList[peer] def broadcast(self, message): - for sock in self.peerList.values(): + print(self.peerList.items()) + for addr, sock in self.peerList.items(): try: print(sock.getpeername()) if type(message) == bytes: sock.send(message) elif type(message) == list: for m in message: - print(m, flush=True) sock.send(m) + + sock.close() + sock = -1 + self.addPeer(addr, False) except ConnectionResetError: print(f"Peer {sock.getpeername()} Disconnected", flush=True) self.removePeer(sock.getpeername()) From 84262c016269e03be4eaee34f2febb4c3dcb3fc9 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Mon, 25 Jul 2022 00:06:01 +0530 Subject: [PATCH 15/88] Added GAN By Neeraj Joshi After Slight Modifications And Cleaning. --- gan.py | 164 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 162 insertions(+), 2 deletions(-) diff --git a/gan.py b/gan.py index 0b2af83..7f680f1 100644 --- a/gan.py +++ b/gan.py @@ -1,2 +1,162 @@ -class Gan(): - pass \ No newline at end of file +import torch +import torch.nn as nn +import random +import pandas as pd +import string +import rsa +import textwrap +from base64 import b64encode +from sklearn.svm import SVC + +class GAN(nn.Module): + def __init__(self): + super(GAN, self).__init__() + inp=0 + out=0 + test_inp=0 + test_out=0 + clf=0 + learning_rate = 0 + model = 0 + criterion = 0 + optimizer = 0 + self.encoder = nn.Sequential( + nn.Linear(230, 128), + nn.ReLU(True), + nn.Linear(128, 64), + nn.ReLU(True), + nn.Linear(64, 32), + nn.ReLU(True), + nn.Linear(32, 8)) + + self.decoder = nn.Sequential( + nn.Linear(8, 32), + nn.ReLU(True), + nn.Linear(32, 64), + nn.ReLU(True), + nn.Linear(64, 128), + nn.ReLU(True), + nn.Linear(128, 230)) + + def initialize(self): + self.inp=0 + self.out=0 + self.test_inp=0 + self.test_out=0 + self.clf=0 + self.learning_rate = 1e-3 + self.model = GAN() + self.criterion = nn.MSELoss() + self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) + + def feedData(self, chain): + input_ = [] + output_ = [] + for i in range(len(self.inp)): + if len(input_) == 512: + break + input_.append = self.encode(chain[i]) + output_.append = self.encode(chain[i+1]) + + self.inp=torch.tensor(input_,dtype=torch.float32) + self.out=torch.tensor(output_,dtype=torch.float32) + total_inp=self.inp + total_out=self.out + self.inp=total_inp[:384] + self.out=total_out[:384] + self.test_inp=total_inp[384:] + self.test_out=total_inp[384:] + + def encode(self,lst): + temp=[] + binaryString="" + for elem in lst: + temp.append(format(elem,'08b')) + for item in temp: + binaryString+=item + rev_str= binaryString[::-1] + chunks=textwrap.wrap(rev_str, 6) + chunks.reverse() + for i in range(len(chunks)) : + chunks[i]=chunks[i][::-1] + encodedList=[] + for item in chunks: + encodedList.append(int(item,2)) + + return encodedList + + def forward(self, x): + x = self.encoder(x) + x = self.decoder(x) + return x + + def train(self): + epochs = 2000 + for epoch in range(epochs): + output = self.model(self.inp) + loss = self.criterion(output , self.out) + self.optimizer.zero_grad() + loss.backward() + self.optimizer.step() + + def trainClassifier(self): + output_ = self.model(self.test_inp) + output_ = torch.round(output_) + + output_=output_.detach().numpy() + + + for i in output_: + if i[0]!=1. : + i[0]=1. + + for i in output_: + if i[229]!=61. : + i[229]=61. + + for i in output_: + for j in range(len(i)): + if i[j]<0 or i[j]>61 : + i[j]=40. + + temp=output_ + df1=pd.DataFrame(temp) + df1['label']=0 + + temp=self.test_out.detach().numpy() + df2=pd.DataFrame(temp) + df2['label']=1 + + df=df1.append(df2, ignore_index = True) + df=df.sample(frac=1) + + x_train=df.drop(['label'], axis = 1) + y_train=df['label'] + + self.clf = SVC(probability=True).fit(x_train, y_train) + pred_svm = self.clf.predict_proba(x_train) + + def clf_score(self,data_): + pred = self.clf.predict_proba(data_) + return pred + + + def getOutput(self,data_) : + out_ = self.model(data_) + out_ = torch.round(out_) + out_ = out_.detach().numpy() + + for i in out_: + if i[0]!=1. : + i[0]=1. + + for i in out_: + if i[229]!=61. : + i[229]=61. + + for i in out_: + for j in range(len(i)): + if i[j]<0 or i[j]>61 : + i[j]=40. + + return out_ \ No newline at end of file From 37351d2327a20b1ce78c13b1966602dd8dda2618 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Mon, 25 Jul 2022 00:07:00 +0530 Subject: [PATCH 16/88] Added GAN Initialisation Flow And Fixed Log Block Broadcasts. --- node.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/node.py b/node.py index 2bbf482..f72cc9f 100644 --- a/node.py +++ b/node.py @@ -3,6 +3,7 @@ from block import Block, LogBlock from p2p import p2pInterface from blockchain import Blockchain +from gan import GAN import threading import sys @@ -19,13 +20,15 @@ def initialize_gan(self): if not self.gan.is_initialized(): self.p2pInterface.sync_chain(self) block = LogBlock(self, "Initializing Gan") - self.p2pInterface.broadcast("blck:new".encode()) - self.p2pInterface.broadcast(block.serialised) - self.gan.train(self.chain) + self.chain.append(block) + gan = GAN() + gan.initialize() + gan.feedData() + gan.train(self.chain) + gan.trainClassifier() block = LogBlock(self, "Gan initialized") - self.p2pInterface.broadcast("blck:new".encode()) - self.p2pInterface.broadcast(block.serialised) + self.chain.append(block) @staticmethod def runtime(first_run=True): From f5cd6728acb004426352f401e62e1423f788a082 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 26 Jul 2022 18:39:10 +0530 Subject: [PATCH 17/88] Removed Double Import Of Gan. --- node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node.py b/node.py index f72cc9f..d50ad90 100644 --- a/node.py +++ b/node.py @@ -3,7 +3,6 @@ from block import Block, LogBlock from p2p import p2pInterface from blockchain import Blockchain -from gan import GAN import threading import sys From 5cfb875fb31069acf54f7504231d81dbab1811fc Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 26 Jul 2022 18:42:18 +0530 Subject: [PATCH 18/88] mend --- node.py | 2 +- site.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/node.py b/node.py index d50ad90..81bdde1 100644 --- a/node.py +++ b/node.py @@ -6,7 +6,7 @@ import threading import sys -class node(): +class Node(): def __init__(self, private_key, host, name="chain"): self.host = host self.private_key = private_key diff --git a/site.py b/site.py index 0a28219..6b7cf7f 100644 --- a/site.py +++ b/site.py @@ -1,10 +1,10 @@ -from node import node +from node import Node import socket import Retica import Retica.Render import Retica.Sockets -node = node("0x0", (socket.gethostname(),880), "test-one") +node = Node("0x0", (socket.gethostname(),880), "test-one") def runtime(first_run): if first_run: From 9f8fad6a5a74adc495dd1aee046c7849fc15f5d8 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 26 Jul 2022 18:49:06 +0530 Subject: [PATCH 19/88] Node Now Passes Itself To Runtime --- node.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/node.py b/node.py index 81bdde1..d2a1306 100644 --- a/node.py +++ b/node.py @@ -30,7 +30,7 @@ def initialize_gan(self): self.chain.append(block) @staticmethod - def runtime(first_run=True): + def runtime(self, first_run=True): pass def run(self): @@ -40,7 +40,7 @@ def run(self): thread.start() first_run = True while True: - self.runtime(first_run) + self.runtime(self, first_run) first_run = False if data_queue.qsize() > 0: data_type, data = data_queue.get(timeout=1) From 09f3d5c2bbc5ac8b40d5840d2673fa6437c7783c Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 26 Jul 2022 18:53:05 +0530 Subject: [PATCH 20/88] Added Option To Close p2p Interface --- p2p.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/p2p.py b/p2p.py index dd5ae55..cc08fb6 100644 --- a/p2p.py +++ b/p2p.py @@ -107,4 +107,8 @@ def listen(self, queue): queue.put(data) except ConnectionResetError: print(f"Peer {sock.getpeername()} Disconnected", flush=True) - self.removePeer(sock.getpeername()) \ No newline at end of file + self.removePeer(sock.getpeername()) + + def stop(self): + self.listening = False + self.open_port.close() \ No newline at end of file From f2c1595d9ea0fe732aa679c62f20214ef91ddb15 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 26 Jul 2022 18:56:34 +0530 Subject: [PATCH 21/88] Forever Loop Now Runs Only While p2pInterface Is Listening. Thread Variable Now Stored As An Attribute. --- node.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/node.py b/node.py index d2a1306..2e9e126 100644 --- a/node.py +++ b/node.py @@ -36,10 +36,10 @@ def runtime(self, first_run=True): def run(self): try: data_queue = Queue() - thread = threading.Thread(target=self.p2pInterface.listen, args=(data_queue,), daemon=True) - thread.start() + self.main_thread = threading.Thread(target=self.p2pInterface.listen, args=(data_queue,), daemon=True) + self.main_thread.start() first_run = True - while True: + while self.p2pInterface.listening: self.runtime(self, first_run) first_run = False if data_queue.qsize() > 0: From 6b3431d6451a0508af95568294053f62d6eefcc9 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 26 Jul 2022 18:57:20 +0530 Subject: [PATCH 22/88] Added Restart And Shutdown Functions To Node --- node.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/node.py b/node.py index 2e9e126..431af26 100644 --- a/node.py +++ b/node.py @@ -57,4 +57,12 @@ def run(self): print(f"Block {block.hash[:6]} invalid") except KeyboardInterrupt as e: print("Shutting down") - sys.exit(0) \ No newline at end of file + sys.exit(0) + + def shutdown(self): + self.p2pInterface.listening = False + self.main_thread.terminate() + + def restart(self): + self.shutdown() + self.run() \ No newline at end of file From 4ab6a3529b7bb5502f1a800e5eb145e12dc540e3 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 27 Jul 2022 17:45:26 +0530 Subject: [PATCH 23/88] Fixed Input Collection For GAN --- gan.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/gan.py b/gan.py index 7f680f1..7f497c5 100644 --- a/gan.py +++ b/gan.py @@ -50,13 +50,14 @@ def initialize(self): self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) def feedData(self, chain): - input_ = [] - output_ = [] - for i in range(len(self.inp)): + print(chain) + input_ = [[0]*230 for element in range(512)] + output_ = [[0]*230 for element in range(512)] + for i in range(len(chain)): if len(input_) == 512: break - input_.append = self.encode(chain[i]) - output_.append = self.encode(chain[i+1]) + input_[i] = self.encode(chain[i].seed if chain[i].seed else [0]*230) + output_[i] = self.encode(chain[i+1].seed if chain[i+1].seed else [0]*230) self.inp=torch.tensor(input_,dtype=torch.float32) self.out=torch.tensor(output_,dtype=torch.float32) From 3131eaad8a56a61b48db7030aa3b3ad174248bd6 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 27 Jul 2022 17:45:41 +0530 Subject: [PATCH 24/88] Fixed Import --- node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node.py b/node.py index 431af26..442e835 100644 --- a/node.py +++ b/node.py @@ -1,5 +1,5 @@ from queue import Queue -from gan import Gan +from gan import GAN from block import Block, LogBlock from p2p import p2pInterface from blockchain import Blockchain From 6e2cc965d57ab59f279b86b24ce5a913ad5e5689 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 27 Jul 2022 17:46:50 +0530 Subject: [PATCH 25/88] Gan Now Initialized On Every Run. TODO: Initialize GAN Only On First Run --- node.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/node.py b/node.py index 442e835..f861841 100644 --- a/node.py +++ b/node.py @@ -13,17 +13,17 @@ def __init__(self, private_key, host, name="chain"): self.address = private_key self.p2pInterface = p2pInterface(self) self.chain = Blockchain(self, name,self.p2pInterface) + self.initialize_gan() def initialize_gan(self): - self.gan = Gan(self.host, self.private_key) - if not self.gan.is_initialized(): - self.p2pInterface.sync_chain(self) + self.gan = GAN() + if not False: block = LogBlock(self, "Initializing Gan") self.chain.append(block) gan = GAN() gan.initialize() - gan.feedData() - gan.train(self.chain) + gan.feedData(self.chain) + gan.train() gan.trainClassifier() block = LogBlock(self, "Gan initialized") From 1e3972b1a858240eabf4710f134e9cdb4b72a0b2 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sat, 30 Jul 2022 12:22:14 +0530 Subject: [PATCH 26/88] Node Is Now Passed To All The Network Handlers --- network_handler.py | 20 ++++++++++---------- p2p.py | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/network_handler.py b/network_handler.py index 1abd82e..a7be878 100644 --- a/network_handler.py +++ b/network_handler.py @@ -34,42 +34,42 @@ def parse_data(data): return None, None class connection_handler(): - def req(self, peer): + def req(self, peer, node): addr_lenght = peer.recv(2) remote_host = peer.recv(int(addr_lenght)).decode().split(":") remote_host = (remote_host[0],int(remote_host[1])) self.addPeer(remote_host,False) - def ack(self, peer): + def ack(self, peer, node): peer.send(b"conn:ack") - def drp(self, peer): + def drp(self, peer, node): self.removePeer(peer.getpeername()) class block_handler(): - def new(self, peer): + def new(self, peer, node): data_lenght = peer.recv(5) data = peer.recv(int(data_lenght)) return "blck", data - def req(self, peer): + def req(self, peer, node): pass - def ack(self, peer): + def ack(self, peer, node): pass class transaction_handler(): - def new(self, peer): + def new(self, peer, node): pass - def req(self, peer): + def req(self, peer, node): pass - def ack(self, peer): + def ack(self, peer, node): pass class seed_handler(): - def scr(self, peer): + def scr(self, peer, node): seed = peer.recv(512) peer.send(self.score_seed(seed)) diff --git a/p2p.py b/p2p.py index cc08fb6..09bb5ff 100644 --- a/p2p.py +++ b/p2p.py @@ -102,7 +102,7 @@ def listen(self, queue): data = sock.recv(8) if len(data) == 8: class_, type_ = network_handler.parse_data(data) - data = getattr(network_handler.handlers[class_],type_)(self, sock) + data = getattr(network_handler.handlers[class_],type_)(self, sock, node) if data: queue.put(data) except ConnectionResetError: From 15db42617616e8138ac10d42f677a9cc962c409d Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sat, 30 Jul 2022 12:24:38 +0530 Subject: [PATCH 27/88] Sending Score Over Network Now Functional --- network_handler.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/network_handler.py b/network_handler.py index a7be878..7f97811 100644 --- a/network_handler.py +++ b/network_handler.py @@ -71,10 +71,7 @@ def ack(self, peer, node): class seed_handler(): def scr(self, peer, node): seed = peer.recv(512) - peer.send(self.score_seed(seed)) - - def score_seed(self, seed): - return "0" + peer.send(node.gan.clf_score(seed)) handlers = { "conn": connection_handler, From e82e2201cd92bce6320ab8b65e1b954951fa713a Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sat, 30 Jul 2022 12:25:29 +0530 Subject: [PATCH 28/88] Removed Unnecessary Imports --- gan.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/gan.py b/gan.py index 7f497c5..0859e43 100644 --- a/gan.py +++ b/gan.py @@ -1,11 +1,7 @@ import torch import torch.nn as nn -import random import pandas as pd -import string -import rsa import textwrap -from base64 import b64encode from sklearn.svm import SVC class GAN(nn.Module): From 7bdc1a3d898903df07a75d8af27a5c58e1831191 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sat, 30 Jul 2022 12:25:57 +0530 Subject: [PATCH 29/88] Epoch Now Printed During Train --- gan.py | 1 + 1 file changed, 1 insertion(+) diff --git a/gan.py b/gan.py index 0859e43..f6f4fc2 100644 --- a/gan.py +++ b/gan.py @@ -90,6 +90,7 @@ def forward(self, x): def train(self): epochs = 2000 for epoch in range(epochs): + print("Epoch: ", epoch) output = self.model(self.inp) loss = self.criterion(output , self.out) self.optimizer.zero_grad() From 5c97f1f6e61f2cb1575b29d4d4b3bf5b598c8fcb Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sat, 30 Jul 2022 12:48:24 +0530 Subject: [PATCH 30/88] Seed Root Is Now Flattened After Processing --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 19d73e5..4115ead 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -23,7 +23,7 @@ def create_consensus(self, block, chain): SEED_ROOT_PROCESSED = [] for char in SEED_ROOT: - SEED_ROOT_PROCESSED.append([int(y) for y in list("".join(format(ord(x), 'b') for x in str(char)))]) + SEED_ROOT_PROCESSED.extend([int(y) for y in list("".join(format(ord(x), 'b') for x in str(char)))]) SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED) self.broadcast_super_seed(SUPER_SEED) From 384fe51be282d66b8f840a550714fe6f8a2aa7dc Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sat, 30 Jul 2022 12:49:44 +0530 Subject: [PATCH 31/88] GPoHC Object Now Also Requires Reference To The Node Object --- GPoHC.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 4115ead..ddce7ef 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -1,8 +1,9 @@ from gan import Gan class GPoHC(): - def __init__(self): + def __init__(self, node): self.strenght = 50 + seld.node = node self.model = Gan() def create_consensus(self, block, chain): From 64983dd3a3ed056c7850f33d0d1ee37e287356b0 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:38:01 +0530 Subject: [PATCH 32/88] Fixed Imports. Imported Block Classes --- GPoHC.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index ddce7ef..a21852b 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -1,4 +1,5 @@ -from gan import Gan +from gan import GAN +from block import Block, LogBlock class GPoHC(): def __init__(self, node): From b08e9f879ed90ac9cabe17ec007dd13faf57599e Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:38:38 +0530 Subject: [PATCH 33/88] GPoHC Class Now Requires A Name --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index a21852b..df1d24e 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -2,7 +2,7 @@ from block import Block, LogBlock class GPoHC(): - def __init__(self, node): + def __init__(self, node, name): self.strenght = 50 seld.node = node self.model = Gan() From 627be713dbed8cfda1ed662857b52f355c790d19 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:38:59 +0530 Subject: [PATCH 34/88] Fixed Typo --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index df1d24e..9f42795 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -4,8 +4,8 @@ class GPoHC(): def __init__(self, node, name): self.strenght = 50 - seld.node = node self.model = Gan() + self.node = node def create_consensus(self, block, chain): hash = block.calculate_hash() From 8e9a4b49c6b0c63d52d8ba12d2adcfbaf0d694f9 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:39:53 +0530 Subject: [PATCH 35/88] GAN Class Now Requires A Name --- GPoHC.py | 2 +- gan.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 9f42795..6adbab3 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -4,8 +4,8 @@ class GPoHC(): def __init__(self, node, name): self.strenght = 50 - self.model = Gan() self.node = node + self.model = GAN(name) def create_consensus(self, block, chain): hash = block.calculate_hash() diff --git a/gan.py b/gan.py index f6f4fc2..2b9f3f1 100644 --- a/gan.py +++ b/gan.py @@ -5,7 +5,7 @@ from sklearn.svm import SVC class GAN(nn.Module): - def __init__(self): + def __init__(self, name): super(GAN, self).__init__() inp=0 out=0 @@ -33,6 +33,8 @@ def __init__(self): nn.Linear(64, 128), nn.ReLU(True), nn.Linear(128, 230)) + + self.name = name def initialize(self): self.inp=0 From 4afc23e98d8fb03a9ea414ef9175a59a8c60666e Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:40:37 +0530 Subject: [PATCH 36/88] GAN Is Now Handled Directly By GPoHC --- GPoHC.py | 8 ++++++++ node.py | 16 ++-------------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 6adbab3..b65f7d2 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -6,6 +6,14 @@ def __init__(self, node, name): self.strenght = 50 self.node = node self.model = GAN(name) + self.new = True + if self.new: + self.node.chain.append(LogBlock(self.node, "Initializing Gan")) + self.model.initialize() + self.model.feedData(self.chain) + self.model.train() + self.model.trainClassifier() + self.node.chain.append(LogBlock(self.node, "Gan initialized")) def create_consensus(self, block, chain): hash = block.calculate_hash() diff --git a/node.py b/node.py index f861841..753f7be 100644 --- a/node.py +++ b/node.py @@ -13,21 +13,9 @@ def __init__(self, private_key, host, name="chain"): self.address = private_key self.p2pInterface = p2pInterface(self) self.chain = Blockchain(self, name,self.p2pInterface) - self.initialize_gan() - def initialize_gan(self): - self.gan = GAN() - if not False: - block = LogBlock(self, "Initializing Gan") - self.chain.append(block) - gan = GAN() - gan.initialize() - gan.feedData(self.chain) - gan.train() - gan.trainClassifier() - - block = LogBlock(self, "Gan initialized") - self.chain.append(block) + def initialize_consensus(self, name): + self.consensus = GPoHC(self, name) @staticmethod def runtime(self, first_run=True): From e927a4c152a17de2fbdfdef944dcc6c15f04bb6b Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:41:44 +0530 Subject: [PATCH 37/88] Seed Root Preprocessing Is Now An Independent Function For Reusability --- GPoHC.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index b65f7d2..6af8a2a 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -31,13 +31,18 @@ def create_consensus(self, block, chain): for root in self.collect_roots(SOURCE_BLOCKS): SEED_ROOT = self.add_by_each_byte(SOURCE_SEED, root) - SEED_ROOT_PROCESSED = [] - for char in SEED_ROOT: - SEED_ROOT_PROCESSED.extend([int(y) for y in list("".join(format(ord(x), 'b') for x in str(char)))]) + SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED) self.broadcast_super_seed(SUPER_SEED) + def preprocess_seed_root(self, seed_root): + seed_root_processed = [] + for char in seed_root: + seed_root_processed.extend([int(y) for y in list("".join(format(ord(x), 'b') for x in str(char)))]) + + return seed_root_processed + def add_by_each_byte(self, a, b): result = "" for i in range(len(a)): From 2b318e976c4314236808f11bfb62e30442950680 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:43:11 +0530 Subject: [PATCH 38/88] broadcast_super_seed Is Temporarily Replaced With score_super_seed (Non p2p) --- GPoHC.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 6af8a2a..b8c2fb0 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -34,7 +34,7 @@ def create_consensus(self, block, chain): SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED) - self.broadcast_super_seed(SUPER_SEED) + self.score_super_seed(SUPER_SEED) def preprocess_seed_root(self, seed_root): seed_root_processed = [] @@ -52,4 +52,7 @@ def add_by_each_byte(self, a, b): def validator_online(self, block): - pass \ No newline at end of file + pass + + def score_super_seed(self, super_seed): + self.model.clf_score(super_seed) \ No newline at end of file From 4905fc3901b432290765f2d06f3d201ddc4d36f5 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:44:14 +0530 Subject: [PATCH 39/88] Imported GPoHC And Initialized --- node.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/node.py b/node.py index 753f7be..a4f93f0 100644 --- a/node.py +++ b/node.py @@ -3,6 +3,7 @@ from block import Block, LogBlock from p2p import p2pInterface from blockchain import Blockchain +from GPoHC import GPoHC import threading import sys @@ -13,6 +14,7 @@ def __init__(self, private_key, host, name="chain"): self.address = private_key self.p2pInterface = p2pInterface(self) self.chain = Blockchain(self, name,self.p2pInterface) + self.initialize_consensus(name) def initialize_consensus(self, name): self.consensus = GPoHC(self, name) From 6cdd06d03b39caea07d89c5e06453bee25c0d6aa Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 13:49:44 +0530 Subject: [PATCH 40/88] Fixed Reference To Node. --- p2p.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/p2p.py b/p2p.py index 09bb5ff..184a8c5 100644 --- a/p2p.py +++ b/p2p.py @@ -102,7 +102,7 @@ def listen(self, queue): data = sock.recv(8) if len(data) == 8: class_, type_ = network_handler.parse_data(data) - data = getattr(network_handler.handlers[class_],type_)(self, sock, node) + data = getattr(network_handler.handlers[class_],type_)(self, sock, self.node) if data: queue.put(data) except ConnectionResetError: From 9a6808a0a9d0ef376456bc707d4b3b9bd445008e Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 14:42:08 +0530 Subject: [PATCH 41/88] Blockchain Now Calculates Seed Before Append. If Consensus Is Won Block Is Added To The Chain And Broadcasted. --- blockchain.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/blockchain.py b/blockchain.py index f6d2aba..d0de6e8 100644 --- a/blockchain.py +++ b/blockchain.py @@ -16,6 +16,9 @@ def __getitem__(self, key): def append(self, item): if isinstance(item, Block): + item.seed, winner = self.node.consensus.create_consensus(item, self) + if not winner: + raise Exception("Consensus Lost. Failed To Add Block.") if item.hash in self.index: raise ValueError("Block already exists") self.index[item.calculate_hash()] = len(self.chain) From 5e9172ec6f25c8c6746e672fa9a0d5776e3a006d Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 14:47:33 +0530 Subject: [PATCH 42/88] Node Now Maintains A Seed Store. --- network_handler.py | 4 +++- node.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index 7f97811..5dffac5 100644 --- a/network_handler.py +++ b/network_handler.py @@ -71,7 +71,9 @@ def ack(self, peer, node): class seed_handler(): def scr(self, peer, node): seed = peer.recv(512) - peer.send(node.gan.clf_score(seed)) + score = node.gan.clf_score(seed) + peer.send(score) + self.seed_store[seed] = score handlers = { "conn": connection_handler, diff --git a/node.py b/node.py index a4f93f0..f413c92 100644 --- a/node.py +++ b/node.py @@ -15,6 +15,7 @@ def __init__(self, private_key, host, name="chain"): self.p2pInterface = p2pInterface(self) self.chain = Blockchain(self, name,self.p2pInterface) self.initialize_consensus(name) + self.seed_store = {} def initialize_consensus(self, name): self.consensus = GPoHC(self, name) From d7def7444edd2c675bf46b3620c75473de51e6bf Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 14:50:56 +0530 Subject: [PATCH 43/88] Broadcast Function Now Allows A Handler To Be Passed To Handle Broadcast Replies. --- p2p.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/p2p.py b/p2p.py index 184a8c5..f0657a5 100644 --- a/p2p.py +++ b/p2p.py @@ -31,7 +31,7 @@ def removePeer(self, peer): self.peerList[peer].close() del self.peerList[peer] - def broadcast(self, message): + def broadcast(self, message, handler=None): print(self.peerList.items()) for addr, sock in self.peerList.items(): try: @@ -41,7 +41,8 @@ def broadcast(self, message): elif type(message) == list: for m in message: sock.send(m) - + if handler: + handler(sock) sock.close() sock = -1 self.addPeer(addr, False) From 301ae4355f6c078748df0569d3e1f785ab0d85d4 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 14:58:45 +0530 Subject: [PATCH 44/88] Broadcast Now Supports Arguments To Broadcast Handlers. --- p2p.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/p2p.py b/p2p.py index f0657a5..cb92cfc 100644 --- a/p2p.py +++ b/p2p.py @@ -31,7 +31,7 @@ def removePeer(self, peer): self.peerList[peer].close() del self.peerList[peer] - def broadcast(self, message, handler=None): + def broadcast(self, message, handler=None, handler_args=None): print(self.peerList.items()) for addr, sock in self.peerList.items(): try: @@ -42,7 +42,7 @@ def broadcast(self, message, handler=None): for m in message: sock.send(m) if handler: - handler(sock) + handler(sock, *handler_args) sock.close() sock = -1 self.addPeer(addr, False) From 195b292b6adce48ca0ef59bb3eceb008fbbc7cec Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:01:33 +0530 Subject: [PATCH 45/88] Broadcast Handler Results Now Returned As An Array --- p2p.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/p2p.py b/p2p.py index cb92cfc..177e065 100644 --- a/p2p.py +++ b/p2p.py @@ -33,6 +33,7 @@ def removePeer(self, peer): def broadcast(self, message, handler=None, handler_args=None): print(self.peerList.items()) + returns = [] for addr, sock in self.peerList.items(): try: print(sock.getpeername()) @@ -42,7 +43,7 @@ def broadcast(self, message, handler=None, handler_args=None): for m in message: sock.send(m) if handler: - handler(sock, *handler_args) + returns.append(handler(sock, *handler_args)) sock.close() sock = -1 self.addPeer(addr, False) @@ -52,6 +53,7 @@ def broadcast(self, message, handler=None, handler_args=None): except OSError: print(f"Peer {sock.getpeername()} Disconnected", flush=True) self.removePeer(sock.getpeername()) + return returns if handler else None def sync_chain(self, node): shuffled_nodes = list(self.peerList.keys()) From ef82e08c00a46e975e37fa87953534c52c39376c Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:02:50 +0530 Subject: [PATCH 46/88] seed:scr Now Sends Length Of Score Data Before The Score. --- network_handler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/network_handler.py b/network_handler.py index 5dffac5..3d5db0b 100644 --- a/network_handler.py +++ b/network_handler.py @@ -72,6 +72,7 @@ class seed_handler(): def scr(self, peer, node): seed = peer.recv(512) score = node.gan.clf_score(seed) + peer.send(f'{len(score):02d}'.encode()) peer.send(score) self.seed_store[seed] = score From 2f1f928ab48c83903f039f12f4326fe4754c7996 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:06:08 +0530 Subject: [PATCH 47/88] Written Seed Score Broadcast Handler. Returns An Array Of Tuples Of Scores From All Nodes. --- GPoHC.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/GPoHC.py b/GPoHC.py index b8c2fb0..6674afa 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -54,5 +54,10 @@ def add_by_each_byte(self, a, b): def validator_online(self, block): pass + def seed_score_broadcast_handler(self, sock, seed): + score_len = int(sock.recv(2).decode()) + score = (sock.getpeername(), int(sock.recv(score_len).decode())) + return score + def score_super_seed(self, super_seed): self.model.clf_score(super_seed) \ No newline at end of file From 94dc496e5e20e44b55538a1db35592b06e8fd178 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:06:17 +0530 Subject: [PATCH 48/88] Update GPoHC.py --- GPoHC.py | 1 - 1 file changed, 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 6674afa..ff96108 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -50,7 +50,6 @@ def add_by_each_byte(self, a, b): return result - def validator_online(self, block): pass From ebcbd0ec03191fbe7ef2adb550537139638ee3c3 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:08:18 +0530 Subject: [PATCH 49/88] Completed Method To Collect And Calculate The Total Seed Score. --- GPoHC.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index ff96108..c946995 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -59,4 +59,14 @@ def seed_score_broadcast_handler(self, sock, seed): return score def score_super_seed(self, super_seed): - self.model.clf_score(super_seed) \ No newline at end of file + scores = self.node.p2pInterface.broadcast([ + b"seed:scr", + f'{len(super_seed):05d}'.encode(), + super_seed.encode() + ]) + + total_score = 0 + for score in scores: + total_score += score[1] + + return total_score, scores \ No newline at end of file From 9b492db949dd4d2835c556d754f5dfafae64224d Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:12:07 +0530 Subject: [PATCH 50/88] Added Imports --- GPoHC.py | 1 + 1 file changed, 1 insertion(+) diff --git a/GPoHC.py b/GPoHC.py index c946995..337afe3 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -1,3 +1,4 @@ +from imports import * from gan import GAN from block import Block, LogBlock From cc9536273a737ee58b343e82d9c13807f8a7680e Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:13:38 +0530 Subject: [PATCH 51/88] Seed Calculation And Scoring Done. TODO: Find The Winner And Add The Block. --- GPoHC.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 337afe3..88dd6fc 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -35,7 +35,14 @@ def create_consensus(self, block, chain): SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED) - self.score_super_seed(SUPER_SEED) + + score, scores = self.score_super_seed(SUPER_SEED) + block.validators = scores + + # Encrypt SUPER_SEED with private_key of node + SEED = rsa.encrypt(SUPER_SEED, self.node.private_key) + + return SEED def preprocess_seed_root(self, seed_root): seed_root_processed = [] From ebb7848ba132f6af2fe89c9d09772b66cb19b783 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 15:22:33 +0530 Subject: [PATCH 52/88] Fixed Check For Availability Of Remote Nodes. --- GPoHC.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 88dd6fc..6c0a74c 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -35,7 +35,7 @@ def create_consensus(self, block, chain): SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED) - + score, scores = self.score_super_seed(SUPER_SEED) block.validators = scores @@ -59,7 +59,12 @@ def add_by_each_byte(self, a, b): return result def validator_online(self, block): - pass + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect(block.address) + sock.send(b"conn:ack") + if sock.recv(8) == b"conn:ack": + return True + return False def seed_score_broadcast_handler(self, sock, seed): score_len = int(sock.recv(2).decode()) From 35f1cf7fa842e013470ce2df126109bf46ba10be Mon Sep 17 00:00:00 2001 From: kunalgoyal9 Date: Sun, 31 Jul 2022 15:47:51 +0530 Subject: [PATCH 53/88] change intendation from 2 to 4 --- gan.py | 308 ++++++++++++++++++++++++++++----------------------------- 1 file changed, 154 insertions(+), 154 deletions(-) diff --git a/gan.py b/gan.py index 2b9f3f1..02a7a57 100644 --- a/gan.py +++ b/gan.py @@ -5,158 +5,158 @@ from sklearn.svm import SVC class GAN(nn.Module): - def __init__(self, name): - super(GAN, self).__init__() - inp=0 - out=0 - test_inp=0 - test_out=0 - clf=0 - learning_rate = 0 - model = 0 - criterion = 0 - optimizer = 0 - self.encoder = nn.Sequential( - nn.Linear(230, 128), - nn.ReLU(True), - nn.Linear(128, 64), - nn.ReLU(True), - nn.Linear(64, 32), - nn.ReLU(True), - nn.Linear(32, 8)) + def __init__(self, name): + super(GAN, self).__init__() + inp=0 + out=0 + test_inp=0 + test_out=0 + clf=0 + learning_rate = 0 + model = 0 + criterion = 0 + optimizer = 0 + self.encoder = nn.Sequential( + nn.Linear(230, 128), + nn.ReLU(True), + nn.Linear(128, 64), + nn.ReLU(True), + nn.Linear(64, 32), + nn.ReLU(True), + nn.Linear(32, 8)) + + self.decoder = nn.Sequential( + nn.Linear(8, 32), + nn.ReLU(True), + nn.Linear(32, 64), + nn.ReLU(True), + nn.Linear(64, 128), + nn.ReLU(True), + nn.Linear(128, 230)) + + self.name = name - self.decoder = nn.Sequential( - nn.Linear(8, 32), - nn.ReLU(True), - nn.Linear(32, 64), - nn.ReLU(True), - nn.Linear(64, 128), - nn.ReLU(True), - nn.Linear(128, 230)) - - self.name = name - - def initialize(self): - self.inp=0 - self.out=0 - self.test_inp=0 - self.test_out=0 - self.clf=0 - self.learning_rate = 1e-3 - self.model = GAN() - self.criterion = nn.MSELoss() - self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) - - def feedData(self, chain): - print(chain) - input_ = [[0]*230 for element in range(512)] - output_ = [[0]*230 for element in range(512)] - for i in range(len(chain)): - if len(input_) == 512: - break - input_[i] = self.encode(chain[i].seed if chain[i].seed else [0]*230) - output_[i] = self.encode(chain[i+1].seed if chain[i+1].seed else [0]*230) - - self.inp=torch.tensor(input_,dtype=torch.float32) - self.out=torch.tensor(output_,dtype=torch.float32) - total_inp=self.inp - total_out=self.out - self.inp=total_inp[:384] - self.out=total_out[:384] - self.test_inp=total_inp[384:] - self.test_out=total_inp[384:] - - def encode(self,lst): - temp=[] - binaryString="" - for elem in lst: - temp.append(format(elem,'08b')) - for item in temp: - binaryString+=item - rev_str= binaryString[::-1] - chunks=textwrap.wrap(rev_str, 6) - chunks.reverse() - for i in range(len(chunks)) : - chunks[i]=chunks[i][::-1] - encodedList=[] - for item in chunks: - encodedList.append(int(item,2)) - - return encodedList - - def forward(self, x): - x = self.encoder(x) - x = self.decoder(x) - return x - - def train(self): - epochs = 2000 - for epoch in range(epochs): - print("Epoch: ", epoch) - output = self.model(self.inp) - loss = self.criterion(output , self.out) - self.optimizer.zero_grad() - loss.backward() - self.optimizer.step() - - def trainClassifier(self): - output_ = self.model(self.test_inp) - output_ = torch.round(output_) - - output_=output_.detach().numpy() - - - for i in output_: - if i[0]!=1. : - i[0]=1. - - for i in output_: - if i[229]!=61. : - i[229]=61. - - for i in output_: - for j in range(len(i)): - if i[j]<0 or i[j]>61 : - i[j]=40. - - temp=output_ - df1=pd.DataFrame(temp) - df1['label']=0 - - temp=self.test_out.detach().numpy() - df2=pd.DataFrame(temp) - df2['label']=1 - - df=df1.append(df2, ignore_index = True) - df=df.sample(frac=1) - - x_train=df.drop(['label'], axis = 1) - y_train=df['label'] - - self.clf = SVC(probability=True).fit(x_train, y_train) - pred_svm = self.clf.predict_proba(x_train) - - def clf_score(self,data_): - pred = self.clf.predict_proba(data_) - return pred - - - def getOutput(self,data_) : - out_ = self.model(data_) - out_ = torch.round(out_) - out_ = out_.detach().numpy() - - for i in out_: - if i[0]!=1. : - i[0]=1. - - for i in out_: - if i[229]!=61. : - i[229]=61. - - for i in out_: - for j in range(len(i)): - if i[j]<0 or i[j]>61 : - i[j]=40. - - return out_ \ No newline at end of file + def initialize(self): + self.inp=0 + self.out=0 + self.test_inp=0 + self.test_out=0 + self.clf=0 + self.learning_rate = 1e-3 + self.model = GAN() + self.criterion = nn.MSELoss() + self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) + + def feedData(self, chain): + print(chain) + input_ = [[0]*230 for element in range(512)] + output_ = [[0]*230 for element in range(512)] + for i in range(len(chain)): + if len(input_) == 512: + break + input_[i] = self.encode(chain[i].seed if chain[i].seed else [0]*230) + output_[i] = self.encode(chain[i+1].seed if chain[i+1].seed else [0]*230) + + self.inp=torch.tensor(input_,dtype=torch.float32) + self.out=torch.tensor(output_,dtype=torch.float32) + total_inp=self.inp + total_out=self.out + self.inp=total_inp[:384] + self.out=total_out[:384] + self.test_inp=total_inp[384:] + self.test_out=total_inp[384:] + + def encode(self,lst): + temp=[] + binaryString="" + for elem in lst: + temp.append(format(elem,'08b')) + for item in temp: + binaryString+=item + rev_str= binaryString[::-1] + chunks=textwrap.wrap(rev_str, 6) + chunks.reverse() + for i in range(len(chunks)) : + chunks[i]=chunks[i][::-1] + encodedList=[] + for item in chunks: + encodedList.append(int(item,2)) + + return encodedList + + def forward(self, x): + x = self.encoder(x) + x = self.decoder(x) + return x + + def train(self): + epochs = 2000 + for epoch in range(epochs): + print("Epoch: ", epoch) + output = self.model(self.inp) + loss = self.criterion(output , self.out) + self.optimizer.zero_grad() + loss.backward() + self.optimizer.step() + + def trainClassifier(self): + output_ = self.model(self.test_inp) + output_ = torch.round(output_) + + output_=output_.detach().numpy() + + + for i in output_: + if i[0]!=1. : + i[0]=1. + + for i in output_: + if i[229]!=61. : + i[229]=61. + + for i in output_: + for j in range(len(i)): + if i[j]<0 or i[j]>61 : + i[j]=40. + + temp=output_ + df1=pd.DataFrame(temp) + df1['label']=0 + + temp=self.test_out.detach().numpy() + df2=pd.DataFrame(temp) + df2['label']=1 + + df=df1.append(df2, ignore_index = True) + df=df.sample(frac=1) + + x_train=df.drop(['label'], axis = 1) + y_train=df['label'] + + self.clf = SVC(probability=True).fit(x_train, y_train) + pred_svm = self.clf.predict_proba(x_train) + + def clf_score(self,data_): + pred = self.clf.predict_proba(data_) + return pred + + + def getOutput(self,data_) : + out_ = self.model(data_) + out_ = torch.round(out_) + out_ = out_.detach().numpy() + + for i in out_: + if i[0]!=1. : + i[0]=1. + + for i in out_: + if i[229]!=61. : + i[229]=61. + + for i in out_: + for j in range(len(i)): + if i[j]<0 or i[j]>61 : + i[j]=40. + + return out_ \ No newline at end of file From 8695d321ab4d9e8bf35b4ab20b753740ae5c0f54 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 21:51:49 +0530 Subject: [PATCH 54/88] Created MockGAN --- gan.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/gan.py b/gan.py index 02a7a57..b8de3e3 100644 --- a/gan.py +++ b/gan.py @@ -4,6 +4,8 @@ import textwrap from sklearn.svm import SVC +from imports import * + class GAN(nn.Module): def __init__(self, name): super(GAN, self).__init__() @@ -159,4 +161,21 @@ def getOutput(self,data_) : if i[j]<0 or i[j]>61 : i[j]=40. - return out_ \ No newline at end of file + return out_ + + +class MockGAN(object): + def __init__(self): + if not os.path.exists('mockgan'): + open("mockgan", "w").close() + + def clf_score(self,data_): + return ord(data_[0])/1000 + + def generator_forward(self,data_): + import random, string + + pub, priv = rsa.newkeys(1024) + msg = ''.join(random.choices(string.ascii_uppercase + string.digits, k=117)) + + return [x for x in rsa.encrypt(msg.encode(), priv)] \ No newline at end of file From 3615ba4ae5ebb133cb2b6d1c8aaf948682db8532 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 21:57:31 +0530 Subject: [PATCH 55/88] MockGAN Temporarily In Use Rather Than GAN. --- GPoHC.py | 3 +-- node.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 6c0a74c..c8ff086 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -1,5 +1,5 @@ from imports import * -from gan import GAN +from gan import MockGAN as GAN from block import Block, LogBlock class GPoHC(): @@ -39,7 +39,6 @@ def create_consensus(self, block, chain): score, scores = self.score_super_seed(SUPER_SEED) block.validators = scores - # Encrypt SUPER_SEED with private_key of node SEED = rsa.encrypt(SUPER_SEED, self.node.private_key) return SEED diff --git a/node.py b/node.py index f413c92..37313b5 100644 --- a/node.py +++ b/node.py @@ -1,5 +1,5 @@ from queue import Queue -from gan import GAN +from gan import MockGAN as GAN from block import Block, LogBlock from p2p import p2pInterface from blockchain import Blockchain From 8c4dcfafca1b37d4f1f4fec91d07faa366ffb19f Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Sun, 31 Jul 2022 22:45:48 +0530 Subject: [PATCH 56/88] Seed Root Collection Now Functional. --- GPoHC.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index c8ff086..5fcda33 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -28,9 +28,7 @@ def create_consensus(self, block, chain): for block in SOURCE_BLOCKS: SOURCE_SEED.append(block.seed) - SEED_ROOT = "" - for root in self.collect_roots(SOURCE_BLOCKS): - SEED_ROOT = self.add_by_each_byte(SOURCE_SEED, root) + SEED_ROOT = self.collect_seed_root(SOURCE_SEED) SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) @@ -65,6 +63,22 @@ def validator_online(self, block): return True return False + def roots_broadcast_handler(self, sock, seed): + return list(sock.recv(128)) + + def collect_seed_root(self, source_seed): + roots = self.node.p2pInterface.broadcast([ + b"seed:rot", + f'{len(source_seed):05d}'.encode(), + source_seed.encode() + ]) + + out = [0]*128 + for root in roots: + for byte in range(128): + out[byte] += root[byte] + out[byte] = out[byte] % 255 + def seed_score_broadcast_handler(self, sock, seed): score_len = int(sock.recv(2).decode()) score = (sock.getpeername(), int(sock.recv(score_len).decode())) From ee92e65121da41af75c7b68e4c7470e6e7280ee4 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 02:37:41 +0530 Subject: [PATCH 57/88] MockGAN Now Accepts Name Attribute To Replicate GAN. --- gan.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gan.py b/gan.py index b8de3e3..dd3ded7 100644 --- a/gan.py +++ b/gan.py @@ -165,7 +165,7 @@ def getOutput(self,data_) : class MockGAN(object): - def __init__(self): + def __init__(self, name): if not os.path.exists('mockgan'): open("mockgan", "w").close() From dafb37d9a055bd44504a4b676928418b5e3659a5 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 02:41:03 +0530 Subject: [PATCH 58/88] consensus Object Is Now Created Directly Before Initiation Of Blockchain. --- node.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/node.py b/node.py index 37313b5..2fdfaae 100644 --- a/node.py +++ b/node.py @@ -13,13 +13,10 @@ def __init__(self, private_key, host, name="chain"): self.private_key = private_key self.address = private_key self.p2pInterface = p2pInterface(self) + self.consensus = GPoHC(self, name) self.chain = Blockchain(self, name,self.p2pInterface) - self.initialize_consensus(name) self.seed_store = {} - def initialize_consensus(self, name): - self.consensus = GPoHC(self, name) - @staticmethod def runtime(self, first_run=True): pass From dc65668bc80ac76d3e80076997e3cc12710bd218 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 02:53:03 +0530 Subject: [PATCH 59/88] Added Functions To MockGAN To Replicate GAN --- gan.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/gan.py b/gan.py index dd3ded7..f9cd99d 100644 --- a/gan.py +++ b/gan.py @@ -169,6 +169,18 @@ def __init__(self, name): if not os.path.exists('mockgan'): open("mockgan", "w").close() + def initialize(self): + pass + + def feedData(self, chain): + pass + + def train(self): + pass + + def trainClassifier(self): + pass + def clf_score(self,data_): return ord(data_[0])/1000 From 3f76bf5b652eb78a37a305c974d4fa7706a9d899 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 02:56:25 +0530 Subject: [PATCH 60/88] Consensus GAN Initialization Is Npw A Separate Function. --- GPoHC.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 5fcda33..ebe5ffe 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -8,13 +8,13 @@ def __init__(self, node, name): self.node = node self.model = GAN(name) self.new = True + + def initialize(self): if self.new: - self.node.chain.append(LogBlock(self.node, "Initializing Gan")) self.model.initialize() - self.model.feedData(self.chain) + self.model.feedData(self.node.chain) self.model.train() self.model.trainClassifier() - self.node.chain.append(LogBlock(self.node, "Gan initialized")) def create_consensus(self, block, chain): hash = block.calculate_hash() From a8e22ca3b355f48e0e73c562474f486ba28e8a02 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:14:41 +0530 Subject: [PATCH 61/88] Added Imports To network_handler --- network_handler.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/network_handler.py b/network_handler.py index 3d5db0b..51ade6b 100644 --- a/network_handler.py +++ b/network_handler.py @@ -1,3 +1,5 @@ +from imports import * + comm_types = { "conn": [ "req", From 640700974c66fde9c827f47ab9e1c23d83ebd701 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:15:14 +0530 Subject: [PATCH 62/88] Added Seed Root Request --- network_handler.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index 51ade6b..5632e12 100644 --- a/network_handler.py +++ b/network_handler.py @@ -21,7 +21,8 @@ "int", ], "seed": [ - "scr" + "scr", + "rot", ] } @@ -78,6 +79,9 @@ def scr(self, peer, node): peer.send(score) self.seed_store[seed] = score + def rot(self, peer, node): + pass + handlers = { "conn": connection_handler, "blck": block_handler, From b5ccf23fc41aeb9ca1d54b94b90bc42247bb2e17 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:15:40 +0530 Subject: [PATCH 63/88] Fixed Typo --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index ebe5ffe..eaab871 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -21,7 +21,7 @@ def create_consensus(self, block, chain): SOURCE_BLOCKS = [] for validator_block in range(self.strenght): - if self.validator_online(chain[-self.strenght[validator_block]]): + if self.validator_online(chain[validator_block]): SOURCE_BLOCKS.append(validator_block) SOURCE_SEED = "" From 6caf6dfd7cd702c8b038b6a5fd905146603ea2fa Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:16:28 +0530 Subject: [PATCH 64/88] Strength Of Consensus Now Reduced For A Blockchain With Length Lower Than Strength --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index eaab871..67d92ad 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -20,7 +20,7 @@ def create_consensus(self, block, chain): hash = block.calculate_hash() SOURCE_BLOCKS = [] - for validator_block in range(self.strenght): + for validator_block in range(min([self.strenght, len(chain)])): if self.validator_online(chain[validator_block]): SOURCE_BLOCKS.append(validator_block) From 3cdc28f7f2776959e4be92fe56543e5f515df571 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:16:56 +0530 Subject: [PATCH 65/88] Assigned Broadcast Handlers --- GPoHC.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 67d92ad..738ddff 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -71,7 +71,7 @@ def collect_seed_root(self, source_seed): b"seed:rot", f'{len(source_seed):05d}'.encode(), source_seed.encode() - ]) + ], handler=self.roots_broadcast_handler) out = [0]*128 for root in roots: @@ -79,7 +79,7 @@ def collect_seed_root(self, source_seed): out[byte] += root[byte] out[byte] = out[byte] % 255 - def seed_score_broadcast_handler(self, sock, seed): + def seed_score_broadcast_handler(self, sock): score_len = int(sock.recv(2).decode()) score = (sock.getpeername(), int(sock.recv(score_len).decode())) return score @@ -89,7 +89,7 @@ def score_super_seed(self, super_seed): b"seed:scr", f'{len(super_seed):05d}'.encode(), super_seed.encode() - ]) + ], handler=self.seed_score_broadcast_handler) total_score = 0 for score in scores: From c6d4ff79e325f294b7adb046cc8432f44a1e9c07 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:23:50 +0530 Subject: [PATCH 66/88] Written Seed Root Request. --- network_handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index 5632e12..b03ca58 100644 --- a/network_handler.py +++ b/network_handler.py @@ -80,7 +80,8 @@ def scr(self, peer, node): self.seed_store[seed] = score def rot(self, peer, node): - pass + seed_lenght = peer.recv(5) + peer.send(rsa.encrypt(peer.recv(seed_lenght), node.private_key)) handlers = { "conn": connection_handler, From 8d2dab4bb161612fde16f2e7282c03b5cb1da0e0 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:24:06 +0530 Subject: [PATCH 67/88] Exported Seed Handlers --- network_handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index b03ca58..e6df927 100644 --- a/network_handler.py +++ b/network_handler.py @@ -86,5 +86,6 @@ def rot(self, peer, node): handlers = { "conn": connection_handler, "blck": block_handler, - "trnx": transaction_handler + "trnx": transaction_handler, + "seed": seed_handler, } \ No newline at end of file From afd6c89a437ff4b647a9e46581c55c42c9a6b949 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:55:55 +0530 Subject: [PATCH 68/88] Fixed Score Simulation In MockGAN --- gan.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gan.py b/gan.py index f9cd99d..e691ddd 100644 --- a/gan.py +++ b/gan.py @@ -182,7 +182,7 @@ def trainClassifier(self): pass def clf_score(self,data_): - return ord(data_[0])/1000 + return str(list(data_)[0]/1000).encode() def generator_forward(self,data_): import random, string From 6faa8d2bf8dfc15df852ce79b90cf019f579869c Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:56:23 +0530 Subject: [PATCH 69/88] All Indentation Now Tabs --- gan.py | 312 ++++++++++++++++++++++++++++----------------------------- 1 file changed, 156 insertions(+), 156 deletions(-) diff --git a/gan.py b/gan.py index e691ddd..290770f 100644 --- a/gan.py +++ b/gan.py @@ -7,161 +7,161 @@ from imports import * class GAN(nn.Module): - def __init__(self, name): - super(GAN, self).__init__() - inp=0 - out=0 - test_inp=0 - test_out=0 - clf=0 - learning_rate = 0 - model = 0 - criterion = 0 - optimizer = 0 - self.encoder = nn.Sequential( - nn.Linear(230, 128), - nn.ReLU(True), - nn.Linear(128, 64), - nn.ReLU(True), - nn.Linear(64, 32), - nn.ReLU(True), - nn.Linear(32, 8)) - - self.decoder = nn.Sequential( - nn.Linear(8, 32), - nn.ReLU(True), - nn.Linear(32, 64), - nn.ReLU(True), - nn.Linear(64, 128), - nn.ReLU(True), - nn.Linear(128, 230)) - - self.name = name - - def initialize(self): - self.inp=0 - self.out=0 - self.test_inp=0 - self.test_out=0 - self.clf=0 - self.learning_rate = 1e-3 - self.model = GAN() - self.criterion = nn.MSELoss() - self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) - - def feedData(self, chain): - print(chain) - input_ = [[0]*230 for element in range(512)] - output_ = [[0]*230 for element in range(512)] - for i in range(len(chain)): - if len(input_) == 512: - break - input_[i] = self.encode(chain[i].seed if chain[i].seed else [0]*230) - output_[i] = self.encode(chain[i+1].seed if chain[i+1].seed else [0]*230) - - self.inp=torch.tensor(input_,dtype=torch.float32) - self.out=torch.tensor(output_,dtype=torch.float32) - total_inp=self.inp - total_out=self.out - self.inp=total_inp[:384] - self.out=total_out[:384] - self.test_inp=total_inp[384:] - self.test_out=total_inp[384:] - - def encode(self,lst): - temp=[] - binaryString="" - for elem in lst: - temp.append(format(elem,'08b')) - for item in temp: - binaryString+=item - rev_str= binaryString[::-1] - chunks=textwrap.wrap(rev_str, 6) - chunks.reverse() - for i in range(len(chunks)) : - chunks[i]=chunks[i][::-1] - encodedList=[] - for item in chunks: - encodedList.append(int(item,2)) - - return encodedList - - def forward(self, x): - x = self.encoder(x) - x = self.decoder(x) - return x - - def train(self): - epochs = 2000 - for epoch in range(epochs): - print("Epoch: ", epoch) - output = self.model(self.inp) - loss = self.criterion(output , self.out) - self.optimizer.zero_grad() - loss.backward() - self.optimizer.step() - - def trainClassifier(self): - output_ = self.model(self.test_inp) - output_ = torch.round(output_) - - output_=output_.detach().numpy() - - - for i in output_: - if i[0]!=1. : - i[0]=1. - - for i in output_: - if i[229]!=61. : - i[229]=61. - - for i in output_: - for j in range(len(i)): - if i[j]<0 or i[j]>61 : - i[j]=40. - - temp=output_ - df1=pd.DataFrame(temp) - df1['label']=0 - - temp=self.test_out.detach().numpy() - df2=pd.DataFrame(temp) - df2['label']=1 - - df=df1.append(df2, ignore_index = True) - df=df.sample(frac=1) - - x_train=df.drop(['label'], axis = 1) - y_train=df['label'] - - self.clf = SVC(probability=True).fit(x_train, y_train) - pred_svm = self.clf.predict_proba(x_train) - - def clf_score(self,data_): - pred = self.clf.predict_proba(data_) - return pred - - - def getOutput(self,data_) : - out_ = self.model(data_) - out_ = torch.round(out_) - out_ = out_.detach().numpy() - - for i in out_: - if i[0]!=1. : - i[0]=1. - - for i in out_: - if i[229]!=61. : - i[229]=61. - - for i in out_: - for j in range(len(i)): - if i[j]<0 or i[j]>61 : - i[j]=40. - - return out_ + def __init__(self, name): + super(GAN, self).__init__() + inp=0 + out=0 + test_inp=0 + test_out=0 + clf=0 + learning_rate = 0 + model = 0 + criterion = 0 + optimizer = 0 + self.encoder = nn.Sequential( + nn.Linear(230, 128), + nn.ReLU(True), + nn.Linear(128, 64), + nn.ReLU(True), + nn.Linear(64, 32), + nn.ReLU(True), + nn.Linear(32, 8)) + + self.decoder = nn.Sequential( + nn.Linear(8, 32), + nn.ReLU(True), + nn.Linear(32, 64), + nn.ReLU(True), + nn.Linear(64, 128), + nn.ReLU(True), + nn.Linear(128, 230)) + + self.name = name + + def initialize(self): + self.inp=0 + self.out=0 + self.test_inp=0 + self.test_out=0 + self.clf=0 + self.learning_rate = 1e-3 + self.model = GAN() + self.criterion = nn.MSELoss() + self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) + + def feedData(self, chain): + print(chain) + input_ = [[0]*230 for element in range(512)] + output_ = [[0]*230 for element in range(512)] + for i in range(len(chain)): + if len(input_) == 512: + break + input_[i] = self.encode(chain[i].seed if chain[i].seed else [0]*230) + output_[i] = self.encode(chain[i+1].seed if chain[i+1].seed else [0]*230) + + self.inp=torch.tensor(input_,dtype=torch.float32) + self.out=torch.tensor(output_,dtype=torch.float32) + total_inp=self.inp + total_out=self.out + self.inp=total_inp[:384] + self.out=total_out[:384] + self.test_inp=total_inp[384:] + self.test_out=total_inp[384:] + + def encode(self,lst): + temp=[] + binaryString="" + for elem in lst: + temp.append(format(elem,'08b')) + for item in temp: + binaryString+=item + rev_str= binaryString[::-1] + chunks=textwrap.wrap(rev_str, 6) + chunks.reverse() + for i in range(len(chunks)) : + chunks[i]=chunks[i][::-1] + encodedList=[] + for item in chunks: + encodedList.append(int(item,2)) + + return encodedList + + def forward(self, x): + x = self.encoder(x) + x = self.decoder(x) + return x + + def train(self): + epochs = 2000 + for epoch in range(epochs): + print("Epoch: ", epoch) + output = self.model(self.inp) + loss = self.criterion(output , self.out) + self.optimizer.zero_grad() + loss.backward() + self.optimizer.step() + + def trainClassifier(self): + output_ = self.model(self.test_inp) + output_ = torch.round(output_) + + output_=output_.detach().numpy() + + + for i in output_: + if i[0]!=1. : + i[0]=1. + + for i in output_: + if i[229]!=61. : + i[229]=61. + + for i in output_: + for j in range(len(i)): + if i[j]<0 or i[j]>61 : + i[j]=40. + + temp=output_ + df1=pd.DataFrame(temp) + df1['label']=0 + + temp=self.test_out.detach().numpy() + df2=pd.DataFrame(temp) + df2['label']=1 + + df=df1.append(df2, ignore_index = True) + df=df.sample(frac=1) + + x_train=df.drop(['label'], axis = 1) + y_train=df['label'] + + self.clf = SVC(probability=True).fit(x_train, y_train) + pred_svm = self.clf.predict_proba(x_train) + + def clf_score(self,data_): + pred = self.clf.predict_proba(data_) + return pred + + + def getOutput(self,data_) : + out_ = self.model(data_) + out_ = torch.round(out_) + out_ = out_.detach().numpy() + + for i in out_: + if i[0]!=1. : + i[0]=1. + + for i in out_: + if i[229]!=61. : + i[229]=61. + + for i in out_: + for j in range(len(i)): + if i[j]<0 or i[j]>61 : + i[j]=40. + + return out_ class MockGAN(object): @@ -182,7 +182,7 @@ def trainClassifier(self): pass def clf_score(self,data_): - return str(list(data_)[0]/1000).encode() + return str(list(data_)[0]/1000).encode() def generator_forward(self,data_): import random, string From c0c50b6ea3edada42ee4bd8a0568823b1b616103 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:56:55 +0530 Subject: [PATCH 70/88] SUPER_SEED Now Limited To First 117 Characters --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 738ddff..7bb2e0a 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -37,7 +37,7 @@ def create_consensus(self, block, chain): score, scores = self.score_super_seed(SUPER_SEED) block.validators = scores - SEED = rsa.encrypt(SUPER_SEED, self.node.private_key) + SEED = rsa.encrypt(bytes(SUPER_SEED[:117]), self.node.private_key) return SEED From b4bc08642c588c1d8ca62ebe26bde9901720da53 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:57:17 +0530 Subject: [PATCH 71/88] Temporarily Every Node Wins Consensus --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 7bb2e0a..96667d7 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -39,7 +39,7 @@ def create_consensus(self, block, chain): SEED = rsa.encrypt(bytes(SUPER_SEED[:117]), self.node.private_key) - return SEED + return True, SEED def preprocess_seed_root(self, seed_root): seed_root_processed = [] From 0b9bb9498c76321353801a2f3ccb950ecbabe38d Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:58:11 +0530 Subject: [PATCH 72/88] Removed Unnecessary Argument --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 96667d7..63eff50 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -63,7 +63,7 @@ def validator_online(self, block): return True return False - def roots_broadcast_handler(self, sock, seed): + def roots_broadcast_handler(self, sock): return list(sock.recv(128)) def collect_seed_root(self, source_seed): From 022ff0df0582502ec923bbd8ba144cb1465fa00a Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:58:38 +0530 Subject: [PATCH 73/88] Added Missing Return --- GPoHC.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/GPoHC.py b/GPoHC.py index 63eff50..334cf47 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -79,6 +79,8 @@ def collect_seed_root(self, source_seed): out[byte] += root[byte] out[byte] = out[byte] % 255 + return out + def seed_score_broadcast_handler(self, sock): score_len = int(sock.recv(2).decode()) score = (sock.getpeername(), int(sock.recv(score_len).decode())) From 793fc119003396bc73175e71e87b31cfa8e4d53b Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 03:59:29 +0530 Subject: [PATCH 74/88] Scores Received Over The Network Are Now Stored As Floats. --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 334cf47..c4b3832 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -83,7 +83,7 @@ def collect_seed_root(self, source_seed): def seed_score_broadcast_handler(self, sock): score_len = int(sock.recv(2).decode()) - score = (sock.getpeername(), int(sock.recv(score_len).decode())) + score = (sock.getpeername(), float(sock.recv(score_len).decode())) return score def score_super_seed(self, super_seed): From f3e93c7be17dca6c2cf67a4aaa17e20242ad16f1 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 04:00:09 +0530 Subject: [PATCH 75/88] Using bytes Function Instead Of encode Method To Account For Ascii Arrays --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index c4b3832..6d4e5a5 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -90,7 +90,7 @@ def score_super_seed(self, super_seed): scores = self.node.p2pInterface.broadcast([ b"seed:scr", f'{len(super_seed):05d}'.encode(), - super_seed.encode() + bytes(super_seed) ], handler=self.seed_score_broadcast_handler) total_score = 0 From 2d08040eecd7b5ed37e12bac2044e3b63e7ab052 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 04:02:11 +0530 Subject: [PATCH 76/88] Fixed Reference To GAN --- network_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index e6df927..7146d94 100644 --- a/network_handler.py +++ b/network_handler.py @@ -74,7 +74,7 @@ def ack(self, peer, node): class seed_handler(): def scr(self, peer, node): seed = peer.recv(512) - score = node.gan.clf_score(seed) + score = node.consensus.model.clf_score(seed) peer.send(f'{len(score):02d}'.encode()) peer.send(score) self.seed_store[seed] = score From 9c877b272e054bcd8904df51872d031e0a31fe16 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 04:02:27 +0530 Subject: [PATCH 77/88] Fixed Reference To seed_store --- network_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index 7146d94..a52e7e2 100644 --- a/network_handler.py +++ b/network_handler.py @@ -77,7 +77,7 @@ def scr(self, peer, node): score = node.consensus.model.clf_score(seed) peer.send(f'{len(score):02d}'.encode()) peer.send(score) - self.seed_store[seed] = score + node.seed_store[seed] = score def rot(self, peer, node): seed_lenght = peer.recv(5) From a3d29f782d9b8ae5e1a92925ce7fa45d605bd293 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 04:03:02 +0530 Subject: [PATCH 78/88] Spread Out seed:rot Handler --- network_handler.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/network_handler.py b/network_handler.py index a52e7e2..f9c7535 100644 --- a/network_handler.py +++ b/network_handler.py @@ -80,8 +80,9 @@ def scr(self, peer, node): node.seed_store[seed] = score def rot(self, peer, node): - seed_lenght = peer.recv(5) - peer.send(rsa.encrypt(peer.recv(seed_lenght), node.private_key)) + seed_lenght = int(peer.recv(5)) + seed_root_segment = rsa.encrypt(peer.recv(seed_lenght), node.private_key) + peer.send(seed_root_segment) handlers = { "conn": connection_handler, From bbd45d9ea36c151ec5df9933fbf16e154c3ab61c Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Tue, 2 Aug 2022 04:03:37 +0530 Subject: [PATCH 79/88] Fixed Default Value For handler_args --- p2p.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/p2p.py b/p2p.py index 177e065..ff6ce4d 100644 --- a/p2p.py +++ b/p2p.py @@ -31,7 +31,7 @@ def removePeer(self, peer): self.peerList[peer].close() del self.peerList[peer] - def broadcast(self, message, handler=None, handler_args=None): + def broadcast(self, message, handler=None, handler_args=[]): print(self.peerList.items()) returns = [] for addr, sock in self.peerList.items(): From 8a96de63f1aeca502c920c736b46b87504d069d2 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 3 Aug 2022 23:30:15 +0530 Subject: [PATCH 80/88] FINALLLL GAN COMMIT!!!! --- gan.py | 219 +++++++++++++++++++++++++-------------------------------- 1 file changed, 97 insertions(+), 122 deletions(-) diff --git a/gan.py b/gan.py index 290770f..ee7ccf3 100644 --- a/gan.py +++ b/gan.py @@ -1,169 +1,144 @@ import torch import torch.nn as nn +import pickle +import rsa import pandas as pd -import textwrap +import numpy as np from sklearn.svm import SVC +import os -from imports import * - -class GAN(nn.Module): - def __init__(self, name): - super(GAN, self).__init__() - inp=0 - out=0 - test_inp=0 - test_out=0 - clf=0 - learning_rate = 0 - model = 0 - criterion = 0 - optimizer = 0 +class Generator(nn.Module): + def __init__(self): + super(Generator, self).__init__() self.encoder = nn.Sequential( - nn.Linear(230, 128), + nn.Linear(128, 80), nn.ReLU(True), - nn.Linear(128, 64), + nn.Linear(80, 64), nn.ReLU(True), nn.Linear(64, 32), - nn.ReLU(True), - nn.Linear(32, 8)) - + nn.ReLU(True), + nn.Linear(32, 8) + ) + self.decoder = nn.Sequential( nn.Linear(8, 32), nn.ReLU(True), nn.Linear(32, 64), nn.ReLU(True), - nn.Linear(64, 128), + nn.Linear(64, 80), nn.ReLU(True), - nn.Linear(128, 230)) + nn.Linear(80, 128) + ) - self.name = name - - def initialize(self): - self.inp=0 - self.out=0 - self.test_inp=0 - self.test_out=0 - self.clf=0 + self.new = False + + try: + self.load_state_dict(torch.load('gan-gen')) + except: + self.new = True + + def train(self, chain): self.learning_rate = 1e-3 - self.model = GAN() self.criterion = nn.MSELoss() - self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate, weight_decay=1e-5) - - def feedData(self, chain): - print(chain) - input_ = [[0]*230 for element in range(512)] - output_ = [[0]*230 for element in range(512)] - for i in range(len(chain)): + self.optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=1e-5) + + input_ = [] + output_ = [] + for i in range(len(chain)-1): if len(input_) == 512: break - input_[i] = self.encode(chain[i].seed if chain[i].seed else [0]*230) - output_[i] = self.encode(chain[i+1].seed if chain[i+1].seed else [0]*230) + input_.append(list(chain[i].seed)) + print(i) + output_.append(list(chain[i+1].seed)) self.inp=torch.tensor(input_,dtype=torch.float32) self.out=torch.tensor(output_,dtype=torch.float32) - total_inp=self.inp - total_out=self.out - self.inp=total_inp[:384] - self.out=total_out[:384] - self.test_inp=total_inp[384:] - self.test_out=total_inp[384:] - - def encode(self,lst): - temp=[] - binaryString="" - for elem in lst: - temp.append(format(elem,'08b')) - for item in temp: - binaryString+=item - rev_str= binaryString[::-1] - chunks=textwrap.wrap(rev_str, 6) - chunks.reverse() - for i in range(len(chunks)) : - chunks[i]=chunks[i][::-1] - encodedList=[] - for item in chunks: - encodedList.append(int(item,2)) - - return encodedList - - def forward(self, x): - x = self.encoder(x) - x = self.decoder(x) - return x + self.test_inp=self.inp[384:] + self.test_out=self.out[384:] + self.inp=self.inp[:384] + self.out=self.out[:384] - def train(self): - epochs = 2000 - for epoch in range(epochs): - print("Epoch: ", epoch) - output = self.model(self.inp) + for epoch in range(5000): + output = self(self.inp) loss = self.criterion(output , self.out) self.optimizer.zero_grad() loss.backward() self.optimizer.step() - def trainClassifier(self): - output_ = self.model(self.test_inp) - output_ = torch.round(output_) + print('epoch [{}/{}], loss:{:.4f}'.format(epoch + 1, 5000, loss.item())) - output_=output_.detach().numpy() + torch.save(self.state_dict(),'gan-gen') + + def forward(self,x): + x = self.encoder(x) + x = self.decoder(x) + return x + + def gen(self, seed_root): + tensor_=torch.tensor(list(seed_root), dtype=torch.float32) + score = self(tensor_) + score = torch.round(score) + score = score.detach().numpy() + return score + +class Descriminator(): + def __init__(self): + + self.new = False + try: + self.classifier = pickle.load(open('gan-desc', 'rb')) + except: + self.new = True - for i in output_: - if i[0]!=1. : - i[0]=1. + def train(self, chain): + input_ = [] + output_ = [] + for i in range(len(chain)-1): + if len(input_) == 512: + break + input_.append(list(chain[i].seed)) + print(i) + output_.append(list(chain[i+1].seed)) - for i in output_: - if i[229]!=61. : - i[229]=61. + self.inp=torch.tensor(input_,dtype=torch.float32) + self.out=torch.tensor(output_,dtype=torch.float32) + self.test_inp=self.inp[384:] + self.test_out=self.out[384:] + self.inp=self.inp[:384] + self.out=self.out[:384] - for i in output_: - for j in range(len(i)): - if i[j]<0 or i[j]>61 : - i[j]=40. + generator = Generator() + output_ = generator(self.inp) + output_ = torch.round(output_) + output_=output_.detach().numpy() + temp=output_ df1=pd.DataFrame(temp) df1['label']=0 - - temp=self.test_out.detach().numpy() + + temp=self.out.detach().numpy() df2=pd.DataFrame(temp) df2['label']=1 - + df=df1.append(df2, ignore_index = True) df=df.sample(frac=1) - + x_train=df.drop(['label'], axis = 1) y_train=df['label'] - - self.clf = SVC(probability=True).fit(x_train, y_train) - pred_svm = self.clf.predict_proba(x_train) - - def clf_score(self,data_): - pred = self.clf.predict_proba(data_) - return pred - - - def getOutput(self,data_) : - out_ = self.model(data_) - out_ = torch.round(out_) - out_ = out_.detach().numpy() - - for i in out_: - if i[0]!=1. : - i[0]=1. - - for i in out_: - if i[229]!=61. : - i[229]=61. - - for i in out_: - for j in range(len(i)): - if i[j]<0 or i[j]>61 : - i[j]=40. - - return out_ - - + + self.classifier = SVC(probability=True).fit(x_train, y_train) + pred_svm = self.classifier.predict_proba(x_train) + print("Classification accuracy : ",self.classifier.score(x_train,y_train)) + pickle.dump(self.classifier, open('classifier', 'wb')) + + def score(self,data_): + temp=np.array(data_) + temp=temp.reshape(1, -1) + result = self.classifier.predict_proba(temp) + return result[0][1] + class MockGAN(object): def __init__(self, name): if not os.path.exists('mockgan'): From c9da4001a445a041d263cf32494dadb66dc280b5 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 3 Aug 2022 23:30:50 +0530 Subject: [PATCH 81/88] Fixed Invalid Attribute --- GPoHC.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 6d4e5a5..67b1471 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -57,7 +57,7 @@ def add_by_each_byte(self, a, b): def validator_online(self, block): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(block.address) + sock.connect(block.creator) sock.send(b"conn:ack") if sock.recv(8) == b"conn:ack": return True From b081fd55a5cd29573ea10c3eb74e682f803c2061 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 3 Aug 2022 23:31:29 +0530 Subject: [PATCH 82/88] Removed Unnecessary Imports --- GPoHC.py | 1 - 1 file changed, 1 deletion(-) diff --git a/GPoHC.py b/GPoHC.py index 67b1471..ed6d584 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -1,6 +1,5 @@ from imports import * from gan import MockGAN as GAN -from block import Block, LogBlock class GPoHC(): def __init__(self, node, name): From 8fc848a10e898cd76307d06fd415795b473b0095 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 3 Aug 2022 23:38:56 +0530 Subject: [PATCH 83/88] GPoHC Now Uses Updated gan. --- GPoHC.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index ed6d584..073ed1c 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -1,19 +1,21 @@ from imports import * -from gan import MockGAN as GAN +import gan class GPoHC(): def __init__(self, node, name): self.strenght = 50 self.node = node - self.model = GAN(name) + self.model = object() + setattr(self.model, 'generator', gan.Generator()) + setattr(self.model, 'discriminator', gan.Discriminator()) self.new = True def initialize(self): - if self.new: - self.model.initialize() - self.model.feedData(self.node.chain) - self.model.train() - self.model.trainClassifier() + if self.model.generator.new == True: + self.model.generator.train(self.node.chain) + + if self.model.discriminator.new == True: + self.model.discriminator.train(self.node.chain) def create_consensus(self, block, chain): hash = block.calculate_hash() @@ -31,7 +33,7 @@ def create_consensus(self, block, chain): SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) - SUPER_SEED = self.model.generator_forward(SEED_ROOT_PROCESSED) + SUPER_SEED = self.model.generator.gen(SEED_ROOT_PROCESSED) score, scores = self.score_super_seed(SUPER_SEED) block.validators = scores From e755608187a26abba43cec35fca0ed8a5a413ce1 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 3 Aug 2022 23:40:25 +0530 Subject: [PATCH 84/88] network_handler Now Using Updated gan. --- network_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/network_handler.py b/network_handler.py index f9c7535..a21d4fd 100644 --- a/network_handler.py +++ b/network_handler.py @@ -74,7 +74,7 @@ def ack(self, peer, node): class seed_handler(): def scr(self, peer, node): seed = peer.recv(512) - score = node.consensus.model.clf_score(seed) + score = node.consensus.model.descriminator.score(seed) peer.send(f'{len(score):02d}'.encode()) peer.send(score) node.seed_store[seed] = score From 3c656309895015eab9f1232e3a4c42402813b49f Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Wed, 3 Aug 2022 23:40:37 +0530 Subject: [PATCH 85/88] -- --- gan.py | 1 - 1 file changed, 1 deletion(-) diff --git a/gan.py b/gan.py index ee7ccf3..f21c55b 100644 --- a/gan.py +++ b/gan.py @@ -83,7 +83,6 @@ def gen(self, seed_root): class Descriminator(): def __init__(self): - self.new = False try: From 5f6bb3aac29e4e03efb4820446d4df977daabd3d Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Fri, 12 Aug 2022 22:03:37 +0530 Subject: [PATCH 86/88] Shifted From Object Style To Attribute Style For GAN Interface To GPoHC. --- GPoHC.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 073ed1c..1fb7a36 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -6,16 +6,16 @@ def __init__(self, node, name): self.strenght = 50 self.node = node self.model = object() - setattr(self.model, 'generator', gan.Generator()) - setattr(self.model, 'discriminator', gan.Discriminator()) + self.generator = gan.Generator() + self.discriminator = gan.Descriminator() self.new = True def initialize(self): - if self.model.generator.new == True: - self.model.generator.train(self.node.chain) + if self.generator.new == True: + self.generator.train(self.node.chain) - if self.model.discriminator.new == True: - self.model.discriminator.train(self.node.chain) + if self.discriminator.new == True: + self.discriminator.train(self.node.chain) def create_consensus(self, block, chain): hash = block.calculate_hash() @@ -33,7 +33,7 @@ def create_consensus(self, block, chain): SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) - SUPER_SEED = self.model.generator.gen(SEED_ROOT_PROCESSED) + SUPER_SEED = self.generator.gen(SEED_ROOT) score, scores = self.score_super_seed(SUPER_SEED) block.validators = scores From 56d689323f8e44449591431e453f9f52b2a56b50 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Fri, 12 Aug 2022 22:03:56 +0530 Subject: [PATCH 87/88] Removed Faulty Seed Pre-Processing --- GPoHC.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/GPoHC.py b/GPoHC.py index 1fb7a36..40aef3b 100644 --- a/GPoHC.py +++ b/GPoHC.py @@ -31,8 +31,6 @@ def create_consensus(self, block, chain): SEED_ROOT = self.collect_seed_root(SOURCE_SEED) - SEED_ROOT_PROCESSED = self.preprocess_seed_root(SEED_ROOT) - SUPER_SEED = self.generator.gen(SEED_ROOT) score, scores = self.score_super_seed(SUPER_SEED) @@ -42,13 +40,6 @@ def create_consensus(self, block, chain): return True, SEED - def preprocess_seed_root(self, seed_root): - seed_root_processed = [] - for char in seed_root: - seed_root_processed.extend([int(y) for y in list("".join(format(ord(x), 'b') for x in str(char)))]) - - return seed_root_processed - def add_by_each_byte(self, a, b): result = "" for i in range(len(a)): From 8aeebbdc680ec5e9689c634d2e7f1fdf54fdab81 Mon Sep 17 00:00:00 2001 From: Sushant Shah Date: Thu, 18 Aug 2022 20:18:59 +0530 Subject: [PATCH 88/88] Create README.md --- README.md | 124 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 124 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..a9a0409 --- /dev/null +++ b/README.md @@ -0,0 +1,124 @@ +# Funzochain + +Funzochain is a blockchain implementation and the proof of concept for our GPoHC Consensus Mechanism or the Generative Proof of History and Co-operation. We have developed GPoHC with a vision to make block creation and validation faster without compromise to decentralisation and security, and to make sure all nodes are equal participants regardless of processing power and other barriers. + +--- + +## Creating a node + +To create a node clone this repository and create a file named `run.py` + +```python +from node import node +import socket +import rsa + +#create an identity (Run Only Once) +with open("identity", "wb") as f: + f.write(rsa.newkeys(1024)[1]) + +#load an identity +with open('identity', mode='rb') as f: + identity = rsa.PrivateKey.load_pkcs1(f.read()) + + +node = node(identity, (socket.gethostname(),80), "node-name") + +if __name__ == "__main__": + node.run() +``` + +--- + +## Generative Proof Of History And Co-operation + +### Block Structure + +```python +{ + “creator”: “0x00000.....”, + “data”: [...], + “timestamp”: 1652949456, + “seed”: “abcdef”, + “validators”: [...], + “previous_hash”: “...” +} +``` + +### Creation Of A New Block + +Here is a hypothetical situation where node 0x007 wants to propose a new block to the blockchain. + +##### Current Blockchain + +```mermaid +graph TD; + Node1([Node 0x001])--Created-->B; + Node2([Node 0x002])--Created-->C; + Node3([Node 0x003])--Created-->D; + Node4([Node 0x004])--Created-->E; + Node5([Node 0x005])--Created-->F; + Node6([Node 0x006])--Created-->G; + Node7([Node 0x007])-.Wants To Create.->H[Block 7]; + + A--Seed-->Seed1(AAAA...); + B--Seed-->Seed2(BBBB...); + C--Seed-->Seed3(CCCC...); + D--Seed-->Seed4(DDDD...); + E--Seed-->Seed5(EEEE...); + F--Seed-->Seed6(FFFF...); + G--Seed-->Seed7(GGGG...); + + A[Genesis]---B; + B[BlocK 1]---C; + C[Block 2]---D; + D[Block 3]---E; + E[Block 4]---F; + F[Block 5]---G; + G[Block 6]-.-H; +``` + +##### Network Situation + +All Nodes Except 0x005 Are Online. + + + +##### How The Node Proposes A New Block + +1. The Node Looks Up The Last `x` Blocks In The Blockchain From The End. + +2. If The Creator Of A Block Is Online, It is Added To A List Called `Source Blocks`. + + Here: + +```mermaid +graph TD; + A[Block 1]-->B[Block 2]-->C[Block 3]-->D[Block 4]-->F[Block 6] +``` + +3. Seeds Of All The Blocks In ‘Source Blocks’ Are Joined In Order As A String Called `Source Seed`. + + Here: 'AAAABBBBCCCCDDDDFFFFGGGG'. + +4. Node 0x007 Now Broadcasts The `Source Seed` To All The Creators Of The Blocks In `Source Blocks`. All The Nodes Will Likely Already Have The Same `Source Seed` And Will Have Encrypted It With Their Private Key As `Seed Root Segment`. If Both The Versions Of `Source Seed` Are Same The Node Will Respond With The `Seed Root Segment`. + +5. Node 0x007 Now Takes All Values Of `Seed Root Segment` And Saves As `Seed Root` After Adding Each Character Individually Using: + + ```python + seed_root = [] + for segment in seed_root_segments: + seed_root.extend([int(y) for y in list("".join(format(ord(x), 'b') for x in str(segment)))]) + ``` + +6. Node 0x007 Then Passes The Seed Root Array (Lenght 128 Bytes) Through It's GAN's Generator And Stores The Output As `Super Seed`. + +7. Node 0x007 Then Encrypts The First 117 Bytes Using It's Private Key And Uses It As The `seed` Attribute For The Block + +8. Node 0x007 Now Broadcasts The `Super Seed` To All The Validators Which Then Return A Score After Passing It Through Their GAN's Discriminator. Node 0x007 Then Sums Up All The Score. + +9. However, Node 0x003 Has Already Created And Added A Block. Node 0x007 Then Polls All The Active Nodes For 0x003's Last Score, And Finds Out That It Had Gotten More Accumulative Score Than 0x003. + +10. After This Node 0x007 Requests An Override On The Last Block (Timeframe Of 5 Seconds For Overriding). More Than 50% Nodes Agree And Update Their Blockchains. + +Since Node 0x007 Has Now Become A Creator He Will Become A Validator For For The Next `x` Blocks (Only If Active), And Will Receive Validator Rewards Without Actually Creating A Block.