From adcee874db9bfddee147194d1ac19b696c83798a Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:27:56 +0100 Subject: [PATCH 01/16] partial cleanup of main.py --- src/main.py | 462 ++++++++++++++++++++++++++-------------------------- 1 file changed, 233 insertions(+), 229 deletions(-) diff --git a/src/main.py b/src/main.py index 32897f8e..fffab240 100644 --- a/src/main.py +++ b/src/main.py @@ -1,5 +1,19 @@ -import os, sys -update_after_shutdown = False # If set True then update and restart zeronet after main loop ended +# Included modules + +import os +import sys +import time +import urllib2 + + +# Third party modules +import gevent +from gevent import monkey + +# ZeroNet modules +import logging + +update_after_shutdown = False # If set True then update and restart zeronet after main loop ended # Load config from Config import config @@ -7,41 +21,45 @@ from Config import config # Create necessary files and dirs if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir) if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir) -if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}") -if not os.path.isfile("%s/users.json" % config.data_dir): open("%s/users.json" % config.data_dir, "w").write("{}") +if not os.path.isfile("%s/sites.json" % config.data_dir): + open("%s/sites.json" % config.data_dir, "w").write("{}") +if not os.path.isfile("%s/users.json" % config.data_dir): + open("%s/users.json" % config.data_dir, "w").write("{}") # Setup logging -import logging + if config.action == "main": - if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate - if os.path.isfile("%s/debug-last.log" % config.log_dir): os.unlink("%s/debug-last.log" % config.log_dir) - os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir) - logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', level=logging.DEBUG, filename="%s/debug.log" % config.log_dir) + if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate + if os.path.isfile("%s/debug-last.log" % config.log_dir): + os.unlink("%s/debug-last.log" % config.log_dir) + os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir) + logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', + level=logging.DEBUG, filename="%s/debug.log" % config.log_dir) else: - logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull,"w")) # No file logging if action is not main + logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull, "w")) # No file logging if action is not main # Console logger console_log = logging.StreamHandler() -if config.action == "main": # Add time if main action - console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S")) +if config.action == "main": # Add time if main action + console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S")) else: - console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) + console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) -logging.getLogger('').addHandler(console_log) # Add console logger -logging.getLogger('').name = "-" # Remove root prefix +logging.getLogger('').addHandler(console_log) # Add console logger +logging.getLogger('').name = "-" # Remove root prefix # Debug dependent configuration from Debug import DebugHook if config.debug: - console_log.setLevel(logging.DEBUG) # Display everything to console + console_log.setLevel(logging.DEBUG) # Display everything to console else: - console_log.setLevel(logging.INFO) # Display only important info to console + console_log.setLevel(logging.INFO) # Display only important info to console + +monkey.patch_all(thread=False, ssl=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible, Not ssl: broken in 2.7.9 + -from gevent import monkey; monkey.patch_all(thread=False, ssl=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible, Not ssl: broken in 2.7.9 -import gevent -import time # Log current config logging.debug("Config: %s" % config) @@ -49,11 +67,11 @@ logging.debug("Config: %s" % config) # Socks Proxy monkey patch if config.proxy: - from util import SocksProxy - import urllib2 - logging.info("Patching sockets to socks proxy: %s" % config.proxy) - config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost - SocksProxy.monkeyPath(*config.proxy.split(":")) + from util import SocksProxy + + logging.info("Patching sockets to socks proxy: %s" % config.proxy) + config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost + SocksProxy.monkeyPath(*config.proxy.split(":")) # Load plugins @@ -64,230 +82,216 @@ PluginManager.plugin_manager.loadPlugins() # -- Actions -- @PluginManager.acceptPlugins -class Actions: - # Default action: Start serving UiServer and FileServer - def main(self): - logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) - global ui_server, file_server - from File import FileServer - from Ui import UiServer - logging.info("Creating UiServer....") - ui_server = UiServer() +class Actions(object): + # Default action: Start serving UiServer and FileServer + def main(self): + logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) + global ui_server, file_server + from File import FileServer + from Ui import UiServer + logging.info("Creating UiServer....") + ui_server = UiServer() - logging.info("Removing old SSL certs...") - from Crypt import CryptConnection - CryptConnection.manager.removeCerts() + logging.info("Removing old SSL certs...") + from Crypt import CryptConnection + CryptConnection.manager.removeCerts() - logging.info("Creating FileServer....") - file_server = FileServer() + logging.info("Creating FileServer....") + file_server = FileServer() - logging.info("Starting servers....") - gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)]) + logging.info("Starting servers....") + gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)]) + + # Site commands + + def siteCreate(self): + logging.info("Generating new privatekey...") + from Crypt import CryptBitcoin + privatekey = CryptBitcoin.newPrivatekey() + logging.info("----------------------------------------------------------------------") + logging.info("Site private key: %s" % privatekey) + logging.info(" !!! ^ Save it now, required to modify the site ^ !!!") + address = CryptBitcoin.privatekeyToAddress(privatekey) + logging.info("Site address: %s" % address) + logging.info("----------------------------------------------------------------------") + + while True: + if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break + else: logging.info("Please, secure it now, you going to need it to modify your site!") + + logging.info("Creating directory structure...") + from Site import Site + os.mkdir("%s/%s" % (config.data_dir, address)) + open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address) + + logging.info("Creating content.json...") + site = Site(address) + site.content_manager.sign(privatekey=privatekey) + site.settings["own"] = True + site.saveSettings() + + logging.info("Site created!") + + def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False): + from Site import Site + logging.info("Signing site: %s..." % address) + site = Site(address, allow_create = False) + + if not privatekey: # If no privatekey in args then ask it now + import getpass + privatekey = getpass.getpass("Private key (input hidden):") + succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True) + if succ and publish: + self.sitePublish(address, inner_path=inner_path) + + def siteVerify(self, address): + import time + from Site import Site + s = time.time() + logging.info("Verifing site: %s..." % address) + site = Site(address) + bad_files = [] + + for content_inner_path in site.content_manager.contents: + logging.info("Verifing %s signature..." % content_inner_path) + if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True: + logging.info("[OK] %s signed by address %s!" % (content_inner_path, address)) + else: + logging.error("[ERROR] %s: invalid file!" % content_inner_path) + bad_files += content_inner_path + + logging.info("Verifying site files...") + bad_files += site.storage.verifyFiles() + if not bad_files: + logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s)) + else: + logging.error("[ERROR] Error during verifying site files!") + + def dbRebuild(self, address): + from Site import Site + logging.info("Rebuilding site sql cache: %s..." % address) + site = Site(address) + s = time.time() + site.storage.rebuildDb() + logging.info("Done in %.3fs" % (time.time()-s)) + + def dbQuery(self, address, query): + from Site import Site + import json + site = Site(address) + result = [] + for row in site.storage.query(query): + result.append(dict(row)) + print json.dumps(result, indent=4) + + def siteAnnounce(self, address): + from Site.Site import Site + logging.info("Announcing site %s to tracker..." % address) + site = Site(address) + + s = time.time() + site.announce() + print "Response time: %.3fs" % (time.time()-s) + print site.peers - # Site commands - - def siteCreate(self): - logging.info("Generating new privatekey...") - from Crypt import CryptBitcoin - privatekey = CryptBitcoin.newPrivatekey() - logging.info("----------------------------------------------------------------------") - logging.info("Site private key: %s" % privatekey) - logging.info(" !!! ^ Save it now, required to modify the site ^ !!!") - address = CryptBitcoin.privatekeyToAddress(privatekey) - logging.info("Site address: %s" % address) - logging.info("----------------------------------------------------------------------") - - while True: - if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break - else: logging.info("Please, secure it now, you going to need it to modify your site!") - - logging.info("Creating directory structure...") - from Site import Site - os.mkdir("%s/%s" % (config.data_dir, address)) - open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address) - - logging.info("Creating content.json...") - site = Site(address) - site.content_manager.sign(privatekey=privatekey) - site.settings["own"] = True - site.saveSettings() - - logging.info("Site created!") + def siteNeedFile(self, address, inner_path): + from Site import Site + site = Site(address) + site.announce() + print site.needFile(inner_path, update=True) - def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False): - from Site import Site - logging.info("Signing site: %s..." % address) - site = Site(address, allow_create = False) + def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): + global file_server + from Site import Site + from File import FileServer # We need fileserver to handle incoming file requests - if not privatekey: # If no privatekey in args then ask it now - import getpass - privatekey = getpass.getpass("Private key (input hidden):") - succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True) - if succ and publish: - self.sitePublish(address, inner_path=inner_path) + logging.info("Creating FileServer....") + file_server = FileServer() + file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity + file_server.openport() + site = file_server.sites[address] + site.settings["serving"] = True # Serving the site even if its disabled + if peer_ip: # Announce ip specificed + site.addPeer(peer_ip, peer_port) + else: # Just ask the tracker + logging.info("Gathering peers from tracker") + site.announce() # Gather peers + published = site.publish(20, inner_path) # Push to 20 peers + if published > 0: + time.sleep(3) + logging.info("Serving files (max 60s)...") + gevent.joinall([file_server_thread], timeout=60) + logging.info("Done.") + else: + logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site") + # Crypto commands + def cryptPrivatekeyToAddress(self, privatekey=None): + from Crypt import CryptBitcoin + if not privatekey: # If no privatekey in args then ask it now + import getpass + privatekey = getpass.getpass("Private key (input hidden):") - def siteVerify(self, address): - import time - from Site import Site - s = time.time() - logging.info("Verifing site: %s..." % address) - site = Site(address) - bad_files = [] + print CryptBitcoin.privatekeyToAddress(privatekey) - for content_inner_path in site.content_manager.contents: - logging.info("Verifing %s signature..." % content_inner_path) - if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True: - logging.info("[OK] %s signed by address %s!" % (content_inner_path, address)) - else: - logging.error("[ERROR] %s: invalid file!" % content_inner_path) - bad_files += content_inner_path + def cryptSign(self, message, privatekey): + from Crypt import CryptBitcoin + print CryptBitcoin.sign(message, privatekey) - logging.info("Verifying site files...") - bad_files += site.storage.verifyFiles() - if not bad_files: - logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s)) - else: - logging.error("[ERROR] Error during verifying site files!") + # Peer + def peerPing(self, peer_ip, peer_port=None): + if not peer_port: + peer_port = config.fileserver_port + logging.info("Opening a simple connection server") + global file_server + from Connection import ConnectionServer + file_server = ConnectionServer("127.0.0.1", 1234) + from Peer import Peer + logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) + peer = Peer(peer_ip, peer_port) + for i in range(5): + s = time.time() + print peer.ping(), + print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt) + time.sleep(1) - def dbRebuild(self, address): - from Site import Site - logging.info("Rebuilding site sql cache: %s..." % address) - site = Site(address) - s = time.time() - site.storage.rebuildDb() - logging.info("Done in %.3fs" % (time.time()-s)) + def peerGetFile(self, peer_ip, peer_port, site, filename): + logging.info("Opening a simple connection server") + global file_server + from Connection import ConnectionServer + file_server = ConnectionServer() + from Peer import Peer + logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) + peer = Peer(peer_ip, peer_port) + s = time.time() + print peer.getFile(site, filename).read() + print "Response time: %.3fs" % (time.time()-s) - def dbQuery(self, address, query): - from Site import Site - import json - site = Site(address) - result = [] - for row in site.storage.query(query): - result.append(dict(row)) - print json.dumps(result, indent=4) + def peerCmd(self, peer_ip, peer_port, cmd, parameters): + logging.info("Opening a simple connection server") + global file_server + from Connection import ConnectionServer + file_server = ConnectionServer() + from Peer import Peer + peer = Peer(peer_ip, peer_port) - - def siteAnnounce(self, address): - from Site.Site import Site - logging.info("Announcing site %s to tracker..." % address) - site = Site(address) - - s = time.time() - site.announce() - print "Response time: %.3fs" % (time.time()-s) - print site.peers - - - def siteNeedFile(self, address, inner_path): - from Site import Site - site = Site(address) - site.announce() - print site.needFile(inner_path, update=True) - - - def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): - global file_server - from Site import Site - from File import FileServer # We need fileserver to handle incoming file requests - - logging.info("Creating FileServer....") - file_server = FileServer() - file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity - file_server.openport() - site = file_server.sites[address] - site.settings["serving"] = True # Serving the site even if its disabled - if peer_ip: # Announce ip specificed - site.addPeer(peer_ip, peer_port) - else: # Just ask the tracker - logging.info("Gathering peers from tracker") - site.announce() # Gather peers - published = site.publish(20, inner_path) # Push to 20 peers - if published > 0: - time.sleep(3) - logging.info("Serving files (max 60s)...") - gevent.joinall([file_server_thread], timeout=60) - logging.info("Done.") - else: - logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site") - - - - # Crypto commands - - def cryptPrivatekeyToAddress(self, privatekey=None): - from Crypt import CryptBitcoin - if not privatekey: # If no privatekey in args then ask it now - import getpass - privatekey = getpass.getpass("Private key (input hidden):") - - print CryptBitcoin.privatekeyToAddress(privatekey) - - - def cryptSign(self, message, privatekey): - from Crypt import CryptBitcoin - print CryptBitcoin.sign(message, privatekey) - - - # Peer - - def peerPing(self, peer_ip, peer_port=None): - if not peer_port: - peer_port = config.fileserver_port - logging.info("Opening a simple connection server") - global file_server - from Connection import ConnectionServer - file_server = ConnectionServer("127.0.0.1", 1234) - - from Peer import Peer - logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) - peer = Peer(peer_ip, peer_port) - for i in range(5): - s = time.time() - print peer.ping(), - print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt) - time.sleep(1) - - - def peerGetFile(self, peer_ip, peer_port, site, filename): - logging.info("Opening a simple connection server") - global file_server - from Connection import ConnectionServer - file_server = ConnectionServer() - - from Peer import Peer - logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) - peer = Peer(peer_ip, peer_port) - s = time.time() - print peer.getFile(site, filename).read() - print "Response time: %.3fs" % (time.time()-s) - - - def peerCmd(self, peer_ip, peer_port, cmd, parameters): - logging.info("Opening a simple connection server") - global file_server - from Connection import ConnectionServer - file_server = ConnectionServer() - from Peer import Peer - peer = Peer(peer_ip, peer_port) - - import json - if parameters: - parameters = json.loads(parameters.replace("'", '"')) - else: - parameters = {} - logging.info("Response: %s" % peer.request(cmd, parameters)) + import json + if parameters: + parameters = json.loads(parameters.replace("'", '"')) + else: + parameters = {} + logging.info("Response: %s" % peer.request(cmd, parameters)) actions = Actions() # Starts here when running zeronet.py def start(): - # Call function - func = getattr(actions, config.action, None) - action_kwargs = config.getActionArguments() - func(**action_kwargs) + # Call function + func = getattr(actions, config.action, None) + action_kwargs = config.getActionArguments() + func(**action_kwargs) From d45e162e7fca2505a8d510f8e380e302d981312c Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:31:38 +0100 Subject: [PATCH 02/16] partial cleanup of zeronet.py --- zeronet.py | 85 +++++++++++++++++++++++++++++------------------------- 1 file changed, 46 insertions(+), 39 deletions(-) diff --git a/zeronet.py b/zeronet.py index 8d1e46d9..3ed0efe2 100644 --- a/zeronet.py +++ b/zeronet.py @@ -1,49 +1,56 @@ #!/usr/bin/env python +# Included modules +import os +import gc +import sys +import traceback + +# ZeroNet Modules +import update + + def main(): - print "- Starting ZeroNet..." - import sys, os - main = None - try: - sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src - import main - main.start() - if main.update_after_shutdown: # Updater - import update, sys, os, gc - # Try cleanup openssl - try: - if "lib.opensslVerify" in sys.modules: - sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary() - except Exception, err: - print "Error closing openssl", err + print "- Starting ZeroNet..." - # Update - update.update() + main = None + try: + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src + import main + main.start() + if main.update_after_shutdown: # Updater + # Try cleanup openssl + try: + if "lib.opensslVerify" in sys.modules: + sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary() + except Exception, err: + print "Error closing openssl", err - # Close log files - logger = sys.modules["main"].logging.getLogger() + # Update + update.update() - for handler in logger.handlers[:]: - handler.flush() - handler.close() - logger.removeHandler(handler) + # Close log files + logger = sys.modules["main"].logging.getLogger() - except Exception, err: # Prevent closing - import traceback - traceback.print_exc() - traceback.print_exc(file=open("log/error.log", "a")) + for handler in logger.handlers[:]: + handler.flush() + handler.close() + logger.removeHandler(handler) - if main and main.update_after_shutdown: # Updater - # Restart - gc.collect() # Garbage collect - print "Restarting..." - args = sys.argv[:] - args.insert(0, sys.executable) - if sys.platform == 'win32': - args = ['"%s"' % arg for arg in args] - os.execv(sys.executable, args) - print "Bye." + except (Exception, ): # Prevent closing + traceback.print_exc() + traceback.print_exc(file=open("log/error.log", "a")) + + if main and main.update_after_shutdown: # Updater + # Restart + gc.collect() # Garbage collect + print "Restarting..." + args = sys.argv[:] + args.insert(0, sys.executable) + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args] + os.execv(sys.executable, args) + print "Bye." if __name__ == '__main__': - main() - + main() From fee071647e1398494722c164ff66fb11857e256b Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:32:29 +0100 Subject: [PATCH 03/16] partial cleanup of start.py --- start.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/start.py b/start.py index 29069a74..5a612a8d 100644 --- a/start.py +++ b/start.py @@ -1,10 +1,16 @@ #!/usr/bin/env python + + +# Included modules import sys + +# ZeroNet Modules import zeronet + def main(): - sys.argv += ["--open_browser", "default_browser"] - zeronet.main() + sys.argv += ["--open_browser", "default_browser"] + zeronet.main() if __name__ == '__main__': - main() + main() From 701b5e2449d5d21ff95a6ab634f75782587e7823 Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:34:54 +0100 Subject: [PATCH 04/16] partial cleanup of User.py --- src/User/User.py | 229 +++++++++++++++++++++++------------------------ 1 file changed, 110 insertions(+), 119 deletions(-) diff --git a/src/User/User.py b/src/User/User.py index b32d9155..ec2e6b50 100644 --- a/src/User/User.py +++ b/src/User/User.py @@ -6,133 +6,124 @@ from Config import config @PluginManager.acceptPlugins class User(object): - def __init__(self, master_address=None, master_seed=None, data={}): - if master_seed: - self.master_seed = master_seed - self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) - elif master_address: - self.master_address = master_address - self.master_seed = data.get("master_seed") - else: - self.master_seed = CryptBitcoin.newSeed() - self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) - self.sites = data.get("sites", {}) - self.certs = data.get("certs", {}) + def __init__(self, master_address=None, master_seed=None, data={}): + if master_seed: + self.master_seed = master_seed + self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) + elif master_address: + self.master_address = master_address + self.master_seed = data.get("master_seed") + else: + self.master_seed = CryptBitcoin.newSeed() + self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) + self.sites = data.get("sites", {}) + self.certs = data.get("certs", {}) - self.log = logging.getLogger("User:%s" % self.master_address) + self.log = logging.getLogger("User:%s" % self.master_address) + # Save to data/users.json + def save(self): + users = json.load(open("%s/users.json" % config.data_dir)) + if self.master_address not in users: + users[self.master_address] = {} # Create if not exist + user_data = users[self.master_address] + if self.master_seed: user_data["master_seed"] = self.master_seed + user_data["sites"] = self.sites + user_data["certs"] = self.certs + open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True)) + self.log.debug("Saved") - # Save to data/users.json - def save(self): - users = json.load(open("%s/users.json" % config.data_dir)) - if not self.master_address in users: users[self.master_address] = {} # Create if not exist - user_data = users[self.master_address] - if self.master_seed: user_data["master_seed"] = self.master_seed - user_data["sites"] = self.sites - user_data["certs"] = self.certs - open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True)) - self.log.debug("Saved") + def getAddressAuthIndex(self, address): + return int(address.encode("hex"), 16) + # Get user site data + # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} + def getSiteData(self, address, create=True): + if address not in self.sites: # Generate new BIP32 child key based on site address + if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet + s = time.time() + address_id = self.getAddressAuthIndex(address) # Convert site address to int + auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) + self.sites[address] = { + "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), + "auth_privatekey": auth_privatekey + } + self.save() + self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s)) + return self.sites[address] - def getAddressAuthIndex(self, address): - return int(address.encode("hex"), 16) + # Get data for a new, unique site + # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] + def getNewSiteData(self): + import random + bip32_index = random.randrange(2**256) % 100000000 + site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index) + site_address = CryptBitcoin.privatekeyToAddress(site_privatekey) + if site_address in self.sites: raise Exception("Random error: site exist!") + # Save to sites + self.getSiteData(site_address) + self.sites[site_address]["privatekey"] = site_privatekey + self.save() + return site_address, bip32_index, self.sites[site_address] + # Get BIP32 address from site address + # Return: BIP32 auth address + def getAuthAddress(self, address, create=True): + cert = self.getCert(address) + if cert: + return cert["auth_address"] + else: + return self.getSiteData(address, create)["auth_address"] - # Get user site data - # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} - def getSiteData(self, address, create=True): - if not address in self.sites: # Genreate new BIP32 child key based on site address - if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet - s = time.time() - address_id = self.getAddressAuthIndex(address) # Convert site address to int - auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) - self.sites[address] = { - "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), - "auth_privatekey": auth_privatekey - } - self.save() - self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s)) - return self.sites[address] + def getAuthPrivatekey(self, address, create=True): + cert = self.getCert(address) + if cert: + return cert["auth_privatekey"] + else: + return self.getSiteData(address, create)["auth_privatekey"] + # Add cert for the user + def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): + domain = domain.lower() + auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address + cert_node = { + "auth_address": auth_address, + "auth_privatekey": auth_privatekey, + "auth_type": auth_type, + "auth_user_name": auth_user_name, + "cert_sign": cert_sign + } + # Check if we have already cert for that domain and its not the same + if self.certs.get(domain) and self.certs[domain] != cert_node: + raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)) + elif self.certs.get(domain) == cert_node: # Same, not updated + return None + else: # Not exist yet, add + self.certs[domain] = cert_node + self.save() + return True - # Get data for a new, unique site - # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] - def getNewSiteData(self): - import random - bip32_index = random.randrange(2**256) % 100000000 - site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index) - site_address = CryptBitcoin.privatekeyToAddress(site_privatekey) - if site_address in self.sites: raise Exception("Random error: site exist!") - # Save to sites - self.getSiteData(site_address) - self.sites[site_address]["privatekey"] = site_privatekey - self.save() - return site_address, bip32_index, self.sites[site_address] + def setCert(self, address, domain): + site_data = self.getSiteData(address) + if domain: + site_data["cert"] = domain + else: + del site_data["cert"] + self.save() + return site_data + # Get cert for the site address + # Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None + def getCert(self, address): + site_data = self.getSiteData(address, create=False) + if not site_data or not "cert" in site_data: return None # Site dont have cert + return self.certs.get(site_data["cert"]) - # Get BIP32 address from site address - # Return: BIP32 auth address - def getAuthAddress(self, address, create=True): - cert = self.getCert(address) - if cert: - return cert["auth_address"] - else: - return self.getSiteData(address, create)["auth_address"] - - - def getAuthPrivatekey(self, address, create=True): - cert = self.getCert(address) - if cert: - return cert["auth_privatekey"] - else: - return self.getSiteData(address, create)["auth_privatekey"] - - - # Add cert for the user - def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): - domain = domain.lower() - auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address - cert_node = { - "auth_address": auth_address, - "auth_privatekey": auth_privatekey, - "auth_type": auth_type, - "auth_user_name": auth_user_name, - "cert_sign": cert_sign - } - # Check if we have already cert for that domain and its not the same - if self.certs.get(domain) and self.certs[domain] != cert_node: - raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)) - elif self.certs.get(domain) == cert_node: # Same, not updated - return None - else: # Not exist yet, add - self.certs[domain] = cert_node - self.save() - return True - - - def setCert(self, address, domain): - site_data = self.getSiteData(address) - if domain: - site_data["cert"] = domain - else: - del site_data["cert"] - self.save() - return site_data - - - # Get cert for the site address - # Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None - def getCert(self, address): - site_data = self.getSiteData(address, create=False) - if not site_data or not "cert" in site_data: return None # Site dont have cert - return self.certs.get(site_data["cert"]) - - - # Get cert user name for the site address - # Return: user@certprovider.bit or None - def getCertUserId(self, address): - site_data = self.getSiteData(address, create=False) - if not site_data or not "cert" in site_data: return None # Site dont have cert - cert = self.certs.get(site_data["cert"]) - if cert: - return cert["auth_user_name"]+"@"+site_data["cert"] \ No newline at end of file + # Get cert user name for the site address + # Return: user@certprovider.bit or None + def getCertUserId(self, address): + site_data = self.getSiteData(address, create=False) + if not site_data or not "cert" in site_data: return None # Site dont have cert + cert = self.certs.get(site_data["cert"]) + if cert: + return cert["auth_user_name"]+"@"+site_data["cert"] \ No newline at end of file From 39acf04b4b2654071742f3753a13cc20ebc79a64 Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:37:53 +0100 Subject: [PATCH 05/16] partial cleanup of UserManager.py --- src/User/UserManager.py | 136 +++++++++++++++++++++------------------- 1 file changed, 70 insertions(+), 66 deletions(-) diff --git a/src/User/UserManager.py b/src/User/UserManager.py index ed7b36e3..28c9c021 100644 --- a/src/User/UserManager.py +++ b/src/User/UserManager.py @@ -1,4 +1,9 @@ -import json, logging, os +# Included modules +import os +import json +import logging + +# ZeroNet Modules from User import User from Plugin import PluginManager from Config import config @@ -6,74 +11,73 @@ from Config import config @PluginManager.acceptPlugins class UserManager(object): - def __init__(self): - self.users = {} + def __init__(self): + self.users = {} + + # Load all user from data/users.json + def load(self): + if not self.users: + self.users = {} + + user_found = [] + added = 0 + # Load new users + for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items(): + if master_address not in self.users: + user = User(master_address, data=data) + self.users[master_address] = user + added += 1 + user_found.append(master_address) + + # Remove deleted adresses + for master_address in self.users.keys(): + if master_address not in user_found: + del(self.users[master_address]) + logging.debug("Removed user: %s" % master_address) + + if added: + logging.debug("UserManager added %s users" % added) + + # Create new user + # Return: User + def create(self, master_address=None, master_seed=None): + user = User(master_address, master_seed) + logging.debug("Created user: %s" % user.master_address) + if user.master_address: # If successfully created + self.users[user.master_address] = user + user.save() + return user + + # List all users from data/users.json + # Return: {"usermasteraddr": User} + def list(self): + if self.users == {}: # Not loaded yet + self.load() + return self.users + + # Get user based on master_address + # Return: User or None + def get(self, master_address=None): + users = self.list() + if users: + return users.values()[0] # Single user mode, always return the first + else: + return None - # Load all user from data/users.json - def load(self): - if not self.users: self.users = {} +user_manager = UserManager() # Singleton - user_found = [] - added = 0 - # Load new users - for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items(): - if master_address not in self.users: - user = User(master_address, data=data) - self.users[master_address] = user - added += 1 - user_found.append(master_address) - - # Remove deleted adresses - for master_address in self.users.keys(): - if master_address not in user_found: - del(self.users[master_address]) - logging.debug("Removed user: %s" % master_address) - - if added: logging.debug("UserManager added %s users" % added) - - - # Create new user - # Return: User - def create(self, master_address=None, master_seed=None): - user = User(master_address, master_seed) - logging.debug("Created user: %s" % user.master_address) - if user.master_address: # If successfully created - self.users[user.master_address] = user - user.save() - return user - - - # List all users from data/users.json - # Return: {"usermasteraddr": User} - def list(self): - if self.users == {}: # Not loaded yet - self.load() - return self.users - - - # Get user based on master_address - # Return: User or None - def get(self, master_address=None): - users = self.list() - if users: - return users.values()[0] # Single user mode, always return the first - else: - return None - - -user_manager = UserManager() # Singletone # Debug: Reload User.py def reloadModule(): - return "Not used" - - import imp - global User, UserManager, user_manager - User = imp.load_source("User", "src/User/User.py").User # Reload source - #module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module - #UserManager = module.UserManager - #user_manager = module.user_manager - # Reload users - user_manager = UserManager() - user_manager.load() + return "Not used" + + import imp + global User, UserManager, user_manager + User = imp.load_source("User", "src/User/User.py").User # Reload source + #module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module + #UserManager = module.UserManager + #user_manager = module.user_manager + # Reload users + user_manager = UserManager() + user_manager.load() From 31b6dc451670cbc9b16d63b502a8884d312cd92d Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:44:20 +0100 Subject: [PATCH 06/16] partial cleanup of FileRequest.py --- src/File/FileRequest.py | 313 ++++++++++++++++++++-------------------- 1 file changed, 158 insertions(+), 155 deletions(-) diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index 2da28db0..3c413999 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -1,5 +1,12 @@ -import os, msgpack, shutil, gevent, socket, struct, random +# Included modules +import socket +import struct +import os from cStringIO import StringIO + +# Third party modules +import gevent + from Debug import Debug from Config import config from util import RateLimit, StreamingMsgpack @@ -8,183 +15,179 @@ FILE_BUFF = 1024*512 # Request from me class FileRequest(object): - __slots__ = ("server", "connection", "req_id", "sites", "log", "responded") + __slots__ = ("server", "connection", "req_id", "sites", "log", "responded") - def __init__(self, server, connection): - self.server = server - self.connection = connection + def __init__(self, server, connection): + self.server = server + self.connection = connection - self.req_id = None - self.sites = self.server.sites - self.log = server.log - self.responded = False # Responded to the request + self.req_id = None + self.sites = self.server.sites + self.log = server.log + self.responded = False # Responded to the request + def unpackAddress(self, packed): + return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] - def unpackAddress(self, packed): - return (socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]) + def send(self, msg, streaming=False): + if not self.connection.closed: + self.connection.send(msg, streaming) + def response(self, msg, streaming=False): + if self.responded: + self.log.debug("Req id %s already responded" % self.req_id) + return + if not isinstance(msg, dict): # If msg not a dict create a {"body": msg} + msg = {"body": msg} + msg["cmd"] = "response" + msg["to"] = self.req_id + self.responded = True + self.send(msg, streaming=streaming) - def send(self, msg, streaming=False): - if not self.connection.closed: - self.connection.send(msg, streaming) + # Route file requests + def route(self, cmd, req_id, params): + self.req_id = req_id + if cmd == "getFile": + self.actionGetFile(params) + elif cmd == "update": + event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) + if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second + self.response({"ok": "File update queued"}) + # If called more than once within 10 sec only keep the last update + RateLimit.callAsync(event, 10, self.actionUpdate, params) - def response(self, msg, streaming=False): - if self.responded: - self.log.debug("Req id %s already responded" % self.req_id) - return - if not isinstance(msg, dict): # If msg not a dict create a {"body": msg} - msg = {"body": msg} - msg["cmd"] = "response" - msg["to"] = self.req_id - self.responded = True - self.send(msg, streaming=streaming) + elif cmd == "pex": + self.actionPex(params) + elif cmd == "listModified": + self.actionListModified(params) + elif cmd == "ping": + self.actionPing() + else: + self.actionUnknown(cmd, params) + # Update a site file request + def actionUpdate(self, params): + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + if site.settings["own"] and params["inner_path"].endswith("content.json"): + self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"])) + changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False) + if changed: # Content.json changed locally + site.settings["size"] = site.content_manager.getTotalSize() # Update site size + buff = StringIO(params["body"]) + valid = site.content_manager.verifyFile(params["inner_path"], buff) + if valid == True: # Valid and changed + self.log.info("Update for %s looks valid, saving..." % params["inner_path"]) + buff.seek(0) + site.storage.write(params["inner_path"], buff) - # Route file requests - def route(self, cmd, req_id, params): - self.req_id = req_id + site.onFileDone(params["inner_path"]) # Trigger filedone - if cmd == "getFile": - self.actionGetFile(params) - elif cmd == "update": - event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) - if not RateLimit.isAllowed(event): # There was already an updat for this file in the last 10 second - self.response({"ok": "File update queued"}) - RateLimit.callAsync(event, 10, self.actionUpdate, params) # If called more than once within 10 sec only keep the last update + if params["inner_path"].endswith("content.json"): # Download every changed file from peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer + site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers + gevent.spawn( + lambda: site.downloadContent(params["inner_path"], peer=peer) + ) # Load new content file and download changed files in new thread - elif cmd == "pex": - self.actionPex(params) - elif cmd == "listModified": - self.actionListModified(params) - elif cmd == "ping": - self.actionPing() - else: - self.actionUnknown(cmd, params) + self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) + elif valid == None: # Not changed + peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer + if peer: + self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) + for task in site.worker_manager.tasks: # New peer add to every ongoing task + if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked - # Update a site file request - def actionUpdate(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) - return False - if site.settings["own"] and params["inner_path"].endswith("content.json"): - self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"])) - changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False) - if changed: # Content.json changed locally - site.settings["size"] = site.content_manager.getTotalSize() # Update site size - buff = StringIO(params["body"]) - valid = site.content_manager.verifyFile(params["inner_path"], buff) - if valid == True: # Valid and changed - self.log.info("Update for %s looks valid, saving..." % params["inner_path"]) - buff.seek(0) - site.storage.write(params["inner_path"], buff) + self.response({"ok": "File not changed"}) - site.onFileDone(params["inner_path"]) # Trigger filedone + else: # Invalid sign or sha1 hash + self.log.debug("Update for %s is invalid" % params["inner_path"]) + self.response({"error": "File invalid"}) - if params["inner_path"].endswith("content.json"): # Download every changed file from peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer - site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers - gevent.spawn( - lambda: site.downloadContent(params["inner_path"], peer=peer) - ) # Load new content file and download changed files in new thread + # Send file content request + def actionGetFile(self, params): + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + try: + file_path = site.storage.getPath(params["inner_path"]) + if config.debug_socket: self.log.debug("Opening file: %s" % file_path) + with StreamingMsgpack.FilePart(file_path, "rb") as file: + file.seek(params["location"]) + file.read_bytes = FILE_BUFF + back = {"body": file, + "size": os.fstat(file.fileno()).st_size, + "location": min(file.tell()+FILE_BUFF, os.fstat(file.fileno()).st_size) + } + if config.debug_socket: + self.log.debug("Sending file %s from position %s to %s" % (file_path, + params["location"], + back["location"])) + self.response(back, streaming=True) + if config.debug_socket: + self.log.debug("File %s sent" % file_path) - self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) + # Add peer to site if not added before + connected_peer = site.addPeer(self.connection.ip, self.connection.port) + if connected_peer: # Just added + connected_peer.connect(self.connection) # Assign current connection to peer - elif valid == None: # Not changed - peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer - if peer: - self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) - for task in site.worker_manager.tasks: # New peer add to every ongoing task - if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked + except Exception, err: + self.log.debug("GetFile read error: %s" % Debug.formatException(err)) + self.response({"error": "File read error: %s" % Debug.formatException(err)}) + return False - self.response({"ok": "File not changed"}) + # Peer exchange request + def actionPex(self, params): + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False - else: # Invalid sign or sha1 hash - self.log.debug("Update for %s is invalid" % params["inner_path"]) - self.response({"error": "File invalid"}) + got_peer_keys = [] + added = 0 + connected_peer = site.addPeer(self.connection.ip, self.connection.port) # Add requester peer to site + if connected_peer: # Just added + added += 1 + connected_peer.connect(self.connection) # Assign current connection to peer + for peer in params["peers"]: # Add sent peers to site + address = self.unpackAddress(peer) + got_peer_keys.append("%s:%s" % address) + if site.addPeer(*address): added += 1 + # Send back peers that is not in the sent list and connectable (not port 0) + packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)] + if added: + site.worker_manager.onPeers() + self.log.debug("Added %s peers to %s using pex, sending back %s" % (added, site, len(packed_peers))) + self.response({"peers": packed_peers}) - # Send file content request - def actionGetFile(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) - return False - try: - file_path = site.storage.getPath(params["inner_path"]) - if config.debug_socket: self.log.debug("Opening file: %s" % file_path) - with StreamingMsgpack.FilePart(file_path, "rb") as file: - file.seek(params["location"]) - file.read_bytes = FILE_BUFF - back = {} - back["body"] = file - back["size"] = os.fstat(file.fileno()).st_size - back["location"] = min(file.tell()+FILE_BUFF, back["size"]) - if config.debug_socket: self.log.debug("Sending file %s from position %s to %s" % (file_path, params["location"], back["location"])) - self.response(back, streaming=True) - if config.debug_socket: self.log.debug("File %s sent" % file_path) + # Get modified content.json files since + def actionListModified(self, params): + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + modified_files = {inner_path: content["modified"] + for inner_path, content in site.content_manager.contents.iteritems() + if content["modified"] > params["since"]} - # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port) - if connected_peer: # Just added - connected_peer.connect(self.connection) # Assign current connection to peer + # Add peer to site if not added before + connected_peer = site.addPeer(self.connection.ip, self.connection.port) + if connected_peer: # Just added + connected_peer.connect(self.connection) # Assign current connection to peer - except Exception, err: - self.log.debug("GetFile read error: %s" % Debug.formatException(err)) - self.response({"error": "File read error: %s" % Debug.formatException(err)}) - return False + self.response({"modified_files": modified_files}) + # Send a simple Pong! answer + def actionPing(self): + self.response("Pong!") - # Peer exchange request - def actionPex(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) - return False - - got_peer_keys = [] - added = 0 - connected_peer = site.addPeer(self.connection.ip, self.connection.port) # Add requester peer to site - if connected_peer: # Just added - added +=1 - connected_peer.connect(self.connection) # Assign current connection to peer - - for peer in params["peers"]: # Add sent peers to site - address = self.unpackAddress(peer) - got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address): added += 1 - # Send back peers that is not in the sent list and connectable (not port 0) - packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)] - if added: - site.worker_manager.onPeers() - self.log.debug("Added %s peers to %s using pex, sending back %s" % (added, site, len(packed_peers))) - self.response({"peers": packed_peers}) - - - # Get modified content.json files since - def actionListModified(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) - return False - modified_files = {inner_path: content["modified"] for inner_path, content in site.content_manager.contents.iteritems() if content["modified"] > params["since"]} - - # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port) - if connected_peer: # Just added - connected_peer.connect(self.connection) # Assign current connection to peer - - self.response({"modified_files": modified_files}) - - - - # Send a simple Pong! answer - def actionPing(self): - self.response("Pong!") - - - # Unknown command - def actionUnknown(self, cmd, params): - self.response({"error": "Unknown command: %s" % cmd}) + # Unknown command + def actionUnknown(self, cmd, params): + self.response({"error": "Unknown command: %s" % cmd}) From b6cb1062ced3e2b24f3570c784edcf1b5d03792a Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 22:49:47 +0100 Subject: [PATCH 07/16] partial cleanup of ContentManager.py --- src/Content/ContentManager.py | 974 +++++++++++++++++----------------- 1 file changed, 482 insertions(+), 492 deletions(-) diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 077884b3..8b4de163 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -4,513 +4,503 @@ from Crypt import CryptHash from Config import config class ContentManager: - def __init__(self, site): - self.site = site - self.log = self.site.log - self.contents = {} # Known content.json (without files and includes) - self.loadContent(add_bad_files = False) - self.site.settings["size"] = self.getTotalSize() - - - # Load content.json to self.content - # Return: Changed files ["index.html", "data/messages.json"] - def loadContent(self, content_inner_path = "content.json", add_bad_files = True, load_includes = True): - content_inner_path = content_inner_path.strip("/") # Remove / from begning - old_content = self.contents.get(content_inner_path) - content_path = self.site.storage.getPath(content_inner_path) - content_path_dir = self.toDir(self.site.storage.getPath(content_inner_path)) - content_dir = self.toDir(content_inner_path) - - if os.path.isfile(content_path): - try: - new_content = json.load(open(content_path)) - except Exception, err: - self.log.error("%s load error: %s" % (content_path, Debug.formatException(err))) - return False - else: - self.log.error("Content.json not exist: %s" % content_path) - return False # Content.json not exist - - - try: - # Get the files where the sha512 changed - changed = [] - for relative_path, info in new_content.get("files", {}).items(): - if "sha512" in info: - hash_type = "sha512" - else: # Backward compatiblity - hash_type = "sha1" - - new_hash = info[hash_type] - if old_content and old_content["files"].get(relative_path): # We have the file in the old content - old_hash = old_content["files"][relative_path].get(hash_type) - else: # The file is not in the old content - old_hash = None - if old_hash != new_hash: changed.append(content_dir+relative_path) - - # Load includes - if load_includes and "includes" in new_content: - for relative_path, info in new_content["includes"].items(): - include_inner_path = content_dir+relative_path - if self.site.storage.isFile(include_inner_path): # Content.json exists, load it - success = self.loadContent(include_inner_path, add_bad_files=add_bad_files) - if success: changed += success # Add changed files - else: # Content.json not exist, add to changed files - self.log.debug("Missing include: %s" % include_inner_path) - changed += [include_inner_path] - - # Load blind user includes (all subdir) - if load_includes and "user_contents" in new_content: - for relative_dir in os.listdir(content_path_dir): - include_inner_path = content_dir+relative_dir+"/content.json" - if not self.site.storage.isFile(include_inner_path): continue # Content.json not exist - success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False) - if success: changed += success # Add changed files - - # Update the content - self.contents[content_inner_path] = new_content - except Exception, err: - self.log.error("Content.json parse error: %s" % Debug.formatException(err)) - return False # Content.json parse error - - # Add changed files to bad files - if add_bad_files: - for inner_path in changed: - self.site.bad_files[inner_path] = True - - if new_content["modified"] > self.site.settings.get("modified", 0): - self.site.settings["modified"] = min(time.time()+60*10, new_content["modified"]) # Dont store modifications in the far future (more than 10 minute) - - return changed - - - # Get total size of site - # Return: 32819 (size of files in kb) - def getTotalSize(self, ignore=None): - total_size = 0 - for inner_path, content in self.contents.iteritems(): - if inner_path == ignore: continue - total_size += self.site.storage.getSize(inner_path) # Size of content.json - for file, info in content.get("files", {}).iteritems(): - total_size += info["size"] - return total_size - - - # Find the file info line from self.contents - # Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"} - def getFileInfo(self, inner_path): - dirs = inner_path.split("/") # Parent dirs of content.json - inner_path_parts = [dirs.pop()] # Filename relative to content.json - while True: - content_inner_path = "%s/content.json" % "/".join(dirs) - content = self.contents.get(content_inner_path.strip("/")) - if content and "files" in content: # Check if content.json exists - back = content["files"].get("/".join(inner_path_parts)) - if back: - back["content_inner_path"] = content_inner_path - return back - - if content and "user_contents" in content: # User dir - back = content["user_contents"] - back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) # Content.json is in the users dir - return back - - # No inner path in this dir, lets try the parent dir - if dirs: - inner_path_parts.insert(0, dirs.pop()) - else: # No more parent dirs - break - - return False # Not found - - - # Get rules for the file - # Return: The rules for the file or False if not allowed - def getRules(self, inner_path, content=None): - if not inner_path.endswith("content.json"): # Find the files content.json first - file_info = self.getFileInfo(inner_path) - if not file_info: return False # File not found - inner_path = file_info["content_inner_path"] - dirs = inner_path.split("/") # Parent dirs of content.json - inner_path_parts = [dirs.pop()] # Filename relative to content.json - inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir - while True: - content_inner_path = "%s/content.json" % "/".join(dirs) - parent_content = self.contents.get(content_inner_path.strip("/")) - if parent_content and "includes" in parent_content: - return parent_content["includes"].get("/".join(inner_path_parts)) - elif parent_content and "user_contents" in parent_content: - return self.getUserContentRules(parent_content, inner_path, content) - else: # No inner path in this dir, lets try the parent dir - if dirs: - inner_path_parts.insert(0, dirs.pop()) - else: # No more parent dirs - break - - return False - - - # Get rules for a user file - # Return: The rules of the file or False if not allowed - def getUserContentRules(self, parent_content, inner_path, content): - user_contents = parent_content["user_contents"] - user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory - - try: - if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specificed - except: # Content.json not exist - return { "signers": [user_address], "user_address": user_address } # Return information that we know for sure - - """if not "cert_user_name" in content: # New file, unknown user - content["cert_auth_type"] = "unknown" - content["cert_user_name"] = "unknown@unknown" - """ - user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit - - rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username - if rules == False: return False # User banned - if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers - for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules - if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user - # Update rules if its better than current recorded ones - for key, val in permission_rules.iteritems(): - if key not in rules: - if type(val) is list: - rules[key] = val[:] # Make copy - else: - rules[key] = val - elif type(val) is int: # Int, update if larger - if val > rules[key]: rules[key] = val - elif hasattr(val, "startswith"): # String, update if longer - if len(val) > len(rules[key]): rules[key] = val - elif type(val) is list: # List, append - rules[key] += val - - rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers - if "signers" not in rules: rules["signers"] = [] - rules["signers"].append(user_address) # Add user as valid signer - rules["user_address"] = user_address - - - return rules - - - - # Create and sign a content.json - # Return: The new content if filewrite = False - def sign(self, inner_path = "content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None): - content = self.contents.get(inner_path) - if not content: # Content not exist yet, load default one - self.log.info("File %s not exist yet, loading default values..." % inner_path) - content = {"files": {}, "signs": {}} # Default content.json - if inner_path == "content.json": # Its the root content.json, add some more fields - content["title"] = "%s - ZeroNet_" % self.site.address - content["description"] = "" - content["signs_required"] = 1 - content["ignore"] = "" - if extend: content.update(extend) # Add custom fields - - directory = self.toDir(self.site.storage.getPath(inner_path)) - self.log.info("Opening site data directory: %s..." % directory) - - hashed_files = {} - changed_files = [inner_path] - for root, dirs, files in os.walk(directory): - for file_name in files: - file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name)) - file_inner_path = re.sub(re.escape(directory), "", file_path) - - if file_name == "content.json": ignored = True - elif content.get("ignore") and re.match(content["ignore"], file_inner_path): ignored = True - elif file_name.startswith("."): ignored = True - else: ignored = False - - if ignored: # Ignore content.json, definied regexp and files starting with . - self.log.info("- [SKIPPED] %s" % file_inner_path) - else: - sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file - self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum)) - hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} - if file_inner_path in content["files"].keys() and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512"): - changed_files.append(file_path) - - - self.log.debug("Changed files: %s" % changed_files) - if update_changed_files: - for file_path in changed_files: - self.site.storage.onUpdated(file_path) - - # Generate new content.json - self.log.info("Adding timestamp and sha512sums to new content.json...") - - new_content = content.copy() # Create a copy of current content.json - new_content["files"] = hashed_files # Add files sha512 hash - new_content["modified"] = time.time() # Add timestamp - if inner_path == "content.json": - new_content["address"] = self.site.address - new_content["zeronet_version"] = config.version - new_content["signs_required"] = content.get("signs_required", 1) - - from Crypt import CryptBitcoin - self.log.info("Verifying private key...") - privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) - valid_signers = self.getValidSigners(inner_path, new_content) - if privatekey_address not in valid_signers: - return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address)) - self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) - - if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers - new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey) - if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") - - self.log.info("Signing %s..." % inner_path) - - if "signs" in new_content: del(new_content["signs"]) # Delete old signs - if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility) - - sign_content = json.dumps(new_content, sort_keys=True) - sign = CryptBitcoin.sign(sign_content, privatekey) - #new_content["signs"] = content.get("signs", {}) # TODO: Multisig - if sign: # If signing is successful (not an old address) - new_content["signs"] = {} - new_content["signs"][privatekey_address] = sign - - if inner_path == "content.json": # To root content.json add old format sign for backward compatibility - oldsign_content = json.dumps(new_content, sort_keys=True) - new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey) - - if not self.validContent(inner_path, new_content): - self.log.error("Sign failed: Invalid content") - return False - - if filewrite: - self.log.info("Saving to %s..." % inner_path) - self.site.storage.writeJson(inner_path, new_content) - - self.log.info("File %s signed!" % inner_path) - - if filewrite: # Written to file - return True - else: # Return the new content - return new_content - - - # The valid signers of content.json file - # Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"] - def getValidSigners(self, inner_path, content=None): - valid_signers = [] - if inner_path == "content.json": # Root content.json - if "content.json" in self.contents and "signers" in self.contents["content.json"]: - valid_signers += self.contents["content.json"]["signers"].keys() - else: - rules = self.getRules(inner_path, content) - if rules and "signers" in rules: - valid_signers += rules["signers"] - - if self.site.address not in valid_signers: valid_signers.append(self.site.address) # Site address always valid - return valid_signers - - - # Return: The required number of valid signs for the content.json - def getSignsRequired(self, inner_path, content=None): - return 1 # Todo: Multisig - - - def verifyCert(self, inner_path, content): - from Crypt import CryptBitcoin - - rules = self.getRules(inner_path, content) - if not rules.get("cert_signers"): return True # Does not need cert - - name, domain = content["cert_user_id"].split("@") - cert_address = rules["cert_signers"].get(domain) - if not cert_address: # Cert signer not allowed - self.log.error("Invalid cert signer: %s" % domain) - return False - return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]) - - - # Checks if the content.json content is valid - # Return: True or False - def validContent(self, inner_path, content): - content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content - site_size = self.getTotalSize(ignore=inner_path)+content_size # Site size without old content - if site_size > self.site.settings.get("size", 0): self.site.settings["size"] = site_size # Save to settings if larger - - site_size_limit = self.site.getSizeLimit()*1024*1024 - - # Check total site size limit - if site_size > site_size_limit: - self.log.error("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit)) - task = self.site.worker_manager.findTask(inner_path) - if task: # Dont try to download from other peers - self.site.worker_manager.failTask(task) - return False - - if inner_path == "content.json": return True # Root content.json is passed - - # Load include details - rules = self.getRules(inner_path, content) - if not rules: - self.log.error("%s: No rules" % inner_path) - return False - - # Check include size limit - if rules.get("max_size"): # Include size limit - if content_size > rules["max_size"]: - self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"])) - return False - - # Check if content includes allowed - if rules.get("includes_allowed") == False and content.get("includes"): - self.log.error("%s: Includes not allowed" % inner_path) - return False # Includes not allowed - - # Filename limit - if rules.get("files_allowed"): - for file_inner_path in content["files"].keys(): - if not re.match("^%s$" % rules["files_allowed"], file_inner_path): - self.log.error("%s: File not allowed" % file_inner_path) - return False - - return True # All good - - - - # Verify file validity - # Return: None = Same as before, False = Invalid, True = Valid - def verifyFile(self, inner_path, file, ignore_same = True): - if inner_path.endswith("content.json"): # content.json: Check using sign - from Crypt import CryptBitcoin - try: - new_content = json.load(file) - if inner_path in self.contents: - old_content = self.contents.get(inner_path) - # Checks if its newer the ours - if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json - return None - elif old_content["modified"] > new_content["modified"]: # We have newer - self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"])) - gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers - return False - if new_content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window) - self.log.error("%s modify is in the future!" % inner_path) - return False - # Check sign - sign = new_content.get("sign") - signs = new_content.get("signs", {}) - if "sign" in new_content: del(new_content["sign"]) # The file signed without the sign - if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs - sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace - - if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files) - - if signs: # New style signing - valid_signers = self.getValidSigners(inner_path, new_content) - signs_required = self.getSignsRequired(inner_path, new_content) - - if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json - if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]): - self.log.error("%s invalid signers_sign!" % inner_path) - return False - - if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid - self.log.error("%s invalid cert!" % inner_path) - return False - - valid_signs = 0 - for address in valid_signers: - if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) - if valid_signs >= signs_required: break # Break if we has enough signs - - - - return valid_signs >= signs_required - else: # Old style signing - return CryptBitcoin.verify(sign_content, self.site.address, sign) - - except Exception, err: - self.log.error("Verify sign error: %s" % Debug.formatException(err)) - return False - - else: # Check using sha512 hash - file_info = self.getFileInfo(inner_path) - if file_info: - if "sha512" in file_info: - hash_valid = CryptHash.sha512sum(file) == file_info["sha512"] - elif "sha1" in file_info: # Backward compatibility - hash_valid = CryptHash.sha1sum(file) == file_info["sha1"] - else: - hash_valid = False - if file_info["size"] != file.tell(): - self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), file_info["size"], hash_valid)) - return False - return hash_valid - - else: # File not in content.json - self.log.error("File not in content.json: %s" % inner_path) - return False - - - # Get dir from file - # Return: data/site/content.json -> data/site - def toDir(self, inner_path): - file_dir = re.sub("[^/]*?$", "", inner_path).strip("/") - if file_dir: file_dir += "/" # Add / at end if its not the root - return file_dir - - + def __init__(self, site): + self.site = site + self.log = self.site.log + self.contents = {} # Known content.json (without files and includes) + self.loadContent(add_bad_files=False) + self.site.settings["size"] = self.getTotalSize() + + # Load content.json to self.content + # Return: Changed files ["index.html", "data/messages.json"] + def loadContent(self, content_inner_path="content.json", add_bad_files=True, load_includes=True): + content_inner_path = content_inner_path.strip("/") # Remove / from begning + old_content = self.contents.get(content_inner_path) + content_path = self.site.storage.getPath(content_inner_path) + content_path_dir = self.toDir(self.site.storage.getPath(content_inner_path)) + content_dir = self.toDir(content_inner_path) + + if os.path.isfile(content_path): + try: + new_content = json.load(open(content_path)) + except Exception, err: + self.log.error("%s load error: %s" % (content_path, Debug.formatException(err))) + return False + else: + self.log.error("Content.json not exist: %s" % content_path) + return False # Content.json not exist + + try: + # Get the files where the sha512 changed + changed = [] + for relative_path, info in new_content.get("files", {}).items(): + if "sha512" in info: + hash_type = "sha512" + else: # Backward compatiblity + hash_type = "sha1" + + new_hash = info[hash_type] + if old_content and old_content["files"].get(relative_path): # We have the file in the old content + old_hash = old_content["files"][relative_path].get(hash_type) + else: # The file is not in the old content + old_hash = None + if old_hash != new_hash: changed.append(content_dir+relative_path) + + # Load includes + if load_includes and "includes" in new_content: + for relative_path, info in new_content["includes"].items(): + include_inner_path = content_dir+relative_path + if self.site.storage.isFile(include_inner_path): # Content.json exists, load it + success = self.loadContent(include_inner_path, add_bad_files=add_bad_files) + if success: changed += success # Add changed files + else: # Content.json not exist, add to changed files + self.log.debug("Missing include: %s" % include_inner_path) + changed += [include_inner_path] + + # Load blind user includes (all subdir) + if load_includes and "user_contents" in new_content: + for relative_dir in os.listdir(content_path_dir): + include_inner_path = content_dir+relative_dir+"/content.json" + if not self.site.storage.isFile(include_inner_path): continue # Content.json not exist + success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False) + if success: changed += success # Add changed files + + # Update the content + self.contents[content_inner_path] = new_content + except Exception, err: + self.log.error("Content.json parse error: %s" % Debug.formatException(err)) + return False # Content.json parse error + + # Add changed files to bad files + if add_bad_files: + for inner_path in changed: + self.site.bad_files[inner_path] = True + + if new_content["modified"] > self.site.settings.get("modified", 0): + self.site.settings["modified"] = min(time.time()+60*10, new_content["modified"]) # Dont store modifications in the far future (more than 10 minute) + + return changed + + # Get total size of site + # Return: 32819 (size of files in kb) + def getTotalSize(self, ignore=None): + total_size = 0 + for inner_path, content in self.contents.iteritems(): + if inner_path == ignore: continue + total_size += self.site.storage.getSize(inner_path) # Size of content.json + for file, info in content.get("files", {}).iteritems(): + total_size += info["size"] + return total_size + + # Find the file info line from self.contents + # Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"} + def getFileInfo(self, inner_path): + dirs = inner_path.split("/") # Parent dirs of content.json + inner_path_parts = [dirs.pop()] # Filename relative to content.json + while True: + content_inner_path = "%s/content.json" % "/".join(dirs) + content = self.contents.get(content_inner_path.strip("/")) + if content and "files" in content: # Check if content.json exists + back = content["files"].get("/".join(inner_path_parts)) + if back: + back["content_inner_path"] = content_inner_path + return back + + if content and "user_contents" in content: # User dir + back = content["user_contents"] + # Content.json is in the users dir + back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) + return back + + # No inner path in this dir, lets try the parent dir + if dirs: + inner_path_parts.insert(0, dirs.pop()) + else: # No more parent dirs + break + + # Not found + return False + + # Get rules for the file + # Return: The rules for the file or False if not allowed + def getRules(self, inner_path, content=None): + if not inner_path.endswith("content.json"): # Find the files content.json first + file_info = self.getFileInfo(inner_path) + if not file_info: return False # File not found + inner_path = file_info["content_inner_path"] + dirs = inner_path.split("/") # Parent dirs of content.json + inner_path_parts = [dirs.pop()] # Filename relative to content.json + inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir + while True: + content_inner_path = "%s/content.json" % "/".join(dirs) + parent_content = self.contents.get(content_inner_path.strip("/")) + if parent_content and "includes" in parent_content: + return parent_content["includes"].get("/".join(inner_path_parts)) + elif parent_content and "user_contents" in parent_content: + return self.getUserContentRules(parent_content, inner_path, content) + else: # No inner path in this dir, lets try the parent dir + if dirs: + inner_path_parts.insert(0, dirs.pop()) + else: # No more parent dirs + break + + return False + + + # Get rules for a user file + # Return: The rules of the file or False if not allowed + def getUserContentRules(self, parent_content, inner_path, content): + user_contents = parent_content["user_contents"] + user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory + + try: + if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specified + except (Exception, ): # Content.json not exist + return {"signers": [user_address], "user_address": user_address} # Return information that we know for sure + + """if not "cert_user_name" in content: # New file, unknown user + content["cert_auth_type"] = "unknown" + content["cert_user_name"] = "unknown@unknown" + """ + user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit + + rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username + if not rules: + return False # User banned + if "signers" in rules: + rules["signers"] = rules["signers"][:] # Make copy of the signers + for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules + if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user + # Update rules if its better than current recorded ones + for key, val in permission_rules.iteritems(): + if key not in rules: + if type(val) is list: + rules[key] = val[:] # Make copy + else: + rules[key] = val + elif type(val) is int: # Int, update if larger + if val > rules[key]: + rules[key] = val + elif hasattr(val, "startswith"): # String, update if longer + if len(val) > len(rules[key]): rules[key] = val + elif type(val) is list: # List, append + rules[key] += val + + rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers + if "signers" not in rules: rules["signers"] = [] + rules["signers"].append(user_address) # Add user as valid signer + rules["user_address"] = user_address + + return rules + + # Create and sign a content.json + # Return: The new content if filewrite = False + def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None): + content = self.contents.get(inner_path) + if not content: # Content not exist yet, load default one + self.log.info("File %s not exist yet, loading default values..." % inner_path) + content = {"files": {}, "signs": {}} # Default content.json + if inner_path == "content.json": # It's the root content.json, add some more fields + content["title"] = "%s - ZeroNet_" % self.site.address + content["description"] = "" + content["signs_required"] = 1 + content["ignore"] = "" + if extend: content.update(extend) # Add custom fields + + directory = self.toDir(self.site.storage.getPath(inner_path)) + self.log.info("Opening site data directory: %s..." % directory) + + hashed_files = {} + changed_files = [inner_path] + for root, dirs, files in os.walk(directory): + for file_name in files: + file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name)) + file_inner_path = re.sub(re.escape(directory), "", file_path) + + if file_name == "content.json": ignored = True + elif content.get("ignore") and re.match(content["ignore"], file_inner_path): ignored = True + elif file_name.startswith("."): ignored = True + else: ignored = False + + if ignored: # Ignore content.json, definied regexp and files starting with . + self.log.info("- [SKIPPED] %s" % file_inner_path) + else: + sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file + self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum)) + hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} + if file_inner_path in content["files"].keys() and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512"): + changed_files.append(file_path) + + self.log.debug("Changed files: %s" % changed_files) + if update_changed_files: + for file_path in changed_files: + self.site.storage.onUpdated(file_path) + + # Generate new content.json + self.log.info("Adding timestamp and sha512sums to new content.json...") + + new_content = content.copy() # Create a copy of current content.json + new_content["files"] = hashed_files # Add files sha512 hash + new_content["modified"] = time.time() # Add timestamp + if inner_path == "content.json": + new_content["address"] = self.site.address + new_content["zeronet_version"] = config.version + new_content["signs_required"] = content.get("signs_required", 1) + + from Crypt import CryptBitcoin + self.log.info("Verifying private key...") + privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) + valid_signers = self.getValidSigners(inner_path, new_content) + if privatekey_address not in valid_signers: + return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address)) + self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) + + if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers + new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey) + if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") + + self.log.info("Signing %s..." % inner_path) + + if "signs" in new_content: del(new_content["signs"]) # Delete old signs + if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility) + + sign_content = json.dumps(new_content, sort_keys=True) + sign = CryptBitcoin.sign(sign_content, privatekey) + #new_content["signs"] = content.get("signs", {}) # TODO: Multisig + if sign: # If signing is successful (not an old address) + new_content["signs"] = {} + new_content["signs"][privatekey_address] = sign + + if inner_path == "content.json": # To root content.json add old format sign for backward compatibility + oldsign_content = json.dumps(new_content, sort_keys=True) + new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey) + + if not self.validContent(inner_path, new_content): + self.log.error("Sign failed: Invalid content") + return False + + if filewrite: + self.log.info("Saving to %s..." % inner_path) + self.site.storage.writeJson(inner_path, new_content) + + self.log.info("File %s signed!" % inner_path) + + if filewrite: # Written to file + return True + else: # Return the new content + return new_content + + # The valid signers of content.json file + # Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"] + def getValidSigners(self, inner_path, content=None): + valid_signers = [] + if inner_path == "content.json": # Root content.json + if "content.json" in self.contents and "signers" in self.contents["content.json"]: + valid_signers += self.contents["content.json"]["signers"].keys() + else: + rules = self.getRules(inner_path, content) + if rules and "signers" in rules: + valid_signers += rules["signers"] + + if self.site.address not in valid_signers: + valid_signers.append(self.site.address) # Site address always valid + return valid_signers + + # Return: The required number of valid signs for the content.json + def getSignsRequired(self, inner_path, content=None): + return 1 # Todo: Multisig + + def verifyCert(self, inner_path, content): + from Crypt import CryptBitcoin + + rules = self.getRules(inner_path, content) + if not rules.get("cert_signers"): return True # Does not need cert + + name, domain = content["cert_user_id"].split("@") + cert_address = rules["cert_signers"].get(domain) + if not cert_address: # Cert signer not allowed + self.log.error("Invalid cert signer: %s" % domain) + return False + return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]) + + # Checks if the content.json content is valid + # Return: True or False + def validContent(self, inner_path, content): + content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content + site_size = self.getTotalSize(ignore=inner_path)+content_size # Site size without old content + if site_size > self.site.settings.get("size", 0): self.site.settings["size"] = site_size # Save to settings if larger + + site_size_limit = self.site.getSizeLimit()*1024*1024 + + # Check total site size limit + if site_size > site_size_limit: + self.log.error("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit)) + task = self.site.worker_manager.findTask(inner_path) + if task: # Dont try to download from other peers + self.site.worker_manager.failTask(task) + return False + + if inner_path == "content.json": return True # Root content.json is passed + + # Load include details + rules = self.getRules(inner_path, content) + if not rules: + self.log.error("%s: No rules" % inner_path) + return False + + # Check include size limit + if rules.get("max_size"): # Include size limit + if content_size > rules["max_size"]: + self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"])) + return False + + # Check if content includes allowed + if rules.get("includes_allowed") == False and content.get("includes"): + self.log.error("%s: Includes not allowed" % inner_path) + return False # Includes not allowed + + # Filename limit + if rules.get("files_allowed"): + for file_inner_path in content["files"].keys(): + if not re.match("^%s$" % rules["files_allowed"], file_inner_path): + self.log.error("%s: File not allowed" % file_inner_path) + return False + + return True # All good + + # Verify file validity + # Return: None = Same as before, False = Invalid, True = Valid + def verifyFile(self, inner_path, file, ignore_same = True): + if inner_path.endswith("content.json"): # content.json: Check using sign + from Crypt import CryptBitcoin + try: + new_content = json.load(file) + if inner_path in self.contents: + old_content = self.contents.get(inner_path) + # Checks if its newer the ours + if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json + return None + elif old_content["modified"] > new_content["modified"]: # We have newer + self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"])) + gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers + return False + if new_content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window) + self.log.error("%s modify is in the future!" % inner_path) + return False + # Check sign + sign = new_content.get("sign") + signs = new_content.get("signs", {}) + if "sign" in new_content: del(new_content["sign"]) # The file signed without the sign + if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs + sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace + + if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files) + + if signs: # New style signing + valid_signers = self.getValidSigners(inner_path, new_content) + signs_required = self.getSignsRequired(inner_path, new_content) + + if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json + if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]): + self.log.error("%s invalid signers_sign!" % inner_path) + return False + + if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid + self.log.error("%s invalid cert!" % inner_path) + return False + + valid_signs = 0 + for address in valid_signers: + if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) + if valid_signs >= signs_required: break # Break if we has enough signs + + + + return valid_signs >= signs_required + else: # Old style signing + return CryptBitcoin.verify(sign_content, self.site.address, sign) + + except Exception, err: + self.log.error("Verify sign error: %s" % Debug.formatException(err)) + return False + + else: # Check using sha512 hash + file_info = self.getFileInfo(inner_path) + if file_info: + if "sha512" in file_info: + hash_valid = CryptHash.sha512sum(file) == file_info["sha512"] + elif "sha1" in file_info: # Backward compatibility + hash_valid = CryptHash.sha1sum(file) == file_info["sha1"] + else: + hash_valid = False + if file_info["size"] != file.tell(): + self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), + file_info["size"], hash_valid)) + return False + return hash_valid + + else: # File not in content.json + self.log.error("File not in content.json: %s" % inner_path) + return False + + + # Get dir from file + # Return: data/site/content.json -> data/site + def toDir(self, inner_path): + file_dir = re.sub("[^/]*?$", "", inner_path).strip("/") + if file_dir: file_dir += "/" # Add / at end if its not the root + return file_dir def testSign(): - global config - from Config import config - from Site import Site - site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - content_manager = ContentManager(site) - content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR") + global config + from Config import config + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + content_manager = ContentManager(site) + content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR") def testVerify(): - from Config import config - from Site import Site - #site = Site("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") - site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + from Config import config + from Site import Site + #site = Site("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - content_manager = ContentManager(site) - print "Loaded contents:", content_manager.contents.keys() + content_manager = ContentManager(site) + print "Loaded contents:", content_manager.contents.keys() - file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) - print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False) + file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) + print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False) - file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) - print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False) + file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) + print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False) def testInfo(): - from Config import config - from Site import Site - site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + from Config import config + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - content_manager = ContentManager(site) - print content_manager.contents.keys() + content_manager = ContentManager(site) + print content_manager.contents.keys() - print content_manager.getFileInfo("index.html") - print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") - print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") - print content_manager.getValidSigners("data/users/content.json") - print content_manager.getValidSigners("content.json") + print content_manager.getFileInfo("index.html") + print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") + print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") + print content_manager.getValidSigners("data/users/content.json") + print content_manager.getValidSigners("content.json") if __name__ == "__main__": - import os, sys, logging - os.chdir("../..") - sys.path.insert(0, os.path.abspath(".")) - sys.path.insert(0, os.path.abspath("src")) - logging.basicConfig(level=logging.DEBUG) - from Debug import Debug - from Crypt import CryptHash + import os, sys, logging + os.chdir("../..") + sys.path.insert(0, os.path.abspath(".")) + sys.path.insert(0, os.path.abspath("src")) + logging.basicConfig(level=logging.DEBUG) + from Debug import Debug + from Crypt import CryptHash - #testSign() - testVerify() - #testInfo() + #testSign() + testVerify() + #testInfo() From 3f14d3d20082b672791768a4a317376784a62993 Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 23:01:56 +0100 Subject: [PATCH 08/16] partial cleanup of Peer.py --- src/Peer/Peer.py | 342 +++++++++++++++++++++++------------------------ 1 file changed, 165 insertions(+), 177 deletions(-) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 92e49b9f..72824a1b 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -5,207 +5,195 @@ from Debug import Debug # Communicate remote peers class Peer(object): - __slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", "last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time") + __slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", "last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time") - def __init__(self, ip, port, site=None): - self.ip = ip - self.port = port - self.site = site - self.key = "%s:%s" % (ip, port) - self.connection_server = sys.modules["main"].file_server + def __init__(self, ip, port, site=None): + self.ip = ip + self.port = port + self.site = site + self.key = "%s:%s" % (ip, port) + self.connection_server = sys.modules["main"].file_server - self.connection = None - self.last_found = None # Time of last found in the torrent tracker - self.last_response = None # Time of last successfull response from peer - self.last_ping = None # Last response time for ping - self.added = time.time() + self.connection = None + self.last_found = None # Time of last found in the torrent tracker + self.last_response = None # Time of last successfull response from peer + self.last_ping = None # Last response time for ping + self.added = time.time() - self.connection_error = 0 # Series of connection error - self.hash_failed = 0 # Number of bad files from peer - self.download_bytes = 0 # Bytes downloaded - self.download_time = 0 # Time spent to download + self.connection_error = 0 # Series of connection error + self.hash_failed = 0 # Number of bad files from peer + self.download_bytes = 0 # Bytes downloaded + self.download_time = 0 # Time spent to download + def log(self, text): + if self.site: + self.site.log.debug("%s:%s %s" % (self.ip, self.port, text)) + else: + logging.debug("%s:%s %s" % (self.ip, self.port, text)) - def log(self, text): - if self.site: - self.site.log.debug("%s:%s %s" % (self.ip, self.port, text)) - else: - logging.debug("%s:%s %s" % (self.ip, self.port, text)) + # Connect to host + def connect(self, connection=None): + if self.connection: + self.log("Getting connection (Closing %s)..." % self.connection) + self.connection.close() + else: + self.log("Getting connection...") + if connection: # Connection specified + self.connection = connection + else: # Try to find from connection pool or create new connection + self.connection = None - # Connect to host - def connect(self, connection = None): - if self.connection: - self.log("Getting connection (Closing %s)..." % self.connection) - self.connection.close() - else: - self.log("Getting connection...") - - if connection: # Connection specificed - self.connection = connection - else: # Try to find from connection pool or create new connection - self.connection = None + try: + self.connection = self.connection_server.getConnection(self.ip, self.port) + except Exception, err: + self.onConnectionError() + self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) + self.connection = None - try: - self.connection = self.connection_server.getConnection(self.ip, self.port) - except Exception, err: - self.onConnectionError() - self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) - self.connection = None + # Check if we have connection to peer + def findConnection(self): + if self.connection and self.connection.connected: # We have connection to peer + return self.connection + else: # Try to find from other sites connections + self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found + return self.connection + def __str__(self): + return "Peer %-12s" % self.ip - # Check if we have connection to peer - def findConnection(self): - if self.connection and self.connection.connected: # We have connection to peer - return self.connection - else: # Try to find from other sites connections - self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found - return self.connection + def __repr__(self): + return "<%s>" % self.__str__() - - def __str__(self): - return "Peer %-12s" % self.ip + # Peer ip:port to packed 6byte format + def packAddress(self): + return socket.inet_aton(self.ip)+struct.pack("H", self.port) - def __repr__(self): - return "<%s>" % self.__str__() + def unpackAddress(self, packed): + return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] + # Found a peer on tracker + def found(self): + self.last_found = time.time() - # Peer ip:port to packed 6byte format - def packAddress(self): - return socket.inet_aton(self.ip)+struct.pack("H", self.port) + # Send a command to peer + def request(self, cmd, params = {}): + if not self.connection or self.connection.closed: + self.connect() + if not self.connection: + self.onConnectionError() + return None # Connection failed + #if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive + # if not self.ping(): return None - def unpackAddress(self, packed): - return (socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]) + for retry in range(1,3): # Retry 3 times + #if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path"))) + try: + response = self.connection.request(cmd, params) + if not response: + raise Exception("Send error") + #if config.debug_socket: self.log.debug("Got response to: %s" % cmd) + if "error" in response: + self.log("%s error: %s" % (cmd, response["error"])) + self.onConnectionError() + else: # Successful request, reset connection error num + self.connection_error = 0 + self.last_response = time.time() + return response + except Exception, err: + if type(err).__name__ == "Notify": # Greenlet kill by worker + self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) + break + else: + self.onConnectionError() + self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry)) + time.sleep(1*retry) + self.connect() + return None # Failed after 4 retry + # Get a file content from peer + def getFile(self, site, inner_path): + location = 0 + buff = StringIO() + s = time.time() + while 1: # Read in 512k parts + back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location + if not back or "body" not in back: # Error + return False - # Found a peer on tracker - def found(self): - self.last_found = time.time() + buff.write(back["body"]) + back["body"] = None # Save memory + if back["location"] == back["size"]: # End of file + break + else: + location = back["location"] + self.download_bytes += back["location"] + self.download_time += (time.time() - s) + buff.seek(0) + return buff + # Send a ping request + def ping(self): + response_time = None + for retry in range(1,3): # Retry 3 times + s = time.time() + with gevent.Timeout(10.0, False): # 10 sec timeout, dont raise exception + response = self.request("ping") - # Send a command to peer - def request(self, cmd, params = {}): - if not self.connection or self.connection.closed: - self.connect() - if not self.connection: - self.onConnectionError() - return None # Connection failed + if response and "body" in response and response["body"] == "Pong!": + response_time = time.time()-s + break # All fine, exit from for loop + # Timeout reached or bad response + self.onConnectionError() + self.connect() + time.sleep(1) - #if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive - # if not self.ping(): return None + if response_time: + self.log("Ping: %.3f" % response_time) + else: + self.log("Ping failed") + self.last_ping = response_time + return response_time - for retry in range(1,3): # Retry 3 times - #if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path"))) - try: - response = self.connection.request(cmd, params) - if not response: raise Exception("Send error") - #if config.debug_socket: self.log.debug("Got response to: %s" % cmd) - if "error" in response: - self.log("%s error: %s" % (cmd, response["error"])) - self.onConnectionError() - else: # Successful request, reset connection error num - self.connection_error = 0 - self.last_response = time.time() - return response - except Exception, err: - if type(err).__name__ == "Notify": # Greenlet kill by worker - self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) - break - else: - self.onConnectionError() - self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry)) - time.sleep(1*retry) - self.connect() - return None # Failed after 4 retry + # Request peer exchange from peer + def pex(self, site=None, need_num=5): + if not site: + site = self.site # If no site defined request peers for this site + # give him/her 5 connectable peers + packed_peers = [peer.packAddress() for peer in self.site.getConnectablePeers(5)] + response = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num}) + if not response or "error" in response: + return False + added = 0 + for peer in response.get("peers", []): + address = self.unpackAddress(peer) + if site.addPeer(*address): + added += 1 + if added: + self.log("Added peers using pex: %s" % added) + return added + # List modified files since the date + # Return: {inner_path: modification date,...} + def listModified(self, since): + return self.request("listModified", {"since": since, "site": self.site.address}) - # Get a file content from peer - def getFile(self, site, inner_path): - location = 0 - buff = StringIO() - s = time.time() - while 1: # Read in 512k parts - back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location - if not back or "body" not in back: # Error - return False + # Stop and remove from site + def remove(self): + self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) + if self.key in self.site.peers: del(self.site.peers[self.key]) + if self.connection: + self.connection.close() - buff.write(back["body"]) - back["body"] = None # Save memory - if back["location"] == back["size"]: # End of file - break - else: - location = back["location"] - self.download_bytes += back["location"] - self.download_time += (time.time() - s) - buff.seek(0) - return buff + # - EVENTS - + # On connection error + def onConnectionError(self): + self.connection_error += 1 + if self.connection_error >= 3: # Dead peer + self.remove() - # Send a ping request - def ping(self): - response_time = None - for retry in range(1,3): # Retry 3 times - s = time.time() - with gevent.Timeout(10.0, False): # 10 sec timeout, dont raise exception - response = self.request("ping") - - if response and "body" in response and response["body"] == "Pong!": - response_time = time.time()-s - break # All fine, exit from for loop - # Timeout reached or bad response - self.onConnectionError() - self.connect() - time.sleep(1) - - if response_time: - self.log("Ping: %.3f" % response_time) - else: - self.log("Ping failed") - self.last_ping = response_time - return response_time - - - # Request peer exchange from peer - def pex(self, site=None, need_num=5): - if not site: site = self.site # If no site definied request peers for this site - packed_peers = [peer.packAddress() for peer in self.site.getConnectablePeers(5)] # give him/her 5 connectable peers - response = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num}) - if not response or "error" in response: - return False - added = 0 - for peer in response.get("peers", []): - address = self.unpackAddress(peer) - if (site.addPeer(*address)): added += 1 - if added: - self.log("Added peers using pex: %s" % added) - return added - - - # List modified files since the date - # Return: {inner_path: modification date,...} - def listModified(self, since): - response = self.request("listModified", {"since": since, "site": self.site.address}) - return response - - - # Stop and remove from site - def remove(self): - self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) - if self.key in self.site.peers: del(self.site.peers[self.key]) - if self.connection: - self.connection.close() - - - # - EVENTS - - - # On connection error - def onConnectionError(self): - self.connection_error += 1 - if self.connection_error >= 3: # Dead peer - self.remove() - - - # Done working with peer - def onWorkerDone(self): - pass + # Done working with peer + def onWorkerDone(self): + pass From 82b8a9f6c3fb6eeae01e309bfd30e27725485e96 Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 23:03:11 +0100 Subject: [PATCH 09/16] partial cleanup of Peer.py --- src/Peer/Peer.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 72824a1b..640b04b9 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -15,15 +15,15 @@ class Peer(object): self.connection_server = sys.modules["main"].file_server self.connection = None - self.last_found = None # Time of last found in the torrent tracker - self.last_response = None # Time of last successfull response from peer - self.last_ping = None # Last response time for ping + self.last_found = None # Time of last found in the torrent tracker + self.last_response = None # Time of last successful response from peer + self.last_ping = None # Last response time for ping self.added = time.time() - self.connection_error = 0 # Series of connection error - self.hash_failed = 0 # Number of bad files from peer - self.download_bytes = 0 # Bytes downloaded - self.download_time = 0 # Time spent to download + self.connection_error = 0 # Series of connection error + self.hash_failed = 0 # Number of bad files from peer + self.download_bytes = 0 # Bytes downloaded + self.download_time = 0 # Time spent to download def log(self, text): if self.site: @@ -39,7 +39,7 @@ class Peer(object): else: self.log("Getting connection...") - if connection: # Connection specified + if connection: # Connection specified self.connection = connection else: # Try to find from connection pool or create new connection self.connection = None @@ -53,9 +53,9 @@ class Peer(object): # Check if we have connection to peer def findConnection(self): - if self.connection and self.connection.connected: # We have connection to peer + if self.connection and self.connection.connected: # We have connection to peer return self.connection - else: # Try to find from other sites connections + else: # Try to find from other sites connections self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found return self.connection @@ -77,7 +77,7 @@ class Peer(object): self.last_found = time.time() # Send a command to peer - def request(self, cmd, params = {}): + def request(self, cmd, params={}): if not self.connection or self.connection.closed: self.connect() if not self.connection: @@ -102,7 +102,7 @@ class Peer(object): self.last_response = time.time() return response except Exception, err: - if type(err).__name__ == "Notify": # Greenlet kill by worker + if type(err).__name__ == "Notify": # Greenlet killed by worker self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) break else: From 05e15e025507be5ac2aeff7ca24245743bb427d2 Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 23:04:49 +0100 Subject: [PATCH 10/16] partial cleanup of Peer.py --- src/Peer/Peer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 640b04b9..5c31fdda 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -136,14 +136,14 @@ class Peer(object): # Send a ping request def ping(self): response_time = None - for retry in range(1,3): # Retry 3 times + for retry in range(1, 3): # Retry 3 times s = time.time() with gevent.Timeout(10.0, False): # 10 sec timeout, dont raise exception response = self.request("ping") if response and "body" in response and response["body"] == "Pong!": response_time = time.time()-s - break # All fine, exit from for loop + break # All fine, exit from for loop # Timeout reached or bad response self.onConnectionError() self.connect() @@ -160,7 +160,7 @@ class Peer(object): def pex(self, site=None, need_num=5): if not site: site = self.site # If no site defined request peers for this site - # give him/her 5 connectable peers + # give him/her 5 connectible peers packed_peers = [peer.packAddress() for peer in self.site.getConnectablePeers(5)] response = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num}) if not response or "error" in response: From e4ba0fdb53aa22f7fb40745c8b31ad1439e6acc4 Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 23:06:41 +0100 Subject: [PATCH 11/16] partial cleanup of Peer.py --- src/Peer/Peer.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 5c31fdda..5c57d2df 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -5,7 +5,8 @@ from Debug import Debug # Communicate remote peers class Peer(object): - __slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", "last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time") + __slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", + "last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time") def __init__(self, ip, port, site=None): self.ip = ip @@ -56,7 +57,7 @@ class Peer(object): if self.connection and self.connection.connected: # We have connection to peer return self.connection else: # Try to find from other sites connections - self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found + self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found return self.connection def __str__(self): @@ -107,7 +108,9 @@ class Peer(object): break else: self.onConnectionError() - self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry)) + self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), + self.connection_error, + self.hash_failed, retry)) time.sleep(1*retry) self.connect() return None # Failed after 4 retry @@ -117,14 +120,14 @@ class Peer(object): location = 0 buff = StringIO() s = time.time() - while 1: # Read in 512k parts + while True: # Read in 512k parts back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location - if not back or "body" not in back: # Error + if not back or "body" not in back: # Error return False buff.write(back["body"]) - back["body"] = None # Save memory - if back["location"] == back["size"]: # End of file + back["body"] = None # Save memory + if back["location"] == back["size"]: # End of file break else: location = back["location"] @@ -138,7 +141,7 @@ class Peer(object): response_time = None for retry in range(1, 3): # Retry 3 times s = time.time() - with gevent.Timeout(10.0, False): # 10 sec timeout, dont raise exception + with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception response = self.request("ping") if response and "body" in response and response["body"] == "Pong!": From 371c74498d01be375eddb09376bc41bdac136d5e Mon Sep 17 00:00:00 2001 From: Matthew Bell Date: Wed, 17 Jun 2015 23:08:45 +0100 Subject: [PATCH 12/16] inherit from object --- src/Content/ContentManager.py | 2 +- src/Worker/Worker.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 8b4de163..f77f81c3 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -3,7 +3,7 @@ from Debug import Debug from Crypt import CryptHash from Config import config -class ContentManager: +class ContentManager(object): def __init__(self, site): self.site = site self.log = self.site.log diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index 96ff7801..242ca9ef 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -2,7 +2,7 @@ import gevent, time, logging, shutil, os from Peer import Peer from Debug import Debug -class Worker: +class Worker(object): def __init__(self, manager, peer): self.manager = manager self.peer = peer From f58aa5f78e6f223d55d27560a4c034c247ca3ae4 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Thu, 18 Jun 2015 02:53:01 +0200 Subject: [PATCH 13/16] Fix for importing libs before monkey patching breaks tor support --- src/main.py | 7 ++----- zeronet.py | 8 +++----- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/src/main.py b/src/main.py index fffab240..1f158669 100644 --- a/src/main.py +++ b/src/main.py @@ -3,15 +3,12 @@ import os import sys import time -import urllib2 - +import logging # Third party modules import gevent from gevent import monkey -# ZeroNet modules -import logging update_after_shutdown = False # If set True then update and restart zeronet after main loop ended @@ -68,7 +65,7 @@ logging.debug("Config: %s" % config) # Socks Proxy monkey patch if config.proxy: from util import SocksProxy - + import urllib2 logging.info("Patching sockets to socks proxy: %s" % config.proxy) config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost SocksProxy.monkeyPath(*config.proxy.split(":")) diff --git a/zeronet.py b/zeronet.py index 3ed0efe2..a287d1f1 100644 --- a/zeronet.py +++ b/zeronet.py @@ -2,12 +2,7 @@ # Included modules import os -import gc import sys -import traceback - -# ZeroNet Modules -import update def main(): @@ -19,6 +14,8 @@ def main(): import main main.start() if main.update_after_shutdown: # Updater + import gc + import update # Try cleanup openssl try: if "lib.opensslVerify" in sys.modules: @@ -38,6 +35,7 @@ def main(): logger.removeHandler(handler) except (Exception, ): # Prevent closing + import traceback traceback.print_exc() traceback.print_exc(file=open("log/error.log", "a")) From 4ceab7820fc291bd81a23d80f173e1d267932759 Mon Sep 17 00:00:00 2001 From: Rarbg Date: Thu, 18 Jun 2015 14:21:16 +0300 Subject: [PATCH 14/16] Typo fix asyc -> async Typo fix asyc -> async --- src/Ui/template/wrapper.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Ui/template/wrapper.html b/src/Ui/template/wrapper.html index 4099629d..dfa00aad 100644 --- a/src/Ui/template/wrapper.html +++ b/src/Ui/template/wrapper.html @@ -57,7 +57,7 @@ permissions = {permissions} show_loadingscreen = {show_loadingscreen} server_url = '{server_url}' - + - \ No newline at end of file + From 3c3b381cccb60d84cd2c946344a3777eb45e1533 Mon Sep 17 00:00:00 2001 From: Rarbg Date: Thu, 18 Jun 2015 14:21:43 +0300 Subject: [PATCH 15/16] Typo fix asyc -> async Typo fix asyc -> async --- .../testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html index 42285031..9feb328b 100644 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/index.html @@ -131,7 +131,7 @@
- + - \ No newline at end of file + From d908a3a456653bbf0ba160b163bae1689838a063 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Thu, 18 Jun 2015 20:31:33 +0200 Subject: [PATCH 16/16] rev245, Fix for gevent1.0.2 ssl, Disable asnyc js load (its slower on my browser) --- src/Config.py | 2 +- src/Crypt/CryptConnection.py | 13 +++++++++---- src/Ui/template/wrapper.html | 2 +- src/main.py | 2 +- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/Config.py b/src/Config.py index c3398a64..a3255e30 100644 --- a/src/Config.py +++ b/src/Config.py @@ -4,7 +4,7 @@ import ConfigParser class Config(object): def __init__(self): self.version = "0.3.1" - self.rev = 242 + self.rev = 245 self.parser = self.createArguments() argv = sys.argv[:] # Copy command line arguments argv = self.parseConfig(argv) # Add arguments from config file diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py index 9dd33741..764739ce 100644 --- a/src/Crypt/CryptConnection.py +++ b/src/Crypt/CryptConnection.py @@ -1,8 +1,13 @@ -import sys, logging, os +import sys +import logging +import os +import ssl + from Config import config import gevent from util import SslPatch + class CryptConnectionManager: def __init__(self): # OpenSSL params @@ -30,9 +35,9 @@ class CryptConnectionManager: if crypt == "tls-rsa": ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" if server: - return gevent.ssl.wrap_socket(sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers) + return ssl.wrap_socket(sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers) else: - return gevent.ssl.wrap_socket(sock, ciphers=ciphers) + return ssl.wrap_socket(sock, ciphers=ciphers) else: return sock @@ -43,7 +48,7 @@ class CryptConnectionManager: if os.path.isfile(file_path): os.unlink(file_path) - # Loand and create cert files is necessary + # Load and create cert files is necessary def loadCerts(self): if config.disable_encryption: return False diff --git a/src/Ui/template/wrapper.html b/src/Ui/template/wrapper.html index dfa00aad..dcbc5e31 100644 --- a/src/Ui/template/wrapper.html +++ b/src/Ui/template/wrapper.html @@ -57,7 +57,7 @@ permissions = {permissions} show_loadingscreen = {show_loadingscreen} server_url = '{server_url}' - + diff --git a/src/main.py b/src/main.py index 1f158669..b1580662 100644 --- a/src/main.py +++ b/src/main.py @@ -54,7 +54,7 @@ if config.debug: else: console_log.setLevel(logging.INFO) # Display only important info to console -monkey.patch_all(thread=False, ssl=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible, Not ssl: broken in 2.7.9 +monkey.patch_all(thread=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible