From efb1dc32105e3cd75eb112788d0def0303cac5b2 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Wed, 14 Jan 2015 02:41:13 +0100 Subject: [PATCH 01/12] file download queue priority by browser request, newer content json log, peer remove key error fix, peer request error also a connection error, new sites created with own flag --- src/Peer/Peer.py | 3 +++ src/Site/Site.py | 7 ++++--- src/Ui/UiRequest.py | 2 +- src/Ui/UiWebsocket.py | 14 +++++++++----- src/Worker/WorkerManager.py | 31 +++++++++++++++++++------------ src/main.py | 2 ++ 6 files changed, 38 insertions(+), 21 deletions(-) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index af530f88..1c308b60 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -11,6 +11,8 @@ class Peer: self.ip = ip self.port = port self.site = site + self.key = "%s:%s" % (ip, port) + self.socket = None self.last_found = None self.added = time.time() @@ -43,6 +45,7 @@ class Peer: response = msgpack.unpackb(self.socket.recv()) if "error" in response: self.log.debug("%s %s error: %s" % (cmd, params, response["error"])) + self.onConnectionError() else: # Successful request, reset connection error num self.connection_error = 0 return response diff --git a/src/Site/Site.py b/src/Site/Site.py index 73c6423b..44c73df3 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -179,7 +179,7 @@ class Site: # Check and download if file not exits - def needFile(self, inner_path, update=False, blocking=True, peer=None): + def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): if os.path.isfile(self.getPath(inner_path)) and not update: # File exits, no need to do anything return True elif self.settings["serving"] == False: # Site not serving @@ -189,12 +189,12 @@ class Site: self.log.debug("Need content.json first") self.announce() if inner_path != "content.json": # Prevent double download - task = self.worker_manager.addTask("content.json", peer) + task = self.worker_manager.addTask("content.json", peer, priority=99999) task.get() self.loadContent() if not self.content: return False - task = self.worker_manager.addTask(inner_path, peer) + task = self.worker_manager.addTask(inner_path, peer, priority=priority) if blocking: return task.get() else: @@ -330,6 +330,7 @@ class Site: if self.content["modified"] == content["modified"]: # Ignore, have the same content.json return None elif self.content["modified"] > content["modified"]: # We have newer + self.log.debug("We have newer content.json (Our: %s, Sent: %s)" % (self.content["modified"], content["modified"])) return False if content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window) self.log.error("Content.json modify is in the future!") diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 84767abf..c870e157 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -158,7 +158,7 @@ class UiRequest: else: # File not exits, try to download site = SiteManager.need(match.group("site"), all_file=False) self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this - result = site.needFile(match.group("inner_path")) # Wait until file downloads + result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads return self.actionFile(file_path) else: # Bad url diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 8056fc9f..6b7b2520 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -6,6 +6,7 @@ class UiWebsocket: def __init__(self, ws, site, server): self.ws = ws self.site = site + self.log = site.log self.server = server self.next_message_id = 1 self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer @@ -35,7 +36,7 @@ class UiWebsocket: if config.debug: # Allow websocket errors to appear on /Debug import sys sys.modules["src.main"].DebugHook.handleError() - self.site.log.error("WebSocket error: %s" % err) + self.log.error("WebSocket error: %s" % err) return "Bye." @@ -64,9 +65,12 @@ class UiWebsocket: def send(self, message, cb = None): message["id"] = self.next_message_id # Add message id to allow response self.next_message_id += 1 - self.ws.send(json.dumps(message)) - if cb: # Callback after client responsed - self.waiting_cb[message["id"]] = cb + try: + self.ws.send(json.dumps(message)) + if cb: # Callback after client responsed + self.waiting_cb[message["id"]] = cb + except Exception, err: + self.log.debug("Websocket send error: %s" % err) # Handle incoming messages @@ -107,7 +111,7 @@ class UiWebsocket: if req["to"] in self.waiting_cb: self.waiting_cb(req["result"]) # Call callback function else: - self.site.log.error("Websocket callback not found: %s" % req) + self.log.error("Websocket callback not found: %s" % req) # Send a simple pong answer diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index f54bc448..d5e63fe8 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -8,7 +8,7 @@ class WorkerManager: def __init__(self, site): self.site = site self.workers = {} # Key: ip:port, Value: Worker.Worker - self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers} + self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers, "priority": 0} self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) self.process_taskchecker = gevent.spawn(self.checkTasks) @@ -40,15 +40,19 @@ class WorkerManager: continue # One reannounce per loop + # Tasks sorted by this + def taskSorter(self, task): + if task["inner_path"] == "content.json": return 9999 # Content.json always prority + if task["inner_path"] == "index.html": return 9998 # index.html also important + return task["priority"]-task["workers_num"] # Prefer more priority and less workers + + # Returns the next free or less worked task - def getTask(self, peer, only_free=False): - best_task = None - for task in self.tasks: # Find out the task with lowest worker number + def getTask(self, peer): + self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers + for task in self.tasks: # Find a task if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task - if task["inner_path"] == "content.json": return task # Content.json always prority - if not best_task or task["workers_num"] < best_task["workers_num"]: # If task has lower worker number then its better - best_task = task - return best_task + return task # New peers added to site @@ -76,21 +80,24 @@ class WorkerManager: if worker.task == task: workers.append(worker) return workers + # Ends and remove a worker def removeWorker(self, worker): worker.running = False - del(self.workers[worker.key]) + if worker.key in self.workers: del(self.workers[worker.key]) self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS)) # Create new task and return asyncresult - def addTask(self, inner_path, peer=None): + def addTask(self, inner_path, peer=None, priority = 0): self.site.onFileStart(inner_path) # First task, trigger site download started task = self.findTask(inner_path) if task: # Already has task for that file - if peer and task["peers"]: # This peer has new version too + if peer and task["peers"]: # This peer also has new version, add it to task possible peers task["peers"].append(peer) self.startWorkers() + if priority: + task["priority"] += priority # Boost on priority return task["evt"] else: # No task for that file yet evt = gevent.event.AsyncResult() @@ -98,7 +105,7 @@ class WorkerManager: peers = [peer] # Only download from this peer else: peers = None - task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers} + task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers, "priority": priority} self.tasks.append(task) self.log.debug("New task: %s" % task) self.startWorkers() diff --git a/src/main.py b/src/main.py index 4b3dab92..f031a743 100644 --- a/src/main.py +++ b/src/main.py @@ -81,6 +81,8 @@ def siteCreate(): logging.info("Creating content.json...") site = Site(address) site.signContent(privatekey) + site.settings["own"] = True + site.saveSettings() logging.info("Site created!") From 79f26e293c1c9977a22018121d9ffcbccd714d42 Mon Sep 17 00:00:00 2001 From: Nodeswitch Date: Wed, 14 Jan 2015 07:45:06 +0000 Subject: [PATCH 02/12] Update README.md Minor changes to wording to give better understanding. --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ee2c02d7..43b4b00f 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Decentralized websites using Bitcoin crypto and BitTorrent network - When you visit a new zeronet site, it's trying to find peers using BitTorrent network and download the site files (html, css, js...) from them. - Each visited sites become also served by You. - Every site containing a `site.json` which holds all other files sha1 hash and a sign generated using site's private key. - - If the site owner (who has the private key for the site address) modifies the site, then he/she signs the new `content.json` and publish it to the peers. After the peers verified the `content.json` integrity using the sign they download the modified files and publish the new content to other peers. + - If the site owner (who has the private key for the site address) modifies the site, then he/she signs the new `content.json` and publish it to the peers. After the peers have verified the `content.json` integrity using the sign they download the modified files and publish the new content to other peers. ## Screenshot @@ -35,7 +35,7 @@ Windows: Linux (Debian): - `apt-get install python-pip` - - `pip install pyzmq` (if drops compile error then `apt-get install python-dev` and try again) + - `pip install pyzmq` (if it drops a compile error then run `apt-get install python-dev` and try again) - `pip install gevent` - `pip install msgpack-python` - start using `python zeronet.py` @@ -76,4 +76,4 @@ Site:13DNDk..bhC2 Successfuly published to 3 peers Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX -#### Thank you! \ No newline at end of file +#### Thank you! From 77dd59b2459f4f52ece49dfa0471db4ac56d9989 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A9di-R=C3=A9mi=20Hashim?= Date: Wed, 14 Jan 2015 17:35:40 +0000 Subject: [PATCH 03/12] Fixed typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 43b4b00f..bceb0446 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Decentralized websites using Bitcoin crypto and BitTorrent network - We believe in open, free and uncensored network and communication. - No single point of failure: Site goes on until at least 1 peer serving it. - No hosting costs: Site served by visitors. - - Imposible to shut down: It's nowhere because it's everywhere. + - Impossible to shut down: It's nowhere because it's everywhere. - Fast and works offline: You can access the site even if your internet is gone. From bcd0c0025aa5c8d23dba4362c75f8dbaf236dd27 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Wed, 14 Jan 2015 22:57:43 +0100 Subject: [PATCH 04/12] no need to specify content.json priority, its always downloaded first, peer broken after 3 hash failes, extra priority for css and js files --- README.md | 1 + src/Site/Site.py | 2 +- src/Worker/Worker.py | 4 +++- src/Worker/WorkerManager.py | 32 +++++++++++++++++--------------- 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index bceb0446..b96c85f2 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,7 @@ Linux (Debian): ## How can I create a ZeroNet site? +Shut down zeronet.py if you are running it already ``` $ zeronet.py siteCreate ... diff --git a/src/Site/Site.py b/src/Site/Site.py index 44c73df3..b25affca 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -189,7 +189,7 @@ class Site: self.log.debug("Need content.json first") self.announce() if inner_path != "content.json": # Prevent double download - task = self.worker_manager.addTask("content.json", peer, priority=99999) + task = self.worker_manager.addTask("content.json", peer) task.get() self.loadContent() if not self.content: return False diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index 018f98c7..239d2394 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -24,8 +24,10 @@ class Worker: if task["workers_num"] > 0: # Wait a bit if someone already working on it self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) time.sleep(1) + self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) if task["done"] == False: + if not task["time_started"]: task["time_started"] = time.time() # Task started now self.task = task task["workers_num"] += 1 buff = self.peer.getFile(task["site"].address, task["inner_path"]) @@ -49,7 +51,7 @@ class Worker: else: # Hash failed self.task = None self.peer.hash_failed += 1 - if self.peer.hash_failed > 5: # Broken peer + if self.peer.hash_failed >= 3: # Broken peer break task["workers_num"] -= 1 self.manager.log.error("%s: Hash failed: %s" % (self.key, task["inner_path"])) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index d5e63fe8..4409fbf4 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -8,7 +8,7 @@ class WorkerManager: def __init__(self, site): self.site = site self.workers = {} # Key: ip:port, Value: Worker.Worker - self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers, "priority": 0} + self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0} self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) self.process_taskchecker = gevent.spawn(self.checkTasks) @@ -20,31 +20,33 @@ class WorkerManager: if not self.tasks: continue tasks = self.tasks[:] # Copy it so removing elements wont cause any problem for task in tasks: - if time.time() >= task["time_start"]+60: # Task timed out - self.log.debug("Cleaning up task: %s" % task) - + if (task["time_started"] and time.time() >= task["time_started"]+60) or (time.time() >= task["time_added"]+60 and not self.workers): # Task taking too long time, kill it + self.log.debug("Timeout, Cleaning up task: %s" % task) # Clean up workers workers = self.findWorkers(task) for worker in workers: worker.stop() - # Remove task self.failTask(task) - elif time.time() >= task["time_start"]+15: # Task taking long time - self.log.debug("Task taking long time, find more peers: %s" % task["inner_path"]) - task["site"].announce() # Find more peers - if task["peers"]: # Release the peer olck - self.log.debug("Task peer lock release: %s" % task["inner_path"]) - task["peers"] = [] - self.startWorkers() - continue # One reannounce per loop + + elif (task["time_started"] and time.time() >= task["time_started"]+15) or not self.workers: # Task started more than 15 sec ago or no workers + self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"]) + task["site"].announce() # Find more peers + if task["peers"]: # Release the peer olck + self.log.debug("Task peer lock release: %s" % task["inner_path"]) + task["peers"] = [] + self.startWorkers() + break # One reannounce per loop + # Tasks sorted by this def taskSorter(self, task): if task["inner_path"] == "content.json": return 9999 # Content.json always prority if task["inner_path"] == "index.html": return 9998 # index.html also important - return task["priority"]-task["workers_num"] # Prefer more priority and less workers + priority = task["priority"] + if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"): priority += 1 # download js and css files first + return priority-task["workers_num"] # Prefer more priority and less workers # Returns the next free or less worked task @@ -105,7 +107,7 @@ class WorkerManager: peers = [peer] # Only download from this peer else: peers = None - task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers, "priority": priority} + task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority} self.tasks.append(task) self.log.debug("New task: %s" % task) self.startWorkers() From aae1022c79c2956cc26809a330a087c8a4397294 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Thu, 15 Jan 2015 23:24:51 +0100 Subject: [PATCH 05/12] more logging on update requests, only add peer to already peer locked tasks, no traceback on peer send error, only allow 1 sec timeout when publishing, request 50 peers from tracker, peer limited startworkers, sitePublish to 20 peers --- README.md | 3 ++- src/File/FileRequest.py | 7 +++++-- src/Peer/Peer.py | 3 --- src/Site/Site.py | 6 +++--- src/Worker/WorkerManager.py | 33 ++++++++++++++++++++++----------- src/main.py | 2 +- 6 files changed, 33 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index b96c85f2..21c9736f 100644 --- a/README.md +++ b/README.md @@ -72,9 +72,10 @@ Site:13DNDk..bhC2 Successfuly published to 3 peers ``` - That's it! You successfuly signed and published your modifications. - ## If you want to help keep this project alive Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX #### Thank you! + +More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ \ No newline at end of file diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index df655bda..6bb4a29b 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -43,6 +43,7 @@ class FileRequest: buff = StringIO(params["body"]) valid = site.verifyFile(params["inner_path"], buff) if valid == True: # Valid and changed + self.log.debug("Update for %s looks valid, saving..." % params["inner_path"]) buff.seek(0) file = open(site.getPath(params["inner_path"]), "wb") shutil.copyfileobj(buff, file) # Write buff to disk @@ -60,12 +61,14 @@ class FileRequest: elif valid == None: # Not changed peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer + self.log.debug("New peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) for task in site.worker_manager.tasks: # New peer add to every ongoing task - site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from peer + if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked - self.send({"ok": "File file not changed"}) + self.send({"ok": "File not changed"}) else: # Invalid sign or sha1 hash + self.log.debug("Update for %s is invalid" % params["inner_path"]) self.send({"error": "File invalid"}) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 1c308b60..002f9111 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -52,9 +52,6 @@ class Peer: except Exception, err: self.onConnectionError() self.log.error("%s" % err) - if config.debug: - import traceback - traceback.print_exc() self.socket.close() time.sleep(1) self.connect() diff --git a/src/Site/Site.py b/src/Site/Site.py index b25affca..6d7f818a 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -152,12 +152,12 @@ class Site: # Update content.json on peers def publish(self, limit=3): - self.log.info( "Publishing to %s/%s peers..." % (len(self.peers), limit) ) + self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) ) published = 0 for key, peer in self.peers.items(): # Send update command to each peer result = {"exception": "Timeout"} try: - with gevent.Timeout(2, False): # 2 sec timeout + with gevent.Timeout(1, False): # 1 sec timeout result = peer.sendCmd("update", { "site": self.address, "inner_path": "content.json", @@ -229,7 +229,7 @@ class Site: try: tracker.connect() tracker.poll_once() - tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest()) + tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest(), num_want=50) back = tracker.poll_once() except Exception, err: self.log.error("Tracker error: %s" % err) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index 4409fbf4..6e42dc7e 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -20,7 +20,7 @@ class WorkerManager: if not self.tasks: continue tasks = self.tasks[:] # Copy it so removing elements wont cause any problem for task in tasks: - if (task["time_started"] and time.time() >= task["time_started"]+60) or (time.time() >= task["time_added"]+60 and not self.workers): # Task taking too long time, kill it + if (task["time_started"] and time.time() >= task["time_started"]+60) or (time.time() >= task["time_added"]+60 and not self.workers): # Task taking too long time, or no peer after 60sec kill it self.log.debug("Timeout, Cleaning up task: %s" % task) # Clean up workers workers = self.findWorkers(task) @@ -62,17 +62,27 @@ class WorkerManager: self.startWorkers() + # Add new worker + def addWorker(self, peer): + key = peer.key + if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max + worker = Worker(self, peer) + self.workers[key] = worker + worker.key = key + worker.start() + return worker + else: # We have woker for this peer or its over the limit + return False + + # Start workers to process tasks - def startWorkers(self): + def startWorkers(self, peers=None): if len(self.workers) >= MAX_WORKERS: return False # Workers number already maxed if not self.tasks: return False # No task for workers for key, peer in self.site.peers.iteritems(): # One worker for every peer - if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max - worker = Worker(self, peer) - self.workers[key] = worker - worker.key = key - worker.start() - self.log.debug("Added worker: %s, workers: %s/%s" % (key, len(self.workers), MAX_WORKERS)) + if peers and peer not in peers: continue # If peers definied and peer not valid + worker = self.addWorker(peer) + if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (key, len(self.workers), MAX_WORKERS)) # Find workers by task @@ -97,7 +107,8 @@ class WorkerManager: if task: # Already has task for that file if peer and task["peers"]: # This peer also has new version, add it to task possible peers task["peers"].append(peer) - self.startWorkers() + self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer]) if priority: task["priority"] += priority # Boost on priority return task["evt"] @@ -109,8 +120,8 @@ class WorkerManager: peers = None task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority} self.tasks.append(task) - self.log.debug("New task: %s" % task) - self.startWorkers() + self.log.debug("New task: %s, peer lock: %s" % (task, peers)) + self.startWorkers(peers) return evt diff --git a/src/main.py b/src/main.py index f031a743..21955a81 100644 --- a/src/main.py +++ b/src/main.py @@ -148,7 +148,7 @@ def sitePublish(address): site = file_server.sites[address] site.settings["serving"] = True # Serving the site even if its disabled site.announce() # Gather peers - site.publish(10) # Push to 10 peers + site.publish(20) # Push to 20 peers logging.info("Serving files....") gevent.joinall([file_server_thread]) From 185424b815ae3c461d984a1f5a76b3f1f37ab4f3 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Fri, 16 Jan 2015 11:52:42 +0100 Subject: [PATCH 06/12] version bump, allow to publihs to specified peer, log sendCmd, add zeronet version to content.js, version to websocket api, force start worker specificed for specificed peer --- src/Config.py | 4 +++- src/File/FileRequest.py | 2 +- src/File/FileServer.py | 2 +- src/Peer/Peer.py | 1 + src/Site/Site.py | 5 +++-- src/Ui/UiWebsocket.py | 1 + src/Worker/Worker.py | 2 +- src/Worker/WorkerManager.py | 2 +- src/main.py | 8 ++++++-- 9 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/Config.py b/src/Config.py index dc455d91..11592e2f 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,7 +3,7 @@ import ConfigParser class Config(object): def __init__(self): - self.version = "0.1" + self.version = "0.1.1" self.parser = self.createArguments() argv = sys.argv[:] # Copy command line arguments argv = self.parseConfig(argv) # Add arguments from config file @@ -43,6 +43,8 @@ class Config(object): # SitePublish action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address') action.add_argument('address', help='Site to publish') + action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?') + action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?') # SiteVerify action = subparsers.add_parser("siteVerify", help='Verify site files using md5: address') diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index 6bb4a29b..e9c5926a 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -61,7 +61,7 @@ class FileRequest: elif valid == None: # Not changed peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer - self.log.debug("New peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) + self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) for task in site.worker_manager.tasks: # New peer add to every ongoing task if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked diff --git a/src/File/FileServer.py b/src/File/FileServer.py index dce3d8b7..9899460d 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -149,7 +149,7 @@ class FileServer: return if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) - + gevent.spawn(self.announceSites) while True: diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 002f9111..43c31659 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -40,6 +40,7 @@ class Peer: # Send a command to peer def sendCmd(self, cmd, params = {}): if not self.socket: self.connect() + self.log.debug("sendCmd: %s" % cmd) try: self.socket.send(msgpack.packb({"cmd": cmd, "params": params}, use_bin_type=True)) response = msgpack.unpackb(self.socket.recv()) diff --git a/src/Site/Site.py b/src/Site/Site.py index 6d7f818a..163862e1 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -384,7 +384,7 @@ class Site: def signContent(self, privatekey=None): if not self.content: # New site self.log.info("Site not exits yet, loading default content.json values...") - self.content = {"files": {}, "title": "%s - ZeroNet_" % self.address, "sign": "", "modified": 0.0, "description": "", "address": self.address, "ignore": ""} # Default content.json + self.content = {"files": {}, "title": "%s - ZeroNet_" % self.address, "sign": "", "modified": 0.0, "description": "", "address": self.address, "ignore": "", "zeronet_version": config.version} # Default content.json self.log.info("Opening site data directory: %s..." % self.directory) @@ -409,7 +409,8 @@ class Site: content["address"] = self.address # Add files sha1 hash content["files"] = hashed_files # Add files sha1 hash content["modified"] = time.time() # Add timestamp - del(content["sign"]) # Delete old site + content["zeronet_version"] = config.version # Signer's zeronet version + del(content["sign"]) # Delete old sign # Signing content from Crypt import CryptBitcoin diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 6b7b2520..e5736287 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -158,6 +158,7 @@ class UiWebsocket: "fileserver_port": config.fileserver_port, "ui_ip": config.ui_ip, "ui_port": config.ui_port, + "version": config.version, "debug": config.debug } self.response(to, ret) diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index 239d2394..797bc35c 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -20,6 +20,7 @@ class Worker: if not task: # Die, no more task self.manager.log.debug("%s: No task found, stopping" % self.key) break + if not task["time_started"]: task["time_started"] = time.time() # Task started now if task["workers_num"] > 0: # Wait a bit if someone already working on it self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) @@ -27,7 +28,6 @@ class Worker: self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) if task["done"] == False: - if not task["time_started"]: task["time_started"] = time.time() # Task started now self.task = task task["workers_num"] += 1 buff = self.peer.getFile(task["site"].address, task["inner_path"]) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index 6e42dc7e..a2ea1dac 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -77,7 +77,7 @@ class WorkerManager: # Start workers to process tasks def startWorkers(self, peers=None): - if len(self.workers) >= MAX_WORKERS: return False # Workers number already maxed + if len(self.workers) >= MAX_WORKERS and not peers: return False # Workers number already maxed if not self.tasks: return False # No task for workers for key, peer in self.site.peers.iteritems(): # One worker for every peer if peers and peer not in peers: continue # If peers definied and peer not valid diff --git a/src/main.py b/src/main.py index 21955a81..ab8fc25f 100644 --- a/src/main.py +++ b/src/main.py @@ -135,7 +135,7 @@ def siteNeedFile(address, inner_path): print site.needFile(inner_path, update=True) -def sitePublish(address): +def sitePublish(address, peer_ip=None, peer_port=15441): from Site import Site from File import FileServer # We need fileserver to handle incoming file requests logging.info("Creating FileServer....") @@ -147,7 +147,11 @@ def sitePublish(address): return site = file_server.sites[address] site.settings["serving"] = True # Serving the site even if its disabled - site.announce() # Gather peers + if peer_ip: # Announce ip specificed + site.addPeer(peer_ip, peer_port) + else: # Just ask the tracker + logging.info("Gathering peers from tracker") + site.announce() # Gather peers site.publish(20) # Push to 20 peers logging.info("Serving files....") gevent.joinall([file_server_thread]) From b37e309eda8591dcd126d169e2ab4621bfdc3326 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Sat, 17 Jan 2015 18:50:56 +0100 Subject: [PATCH 07/12] limitations and irc to readme, version 0.1.2, socket debugging option, Notify exceptions support, better error logging, retry on socket error, dont expose external ip to websocket api, kill workers if no task, log time to console --- README.md | 13 +++++++++++- src/Config.py | 7 ++++--- src/Debug/Debug.py | 39 +++++++++++++++++++++++++++++++++++ src/Debug/DebugHook.py | 4 ++-- src/File/FileRequest.py | 3 ++- src/File/FileServer.py | 7 ++++--- src/Peer/Peer.py | 41 ++++++++++++++++++++++--------------- src/Site/Site.py | 41 ++++++++++++++++++------------------- src/Ui/UiRequest.py | 1 + src/Ui/UiServer.py | 14 +------------ src/Ui/UiWebsocket.py | 7 ++++--- src/Worker/Worker.py | 6 ++++++ src/Worker/WorkerManager.py | 13 +++++++++--- src/main.py | 8 +++++++- 14 files changed, 136 insertions(+), 68 deletions(-) create mode 100644 src/Debug/Debug.py diff --git a/README.md b/README.md index 21c9736f..b2ac7429 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,13 @@ Linux (Debian): - start using `python zeronet.py` +## Current limitations + - No torrent-like, file splitting big file support + - Just as anonymous as the bittorrent + - File transactions not compressed or encrypted yet + - No private sites + + ## How can I create a ZeroNet site? Shut down zeronet.py if you are running it already ``` @@ -55,6 +62,7 @@ $ zeronet.py ``` Congratulations, you are done! Now anyone can access your site using http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 + ## How can I modify a ZeroNet site? - Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory. After you done: ``` @@ -72,10 +80,13 @@ Site:13DNDk..bhC2 Successfuly published to 3 peers ``` - That's it! You successfuly signed and published your modifications. + ## If you want to help keep this project alive Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX #### Thank you! -More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ \ No newline at end of file + +More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ +Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) diff --git a/src/Config.py b/src/Config.py index 11592e2f..6b2d5026 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,7 +3,7 @@ import ConfigParser class Config(object): def __init__(self): - self.version = "0.1.1" + self.version = "0.1.2" self.parser = self.createArguments() argv = sys.argv[:] # Copy command line arguments argv = self.parseConfig(argv) # Add arguments from config file @@ -53,13 +53,14 @@ class Config(object): # Config parameters parser.add_argument('--debug', help='Debug mode', action='store_true') + parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') - parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='host') + parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') parser.add_argument('--ui_port', help='Web interface bind port', default=43110, metavar='port') parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip') parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address') - parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='host') + parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, metavar='port') parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip') diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py new file mode 100644 index 00000000..3ab4c48b --- /dev/null +++ b/src/Debug/Debug.py @@ -0,0 +1,39 @@ +import sys, os, traceback + +# Non fatal exception +class Notify(Exception): + def __init__(self, message): + self.message = message + + def __str__(self): + return self.message + + +def formatException(err=None): + exc_type, exc_obj, exc_tb = sys.exc_info() + if not err: err = exc_obj.message + tb = [] + for frame in traceback.extract_tb(exc_tb): + path, line, function, text = frame + file = os.path.split(path)[1] + tb.append("%s line %s" % (file, line)) + return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb)) + + +if __name__ == "__main__": + try: + print 1/0 + except Exception, err: + print type(err).__name__ + print "1/0 error: %s" % formatException(err) + + def loadJson(): + json.loads("Errr") + + import json + try: + loadJson() + except Exception, err: + print err + print "Json load error: %s" % formatException(err) + loadJson() diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index d03e7fb9..60229f61 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -3,14 +3,14 @@ import gevent, sys last_error = None def handleError(*args): global last_error - if not args: # Get last error + if not args: # Called explicitly args = sys.exc_info() silent = True else: silent = False print "Error catched", args last_error = args - if not silent: sys.__excepthook__(*args) + if not silent and args[0].__name__ != "Notify": sys.__excepthook__(*args) OriginalGreenlet = gevent.Greenlet class ErrorhookedGreenlet(OriginalGreenlet): diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index e9c5926a..973b6fca 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -1,6 +1,7 @@ import os, msgpack, shutil from Site import SiteManager from cStringIO import StringIO +from Debug import Debug FILE_BUFF = 1024*512 @@ -87,7 +88,7 @@ class FileRequest: back["size"] = os.fstat(file.fileno()).st_size self.send(back) except Exception, err: - self.send({"error": "File read error: %s" % err}) + self.send({"error": "File read error: %s" % Debug.formatException(err)}) return False diff --git a/src/File/FileServer.py b/src/File/FileServer.py index 9899460d..34ce5d42 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -4,6 +4,7 @@ import zmq.green as zmq from Config import config from FileRequest import FileRequest from Site import SiteManager +from Debug import Debug class FileServer: @@ -53,7 +54,7 @@ class FileServer: else: upnpc_success = False except Exception, err: - self.log.error("Upnpc run error: %s" % err) + self.log.error("Upnpc run error: %s" % Debug.formatException(err)) upnpc_success = False if upnpc_success and self.testOpenport(port)["result"] == True: @@ -73,7 +74,7 @@ class FileServer: message = re.match('.*

(.*?)

', data, re.DOTALL).group(1) message = re.sub("<.*?>", "", message.replace("
", " ").replace(" ", " ")) # Strip http tags except Exception, err: - message = "Error: %s" % err + message = "Error: %s" % Debug.formatException(err) if "Error" in message: self.log.info("[BAD :(] Port closed: %s" % message) if port == self.port: @@ -159,7 +160,7 @@ class FileServer: self.handleRequest(req) except Exception, err: self.log.error(err) - self.socket.send(msgpack.packb({"error": "%s" % err}, use_bin_type=True)) + self.socket.send(msgpack.packb({"error": "%s" % Debug.formatException(err)}, use_bin_type=True)) if config.debug: # Raise exception import sys sys.excepthook(*sys.exc_info()) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 43c31659..6a4bb6f9 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -2,6 +2,7 @@ import os, logging, gevent, time, msgpack import zmq.green as zmq from cStringIO import StringIO from Config import config +from Debug import Debug context = zmq.Context() @@ -40,23 +41,29 @@ class Peer: # Send a command to peer def sendCmd(self, cmd, params = {}): if not self.socket: self.connect() - self.log.debug("sendCmd: %s" % cmd) - try: - self.socket.send(msgpack.packb({"cmd": cmd, "params": params}, use_bin_type=True)) - response = msgpack.unpackb(self.socket.recv()) - if "error" in response: - self.log.debug("%s %s error: %s" % (cmd, params, response["error"])) + for retry in range(1,5): + if config.debug_socket: self.log.debug("sendCmd: %s" % cmd) + try: + self.socket.send(msgpack.packb({"cmd": cmd, "params": params}, use_bin_type=True)) + if config.debug_socket: self.log.debug("Sent command: %s" % cmd) + response = msgpack.unpackb(self.socket.recv()) + if config.debug_socket: self.log.debug("Got response to: %s" % cmd) + if "error" in response: + self.log.debug("%s error: %s" % (cmd, response["error"])) + self.onConnectionError() + else: # Successful request, reset connection error num + self.connection_error = 0 + return response + except Exception, err: self.onConnectionError() - else: # Successful request, reset connection error num - self.connection_error = 0 - return response - except Exception, err: - self.onConnectionError() - self.log.error("%s" % err) - self.socket.close() - time.sleep(1) - self.connect() - return None + self.log.debug("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry)) + self.socket.close() + time.sleep(1*retry) + self.connect() + if type(err).__name__ == "Notify" and err.message == "Worker stopped": # Greenlet kill by worker + self.log.debug("Peer worker got killed, aborting cmd: %s" % cmd) + break + return None # Failed after 4 retry # Get a file content from peer @@ -97,7 +104,7 @@ class Peer: # On connection error def onConnectionError(self): self.connection_error += 1 - if self.connection_error > 5: # Dead peer + if self.connection_error >= 5: # Dead peer self.remove() diff --git a/src/Site/Site.py b/src/Site/Site.py index 163862e1..6443b9db 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -6,6 +6,7 @@ from Config import config from Peer import Peer from Worker import WorkerManager from Crypt import CryptHash +from Debug import Debug import SiteManager class Site: @@ -53,7 +54,7 @@ class Site: try: new_content = json.load(open(content_path)) except Exception, err: - self.log.error("Content.json load error: %s" % err) + self.log.error("Content.json load error: %s" % Debug.formatException(err)) return None else: return None # Content.json not exits @@ -69,7 +70,7 @@ class Site: if old_sha1 != new_sha1: changed.append(inner_path) self.content = new_content except Exception, err: - self.log.error("Content.json parse error: %s" % err) + self.log.error("Content.json parse error: %s" % Debug.formatException(err)) return None # Content.json parse error # Add to bad files if not init: @@ -114,7 +115,7 @@ class Site: # Start downloading site @util.Noparallel(blocking=False) def download(self): - self.log.debug("Start downloading...") + self.log.debug("Start downloading...%s" % self.bad_files) self.announce() found = self.needFile("content.json", update=self.bad_files.get("content.json")) if not found: return False # Could not download content.json @@ -165,7 +166,7 @@ class Site: "peer": (config.ip_external, config.fileserver_port) }) except Exception, err: - result = {"exception": err} + result = {"exception": Debug.formatException(err)} if result and "ok" in result: published += 1 @@ -231,24 +232,22 @@ class Site: tracker.poll_once() tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest(), num_want=50) back = tracker.poll_once() - except Exception, err: - self.log.error("Tracker error: %s" % err) - continue - if back: # Tracker announce success peers = back["response"]["peers"] - added = 0 - for peer in peers: - if (peer["addr"], peer["port"]) in self.peer_blacklist: # Ignore blacklist (eg. myself) - continue - if self.addPeer(peer["addr"], peer["port"]): added += 1 - if added: - self.worker_manager.onPeers() - self.updateWebsocket(peers_added=added) - self.log.debug("Found %s peers, new: %s" % (len(peers), added)) - break # Successful announcing, break the list - else: - self.log.error("Tracker bad response, trying next in list...") # Failed to announce, go to next + except Exception, err: + self.log.error("Tracker error: %s" % Debug.formatException(err)) time.sleep(1) + continue + + added = 0 + for peer in peers: + if (peer["addr"], peer["port"]) in self.peer_blacklist: # Ignore blacklist (eg. myself) + continue + if self.addPeer(peer["addr"], peer["port"]): added += 1 + if added: + self.worker_manager.onPeers() + self.updateWebsocket(peers_added=added) + self.log.debug("Found %s peers, new: %s" % (len(peers), added)) + break # Successful announcing, break the list else: pass # TODO: http tracker support @@ -342,7 +341,7 @@ class Site: return CryptBitcoin.verify(sign_content, self.address, sign) except Exception, err: - self.log.error("Verify sign error: %s" % err) + self.log.error("Verify sign error: %s" % Debug.formatException(err)) return False else: # Check using sha1 hash diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index c870e157..611173c6 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -245,6 +245,7 @@ class UiRequest: # Just raise an error to get console def actionConsole(self): + sites = self.server.sites raise Exception("Here is your console") diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 61f32a8f..e14a72bc 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -6,6 +6,7 @@ from lib.geventwebsocket.handler import WebSocketHandler from Ui import UiRequest from Site import SiteManager from Config import config +from Debug import Debug # Skip websocket handler if not necessary class UiWSGIHandler(WSGIHandler): @@ -48,19 +49,6 @@ class UiServer: return self.ui_request.route(path) - # Send a message to all connected client - def sendMessage(self, message): - sent = 0 - for ws in self.websockets: - try: - ws.send(message) - sent += 1 - except Exception, err: - self.log.error("addMessage error: %s" % err) - self.server.websockets.remove(ws) - return sent - - # Reload the UiRequest class to prevent restarts in debug mode def reload(self): import imp diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index e5736287..68d3ce77 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -1,6 +1,7 @@ import json, gevent, time, sys, hashlib from Config import config from Site import SiteManager +from Debug import Debug class UiWebsocket: def __init__(self, ws, site, server): @@ -36,7 +37,7 @@ class UiWebsocket: if config.debug: # Allow websocket errors to appear on /Debug import sys sys.modules["src.main"].DebugHook.handleError() - self.log.error("WebSocket error: %s" % err) + self.log.error("WebSocket error: %s" % Debug.formatException(err)) return "Bye." @@ -70,7 +71,7 @@ class UiWebsocket: if cb: # Callback after client responsed self.waiting_cb[message["id"]] = cb except Exception, err: - self.log.debug("Websocket send error: %s" % err) + self.log.debug("Websocket send error: %s" % Debug.formatException(err)) # Handle incoming messages @@ -152,7 +153,7 @@ class UiWebsocket: # Server variables def actionServerInfo(self, to, params): ret = { - "ip_external": config.ip_external, + "ip_external": bool(config.ip_external), "platform": sys.platform, "fileserver_ip": config.fileserver_ip, "fileserver_port": config.fileserver_port, diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index 797bc35c..bbe9486c 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -1,5 +1,6 @@ import gevent, time, logging, shutil, os from Peer import Peer +from Debug import Debug class Worker: def __init__(self, manager, peer): @@ -66,6 +67,11 @@ class Worker: self.running = True self.thread = gevent.spawn(self.downloader) + + # Force stop the worker def stop(self): + self.manager.log.debug("%s: Force stopping, thread: %s" % (self.key, self.thread)) self.running = False + if self.thread: + self.thread.kill(exception=Debug.Notify("Worker stopped")) self.manager.removeWorker(self) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index a2ea1dac..dd2ba7b3 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -16,7 +16,12 @@ class WorkerManager: # Check expired tasks def checkTasks(self): while 1: - time.sleep(15) # Check every 30 sec + time.sleep(15) # Check every 15 sec + + # Clean up workers + if not self.tasks and self.workers: # No task but workers still running + for worker in self.workers.values(): worker.stop() + if not self.tasks: continue tasks = self.tasks[:] # Copy it so removing elements wont cause any problem for task in tasks: @@ -40,6 +45,7 @@ class WorkerManager: + # Tasks sorted by this def taskSorter(self, task): if task["inner_path"] == "content.json": return 9999 # Content.json always prority @@ -96,8 +102,9 @@ class WorkerManager: # Ends and remove a worker def removeWorker(self, worker): worker.running = False - if worker.key in self.workers: del(self.workers[worker.key]) - self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS)) + if worker.key in self.workers: + del(self.workers[worker.key]) + self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS)) # Create new task and return asyncresult diff --git a/src/main.py b/src/main.py index ab8fc25f..791e952c 100644 --- a/src/main.py +++ b/src/main.py @@ -19,8 +19,14 @@ if config.action == "main": else: logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull,"w")) # No file logging if action is not main +# Console logger console_log = logging.StreamHandler() -console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) +if config.action == "main": # Add time if main action + console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S")) +else: + console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) + + logging.getLogger('').addHandler(console_log) # Add console logger logging.getLogger('').name = "-" # Remove root prefix From 014a79912fbe7fb2a7f9740a76518e4e2ca81bbf Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Sun, 18 Jan 2015 22:52:19 +0100 Subject: [PATCH 08/12] version 0.1.3, tructate sha512 to 256bits, retry peer cmd only 3 times, ping peer before cmd to find stucked sockets, ping with timeout and retry, separate wrapper_key and auth_key, changed sha1 to sha512, backward compatibility to sha1, reduce websocket bw usage on events, removed wrapper hash from wrapper iframe url --- README.md | 10 +++++--- src/Config.py | 2 +- src/Crypt/CryptHash.py | 2 +- src/Peer/Peer.py | 47 +++++++++++++++++++++++++++++------- src/Site/Site.py | 35 ++++++++++++++++++--------- src/Ui/UiRequest.py | 14 +++++------ src/Ui/UiWebsocket.py | 26 ++++++++++++-------- src/Ui/media/Wrapper.coffee | 6 ++--- src/Ui/media/all.js | 6 ++--- src/Ui/template/wrapper.html | 4 +-- src/Worker/Worker.py | 2 +- src/main.py | 2 +- 12 files changed, 102 insertions(+), 54 deletions(-) diff --git a/README.md b/README.md index b2ac7429..86d25d36 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Decentralized websites using Bitcoin crypto and BitTorrent network - When you visit a new zeronet site, it's trying to find peers using BitTorrent network and download the site files (html, css, js...) from them. - Each visited sites become also served by You. - Every site containing a `site.json` which holds all other files sha1 hash and a sign generated using site's private key. - - If the site owner (who has the private key for the site address) modifies the site, then he/she signs the new `content.json` and publish it to the peers. After the peers have verified the `content.json` integrity using the sign they download the modified files and publish the new content to other peers. + - If the site owner (who has the private key for the site address) modifies the site, then he/she signs the new `content.json` and publish it to the peers. After the peers have verified the `content.json` integrity (using the sign), they download the modified files and publish the new content to other peers. ## Screenshot @@ -62,6 +62,8 @@ $ zeronet.py ``` Congratulations, you are done! Now anyone can access your site using http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +Next steps: [ZeroNet Developer Documentation](https://github.com/HelloZeroNet/ZeroNet/wiki/ZeroNet-Developer-Documentation) + ## How can I modify a ZeroNet site? - Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory. After you done: @@ -85,8 +87,8 @@ Site:13DNDk..bhC2 Successfuly published to 3 peers Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX + #### Thank you! - -More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ -Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) +- More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ +- Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) diff --git a/src/Config.py b/src/Config.py index 6b2d5026..ec7a470d 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,7 +3,7 @@ import ConfigParser class Config(object): def __init__(self): - self.version = "0.1.2" + self.version = "0.1.3" self.parser = self.createArguments() argv = sys.argv[:] # Copy command line arguments argv = self.parseConfig(argv) # Add arguments from config file diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py index 57932acc..d9de55f9 100644 --- a/src/Crypt/CryptHash.py +++ b/src/Crypt/CryptHash.py @@ -15,7 +15,7 @@ def sha512sum(file, blocksize=65536): hash = hashlib.sha512() for block in iter(lambda: file.read(blocksize), ""): hash.update(block) - return hash.hexdigest() + return hash.hexdigest()[0:64] # Truncate to 256bits is good enough if __name__ == "__main__": diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 6a4bb6f9..7641e571 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -13,23 +13,32 @@ class Peer: self.port = port self.site = site self.key = "%s:%s" % (ip, port) + self.log = None self.socket = None - self.last_found = None + self.last_found = None # Time of last found in the torrent tracker + self.last_response = None # Time of last successfull response from peer + self.last_ping = None # Last response time for ping self.added = time.time() - self.connection_error = 0 - self.hash_failed = 0 - self.download_bytes = 0 - self.download_time = 0 + self.connection_error = 0 # Series of connection error + self.hash_failed = 0 # Number of bad files from peer + self.download_bytes = 0 # Bytes downloaded + self.download_time = 0 # Time spent to download # Connect to host def connect(self): - self.log = logging.getLogger("Peer:%s:%s" % (self.ip, self.port)) + if not self.log: self.log = logging.getLogger("Peer:%s:%s" % (self.ip, self.port)) + if self.socket: self.socket.close() + self.socket = context.socket(zmq.REQ) self.socket.setsockopt(zmq.SNDTIMEO, 5000) # Wait for data send self.socket.setsockopt(zmq.LINGER, 500) # Wait for socket close + #self.socket.setsockopt(zmq.TCP_KEEPALIVE, 1) # Enable keepalive + #self.socket.setsockopt(zmq.TCP_KEEPALIVE_IDLE, 4*60) # Send after 4 minute idle + #self.socket.setsockopt(zmq.TCP_KEEPALIVE_INTVL, 15) # Wait 15 sec to response + #self.socket.setsockopt(zmq.TCP_KEEPALIVE_CNT, 4) # 4 Probes self.socket.connect('tcp://%s:%s' % (self.ip, self.port)) @@ -41,7 +50,10 @@ class Peer: # Send a command to peer def sendCmd(self, cmd, params = {}): if not self.socket: self.connect() - for retry in range(1,5): + if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive + if not self.ping(): return None + + for retry in range(1,3): # Retry 3 times if config.debug_socket: self.log.debug("sendCmd: %s" % cmd) try: self.socket.send(msgpack.packb({"cmd": cmd, "params": params}, use_bin_type=True)) @@ -53,11 +65,11 @@ class Peer: self.onConnectionError() else: # Successful request, reset connection error num self.connection_error = 0 + self.last_response = time.time() return response except Exception, err: self.onConnectionError() self.log.debug("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry)) - self.socket.close() time.sleep(1*retry) self.connect() if type(err).__name__ == "Notify" and err.message == "Worker stopped": # Greenlet kill by worker @@ -89,7 +101,24 @@ class Peer: # Send a ping request def ping(self): - return self.sendCmd("ping") + response_time = None + for retry in range(1,3): # Retry 3 times + s = time.time() + with gevent.Timeout(10.0, False): # 10 sec timeout, dont raise exception + response = self.sendCmd("ping") + if response and "body" in response and response["body"] == "Pong!": + response_time = time.time()-s + break # All fine, exit from for loop + # Timeout reached or bad response + self.onConnectionError() + time.sleep(1) + + if response_time: + self.log.debug("Ping: %.3f" % response_time) + else: + self.log.debug("Ping failed") + self.last_ping = response_time + return response_time # Stop and remove from site diff --git a/src/Site/Site.py b/src/Site/Site.py index 6443b9db..44fbe3a7 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -27,7 +27,7 @@ class Site: self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) self.last_announce = 0 # Last announce time to tracker self.worker_manager = WorkerManager(self) # Handle site download from other peers - self.bad_files = {} # SHA1 check failed files, need to redownload + self.bad_files = {} # SHA512 check failed files, need to redownload self.content_updated = None # Content.js update time self.last_downloads = [] # Files downloaded in run of self.download() self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout] @@ -36,10 +36,16 @@ class Site: self.loadContent(init=True) # Load content.json self.loadSettings() # Load settings from sites.json - if not self.settings.get("auth_key"): - self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # To auth websocket + if not self.settings.get("auth_key"): # To auth user in site + self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24)) self.log.debug("New auth key: %s" % self.settings["auth_key"]) self.saveSettings() + + if not self.settings.get("wrapper_key"): # To auth websocket permissions + self.settings["wrapper_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)) + self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) + self.saveSettings() + self.websockets = [] # Active site websocket connections # Add event listeners @@ -224,7 +230,7 @@ class Site: for protocol, ip, port in SiteManager.TRACKERS: if protocol == "udp": - self.log.debug("Announing to %s://%s:%s..." % (protocol, ip, port)) + self.log.debug("Announcing to %s://%s:%s..." % (protocol, ip, port)) tracker = UdpTrackerClient(ip, port) tracker.peer_port = config.fileserver_port try: @@ -346,13 +352,17 @@ class Site: else: # Check using sha1 hash if self.content and inner_path in self.content["files"]: - return CryptHash.sha1sum(file) == self.content["files"][inner_path]["sha1"] + if "sha512" in self.content["files"][inner_path]: # Use sha512 to verify if possible + return CryptHash.sha512sum(file) == self.content["files"][inner_path]["sha512"] + else: # Backward compatiblity + return CryptHash.sha1sum(file) == self.content["files"][inner_path]["sha1"] + else: # File not in content.json self.log.error("File not in content.json: %s" % inner_path) return False - # Verify all files sha1sum using content.json + # Verify all files sha512sum using content.json def verifyFiles(self, quick_check=False): # Fast = using file size bad_files = [] if not self.content: # No content.json, download it first @@ -396,17 +406,18 @@ class Site: if file_name == "content.json" or (self.content["ignore"] and re.match(self.content["ignore"], file_path.replace(self.directory+"/", "") )): # Dont add content.json and ignore regexp pattern definied in content.json self.log.info("- [SKIPPED] %s" % file_path) else: - sha1sum = CryptHash.sha1sum(file_path) # Calculate sha sum of file + sha1sum = CryptHash.sha1sum(file_path) # Calculate sha1 sum of file + sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file inner_path = re.sub("^%s/" % re.escape(self.directory), "", file_path) - self.log.info("- %s (SHA1: %s)" % (file_path, sha1sum)) - hashed_files[inner_path] = {"sha1": sha1sum, "size": os.path.getsize(file_path)} + self.log.info("- %s (SHA512: %s)" % (file_path, sha512sum)) + hashed_files[inner_path] = {"sha1": sha1sum, "sha512": sha512sum, "size": os.path.getsize(file_path)} # Generate new content.json - self.log.info("Adding timestamp and sha1sums to new content.json...") + self.log.info("Adding timestamp and sha512sums to new content.json...") content = self.content.copy() # Create a copy of current content.json - content["address"] = self.address # Add files sha1 hash - content["files"] = hashed_files # Add files sha1 hash + content["address"] = self.address + content["files"] = hashed_files # Add files sha512 hash content["modified"] = time.time() # Add timestamp content["zeronet_version"] = config.version # Signer's zeronet version del(content["sign"]) # Delete old sign diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 611173c6..4c57c916 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -122,7 +122,7 @@ class UiRequest: inner_path=inner_path, address=match.group("site"), title=title, - auth_key=site.settings["auth_key"], + wrapper_key=site.settings["wrapper_key"], permissions=json.dumps(site.settings["permissions"]), show_loadingscreen=json.dumps(not os.path.isfile(site.getPath(inner_path))), homepage=config.homepage @@ -209,13 +209,13 @@ class UiRequest: def actionWebsocket(self): ws = self.env.get("wsgi.websocket") if ws: - auth_key = self.get["auth_key"] - # Find site by auth_key + wrapper_key = self.get["wrapper_key"] + # Find site by wraper_key site = None for site_check in self.server.sites.values(): - if site_check.settings["auth_key"] == auth_key: site = site_check + if site_check.settings["wrapper_key"] == wrapper_key: site = site_check - if site: # Correct auth key + if site: # Correct wrapper key ui_websocket = UiWebsocket(ws, site, self.server) site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events ui_websocket.start() @@ -223,8 +223,8 @@ class UiRequest: if ui_websocket in site_check.websockets: site_check.websockets.remove(ui_websocket) return "Bye." - else: # No site found by auth key - self.log.error("Auth key not found: %s" % auth_key) + else: # No site found by wrapper key + self.log.error("Wrapper key not found: %s" % wraper_key) return self.error403() else: start_response("400 Bad Request", []) diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 68d3ce77..634cd638 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -46,7 +46,7 @@ class UiWebsocket: if channel in self.channels: # We are joined to channel if channel == "siteChanged": site = params[0] # Triggerer site - site_info = self.siteInfo(site) + site_info = self.formatSiteInfo(site) if len(params) > 1 and params[1]: # Extra data site_info.update(params[1]) self.cmd("setSiteInfo", site_info) @@ -121,18 +121,24 @@ class UiWebsocket: # Format site info - def siteInfo(self, site): + def formatSiteInfo(self, site): + content = site.content + if content and "files" in content: # Remove unnecessary data transfer + content = site.content.copy() + content["files"] = len(content["files"]) + del(content["sign"]) + ret = { - "auth_id": self.site.settings["auth_key"][0:10], - "auth_id_md5": hashlib.md5(self.site.settings["auth_key"][0:10]).hexdigest(), + "auth_key": self.site.settings["auth_key"], + "auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], "address": site.address, "settings": site.settings, "content_updated": site.content_updated, - "bad_files": site.bad_files.keys(), - "last_downloads": site.last_downloads, + "bad_files": len(site.bad_files), + "last_downloads": len(site.last_downloads), "peers": len(site.peers), - "tasks": [task["inner_path"] for task in site.worker_manager.tasks], - "content": site.content + "tasks": len([task["inner_path"] for task in site.worker_manager.tasks]), + "content": content } if site.settings["serving"] and site.content: ret["peers"] += 1 # Add myself if serving return ret @@ -140,7 +146,7 @@ class UiWebsocket: # Send site details def actionSiteInfo(self, to, params): - ret = self.siteInfo(self.site) + ret = self.formatSiteInfo(self.site) self.response(to, ret) @@ -173,7 +179,7 @@ class UiWebsocket: SiteManager.load() # Reload sites for site in self.server.sites.values(): if not site.content: continue # Broken site - ret.append(self.siteInfo(site)) + ret.append(self.formatSiteInfo(site)) self.response(to, ret) diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index 6dfdffc2..b99a326e 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -118,8 +118,8 @@ class Wrapper setSiteInfo: (site_info) -> if site_info.event? # If loading screen visible add event to it # File started downloading - if site_info.event[0] == "file_added" and site_info.bad_files.length - @loading.printLine("#{site_info.bad_files.length} files needs to be downloaded") + if site_info.event[0] == "file_added" and site_info.bad_files + @loading.printLine("#{site_info.bad_files} files needs to be downloaded") # File finished downloading else if site_info.event[0] == "file_done" @loading.printLine("#{site_info.event[1]} downloaded") @@ -148,5 +148,5 @@ class Wrapper console.log "[Wrapper]", args... -ws_url = "ws://#{window.location.hostname}:#{window.location.port}/Websocket?auth_key=#{window.auth_key}" +ws_url = "ws://#{window.location.hostname}:#{window.location.port}/Websocket?wrapper_key=#{window.wrapper_key}" window.wrapper = new Wrapper(ws_url) diff --git a/src/Ui/media/all.js b/src/Ui/media/all.js index 16923085..8c18be78 100644 --- a/src/Ui/media/all.js +++ b/src/Ui/media/all.js @@ -852,8 +852,8 @@ jQuery.extend( jQuery.easing, Wrapper.prototype.setSiteInfo = function(site_info) { if (site_info.event != null) { - if (site_info.event[0] === "file_added" && site_info.bad_files.length) { - this.loading.printLine("" + site_info.bad_files.length + " files needs to be downloaded"); + if (site_info.event[0] === "file_added" && site_info.bad_files) { + this.loading.printLine("" + site_info.bad_files + " files needs to be downloaded"); } else if (site_info.event[0] === "file_done") { this.loading.printLine("" + site_info.event[1] + " downloaded"); if (site_info.event[1] === window.inner_path) { @@ -890,7 +890,7 @@ jQuery.extend( jQuery.easing, })(); - ws_url = "ws://" + window.location.hostname + ":" + window.location.port + "/Websocket?auth_key=" + window.auth_key; + ws_url = "ws://" + window.location.hostname + ":" + window.location.port + "/Websocket?wrapper_key=" + window.wrapper_key; window.wrapper = new Wrapper(ws_url); diff --git a/src/Ui/template/wrapper.html b/src/Ui/template/wrapper.html index 46c0cc0d..7de9dac0 100644 --- a/src/Ui/template/wrapper.html +++ b/src/Ui/template/wrapper.html @@ -35,12 +35,12 @@ - + - + diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index bbe9486c..b648f0da 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -50,12 +50,12 @@ class Worker: self.manager.doneTask(task) self.task = None else: # Hash failed + self.manager.log.debug("%s: Hash failed: %s" % (self.key, task["inner_path"])) self.task = None self.peer.hash_failed += 1 if self.peer.hash_failed >= 3: # Broken peer break task["workers_num"] -= 1 - self.manager.log.error("%s: Hash failed: %s" % (self.key, task["inner_path"])) time.sleep(1) self.peer.onWorkerDone() self.running = False diff --git a/src/main.py b/src/main.py index 791e952c..cc324e08 100644 --- a/src/main.py +++ b/src/main.py @@ -118,7 +118,7 @@ def siteVerify(address): logging.info("Verifying site files...") bad_files = site.verifyFiles() if not bad_files: - logging.info("[OK] All file sha1sum matches!") + logging.info("[OK] All file sha512sum matches!") else: logging.error("[ERROR] Error during verifying site files!") From 2000f5b38eb5d7e6f29bc29232b8493d31b6817a Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Sun, 18 Jan 2015 23:29:53 +0100 Subject: [PATCH 09/12] fix for fileserver_port arg problem --- src/Config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Config.py b/src/Config.py index ec7a470d..c7e92cee 100644 --- a/src/Config.py +++ b/src/Config.py @@ -56,12 +56,12 @@ class Config(object): parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') - parser.add_argument('--ui_port', help='Web interface bind port', default=43110, metavar='port') + parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip') parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address') parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') - parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, metavar='port') + parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port') parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip') parser.add_argument('--upnpc', help='MiniUPnP binary for open port on router', default=upnpc, metavar='executable_path') From 59ceb438c4cd6233c710ff5a8c596beb113b337f Mon Sep 17 00:00:00 2001 From: James Milne Date: Mon, 19 Jan 2015 09:56:03 -0500 Subject: [PATCH 10/12] Pip Requirements File Added --- requirements.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..2b9f1afd --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +gevent==1.0.1 +pyzmq==14.4.1 +msgpack-python==0.4.4 From 3bec7385956bd00391e0e10c0c671d8d82653c8d Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Tue, 20 Jan 2015 02:47:00 +0100 Subject: [PATCH 11/12] version 0.1.4, WIF compatible new private keys, proper bitcoin address verification, worker killing does not drops hash error, private key saved confirmation on site create --- src/Config.py | 4 +- src/Crypt/CryptBitcoin.py | 11 +- src/Site/SiteManager.py | 2 +- src/Worker/Worker.py | 3 + src/lib/BitcoinECC/newBitcoinECC.py | 460 ++++++++++++++++++++++++++++ src/main.py | 13 +- 6 files changed, 481 insertions(+), 12 deletions(-) create mode 100644 src/lib/BitcoinECC/newBitcoinECC.py diff --git a/src/Config.py b/src/Config.py index c7e92cee..f44c3758 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,7 +3,7 @@ import ConfigParser class Config(object): def __init__(self): - self.version = "0.1.3" + self.version = "0.1.4" self.parser = self.createArguments() argv = sys.argv[:] # Copy command line arguments argv = self.parseConfig(argv) # Add arguments from config file @@ -47,7 +47,7 @@ class Config(object): action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?') # SiteVerify - action = subparsers.add_parser("siteVerify", help='Verify site files using md5: address') + action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') action.add_argument('address', help='Site to verify') diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py index f7ba209f..63199ffe 100644 --- a/src/Crypt/CryptBitcoin.py +++ b/src/Crypt/CryptBitcoin.py @@ -1,11 +1,12 @@ from src.lib.BitcoinECC import BitcoinECC -import hashlib -def newPrivatekey(): # Return new private key - bitcoin = BitcoinECC.Bitcoin() - bitcoin.GeneratePrivateKey() - return bitcoin.PrivateEncoding() +def newPrivatekey(uncompressed=True): # Return new private key + from src.lib.BitcoinECC import newBitcoinECC # Use new lib to generate WIF compatible addresses, but keep using the old yet for backward compatiblility issues + bitcoin = newBitcoinECC.Bitcoin() + d = bitcoin.GenerateD() + bitcoin.AddressFromD(d, uncompressed) + return bitcoin.PrivFromD(d, uncompressed) def privatekeyToAddress(privatekey): # Return address from private key diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index daf5479d..80d5a8e9 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -37,7 +37,7 @@ def load(): # Checks if its a valid address def isAddress(address): - return re.match("^[A-Za-z0-9]{34}$", address) + return re.match("^[A-Za-z0-9]{26,35}$", address) # Return site and start download site files diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index b648f0da..7bb5ea66 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -32,6 +32,9 @@ class Worker: self.task = task task["workers_num"] += 1 buff = self.peer.getFile(task["site"].address, task["inner_path"]) + if self.running == False: # Worker no longer needed or got killed + self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) + return None if buff: # Download ok correct = task["site"].verifyFile(task["inner_path"], buff) else: # Download error diff --git a/src/lib/BitcoinECC/newBitcoinECC.py b/src/lib/BitcoinECC/newBitcoinECC.py new file mode 100644 index 00000000..b09386bc --- /dev/null +++ b/src/lib/BitcoinECC/newBitcoinECC.py @@ -0,0 +1,460 @@ +import random +import hashlib +import base64 + +class GaussInt: + def __init__(self,x,y,n,p=0): + if p: + self.x=x%p + self.y=y%p + self.n=n%p + else: + self.x=x + self.y=y + self.n=n + + self.p=p + + def __add__(self,b): + return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p) + + def __sub__(self,b): + return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p) + + def __mul__(self,b): + return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p) + + def __div__(self,b): + return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p) + + def __eq__(self,b): + return self.x==b.x and self.y==b.y + + def __repr__(self): + if self.p: + return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p) + else: + return "%s+%s (%d)"%(self.x,self.y,self.n) + + def __pow__(self,n): + b=Base(n,2) + t=GaussInt(1,0,self.n) + while b: + t=t*t + if b.pop(): + t=self*t + + return t + + def Inv(self): + return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p) + + def Eval(self): + return self.x.Eval()+self.y.Eval()*math.sqrt(self.n) + +def Cipolla(a,p): + b=0 + while pow((b*b-a)%p,(p-1)/2,p)==1: + b+=1 + + return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x + +def InvMod(a,n): + m=[] + + s=n + while n: + m.append(a/n) + (a,n)=(n,a%n) + + u=1 + v=0 + while m: + (u,v)=(v,u-m.pop()*v) + + return u%s + +def Base(n,b): + l=[] + while n: + l.append(n%b) + n/=b + + return l + +def MsgMagic(message): + return "\x18Bitcoin Signed Message:\n"+chr(len(message))+message + +def Hash(m,method): + h=hashlib.new(method) + h.update(m) + + return h.digest() + +def b58encode(v): + #Encode a byte string to the Base58 + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=256 + val+=ord(c) + + result="" + while val: + (val,mod)=divmod(val,base) + result=digit[mod]+result + + pad=0 + for c in v: + if c=="\x00": + pad+=1 + else: + break + + return (digit[0]*pad)+result + +def b58decode(v): + #Decode a Base58 string to byte string + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=base + val+=digit.find(c) + + result="" + while val: + (val,mod)=divmod(val,256) + result=chr(mod)+result + + pad=0 + for c in v: + if c==digit[0]: + pad+=1 + else: + break + + return "\x00"*pad+result + +def Byte2Int(b): + n=0 + for x in b: + n*=256 + n+=ord(x) + + return n + +def Byte2Hex(b): + #Convert a byte string to hex number + out="" + for x in b: + y=hex(ord(x))[2:] + if len(y)==1: + y="0"+y + out+="%2s"%y + + return out + +def Int2Byte(n,b): + #Convert a integer to a byte string of length b + out="" + + for _ in range(b): + (n,m)=divmod(n,256) + out=chr(m)+out + + return out + +class EllipticCurvePoint: + #Main class + #It's a point on an Elliptic Curve + + def __init__(self,x,a,b,p,n=0): + #We store the coordinate in x and the elliptic curve parameter. + #x is of length 3. This is the 3 projective coordinates of the point. + self.x=x[:] + self.a=a + self.b=b + self.p=p + self.n=n + + def __add__(self,y): + #The main function to add self and y + #It uses the formulas I derived in projective coordinates. + #Projectives coordinates are more efficient than the usual (x,y) coordinates + #because we don't need to compute inverse mod p, which is faster. + z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p) + + if self==y: + d=(2*self.x[1]*self.x[2])%self.p + d3=pow(d,3,self.p) + n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p + + z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p + z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p + z.x[2]=(self.x[2]*d3)%self.p + else: + d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p + d3=pow(d,3,self.p) + n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p + + z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p + z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p + z.x[2]=(self.x[2]*d3*y.x[2])%self.p + + return z + + def __mul__(self,n): + #The fast multiplication of point n times by itself. + b=Base(n,2) + t=EllipticCurvePoint(self.x,self.a,self.b,self.p) + b.pop() + while b: + t+=t + if b.pop(): + t+=self + + return t + + def __repr__(self): + #print a point in (x,y) coordinate. + return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p) + + def __eq__(self,y): + #Does self==y ? + #It computes self cross product with x and check if the result is 0. + return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2] and self.a==y.a and self.b==y.b and self.p==y.p + + def __ne__(self,y): + #Does self!=x ? + return not (self == y) + + def Normalize(self): + #Transform projective coordinates of self to the usual (x,y) coordinates. + if self.x[2]: + self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p + self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p + self.x[2]=1 + elif self.x[1]: + self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p + self.x[1]=1 + elif self.x[0]: + self.x[0]=1 + else: + raise Exception + + def Check(self): + #Is self on the curve ? + return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0 + + + def CryptAddr(self,filename,password,Address): + txt="" + for tag in Address: + (addr,priv)=Address[tag] + if priv: + txt+="%s\t%s\t%s\n"%(tag,addr,priv) + else: + txt+="%s\t%s\t\n"%(tag,addr) + + txt+="\x00"*(15-(len(txt)-1)%16) + + password+="\x00"*(15-(len(password)-1)%16) + crypt=twofish.Twofish(password).encrypt(txt) + + f=open(filename,"wb") + f.write(crypt) + f.close() + + def GenerateD(self): + #Generate a private key. It's just a random number between 1 and n-1. + #Of course, this function isn't cryptographically secure. + #Don't use it to generate your key. Use a cryptographically secure source of randomness instead. + #return random.randint(1,self.n-1) + return random.SystemRandom().randint(1,self.n-1) # Better random fix + + def CheckECDSA(self,sig,message,Q): + #Check a signature (r,s) of the message m using the public key self.Q + # and the generator which is self. + #This is not the one used by Bitcoin because the public key isn't known; + # only a hash of the public key is known. See the function VerifyMessageFromAddress. + (r,s)=sig + + if Q.x[2]==0: + return False + if not Q.Check(): + return False + if (Q*self.n).x[2]!=0: + return False + if r<1 or r>self.n-1 or s<1 or s>self.n-1: + return False + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + w=InvMod(s,self.n) + u1=(z*w)%self.n + u2=(r*w)%self.n + R=self*u1+Q*u2 + R.Normalize() + + return (R.x[0]-r)%self.n==0 + + def SignMessage(self,message,priv): + #Sign a message. The private key is self.d. + (d,uncompressed)=self.DFromPriv(priv) + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + r=0 + s=0 + while not r or not s: + #k=random.randint(1,self.n-1) + k=random.SystemRandom().randint(1,self.n-1) # Better random fix + R=self*k + R.Normalize() + r=R.x[0]%self.n + s=(InvMod(k,self.n)*(z+r*d))%self.n + + val=27 + if not uncompressed: + val+=4 + + return base64.standard_b64encode(chr(val)+Int2Byte(r,32)+Int2Byte(s,32)) + + def VerifyMessageFromAddress(self,addr,message,sig): + #Check a signature (r,s) for the message m signed by the Bitcoin + # address "addr". + + sign=base64.standard_b64decode(sig) + (r,s)=(Byte2Int(sign[1:33]),Byte2Int(sign[33:65])) + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + val=ord(sign[0]) + if val<27 or val>=35: + return False + + if val>=31: + uncompressed=False + val-=4 + else: + uncompressed=True + + x=r + y2=(pow(x,3,self.p) + self.a*x + self.b) % self.p + y=Cipolla(y2,self.p) + + for _ in range(2): + kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n) + mzG=self*((-z)%self.n) + Q=(kG*s+mzG)*InvMod(r,self.n) + + if self.AddressFromPublicKey(Q,uncompressed)==addr: + return True + + y=self.p-y + + return False + + def AddressFromPrivate(self,priv): + #Transform a private key to a bitcoin address. + (d,uncompressed)=self.DFromPriv(priv) + + return self.AddressFromD(d,uncompressed) + + def PrivFromD(self,d,uncompressed): + #Encode a private key self.d to base58 encoding. + p=Int2Byte(d,32) + p="\x80"+p + + if not uncompressed: + p+=chr(1) + + cs=Hash(Hash(p,"SHA256"),"SHA256")[:4] + + return b58encode(p+cs) + + def DFromPriv(self,priv): + uncompressed=(len(priv)==51) + priv=b58decode(priv) + + if uncompressed: + priv=priv[:-4] + else: + priv=priv[:-5] + + return (Byte2Int(priv[1:]),uncompressed) + + def AddressFromPublicKey(self,Q,uncompressed): + #Find the bitcoin address from the public key self.Q + #We do normalization to go from the projective coordinates to the usual + # (x,y) coordinates. + Q.Normalize() + if uncompressed: + pk=chr(4)+Int2Byte(Q.x[0],32)+Int2Byte(Q.x[1],32) + else: + pk=chr(2+Q.x[1]%2)+Int2Byte(Q.x[0],32) + + kh=chr(0)+Hash(Hash(pk,"SHA256"),"RIPEMD160") + cs=Hash(Hash(kh,"SHA256"),"SHA256")[:4] + + return b58encode(kh+cs) + + def AddressFromD(self,d,uncompressed): + #Computes a bitcoin address given the private key self.d. + return self.AddressFromPublicKey(self*d,uncompressed) + + def IsValid(self,addr): + adr=b58decode(addr) + kh=adr[:-4] + cs=adr[-4:] + + verif=Hash(Hash(kh,"SHA256"),"SHA256")[:4] + + return cs==verif + + def AddressGenerator(self,k,uncompressed=True): + #Generate Bitcoin address and write them in the multibit format. + #Change the date as you like. + liste={} + for i in range(k): + d=self.GenerateD() + addr=self.AddressFromD(d,uncompressed) + priv=self.PrivFromD(d,uncompressed) + liste[i]=[addr,priv] + print "%s %s"%(addr, priv) + + return liste + +def Bitcoin(): + a=0 + b=7 + p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 + Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16) + Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16) + n=int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16) + + return EllipticCurvePoint([Gx,Gy,1],a,b,p,n) + +def main(): + bitcoin=Bitcoin() + + #Generate an adress from the private key + privkey = "PrivatekeyinBase58" + adr = bitcoin.AddressFromPrivate(privkey) + print "Address : ", adr + + #Sign a message with the current address + m="Hello World" + sig=bitcoin.SignMessage("Hello World", privkey) + #Verify the message using only the bitcoin adress, the signature and the message. + #Not using the public key as it is not needed. + if bitcoin.VerifyMessageFromAddress(adr,m,sig): + print "Message verified" + + #Generate some addresses + print "Here are some adresses and associated private keys" + bitcoin.AddressGenerator(10) + +if __name__ == "__main__": main() diff --git a/src/main.py b/src/main.py index cc324e08..46d576a7 100644 --- a/src/main.py +++ b/src/main.py @@ -73,11 +73,16 @@ def siteCreate(): logging.info("Generating new privatekey...") from src.Crypt import CryptBitcoin privatekey = CryptBitcoin.newPrivatekey() - logging.info("-----------------------------------------------------------") - logging.info("Site private key: %s (save it, required to modify the site)" % privatekey) + logging.info("----------------------------------------------------------------------") + logging.info("Site private key: %s" % privatekey) + logging.info(" !!! ^ Save it now, required to modify the site ^ !!!") address = CryptBitcoin.privatekeyToAddress(privatekey) - logging.info("Site address: %s" % address) - logging.info("-----------------------------------------------------------") + logging.info("Site address: %s" % address) + logging.info("----------------------------------------------------------------------") + + while True: + if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break + else: logging.info("Please, secure it now, you going to need it to modify your site!") logging.info("Creating directory structure...") from Site import Site From a977feec330a024686518ce6c374167d63d564f2 Mon Sep 17 00:00:00 2001 From: HelloZeroNet Date: Wed, 21 Jan 2015 12:58:26 +0100 Subject: [PATCH 12/12] version 0.1.5, install as user to readme, more debug on filerequests, if upnpc fail try without -e, announce interval from 10 to 20 min, detect computer wakeup and acts as startup, delete sites files websocket command support, pause stop all current downloads, wrapper confirmation dialog support --- README.md | 7 ++++- src/Config.py | 2 +- src/File/FileRequest.py | 7 ++++- src/File/FileServer.py | 25 +++++++++++++--- src/Site/Site.py | 31 +++++++++++++++++-- src/Site/SiteManager.py | 8 +++++ src/Ui/UiWebsocket.py | 18 ++++++++++++ src/Ui/media/Notifications.coffee | 12 ++++++-- src/Ui/media/Wrapper.coffee | 26 +++++++++++++++- src/Ui/media/Wrapper.css | 7 +++++ src/Ui/media/all.css | 7 +++++ src/Ui/media/all.js | 49 +++++++++++++++++++++++++++++-- src/Worker/WorkerManager.py | 14 ++++++++- 13 files changed, 197 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 86d25d36..c5ac0b2f 100644 --- a/README.md +++ b/README.md @@ -40,6 +40,11 @@ Linux (Debian): - `pip install msgpack-python` - start using `python zeronet.py` +Linux (Without root access): + - `wget https://bootstrap.pypa.io/get-pip.py` + - `python get-pip.py --user pyzmq gevent msgpack-python` + - start using `python zeronet.py` + ## Current limitations - No torrent-like, file splitting big file support @@ -91,4 +96,4 @@ Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX #### Thank you! - More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/ -- Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) +- Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) \ No newline at end of file diff --git a/src/Config.py b/src/Config.py index f44c3758..df512c92 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,7 +3,7 @@ import ConfigParser class Config(object): def __init__(self): - self.version = "0.1.4" + self.version = "0.1.5" self.parser = self.createArguments() argv = sys.argv[:] # Copy command line arguments argv = self.parseConfig(argv) # Add arguments from config file diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index 973b6fca..f70671b8 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -2,6 +2,7 @@ import os, msgpack, shutil from Site import SiteManager from cStringIO import StringIO from Debug import Debug +from Config import config FILE_BUFF = 1024*512 @@ -80,13 +81,17 @@ class FileRequest: self.send({"error": "Unknown site"}) return False try: - file = open(site.getPath(params["inner_path"]), "rb") + file_path = site.getPath(params["inner_path"]) + if config.debug_socket: self.log.debug("Opening file: %s" % file_path) + file = open(file_path, "rb") file.seek(params["location"]) back = {} back["body"] = file.read(FILE_BUFF) back["location"] = file.tell() back["size"] = os.fstat(file.fileno()).st_size + if config.debug_socket: self.log.debug("Sending file %s from position %s to %s" % (file_path, params["location"], back["location"])) self.send(back) + if config.debug_socket: self.log.debug("File %s sent" % file_path) except Exception, err: self.send({"error": "File read error: %s" % Debug.formatException(err)}) return False diff --git a/src/File/FileServer.py b/src/File/FileServer.py index 34ce5d42..4a5e4f86 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -49,10 +49,14 @@ class FileServer: self.log.info("Try to open port using upnpc...") try: exit = os.system("%s -e ZeroNet -r %s tcp" % (config.upnpc, self.port)) - if exit == 0: + if exit == 0: # Success upnpc_success = True - else: - upnpc_success = False + else: # Failed + exit = os.system("%s -r %s tcp" % (config.upnpc, self.port)) # Try without -e option + if exit == 0: + upnpc_success = True + else: + upnpc_success = False except Exception, err: self.log.error("Upnpc run error: %s" % Debug.formatException(err)) upnpc_success = False @@ -122,13 +126,25 @@ class FileServer: # Announce sites every 10 min def announceSites(self): while 1: - time.sleep(10*60) # Announce sites every 10 min + time.sleep(20*60) # Announce sites every 20 min for address, site in self.sites.items(): if site.settings["serving"]: site.announce() # Announce site to tracker time.sleep(2) # Prevent too quick request + # Detects if computer back from wakeup + def wakeupWatcher(self): + last_time = time.time() + while 1: + time.sleep(30) + if time.time()-last_time > 60: # If taken more than 60 second then the computer was in sleep mode + self.log.info("Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time()-last_time)) + self.port_opened = None # Check if we still has the open port on router + self.checkSites() + last_time = time.time() + + # Bind and start serving sites def start(self, check_sites = True): self.log = logging.getLogger(__name__) @@ -152,6 +168,7 @@ class FileServer: gevent.spawn(self.checkSites) gevent.spawn(self.announceSites) + gevent.spawn(self.wakeupWatcher) while True: try: diff --git a/src/Site/Site.py b/src/Site/Site.py index 44fbe3a7..ed7a1a84 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -267,6 +267,32 @@ class Site: self.bad_files[bad_file] = True + def deleteFiles(self): + self.log.debug("Deleting files from content.json...") + files = self.content["files"].keys() # Make a copy + files.append("content.json") + for inner_path in files: + path = self.getPath(inner_path) + if os.path.isfile(path): os.unlink(path) + + self.log.debug("Deleting empty dirs...") + for root, dirs, files in os.walk(self.directory, topdown=False): + for dir in dirs: + path = os.path.join(root,dir) + if os.path.isdir(path) and os.listdir(path) == []: + os.removedirs(path) + self.log.debug("Removing %s" % path) + if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.removedirs(self.directory) # Remove sites directory if empty + + if os.path.isdir(self.directory): + self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) + return False # Some files not deleted + else: + self.log.debug("Site data directory deleted: %s..." % self.directory) + return True # All clean + + + # - Events - # Add event listeners @@ -380,11 +406,10 @@ class Site: else: ok = self.verifyFile(inner_path, open(file_path, "rb")) - if ok: - self.log.debug("[OK] %s" % inner_path) - else: + if not ok: self.log.error("[ERROR] %s" % inner_path) bad_files.append(inner_path) + self.log.debug("Site verified: %s files, quick_check: %s, bad files: %s" % (len(self.content["files"]), quick_check, bad_files)) return bad_files diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index 80d5a8e9..9779a639 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -45,12 +45,20 @@ def need(address, all_file=True): from Site import Site if address not in sites: # Site not exits yet if not isAddress(address): raise Exception("Not address: %s" % address) + logging.debug("Added new site: %s" % address) sites[address] = Site(address) + sites[address].settings["serving"] = True # Maybe it was deleted before site = sites[address] if all_file: site.download() return site +def delete(address): + global sites + logging.debug("SiteManager deleted site: %s" % address) + del(sites[address]) + + # Lazy load sites def list(): if sites == None: # Not loaded yet diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 634cd638..44c8e593 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -96,6 +96,8 @@ class UiWebsocket: self.actionSitePause(req["id"], req["params"]) elif cmd == "siteResume" and "ADMIN" in permissions: self.actionSiteResume(req["id"], req["params"]) + elif cmd == "siteDelete" and "ADMIN" in permissions: + self.actionSiteDelete(req["id"], req["params"]) elif cmd == "siteList" and "ADMIN" in permissions: self.actionSiteList(req["id"], req["params"]) elif cmd == "channelJoinAllsite" and "ADMIN" in permissions: @@ -211,6 +213,7 @@ class UiWebsocket: site.settings["serving"] = False site.saveSettings() site.updateWebsocket() + site.worker_manager.stopWorkers() else: self.response(to, {"error": "Unknown site: %s" % address}) @@ -227,3 +230,18 @@ class UiWebsocket: site.updateWebsocket() else: self.response(to, {"error": "Unknown site: %s" % address}) + + + def actionSiteDelete(self, to, params): + address = params.get("address") + site = self.server.sites.get(address) + if site: + site.settings["serving"] = False + site.saveSettings() + site.worker_manager.running = False + site.worker_manager.stopWorkers() + site.deleteFiles() + SiteManager.delete(address) + site.updateWebsocket() + else: + self.response(to, {"error": "Unknown site: %s" % address}) diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee index d5190c5a..ec26f425 100644 --- a/src/Ui/media/Notifications.coffee +++ b/src/Ui/media/Notifications.coffee @@ -27,10 +27,15 @@ class Notifications $(".notification-icon", elem).html("!") else if type == "done" $(".notification-icon", elem).html("
") + else if type == "ask" + $(".notification-icon", elem).html("?") else $(".notification-icon", elem).html("i") - $(".body", elem).html(body) + if typeof(body) == "string" + $(".body", elem).html(body) + else + $(".body", elem).html("").append(body) elem.appendTo(@elem) @@ -53,7 +58,10 @@ class Notifications @close elem return false - @ + # Close on button click within body (confirm dialog) + $(".button", elem).on "click", => + @close elem + return false close: (elem) -> diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index b99a326e..a43ddbb2 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -55,12 +55,31 @@ class Wrapper if @ws.ws.readyState == 1 and not @wrapperWsInited # If ws already opened @sendInner {"cmd": "wrapperOpenedWebsocket"} @wrapperWsInited = true - else if cmd == "wrapperNotification" + else if cmd == "wrapperNotification" # Display notification + message.params = @toHtmlSafe(message.params) # Escape html @notifications.add("notification-#{message.id}", message.params[0], message.params[1], message.params[2]) + else if cmd == "wrapperConfirm" # Display confirm message + @actionWrapperConfirm(message) else # Send to websocket @ws.send(message) # Pass message to websocket + # - Actions - + + actionWrapperConfirm: (message) -> + message.params = @toHtmlSafe(message.params) # Escape html + if message.params[1] then caption = message.params[1] else caption = "ok" + + body = $(""+message.params[0]+"") + button = $("#{caption}") # Add confirm button + button.on "click", => # Response on button click + @sendInner {"cmd": "response", "to": message.id, "result": "boom"} # Response to confirm + return false + body.append(button) + + @notifications.add("notification-#{message.id}", "ask", body) + + onOpenWebsocket: (e) => @ws.cmd "channelJoin", {"channel": "siteChanged"} # Get info on modifications @log "onOpenWebsocket", @inner_ready, @wrapperWsInited @@ -125,6 +144,7 @@ class Wrapper @loading.printLine("#{site_info.event[1]} downloaded") if site_info.event[1] == window.inner_path # File downloaded we currently on @loading.hideScreen() + if not @site_info then @reloadSiteInfo() if not $(".loadingscreen").length # Loading screen already removed (loaded +2sec) @notifications.add("modified", "info", "New version of this page has just released.
Reload to see the modified content.") # File failed downloading @@ -144,6 +164,10 @@ class Wrapper @site_info = site_info + toHtmlSafe: (unsafe) -> + return unsafe + + log: (args...) -> console.log "[Wrapper]", args... diff --git a/src/Ui/media/Wrapper.css b/src/Ui/media/Wrapper.css index 1f588186..cf50fe81 100644 --- a/src/Ui/media/Wrapper.css +++ b/src/Ui/media/Wrapper.css @@ -7,6 +7,12 @@ a { color: black } #inner-iframe { width: 100%; height: 100%; position: absolute; border: 0px; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out } #inner-iframe.back { transform: scale(0.95) translate(-300px, 0px); opacity: 0.4 } +.button { padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; border-radius: 2px; text-decoration: none; transition: all 0.5s; } +.button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; transition: none } +.button:active { position: relative; top: 1px } + +.button-Delete { background-color: #e74c3c; border-bottom-color: #c0392b; color: white } +.button-Delete:hover { background-color: #FF5442; border-bottom-color: #8E2B21 } /* Fixbutton */ @@ -43,6 +49,7 @@ a { color: black } .notification .close:active, .notification .close:focus { color: #AF3BFF } /* Notification types */ +.notification-ask .notification-icon { background-color: #f39c12; } .notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px } .notification-done .notification-icon { font-size: 22px; background-color: #27ae60 } diff --git a/src/Ui/media/all.css b/src/Ui/media/all.css index 16df57ea..01e91c31 100644 --- a/src/Ui/media/all.css +++ b/src/Ui/media/all.css @@ -12,6 +12,12 @@ a { color: black } #inner-iframe { width: 100%; height: 100%; position: absolute; border: 0px; -webkit-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; -moz-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; -o-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; -ms-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out } #inner-iframe.back { -webkit-transform: scale(0.95) translate(-300px, 0px); -moz-transform: scale(0.95) translate(-300px, 0px); -o-transform: scale(0.95) translate(-300px, 0px); -ms-transform: scale(0.95) translate(-300px, 0px); transform: scale(0.95) translate(-300px, 0px) ; opacity: 0.4 } +.button { padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; text-decoration: none; -webkit-transition: all 0.5s; -moz-transition: all 0.5s; -o-transition: all 0.5s; -ms-transition: all 0.5s; transition: all 0.5s ; } +.button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none } +.button:active { position: relative; top: 1px } + +.button-Delete { background-color: #e74c3c; border-bottom-color: #c0392b; color: white } +.button-Delete:hover { background-color: #FF5442; border-bottom-color: #8E2B21 } /* Fixbutton */ @@ -48,6 +54,7 @@ a { color: black } .notification .close:active, .notification .close:focus { color: #AF3BFF } /* Notification types */ +.notification-ask .notification-icon { background-color: #f39c12; } .notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px } .notification-done .notification-icon { font-size: 22px; background-color: #27ae60 } diff --git a/src/Ui/media/all.js b/src/Ui/media/all.js index 8c18be78..0d6ffc11 100644 --- a/src/Ui/media/all.js +++ b/src/Ui/media/all.js @@ -571,10 +571,16 @@ jQuery.extend( jQuery.easing, $(".notification-icon", elem).html("!"); } else if (type === "done") { $(".notification-icon", elem).html("
"); + } else if (type === "ask") { + $(".notification-icon", elem).html("?"); } else { $(".notification-icon", elem).html("i"); } - $(".body", elem).html(body); + if (typeof body === "string") { + $(".body", elem).html(body); + } else { + $(".body", elem).html("").append(body); + } elem.appendTo(this.elem); if (timeout) { $(".close", elem).remove(); @@ -604,7 +610,12 @@ jQuery.extend( jQuery.easing, return false; }; })(this)); - return this; + return $(".button", elem).on("click", (function(_this) { + return function() { + _this.close(elem); + return false; + }; + })(this)); }; Notifications.prototype.close = function(elem) { @@ -771,12 +782,39 @@ jQuery.extend( jQuery.easing, return this.wrapperWsInited = true; } } else if (cmd === "wrapperNotification") { + message.params = this.toHtmlSafe(message.params); return this.notifications.add("notification-" + message.id, message.params[0], message.params[1], message.params[2]); + } else if (cmd === "wrapperConfirm") { + return this.actionWrapperConfirm(message); } else { return this.ws.send(message); } }; + Wrapper.prototype.actionWrapperConfirm = function(message) { + var body, button, caption; + message.params = this.toHtmlSafe(message.params); + if (message.params[1]) { + caption = message.params[1]; + } else { + caption = "ok"; + } + body = $("" + message.params[0] + ""); + button = $("" + caption + ""); + button.on("click", (function(_this) { + return function() { + _this.sendInner({ + "cmd": "response", + "to": message.id, + "result": "boom" + }); + return false; + }; + })(this)); + body.append(button); + return this.notifications.add("notification-" + message.id, "ask", body); + }; + Wrapper.prototype.onOpenWebsocket = function(e) { this.ws.cmd("channelJoin", { "channel": "siteChanged" @@ -858,6 +896,9 @@ jQuery.extend( jQuery.easing, this.loading.printLine("" + site_info.event[1] + " downloaded"); if (site_info.event[1] === window.inner_path) { this.loading.hideScreen(); + if (!this.site_info) { + this.reloadSiteInfo(); + } if (!$(".loadingscreen").length) { this.notifications.add("modified", "info", "New version of this page has just released.
Reload to see the modified content."); } @@ -880,6 +921,10 @@ jQuery.extend( jQuery.easing, return this.site_info = site_info; }; + Wrapper.prototype.toHtmlSafe = function(unsafe) { + return unsafe; + }; + Wrapper.prototype.log = function() { var args; args = 1 <= arguments.length ? __slice.call(arguments, 0) : []; diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index dd2ba7b3..c11887ed 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -9,13 +9,14 @@ class WorkerManager: self.site = site self.workers = {} # Key: ip:port, Value: Worker.Worker self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0} + self.running = True self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) self.process_taskchecker = gevent.spawn(self.checkTasks) # Check expired tasks def checkTasks(self): - while 1: + while self.running: time.sleep(15) # Check every 15 sec # Clean up workers @@ -42,6 +43,7 @@ class WorkerManager: task["peers"] = [] self.startWorkers() break # One reannounce per loop + self.log.debug("checkTasks stopped running") @@ -91,6 +93,16 @@ class WorkerManager: if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (key, len(self.workers), MAX_WORKERS)) + # Stop all worker + def stopWorkers(self): + for worker in self.workers.values(): + worker.stop() + tasks = self.tasks[:] # Copy + for task in tasks: # Mark all current task as failed + self.failTask(task) + + + # Find workers by task def findWorkers(self, task): workers = []