version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
import os, logging, urllib2, re, time
|
2015-01-12 02:03:45 +01:00
|
|
|
import gevent, msgpack
|
|
|
|
import zmq.green as zmq
|
|
|
|
from Config import config
|
|
|
|
from FileRequest import FileRequest
|
|
|
|
from Site import SiteManager
|
2015-01-17 18:50:56 +01:00
|
|
|
from Debug import Debug
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
from Connection import ConnectionServer
|
2015-02-24 14:02:05 +01:00
|
|
|
from util import UpnpPunch
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
class FileServer(ConnectionServer):
|
2015-01-12 02:03:45 +01:00
|
|
|
def __init__(self):
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
|
2015-01-12 02:03:45 +01:00
|
|
|
if config.ip_external: # Ip external definied in arguments
|
|
|
|
self.port_opened = True
|
|
|
|
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
|
|
|
|
else:
|
|
|
|
self.port_opened = None # Is file server opened on router
|
|
|
|
self.sites = SiteManager.list()
|
|
|
|
|
|
|
|
|
|
|
|
# Handle request to fileserver
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
def handleRequest(self, connection, message):
|
|
|
|
if "params" in message:
|
2015-02-25 03:22:10 +01:00
|
|
|
self.log.debug("FileRequest: %s %s %s %s" % (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path")))
|
2015-01-12 02:03:45 +01:00
|
|
|
else:
|
2015-02-25 03:22:10 +01:00
|
|
|
self.log.debug("FileRequest: %s %s" % (str(connection), req["cmd"]))
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
req = FileRequest(self, connection)
|
|
|
|
req.route(message["cmd"], message.get("req_id"), message.get("params"))
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Reload the FileRequest class to prevent restarts in debug mode
|
|
|
|
def reload(self):
|
|
|
|
global FileRequest
|
|
|
|
import imp
|
|
|
|
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
|
|
|
|
|
|
|
|
|
|
|
|
# Try to open the port using upnp
|
|
|
|
def openport(self, port=None, check=True):
|
|
|
|
if not port: port = self.port
|
|
|
|
if self.port_opened: return True # Port already opened
|
|
|
|
if check: # Check first if its already opened
|
|
|
|
if self.testOpenport(port)["result"] == True:
|
|
|
|
return True # Port already opened
|
|
|
|
|
2015-02-24 14:02:05 +01:00
|
|
|
self.log.info("Trying to open port using UpnpPunch...")
|
|
|
|
try:
|
|
|
|
upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet')
|
|
|
|
upnp_punch = True
|
|
|
|
except Exception, err:
|
|
|
|
self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
|
|
|
|
upnp_punch = False
|
2015-01-12 02:03:45 +01:00
|
|
|
|
2015-02-24 14:02:05 +01:00
|
|
|
if upnp_punch and self.testOpenport(port)["result"] == True:
|
|
|
|
return True
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
# Test if the port is open
|
|
|
|
def testOpenport(self, port = None):
|
|
|
|
time.sleep(1) # Wait for port open
|
|
|
|
if not port: port = self.port
|
|
|
|
self.log.info("Checking port %s using canyouseeme.org..." % port)
|
|
|
|
try:
|
|
|
|
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
|
|
|
|
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
|
|
|
|
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags
|
|
|
|
except Exception, err:
|
2015-01-17 18:50:56 +01:00
|
|
|
message = "Error: %s" % Debug.formatException(err)
|
2015-01-12 02:03:45 +01:00
|
|
|
if "Error" in message:
|
|
|
|
self.log.info("[BAD :(] Port closed: %s" % message)
|
|
|
|
if port == self.port:
|
|
|
|
self.port_opened = False # Self port, update port_opened status
|
|
|
|
config.ip_external = False
|
|
|
|
return {"result": False, "message": message}
|
|
|
|
else:
|
|
|
|
self.log.info("[OK :)] Port open: %s" % message)
|
|
|
|
if port == self.port: # Self port, update port_opened status
|
|
|
|
self.port_opened = True
|
|
|
|
match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
|
|
|
|
if match: # Found my ip in message
|
|
|
|
config.ip_external = match.group(1)
|
|
|
|
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
|
|
|
|
else:
|
|
|
|
config.ip_external = False
|
|
|
|
return {"result": True, "message": message}
|
|
|
|
|
|
|
|
|
|
|
|
# Set external ip without testing
|
|
|
|
def setIpExternal(self, ip_external):
|
|
|
|
logging.info("Setting external ip without testing: %s..." % ip_external)
|
|
|
|
config.ip_external = ip_external
|
|
|
|
self.port_opened = True
|
|
|
|
|
|
|
|
|
|
|
|
# Check site file integrity
|
|
|
|
def checkSite(self, site):
|
|
|
|
if site.settings["serving"]:
|
|
|
|
site.announce() # Announce site to tracker
|
|
|
|
site.update() # Update site's content.json and download changed files
|
|
|
|
|
|
|
|
|
|
|
|
# Check sites integrity
|
|
|
|
def checkSites(self):
|
|
|
|
if self.port_opened == None: # Test and open port if not tested yet
|
|
|
|
self.openport()
|
|
|
|
|
|
|
|
self.log.debug("Checking sites integrity..")
|
|
|
|
for address, site in self.sites.items(): # Check sites integrity
|
|
|
|
gevent.spawn(self.checkSite, site) # Check in new thread
|
|
|
|
time.sleep(2) # Prevent too quick request
|
|
|
|
|
|
|
|
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
# Announce sites every 20 min
|
2015-01-12 02:03:45 +01:00
|
|
|
def announceSites(self):
|
|
|
|
while 1:
|
2015-01-21 12:58:26 +01:00
|
|
|
time.sleep(20*60) # Announce sites every 20 min
|
2015-01-12 02:03:45 +01:00
|
|
|
for address, site in self.sites.items():
|
|
|
|
if site.settings["serving"]:
|
|
|
|
site.announce() # Announce site to tracker
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
for inner_path in site.bad_files: # Reset bad file retry counter
|
|
|
|
site.bad_files[inner_path] = 0
|
2015-01-12 02:03:45 +01:00
|
|
|
time.sleep(2) # Prevent too quick request
|
|
|
|
|
|
|
|
|
2015-01-21 12:58:26 +01:00
|
|
|
# Detects if computer back from wakeup
|
|
|
|
def wakeupWatcher(self):
|
|
|
|
last_time = time.time()
|
|
|
|
while 1:
|
|
|
|
time.sleep(30)
|
|
|
|
if time.time()-last_time > 60: # If taken more than 60 second then the computer was in sleep mode
|
|
|
|
self.log.info("Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time()-last_time))
|
|
|
|
self.port_opened = None # Check if we still has the open port on router
|
|
|
|
self.checkSites()
|
|
|
|
last_time = time.time()
|
|
|
|
|
|
|
|
|
2015-01-12 02:03:45 +01:00
|
|
|
# Bind and start serving sites
|
|
|
|
def start(self, check_sites = True):
|
|
|
|
self.log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
if config.debug:
|
|
|
|
# Auto reload FileRequest on change
|
|
|
|
from Debug import DebugReloader
|
|
|
|
DebugReloader(self.reload)
|
|
|
|
|
|
|
|
if check_sites: # Open port, Update sites, Check files integrity
|
|
|
|
gevent.spawn(self.checkSites)
|
2015-01-16 11:52:42 +01:00
|
|
|
|
2015-02-20 01:37:12 +01:00
|
|
|
thread_announce_sites = gevent.spawn(self.announceSites)
|
|
|
|
thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
|
2015-01-12 02:03:45 +01:00
|
|
|
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
ConnectionServer.start(self)
|
2015-02-20 01:37:12 +01:00
|
|
|
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
# thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer"))
|
|
|
|
# thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer"))
|
|
|
|
self.log.debug("Stopped.")
|