2015-07-12 20:36:46 +02:00
|
|
|
import logging
|
|
|
|
import time
|
2017-08-15 19:17:42 +02:00
|
|
|
import random
|
2019-01-23 02:13:13 +01:00
|
|
|
import socket
|
2019-09-19 16:33:45 +02:00
|
|
|
import sys
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
import gevent
|
2018-08-29 19:54:31 +02:00
|
|
|
import gevent.pool
|
2019-01-23 02:13:13 +01:00
|
|
|
from gevent.server import StreamServer
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-03-18 20:03:38 +01:00
|
|
|
import util
|
2019-01-20 03:13:54 +01:00
|
|
|
from util import helper
|
2015-01-12 02:03:45 +01:00
|
|
|
from Config import config
|
2019-03-15 21:06:59 +01:00
|
|
|
from .FileRequest import FileRequest
|
2019-01-20 03:13:54 +01:00
|
|
|
from Peer import PeerPortchecker
|
2015-01-12 02:03:45 +01:00
|
|
|
from Site import SiteManager
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
from Connection import ConnectionServer
|
2018-02-08 18:09:57 +01:00
|
|
|
from Plugin import PluginManager
|
2019-01-23 02:13:13 +01:00
|
|
|
from Debug import Debug
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
|
2018-02-08 18:09:57 +01:00
|
|
|
@PluginManager.acceptPlugins
|
version 0.2.4, peerPing and peerGetFile commands, old content update bugfix, new network code and protocol, connection share between sites, connection reuse, dont retry bad file more than 3 times in 20 min, multi threaded include file download, shuffle peers before publish, simple internal stats page, dont retry on failed peers, more than 10 peers publish bugfix
2015-02-23 23:33:31 +01:00
|
|
|
class FileServer(ConnectionServer):
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-01-20 03:15:21 +01:00
|
|
|
def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port, ip_type=config.fileserver_ip_type):
|
2018-02-08 18:11:45 +01:00
|
|
|
self.site_manager = SiteManager.site_manager
|
2019-01-20 03:13:54 +01:00
|
|
|
self.portchecker = PeerPortchecker.PeerPortchecker(self)
|
2018-02-08 18:11:45 +01:00
|
|
|
self.log = logging.getLogger("FileServer")
|
2019-01-20 03:15:21 +01:00
|
|
|
self.ip_type = ip_type
|
2019-01-26 20:42:27 +01:00
|
|
|
self.ip_external_list = []
|
2019-01-23 02:13:13 +01:00
|
|
|
|
|
|
|
self.supported_ip_types = ["ipv4"] # Outgoing ip_type support
|
|
|
|
if helper.getIpType(ip) == "ipv6" or self.isIpv6Supported():
|
|
|
|
self.supported_ip_types.append("ipv6")
|
|
|
|
|
|
|
|
if ip_type == "ipv6" or (ip_type == "dual" and "ipv6" in self.supported_ip_types):
|
2019-01-20 03:15:21 +01:00
|
|
|
ip = ip.replace("*", "::")
|
|
|
|
else:
|
|
|
|
ip = ip.replace("*", "0.0.0.0")
|
2018-04-03 14:45:04 +02:00
|
|
|
|
2018-04-06 18:18:10 +02:00
|
|
|
if config.tor == "always":
|
|
|
|
port = config.tor_hs_port
|
|
|
|
config.fileserver_port = port
|
|
|
|
elif port == 0: # Use random port
|
2019-03-15 21:06:59 +01:00
|
|
|
port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-")))
|
2018-04-03 14:45:04 +02:00
|
|
|
port = self.getRandomPort(ip, port_range_from, port_range_to)
|
|
|
|
config.fileserver_port = port
|
|
|
|
if not port:
|
|
|
|
raise Exception("Can't find bindable port")
|
|
|
|
if not config.tor == "always":
|
|
|
|
config.saveValue("fileserver_port", port) # Save random port value for next restart
|
2020-06-30 17:04:09 +02:00
|
|
|
config.arguments.fileserver_port = port
|
2018-04-03 14:45:04 +02:00
|
|
|
|
|
|
|
ConnectionServer.__init__(self, ip, port, self.handleRequest)
|
2019-01-26 03:13:47 +01:00
|
|
|
self.log.debug("Supported IP types: %s" % self.supported_ip_types)
|
2018-02-08 18:11:45 +01:00
|
|
|
|
2019-01-23 02:13:13 +01:00
|
|
|
if ip_type == "dual" and ip == "::":
|
|
|
|
# Also bind to ipv4 addres in dual mode
|
|
|
|
try:
|
|
|
|
self.log.debug("Binding proxy to %s:%s" % ("::", self.port))
|
|
|
|
self.stream_server_proxy = StreamServer(
|
|
|
|
("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
|
|
|
)
|
2019-03-15 21:06:59 +01:00
|
|
|
except Exception as err:
|
2019-01-23 02:13:13 +01:00
|
|
|
self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
|
2019-01-23 02:11:31 +01:00
|
|
|
|
|
|
|
self.port_opened = {}
|
|
|
|
|
2019-11-25 14:34:46 +01:00
|
|
|
self.sites = self.site_manager.sites
|
2016-03-19 01:36:01 +01:00
|
|
|
self.last_request = time.time()
|
2016-04-20 23:38:22 +02:00
|
|
|
self.files_parsing = {}
|
2017-11-05 23:43:51 +01:00
|
|
|
self.ui_server = None
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2018-04-03 14:45:04 +02:00
|
|
|
def getRandomPort(self, ip, port_range_from, port_range_to):
|
|
|
|
self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to))
|
|
|
|
tried = []
|
|
|
|
for bind_retry in range(100):
|
|
|
|
port = random.randint(port_range_from, port_range_to)
|
|
|
|
if port in tried:
|
|
|
|
continue
|
|
|
|
tried.append(port)
|
2019-01-20 03:13:54 +01:00
|
|
|
sock = helper.createSocket(ip)
|
2018-04-03 14:45:04 +02:00
|
|
|
try:
|
|
|
|
sock.bind((ip, port))
|
|
|
|
success = True
|
|
|
|
except Exception as err:
|
|
|
|
self.log.warning("Error binding to port %s: %s" % (port, err))
|
|
|
|
success = False
|
|
|
|
sock.close()
|
|
|
|
if success:
|
2019-01-20 03:13:54 +01:00
|
|
|
self.log.info("Found unused random port: %s" % port)
|
2018-04-03 14:45:04 +02:00
|
|
|
return port
|
|
|
|
else:
|
|
|
|
time.sleep(0.1)
|
|
|
|
return False
|
|
|
|
|
2019-01-23 02:13:13 +01:00
|
|
|
def isIpv6Supported(self):
|
2019-01-30 14:52:26 +01:00
|
|
|
if config.tor == "always":
|
|
|
|
return True
|
2019-01-23 02:13:13 +01:00
|
|
|
# Test if we can connect to ipv6 address
|
2019-01-23 14:21:25 +01:00
|
|
|
ipv6_testip = "fcec:ae97:8902:d810:6c92:ec67:efb2:3ec5"
|
2019-01-23 02:13:13 +01:00
|
|
|
try:
|
|
|
|
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
|
|
|
|
sock.connect((ipv6_testip, 80))
|
|
|
|
local_ipv6 = sock.getsockname()[0]
|
|
|
|
if local_ipv6 == "::1":
|
|
|
|
self.log.debug("IPv6 not supported, no local IPv6 address")
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
self.log.debug("IPv6 supported on IP %s" % local_ipv6)
|
|
|
|
return True
|
|
|
|
except socket.error as err:
|
2019-08-18 03:02:30 +02:00
|
|
|
self.log.warning("IPv6 not supported: %s" % err)
|
2019-01-23 02:13:13 +01:00
|
|
|
return False
|
|
|
|
except Exception as err:
|
|
|
|
self.log.error("IPv6 check error: %s" % err)
|
|
|
|
return False
|
|
|
|
|
|
|
|
def listenProxy(self):
|
|
|
|
try:
|
|
|
|
self.stream_server_proxy.serve_forever()
|
2019-03-15 21:06:59 +01:00
|
|
|
except Exception as err:
|
2019-01-23 02:13:13 +01:00
|
|
|
if err.errno == 98: # Address already in use error
|
|
|
|
self.log.debug("StreamServer proxy listen error: %s" % err)
|
|
|
|
else:
|
|
|
|
self.log.info("StreamServer proxy listen error: %s" % err)
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Handle request to fileserver
|
|
|
|
def handleRequest(self, connection, message):
|
2016-03-06 00:55:50 +01:00
|
|
|
if config.verbose:
|
|
|
|
if "params" in message:
|
|
|
|
self.log.debug(
|
|
|
|
"FileRequest: %s %s %s %s" %
|
|
|
|
(str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
|
2015-07-12 20:36:46 +02:00
|
|
|
req = FileRequest(self, connection)
|
|
|
|
req.route(message["cmd"], message.get("req_id"), message.get("params"))
|
2018-02-08 18:12:08 +01:00
|
|
|
if not self.has_internet and not connection.is_private_ip:
|
2016-03-19 18:14:09 +01:00
|
|
|
self.has_internet = True
|
|
|
|
self.onInternetOnline()
|
|
|
|
|
|
|
|
def onInternetOnline(self):
|
|
|
|
self.log.info("Internet online")
|
|
|
|
gevent.spawn(self.checkSites, check_files=False, force_port_check=True)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Reload the FileRequest class to prevent restarts in debug mode
|
|
|
|
def reload(self):
|
|
|
|
global FileRequest
|
|
|
|
import imp
|
|
|
|
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
|
|
|
|
|
2019-01-20 03:13:54 +01:00
|
|
|
def portCheck(self):
|
2019-04-15 15:16:38 +02:00
|
|
|
if config.offline:
|
|
|
|
self.log.info("Offline mode: port check disabled")
|
|
|
|
res = {"ipv4": None, "ipv6": None}
|
|
|
|
self.port_opened = res
|
|
|
|
return res
|
|
|
|
|
2019-01-25 01:23:46 +01:00
|
|
|
if config.ip_external:
|
|
|
|
for ip_external in config.ip_external:
|
|
|
|
SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist
|
|
|
|
|
|
|
|
ip_external_types = set([helper.getIpType(ip) for ip in config.ip_external])
|
|
|
|
res = {
|
|
|
|
"ipv4": "ipv4" in ip_external_types,
|
|
|
|
"ipv6": "ipv6" in ip_external_types
|
|
|
|
}
|
2019-01-26 20:42:27 +01:00
|
|
|
self.ip_external_list = config.ip_external
|
2019-01-25 01:23:46 +01:00
|
|
|
self.port_opened.update(res)
|
|
|
|
self.log.info("Server port opened based on configuration ipv4: %s, ipv6: %s" % (res["ipv4"], res["ipv6"]))
|
|
|
|
return res
|
|
|
|
|
2019-01-23 02:11:31 +01:00
|
|
|
self.port_opened = {}
|
|
|
|
if self.ui_server:
|
|
|
|
self.ui_server.updateWebsocket()
|
|
|
|
|
|
|
|
if "ipv6" in self.supported_ip_types:
|
|
|
|
res_ipv6_thread = gevent.spawn(self.portchecker.portCheck, self.port, "ipv6")
|
|
|
|
else:
|
|
|
|
res_ipv6_thread = None
|
|
|
|
|
|
|
|
res_ipv4 = self.portchecker.portCheck(self.port, "ipv4")
|
2019-01-30 14:52:41 +01:00
|
|
|
if not res_ipv4["opened"] and config.tor != "always":
|
2019-01-20 03:13:54 +01:00
|
|
|
if self.portchecker.portOpen(self.port):
|
2019-01-23 02:11:31 +01:00
|
|
|
res_ipv4 = self.portchecker.portCheck(self.port, "ipv4")
|
2019-01-20 03:13:54 +01:00
|
|
|
|
2019-01-25 01:23:46 +01:00
|
|
|
if res_ipv6_thread is None:
|
2019-01-23 02:11:31 +01:00
|
|
|
res_ipv6 = {"ip": None, "opened": None}
|
|
|
|
else:
|
|
|
|
res_ipv6 = res_ipv6_thread.get()
|
|
|
|
if res_ipv6["opened"] and not helper.getIpType(res_ipv6["ip"]) == "ipv6":
|
|
|
|
self.log.info("Invalid IPv6 address from port check: %s" % res_ipv6["ip"])
|
|
|
|
res_ipv6["opened"] = False
|
|
|
|
|
2019-01-26 20:42:27 +01:00
|
|
|
self.ip_external_list = []
|
2019-01-24 15:20:37 +01:00
|
|
|
for res_ip in [res_ipv4, res_ipv6]:
|
2019-01-26 20:42:27 +01:00
|
|
|
if res_ip["ip"] and res_ip["ip"] not in self.ip_external_list:
|
|
|
|
self.ip_external_list.append(res_ip["ip"])
|
2019-01-24 15:20:37 +01:00
|
|
|
SiteManager.peer_blacklist.append((res_ip["ip"], self.port))
|
2019-01-23 02:11:31 +01:00
|
|
|
|
|
|
|
self.log.info("Server port opened ipv4: %s, ipv6: %s" % (res_ipv4["opened"], res_ipv6["opened"]))
|
|
|
|
|
|
|
|
res = {"ipv4": res_ipv4["opened"], "ipv6": res_ipv6["opened"]}
|
2019-01-29 02:59:15 +01:00
|
|
|
|
|
|
|
# Add external IPs from local interfaces
|
|
|
|
interface_ips = helper.getInterfaceIps("ipv4")
|
|
|
|
if "ipv6" in self.supported_ip_types:
|
|
|
|
interface_ips += helper.getInterfaceIps("ipv6")
|
|
|
|
for ip in interface_ips:
|
|
|
|
if not helper.isPrivateIp(ip) and ip not in self.ip_external_list:
|
|
|
|
self.ip_external_list.append(ip)
|
2019-01-30 14:58:11 +01:00
|
|
|
res[helper.getIpType(ip)] = True # We have opened port if we have external ip
|
2019-01-29 02:59:15 +01:00
|
|
|
SiteManager.peer_blacklist.append((ip, self.port))
|
|
|
|
self.log.debug("External ip found on interfaces: %s" % ip)
|
|
|
|
|
2019-01-23 02:11:31 +01:00
|
|
|
self.port_opened.update(res)
|
|
|
|
|
|
|
|
if self.ui_server:
|
|
|
|
self.ui_server.updateWebsocket()
|
|
|
|
|
|
|
|
return res
|
2019-01-20 03:13:54 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Check site file integrity
|
2016-09-04 17:51:00 +02:00
|
|
|
def checkSite(self, site, check_files=False):
|
2019-04-15 15:06:25 +02:00
|
|
|
if site.isServing():
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
site.announce(mode="startup") # Announce site to tracker
|
2016-03-18 20:03:19 +01:00
|
|
|
site.update(check_files=check_files) # Update site's content.json and download changed files
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
site.sendMyHashfield()
|
|
|
|
site.updateHashfield()
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Check sites integrity
|
2016-03-18 20:03:38 +01:00
|
|
|
@util.Noparallel()
|
2016-09-04 17:51:00 +02:00
|
|
|
def checkSites(self, check_files=False, force_port_check=False):
|
2016-03-19 01:36:01 +01:00
|
|
|
self.log.debug("Checking sites...")
|
2018-08-29 19:54:31 +02:00
|
|
|
s = time.time()
|
2016-03-18 20:03:19 +01:00
|
|
|
sites_checking = False
|
2019-01-23 02:11:31 +01:00
|
|
|
if not self.port_opened or force_port_check: # Test and open port if not tested yet
|
2016-03-18 20:03:19 +01:00
|
|
|
if len(self.sites) <= 2: # Don't wait port opening on first startup
|
|
|
|
sites_checking = True
|
2019-03-15 21:06:59 +01:00
|
|
|
for address, site in list(self.sites.items()):
|
2016-03-18 20:03:19 +01:00
|
|
|
gevent.spawn(self.checkSite, site, check_files)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-01-23 02:11:31 +01:00
|
|
|
self.portCheck()
|
2019-01-20 03:13:54 +01:00
|
|
|
|
2019-01-23 02:11:31 +01:00
|
|
|
if not self.port_opened["ipv4"]:
|
2016-03-30 23:19:46 +02:00
|
|
|
self.tor_manager.startOnions()
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
|
2016-03-18 20:03:19 +01:00
|
|
|
if not sites_checking:
|
2018-08-29 19:54:31 +02:00
|
|
|
check_pool = gevent.pool.Pool(5)
|
2019-01-20 03:13:54 +01:00
|
|
|
# Check sites integrity
|
2019-03-15 21:06:59 +01:00
|
|
|
for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
2019-04-15 15:06:25 +02:00
|
|
|
if not site.isServing():
|
2019-01-20 03:13:54 +01:00
|
|
|
continue
|
2018-08-29 19:54:31 +02:00
|
|
|
check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
|
2017-02-27 00:06:40 +01:00
|
|
|
time.sleep(2)
|
|
|
|
if site.settings.get("modified", 0) < time.time() - 60 * 60 * 24: # Not so active site, wait some sec to finish
|
2017-11-19 18:18:26 +01:00
|
|
|
check_thread.join(timeout=5)
|
2018-08-29 19:54:31 +02:00
|
|
|
self.log.debug("Checksites done in %.3fs" % (time.time() - s))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-02-27 00:07:29 +01:00
|
|
|
def cleanupSites(self):
|
2015-07-12 20:36:46 +02:00
|
|
|
import gc
|
2017-02-27 00:07:29 +01:00
|
|
|
startup = True
|
|
|
|
time.sleep(5 * 60) # Sites already cleaned up on startup
|
2017-05-07 18:15:23 +02:00
|
|
|
peers_protected = set([])
|
2015-07-12 20:36:46 +02:00
|
|
|
while 1:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
# Sites health care every 20 min
|
2019-01-20 03:13:54 +01:00
|
|
|
self.log.debug(
|
|
|
|
"Running site cleanup, connections: %s, internet: %s, protected peers: %s" %
|
|
|
|
(len(self.connections), self.has_internet, len(peers_protected))
|
|
|
|
)
|
2017-02-27 00:07:29 +01:00
|
|
|
|
2019-03-15 21:06:59 +01:00
|
|
|
for address, site in list(self.sites.items()):
|
2019-04-15 15:06:25 +02:00
|
|
|
if not site.isServing():
|
2017-02-27 00:07:29 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if not startup:
|
2017-05-07 18:15:23 +02:00
|
|
|
site.cleanupPeers(peers_protected)
|
2017-02-27 00:07:29 +01:00
|
|
|
|
|
|
|
time.sleep(1) # Prevent too quick request
|
|
|
|
|
2017-05-07 18:15:23 +02:00
|
|
|
peers_protected = set([])
|
2019-03-15 21:06:59 +01:00
|
|
|
for address, site in list(self.sites.items()):
|
2019-04-15 15:06:25 +02:00
|
|
|
if not site.isServing():
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
continue
|
2017-02-27 00:07:29 +01:00
|
|
|
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
if site.peers:
|
2017-02-27 00:07:29 +01:00
|
|
|
with gevent.Timeout(10, exception=False):
|
2018-04-28 21:54:03 +02:00
|
|
|
site.announcer.announcePex()
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2020-09-18 18:44:28 +02:00
|
|
|
# Last check modification failed
|
|
|
|
if site.content_updated is False:
|
|
|
|
site.update()
|
|
|
|
elif site.bad_files:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
site.retryBadFiles()
|
2015-07-25 13:38:58 +02:00
|
|
|
|
2018-03-14 22:31:21 +01:00
|
|
|
if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7:
|
|
|
|
# Keep active connections if site has been modified witin 7 days
|
|
|
|
connected_num = site.needConnections(check_site_on_reconnect=True)
|
2018-01-27 12:21:25 +01:00
|
|
|
|
2018-03-14 22:31:21 +01:00
|
|
|
if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing
|
|
|
|
peers_protected.update([peer.key for peer in site.getConnectedPeers()])
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-02-27 00:07:29 +01:00
|
|
|
time.sleep(1) # Prevent too quick request
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
site = None
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
gc.collect() # Implicit garbage collection
|
2017-02-27 00:07:29 +01:00
|
|
|
startup = False
|
|
|
|
time.sleep(60 * 20)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2018-10-30 04:43:58 +01:00
|
|
|
def announceSite(self, site):
|
|
|
|
site.announce(mode="update", pex=False)
|
|
|
|
active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60
|
2020-09-18 18:44:42 +02:00
|
|
|
if site.settings["own"] or active_site:
|
|
|
|
# Check connections more frequently on own and active sites to speed-up first connections
|
2018-10-30 04:43:58 +01:00
|
|
|
site.needConnections(check_site_on_reconnect=True)
|
|
|
|
site.sendMyHashfield(3)
|
|
|
|
site.updateHashfield(3)
|
|
|
|
|
2017-02-27 00:07:29 +01:00
|
|
|
# Announce sites every 20 min
|
|
|
|
def announceSites(self):
|
|
|
|
time.sleep(5 * 60) # Sites already announced on startup
|
|
|
|
while 1:
|
2018-07-10 03:32:37 +02:00
|
|
|
config.loadTrackersFile()
|
2017-02-27 00:07:29 +01:00
|
|
|
s = time.time()
|
2019-03-15 21:06:59 +01:00
|
|
|
for address, site in list(self.sites.items()):
|
2019-04-15 15:06:25 +02:00
|
|
|
if not site.isServing():
|
2017-02-27 00:07:29 +01:00
|
|
|
continue
|
2018-10-30 04:44:43 +01:00
|
|
|
gevent.spawn(self.announceSite, site).join(timeout=10)
|
2017-02-27 00:07:29 +01:00
|
|
|
time.sleep(1)
|
|
|
|
taken = time.time() - s
|
|
|
|
|
2020-09-18 18:44:42 +02:00
|
|
|
# Query all trackers one-by-one in 20 minutes evenly distributed
|
|
|
|
sleep = max(0, 60 * 20 / len(config.trackers) - taken)
|
|
|
|
|
2018-08-26 02:53:15 +02:00
|
|
|
self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep))
|
2017-02-27 00:07:29 +01:00
|
|
|
time.sleep(sleep)
|
2015-09-10 23:25:09 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Detects if computer back from wakeup
|
|
|
|
def wakeupWatcher(self):
|
|
|
|
last_time = time.time()
|
|
|
|
while 1:
|
|
|
|
time.sleep(30)
|
2016-10-02 14:22:01 +02:00
|
|
|
if time.time() - max(self.last_request, last_time) > 60 * 3:
|
|
|
|
# If taken more than 3 minute then the computer was in sleep mode
|
2015-07-12 20:36:46 +02:00
|
|
|
self.log.info(
|
2016-04-03 10:32:13 +02:00
|
|
|
"Wakeup detected: time warp from %s to %s (%s sleep seconds), acting like startup..." %
|
2015-07-12 20:36:46 +02:00
|
|
|
(last_time, time.time(), time.time() - last_time)
|
|
|
|
)
|
2016-03-19 12:23:29 +01:00
|
|
|
self.checkSites(check_files=False, force_port_check=True)
|
2015-07-12 20:36:46 +02:00
|
|
|
last_time = time.time()
|
|
|
|
|
|
|
|
# Bind and start serving sites
|
|
|
|
def start(self, check_sites=True):
|
2019-09-19 16:33:45 +02:00
|
|
|
if self.stopping:
|
|
|
|
return False
|
|
|
|
|
2018-04-29 13:04:38 +02:00
|
|
|
ConnectionServer.start(self)
|
2019-09-19 16:33:45 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.stream_server.start()
|
|
|
|
except Exception as err:
|
|
|
|
self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err))
|
|
|
|
if "ui_server" in dir(sys.modules["main"]):
|
|
|
|
self.log.debug("Stopping UI Server.")
|
|
|
|
sys.modules["main"].ui_server.stop()
|
|
|
|
return False
|
|
|
|
|
2018-02-08 18:11:45 +01:00
|
|
|
self.sites = self.site_manager.list()
|
2015-07-12 20:36:46 +02:00
|
|
|
if config.debug:
|
|
|
|
# Auto reload FileRequest on change
|
|
|
|
from Debug import DebugReloader
|
2019-03-16 02:42:43 +01:00
|
|
|
DebugReloader.watcher.addCallback(self.reload)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
if check_sites: # Open port, Update sites, Check files integrity
|
|
|
|
gevent.spawn(self.checkSites)
|
|
|
|
|
|
|
|
thread_announce_sites = gevent.spawn(self.announceSites)
|
2017-02-27 00:07:29 +01:00
|
|
|
thread_cleanup_sites = gevent.spawn(self.cleanupSites)
|
2015-07-12 20:36:46 +02:00
|
|
|
thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
|
|
|
|
|
2018-04-29 02:44:46 +02:00
|
|
|
ConnectionServer.listen(self)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
self.log.debug("Stopped.")
|
2016-10-02 14:22:01 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2019-01-20 03:13:54 +01:00
|
|
|
if self.running and self.portchecker.upnp_port_opened:
|
2016-10-02 14:22:01 +02:00
|
|
|
self.log.debug('Closing port %d' % self.port)
|
|
|
|
try:
|
2019-01-20 03:13:54 +01:00
|
|
|
self.portchecker.portClose(self.port)
|
2016-10-02 14:22:01 +02:00
|
|
|
self.log.info('Closed port via upnp.')
|
2019-01-20 03:13:54 +01:00
|
|
|
except Exception as err:
|
2016-10-02 14:22:01 +02:00
|
|
|
self.log.info("Failed at attempt to use upnp to close port: %s" % err)
|
2018-02-08 18:11:45 +01:00
|
|
|
|
|
|
|
return ConnectionServer.stop(self)
|