2015-07-12 20:36:46 +02:00
|
|
|
import time
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import mimetypes
|
|
|
|
import json
|
|
|
|
import cgi
|
|
|
|
|
2018-06-03 12:46:00 +02:00
|
|
|
import gevent
|
|
|
|
|
2015-01-12 02:03:45 +01:00
|
|
|
from Config import config
|
|
|
|
from Site import SiteManager
|
version 0.2.0, new lib for bitcoin ecc, dont display or track notify errors, dont reload again within 1 sec, null peer ip fix, signingmoved to ContentManager, content.json include support, content.json multisig ready, content.json proper bitcoincore compatible signing, content.json include permissions, multithreaded publish, publish timeout 60s, no exception on invalid bitcoin address, testcase for new lib, bip32 based persite privatekey generation, multiuser ready, simple json database query command, websocket api fileGet, wrapper loading title stuck bugfix
2015-02-09 02:09:02 +01:00
|
|
|
from User import UserManager
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
from Plugin import PluginManager
|
2015-01-12 02:03:45 +01:00
|
|
|
from Ui.UiWebsocket import UiWebsocket
|
2015-09-13 23:17:13 +02:00
|
|
|
from Crypt import CryptHash
|
2017-10-04 12:44:34 +02:00
|
|
|
from util import helper
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
status_texts = {
|
2015-07-12 20:36:46 +02:00
|
|
|
200: "200 OK",
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
206: "206 Partial Content",
|
2015-07-12 20:36:46 +02:00
|
|
|
400: "400 Bad Request",
|
|
|
|
403: "403 Forbidden",
|
|
|
|
404: "404 Not Found",
|
|
|
|
500: "500 Internal Server Error",
|
2015-01-12 02:03:45 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-10-04 12:37:22 +02:00
|
|
|
class SecurityError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
@PluginManager.acceptPlugins
|
|
|
|
class UiRequest(object):
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
def __init__(self, server, get, env, start_response):
|
|
|
|
if server:
|
|
|
|
self.server = server
|
|
|
|
self.log = server.log
|
|
|
|
self.get = get # Get parameters
|
|
|
|
self.env = env # Enviroment settings
|
2015-07-17 00:28:43 +02:00
|
|
|
# ['CONTENT_LENGTH', 'CONTENT_TYPE', 'GATEWAY_INTERFACE', 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE',
|
|
|
|
# 'HTTP_COOKIE', 'HTTP_CACHE_CONTROL', 'HTTP_HOST', 'HTTP_HTTPS', 'HTTP_ORIGIN', 'HTTP_PROXY_CONNECTION', 'HTTP_REFERER',
|
|
|
|
# 'HTTP_USER_AGENT', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_ADDR', 'REMOTE_PORT', 'REQUEST_METHOD', 'SCRIPT_NAME',
|
|
|
|
# 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'SERVER_SOFTWARE', 'werkzeug.request', 'wsgi.errors',
|
|
|
|
# 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version']
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
self.start_response = start_response # Start response function
|
|
|
|
self.user = None
|
2018-11-26 00:02:35 +01:00
|
|
|
self.script_nonce = None # Nonce for script tags in wrapper html
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-06-13 14:19:23 +02:00
|
|
|
def isHostAllowed(self, host):
|
|
|
|
if host in self.server.allowed_hosts:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.isProxyRequest(): # Support for chrome extension proxy
|
|
|
|
if self.server.site_manager.isDomain(host):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-06-15 19:48:01 +02:00
|
|
|
if self.server.learn_allowed_host:
|
2017-06-13 14:19:23 +02:00
|
|
|
# Learn the first request's host as allowed one
|
|
|
|
self.server.learn_allowed_host = False
|
|
|
|
self.server.allowed_hosts.add(host)
|
|
|
|
self.server.log.info("Added %s as allowed host" % host)
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Call the request handler function base on path
|
|
|
|
def route(self, path):
|
2017-06-13 14:19:23 +02:00
|
|
|
# Restict Ui access by ip
|
|
|
|
if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict:
|
2015-10-25 23:08:25 +01:00
|
|
|
return self.error403(details=False)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-06-13 14:19:23 +02:00
|
|
|
# Check if host allowed to do request
|
|
|
|
if not self.isHostAllowed(self.env.get("HTTP_HOST")):
|
2017-06-15 19:49:51 +02:00
|
|
|
return self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)
|
2017-06-13 14:19:23 +02:00
|
|
|
|
2018-06-02 14:17:10 +02:00
|
|
|
# Prepend .bit host for transparent proxy
|
|
|
|
if self.server.site_manager.isDomain(self.env.get("HTTP_HOST")):
|
|
|
|
path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path)
|
2015-07-12 20:36:46 +02:00
|
|
|
path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
|
|
|
|
path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
|
|
|
|
|
2018-06-25 14:23:38 +02:00
|
|
|
# Sanitize request url
|
|
|
|
path = path.replace("\\", "/")
|
|
|
|
if "../" in path or "./" in path:
|
2018-06-25 14:28:42 +02:00
|
|
|
return self.error403("Invalid path: %s" % path)
|
2018-06-25 14:23:38 +02:00
|
|
|
|
2015-09-13 23:17:13 +02:00
|
|
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
2017-01-08 17:27:57 +01:00
|
|
|
if "/" not in path.strip("/"):
|
|
|
|
content_type = self.getContentType("index.html")
|
|
|
|
else:
|
|
|
|
content_type = self.getContentType(path)
|
2017-08-15 02:40:38 +02:00
|
|
|
|
2018-03-06 11:58:56 +01:00
|
|
|
extra_headers = {"Access-Control-Allow-Origin": "null"}
|
2017-08-15 02:40:38 +02:00
|
|
|
|
|
|
|
self.sendHeader(content_type=content_type, extra_headers=extra_headers)
|
2015-09-13 23:17:13 +02:00
|
|
|
return ""
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if path == "/":
|
|
|
|
return self.actionIndex()
|
2016-11-10 23:14:30 +01:00
|
|
|
elif path == "/favicon.ico":
|
2015-07-12 20:36:46 +02:00
|
|
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
2017-10-04 12:33:43 +02:00
|
|
|
# Internal functions
|
|
|
|
elif "/ZeroNet-Internal/" in path:
|
|
|
|
path = re.sub(".*?/ZeroNet-Internal/", "/", path)
|
2018-07-10 03:36:09 +02:00
|
|
|
func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function
|
2017-10-04 12:33:43 +02:00
|
|
|
if func:
|
|
|
|
return func()
|
|
|
|
else:
|
|
|
|
return self.error404(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
# Media
|
|
|
|
elif path.startswith("/uimedia/"):
|
|
|
|
return self.actionUiMedia(path)
|
2015-08-06 00:51:25 +02:00
|
|
|
elif "/uimedia/" in path:
|
|
|
|
# uimedia within site dir (for chrome extension)
|
|
|
|
path = re.sub(".*?/uimedia/", "/uimedia/", path)
|
|
|
|
return self.actionUiMedia(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
# Websocket
|
|
|
|
elif path == "/Websocket":
|
|
|
|
return self.actionWebsocket()
|
|
|
|
# Debug
|
|
|
|
elif path == "/Debug" and config.debug:
|
|
|
|
return self.actionDebug()
|
|
|
|
elif path == "/Console" and config.debug:
|
|
|
|
return self.actionConsole()
|
2017-07-09 14:10:01 +02:00
|
|
|
# Wrapper-less static files
|
|
|
|
elif path.startswith("/raw/"):
|
|
|
|
return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True)
|
2017-07-15 01:32:15 +02:00
|
|
|
|
|
|
|
elif path.startswith("/add/"):
|
|
|
|
return self.actionSiteAdd()
|
2015-07-12 20:36:46 +02:00
|
|
|
# Site media wrapper
|
|
|
|
else:
|
2018-04-28 22:02:10 +02:00
|
|
|
if self.get.get("wrapper_nonce"):
|
|
|
|
if self.get["wrapper_nonce"] in self.server.wrapper_nonces:
|
|
|
|
self.server.wrapper_nonces.remove(self.get["wrapper_nonce"])
|
|
|
|
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
|
|
|
|
else:
|
|
|
|
self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"])
|
|
|
|
body = self.actionWrapper(path)
|
2015-07-17 00:28:43 +02:00
|
|
|
else:
|
|
|
|
body = self.actionWrapper(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
if body:
|
|
|
|
return body
|
|
|
|
else:
|
2018-07-10 03:36:09 +02:00
|
|
|
func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function
|
2015-07-12 20:36:46 +02:00
|
|
|
if func:
|
|
|
|
return func()
|
|
|
|
else:
|
|
|
|
return self.error404(path)
|
|
|
|
|
2018-06-02 14:17:10 +02:00
|
|
|
# The request is proxied by chrome extension or a transparent proxy
|
2015-07-12 20:36:46 +02:00
|
|
|
def isProxyRequest(self):
|
2018-06-02 14:17:10 +02:00
|
|
|
return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.server.site_manager.isDomain(self.env.get("HTTP_HOST")))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-01-08 17:26:59 +01:00
|
|
|
def isWebSocketRequest(self):
|
2017-01-08 07:56:42 +01:00
|
|
|
return self.env.get("HTTP_UPGRADE") == "websocket"
|
2017-01-08 17:26:59 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
def isAjaxRequest(self):
|
|
|
|
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
|
|
|
|
|
|
|
|
# Get mime by filename
|
|
|
|
def getContentType(self, file_name):
|
|
|
|
content_type = mimetypes.guess_type(file_name)[0]
|
2017-01-05 02:33:33 +01:00
|
|
|
|
2018-09-30 21:54:50 +02:00
|
|
|
if content_type:
|
|
|
|
content_type = content_type.lower()
|
|
|
|
|
2017-01-12 06:22:26 +01:00
|
|
|
if file_name.endswith(".css"): # Force correct css content type
|
2017-01-05 02:33:33 +01:00
|
|
|
content_type = "text/css"
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if not content_type:
|
2017-01-12 06:22:26 +01:00
|
|
|
if file_name.endswith(".json"): # Correct json header
|
2015-07-12 20:36:46 +02:00
|
|
|
content_type = "application/json"
|
|
|
|
else:
|
|
|
|
content_type = "application/octet-stream"
|
2018-10-20 02:34:26 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
return content_type
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
# Return: <dict> Posted variables
|
|
|
|
def getPosted(self):
|
|
|
|
if self.env['REQUEST_METHOD'] == "POST":
|
|
|
|
return dict(cgi.parse_qsl(
|
|
|
|
self.env['wsgi.input'].readline().decode()
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# Return: <dict> Cookies based on self.env
|
2015-07-12 20:36:46 +02:00
|
|
|
def getCookies(self):
|
|
|
|
raw_cookies = self.env.get('HTTP_COOKIE')
|
|
|
|
if raw_cookies:
|
|
|
|
cookies = cgi.parse_qsl(raw_cookies)
|
|
|
|
return {key.strip(): val for key, val in cookies}
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def getCurrentUser(self):
|
|
|
|
if self.user:
|
|
|
|
return self.user # Cache
|
|
|
|
self.user = UserManager.user_manager.get() # Get user
|
|
|
|
if not self.user:
|
|
|
|
self.user = UserManager.user_manager.create()
|
|
|
|
return self.user
|
|
|
|
|
2017-07-09 14:10:01 +02:00
|
|
|
def getRequestUrl(self):
|
|
|
|
if self.isProxyRequest():
|
2017-07-14 11:08:22 +02:00
|
|
|
if self.env["PATH_INFO"].startswith("http://zero/"):
|
2017-07-09 14:10:01 +02:00
|
|
|
return self.env["PATH_INFO"]
|
|
|
|
else: # Add http://zero to direct domain access
|
|
|
|
return self.env["PATH_INFO"].replace("http://", "http://zero/", 1)
|
|
|
|
else:
|
|
|
|
return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"]
|
|
|
|
|
|
|
|
def getReferer(self):
|
|
|
|
referer = self.env.get("HTTP_REFERER")
|
2017-07-14 11:08:22 +02:00
|
|
|
if referer and self.isProxyRequest() and not referer.startswith("http://zero/"):
|
2017-07-09 14:10:01 +02:00
|
|
|
return referer.replace("http://", "http://zero/", 1)
|
|
|
|
else:
|
|
|
|
return referer
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Send response headers
|
2018-11-26 00:02:35 +01:00
|
|
|
def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]):
|
2018-03-06 11:58:56 +01:00
|
|
|
headers = {}
|
|
|
|
headers["Version"] = "HTTP/1.1"
|
|
|
|
headers["Connection"] = "Keep-Alive"
|
|
|
|
headers["Keep-Alive"] = "max=25, timeout=30"
|
|
|
|
headers["X-Frame-Options"] = "SAMEORIGIN"
|
2017-07-09 14:11:44 +02:00
|
|
|
if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()):
|
2018-03-06 11:58:56 +01:00
|
|
|
headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css
|
2017-07-11 23:00:33 +02:00
|
|
|
|
|
|
|
if noscript:
|
2018-03-06 11:58:56 +01:00
|
|
|
headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src 'self'; font-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline';"
|
2018-11-26 00:02:35 +01:00
|
|
|
elif script_nonce:
|
|
|
|
headers["Content-Security-Policy"] = "script-src 'nonce-%s'" % script_nonce
|
2017-07-11 23:00:33 +02:00
|
|
|
|
2018-03-09 15:01:45 +01:00
|
|
|
if allow_ajax:
|
|
|
|
headers["Access-Control-Allow-Origin"] = "null"
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
|
|
|
# Allow json access
|
2018-03-06 11:58:56 +01:00
|
|
|
headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range"
|
|
|
|
headers["Access-Control-Allow-Credentials"] = "true"
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-01-12 21:52:24 +01:00
|
|
|
if content_type == "text/html":
|
|
|
|
content_type = "text/html; charset=utf-8"
|
2017-02-24 14:39:20 +01:00
|
|
|
if content_type == "text/plain":
|
|
|
|
content_type = "text/plain; charset=utf-8"
|
|
|
|
|
2017-07-06 00:09:05 +02:00
|
|
|
# Download instead of display file types that can be dangerous
|
|
|
|
if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type):
|
2018-03-06 11:58:56 +01:00
|
|
|
headers["Content-Disposition"] = "attachment"
|
2017-07-06 00:09:05 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
cacheable_type = (
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or
|
2015-07-12 20:36:46 +02:00
|
|
|
self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
|
|
|
|
)
|
|
|
|
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min
|
2018-03-06 11:58:56 +01:00
|
|
|
headers["Cache-Control"] = "public, max-age=600" # Cache 10 min
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
else:
|
2018-03-06 11:58:56 +01:00
|
|
|
headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all
|
|
|
|
headers["Content-Type"] = content_type
|
|
|
|
headers.update(extra_headers)
|
|
|
|
return self.start_response(status_texts[status], headers.items())
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Renders a template
|
|
|
|
def render(self, template_path, *args, **kwargs):
|
2018-03-06 12:00:30 +01:00
|
|
|
template = open(template_path).read()
|
|
|
|
for key, val in kwargs.items():
|
|
|
|
template = template.replace("{%s}" % key, "%s" % val)
|
2018-03-07 10:23:13 +01:00
|
|
|
return template.encode("utf8")
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# - Actions -
|
|
|
|
|
|
|
|
# Redirect to an url
|
|
|
|
def actionRedirect(self, url):
|
2017-05-11 17:56:01 +02:00
|
|
|
self.start_response('301 Redirect', [('Location', str(url))])
|
2015-07-12 20:36:46 +02:00
|
|
|
yield "Location changed: %s" % url
|
|
|
|
|
|
|
|
def actionIndex(self):
|
|
|
|
return self.actionRedirect("/" + config.homepage)
|
|
|
|
|
|
|
|
# Render a file from media with iframe site wrapper
|
|
|
|
def actionWrapper(self, path, extra_headers=None):
|
|
|
|
if not extra_headers:
|
2018-03-06 11:58:56 +01:00
|
|
|
extra_headers = {}
|
2018-11-26 00:02:35 +01:00
|
|
|
script_nonce = self.getScriptNonce()
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
2018-08-26 02:57:02 +02:00
|
|
|
just_added = False
|
2015-07-12 20:36:46 +02:00
|
|
|
if match:
|
|
|
|
address = match.group("address")
|
|
|
|
inner_path = match.group("inner_path").lstrip("/")
|
2018-09-30 21:55:48 +02:00
|
|
|
|
|
|
|
if not inner_path or path.endswith("/"): # It's a directory
|
|
|
|
content_type = self.getContentType("index.html")
|
|
|
|
else: # It's a file
|
|
|
|
content_type = self.getContentType(inner_path)
|
|
|
|
|
|
|
|
is_html_file = "html" in content_type or "xhtml" in content_type
|
|
|
|
|
|
|
|
if not is_html_file:
|
|
|
|
return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper
|
|
|
|
|
2016-03-06 15:44:55 +01:00
|
|
|
if self.isAjaxRequest():
|
2015-07-12 20:36:46 +02:00
|
|
|
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
|
2017-01-08 17:26:59 +01:00
|
|
|
|
|
|
|
if self.isWebSocketRequest():
|
2017-02-02 12:51:41 +01:00
|
|
|
return self.error403("WebSocket request not allowed to load wrapper") # No websocket
|
2017-01-08 17:26:59 +01:00
|
|
|
|
2017-01-09 06:01:49 +01:00
|
|
|
if "text/html" not in self.env.get("HTTP_ACCEPT", ""):
|
2016-12-27 11:37:35 +01:00
|
|
|
return self.error403("Invalid Accept header to load wrapper")
|
|
|
|
if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""):
|
|
|
|
return self.error403("Prefetch not allowed to load wrapper")
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
site = SiteManager.site_manager.get(address)
|
|
|
|
|
|
|
|
if (
|
|
|
|
site and site.content_manager.contents.get("content.json") and
|
|
|
|
(not site.getReachableBadFiles() or site.settings["own"])
|
|
|
|
): # Its downloaded or own
|
|
|
|
title = site.content_manager.contents["content.json"]["title"]
|
|
|
|
else:
|
|
|
|
title = "Loading %s..." % address
|
2018-07-19 20:42:53 +02:00
|
|
|
site = SiteManager.site_manager.get(address)
|
|
|
|
if site: # Already added, but not downloaded
|
2018-08-26 02:57:02 +02:00
|
|
|
if time.time() - site.announcer.time_last_announce > 5:
|
|
|
|
site.log.debug("Reannouncing site...")
|
|
|
|
gevent.spawn(site.update, announce=True)
|
2018-07-19 20:42:53 +02:00
|
|
|
else: # If not added yet
|
|
|
|
site = SiteManager.site_manager.need(address)
|
2018-08-26 02:57:02 +02:00
|
|
|
just_added = True
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
if not site:
|
|
|
|
return False
|
2015-07-31 23:34:53 +02:00
|
|
|
|
2018-11-26 00:02:35 +01:00
|
|
|
self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce)
|
2018-06-03 12:46:00 +02:00
|
|
|
|
2018-06-29 14:18:09 +02:00
|
|
|
min_last_announce = (time.time() - site.announcer.time_last_announce) / 60
|
2018-08-26 02:57:02 +02:00
|
|
|
if min_last_announce > 60 and site.settings["serving"] and not just_added:
|
2018-06-29 14:18:09 +02:00
|
|
|
site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce)
|
2018-06-03 12:46:00 +02:00
|
|
|
gevent.spawn(site.update, announce=True)
|
|
|
|
|
2018-11-26 00:02:35 +01:00
|
|
|
return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)])
|
2017-08-25 10:05:48 +02:00
|
|
|
# Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
else: # Bad url
|
|
|
|
return False
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-05-11 17:57:29 +02:00
|
|
|
def getSiteUrl(self, address):
|
|
|
|
if self.isProxyRequest():
|
|
|
|
return "http://zero/" + address
|
|
|
|
else:
|
|
|
|
return "/" + address
|
|
|
|
|
2018-07-17 02:09:14 +02:00
|
|
|
def processQueryString(self, site, query_string):
|
|
|
|
match = re.search("zeronet_peers=(.*?)(&|$)", query_string)
|
|
|
|
if match:
|
|
|
|
query_string = query_string.replace(match.group(0), "")
|
|
|
|
num_added = 0
|
|
|
|
for peer in match.group(1).split(","):
|
|
|
|
if not re.match(".*?:[0-9]+$", peer):
|
|
|
|
continue
|
|
|
|
ip, port = peer.split(":")
|
|
|
|
if site.addPeer(ip, int(port), source="query_string"):
|
|
|
|
num_added += 1
|
|
|
|
site.log.debug("%s peers added by query string" % num_added)
|
|
|
|
|
|
|
|
return query_string
|
|
|
|
|
2018-11-26 00:02:35 +01:00
|
|
|
def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None):
|
2015-07-17 00:28:43 +02:00
|
|
|
file_inner_path = inner_path
|
|
|
|
if not file_inner_path:
|
|
|
|
file_inner_path = "index.html" # If inner path defaults to index.html
|
|
|
|
|
Rev903, FeedQuery command only available for ADMIN sites, Show bad files in sidebar, Log unknown messages, Add and check inner_path and site address on sign/verify, Better peer cleanup limit, Log site load times, Testcase for address and inner_path verification, Re-sign testsite with new fields, Fix unnecessary loading screen display when browsing sub-folder with index.html, Fix safari notification width
2016-02-18 11:22:21 +01:00
|
|
|
if file_inner_path.endswith("/"):
|
|
|
|
file_inner_path = file_inner_path + "index.html"
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
address = re.sub("/.*", "", path.lstrip("/"))
|
|
|
|
if self.isProxyRequest() and (not path or "/" in path[1:]):
|
2017-02-05 22:55:24 +01:00
|
|
|
if self.env["HTTP_HOST"] == "zero":
|
|
|
|
root_url = "/" + address + "/"
|
2017-05-11 18:18:12 +02:00
|
|
|
file_url = "/" + address + "/" + inner_path
|
2017-02-05 22:55:24 +01:00
|
|
|
else:
|
2017-05-11 18:18:12 +02:00
|
|
|
file_url = "/" + inner_path
|
2017-02-05 22:55:24 +01:00
|
|
|
root_url = "/"
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
else:
|
|
|
|
file_url = "/" + address + "/" + inner_path
|
2016-11-10 23:14:30 +01:00
|
|
|
root_url = "/" + address + "/"
|
2015-07-17 00:28:43 +02:00
|
|
|
|
|
|
|
# Wrapper variable inits
|
|
|
|
body_style = ""
|
|
|
|
meta_tags = ""
|
Rev900, Sidebar filestats bar width round fix, Sidebar WebGL not supported error, Sidebar optimalizations, Trayicon gray shadow, Trim end of line whitespace from json files, Fix testweb testcase, Implement experimental postMessage nonce security, Return None when testing external ip, Window opener security check and message, Increase timeout for large files
2016-02-10 02:30:04 +01:00
|
|
|
postmessage_nonce_security = "false"
|
2015-07-17 00:28:43 +02:00
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
wrapper_nonce = self.getWrapperNonce()
|
2018-07-17 02:09:14 +02:00
|
|
|
inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", ""))
|
2015-09-10 23:25:09 +02:00
|
|
|
|
2018-07-17 02:09:14 +02:00
|
|
|
if inner_query_string:
|
|
|
|
inner_query_string = "?%s&wrapper_nonce=%s" % (inner_query_string, wrapper_nonce)
|
2017-05-11 17:59:12 +02:00
|
|
|
elif "?" in inner_path:
|
2018-07-17 02:09:14 +02:00
|
|
|
inner_query_string = "&wrapper_nonce=%s" % wrapper_nonce
|
2015-07-17 00:28:43 +02:00
|
|
|
else:
|
2018-07-17 02:09:14 +02:00
|
|
|
inner_query_string = "?wrapper_nonce=%s" % wrapper_nonce
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
if self.isProxyRequest(): # Its a remote proxy request
|
|
|
|
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
|
|
|
|
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
|
|
|
|
else: # Remote client, use SERVER_NAME as server's real address
|
|
|
|
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
|
|
|
|
homepage = "http://zero/" + config.homepage
|
|
|
|
else: # Use relative path
|
|
|
|
server_url = ""
|
|
|
|
homepage = "/" + config.homepage
|
|
|
|
|
2018-10-20 13:34:49 +02:00
|
|
|
user = self.getCurrentUser()
|
|
|
|
if user:
|
|
|
|
theme = user.settings.get("theme", "light")
|
|
|
|
else:
|
|
|
|
theme = "light"
|
|
|
|
|
|
|
|
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme)
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
if site.content_manager.contents.get("content.json"): # Got content.json
|
|
|
|
content = site.content_manager.contents["content.json"]
|
|
|
|
if content.get("background-color"):
|
2018-10-20 02:35:48 +02:00
|
|
|
background_color = content.get("background-color-%s" % theme, content["background-color"])
|
|
|
|
body_style += "background-color: %s;" % cgi.escape(background_color, True)
|
2015-07-17 00:28:43 +02:00
|
|
|
if content.get("viewport"):
|
|
|
|
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
|
2016-11-10 23:14:30 +01:00
|
|
|
if content.get("favicon"):
|
|
|
|
meta_tags += '<link rel="icon" href="%s%s">' % (root_url, cgi.escape(content["favicon"], True))
|
Rev900, Sidebar filestats bar width round fix, Sidebar WebGL not supported error, Sidebar optimalizations, Trayicon gray shadow, Trim end of line whitespace from json files, Fix testweb testcase, Implement experimental postMessage nonce security, Return None when testing external ip, Window opener security check and message, Increase timeout for large files
2016-02-10 02:30:04 +01:00
|
|
|
if content.get("postmessage_nonce_security"):
|
|
|
|
postmessage_nonce_security = "true"
|
|
|
|
|
2017-10-30 15:15:40 +01:00
|
|
|
sandbox_permissions = ""
|
2015-09-17 02:20:43 +02:00
|
|
|
|
2018-02-21 03:12:51 +01:00
|
|
|
if "NOSANDBOX" in site.settings["permissions"]:
|
|
|
|
sandbox_permissions += " allow-same-origin"
|
|
|
|
|
2017-05-11 18:01:16 +02:00
|
|
|
if show_loadingscreen is None:
|
|
|
|
show_loadingscreen = not site.storage.isFile(file_inner_path)
|
|
|
|
|
2015-07-31 23:34:53 +02:00
|
|
|
return self.render(
|
2015-07-17 00:28:43 +02:00
|
|
|
"src/Ui/template/wrapper.html",
|
|
|
|
server_url=server_url,
|
|
|
|
inner_path=inner_path,
|
2016-02-20 11:19:28 +01:00
|
|
|
file_url=re.escape(file_url),
|
|
|
|
file_inner_path=re.escape(file_inner_path),
|
2015-07-17 00:28:43 +02:00
|
|
|
address=site.address,
|
2016-02-18 19:44:52 +01:00
|
|
|
title=cgi.escape(title, True),
|
2015-07-17 00:28:43 +02:00
|
|
|
body_style=body_style,
|
|
|
|
meta_tags=meta_tags,
|
2018-07-17 02:09:14 +02:00
|
|
|
query_string=re.escape(inner_query_string),
|
2015-07-17 00:28:43 +02:00
|
|
|
wrapper_key=site.settings["wrapper_key"],
|
2017-08-15 02:40:38 +02:00
|
|
|
ajax_key=site.settings["ajax_key"],
|
Rev900, Sidebar filestats bar width round fix, Sidebar WebGL not supported error, Sidebar optimalizations, Trayicon gray shadow, Trim end of line whitespace from json files, Fix testweb testcase, Implement experimental postMessage nonce security, Return None when testing external ip, Window opener security check and message, Increase timeout for large files
2016-02-10 02:30:04 +01:00
|
|
|
wrapper_nonce=wrapper_nonce,
|
|
|
|
postmessage_nonce_security=postmessage_nonce_security,
|
2015-07-17 00:28:43 +02:00
|
|
|
permissions=json.dumps(site.settings["permissions"]),
|
2017-05-11 18:01:16 +02:00
|
|
|
show_loadingscreen=json.dumps(show_loadingscreen),
|
2015-09-17 02:20:43 +02:00
|
|
|
sandbox_permissions=sandbox_permissions,
|
2015-07-17 00:28:43 +02:00
|
|
|
rev=config.rev,
|
2016-11-18 20:07:58 +01:00
|
|
|
lang=config.language,
|
2018-10-20 02:35:48 +02:00
|
|
|
homepage=homepage,
|
2018-11-26 00:02:35 +01:00
|
|
|
themeclass=themeclass,
|
|
|
|
script_nonce=script_nonce
|
2015-07-17 00:28:43 +02:00
|
|
|
)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
# Create a new wrapper nonce that allows to get one html file without the wrapper
|
|
|
|
def getWrapperNonce(self):
|
2015-09-13 23:17:13 +02:00
|
|
|
wrapper_nonce = CryptHash.random()
|
2015-09-10 23:25:09 +02:00
|
|
|
self.server.wrapper_nonces.append(wrapper_nonce)
|
|
|
|
return wrapper_nonce
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2018-11-26 00:02:35 +01:00
|
|
|
def getScriptNonce(self):
|
|
|
|
if not self.script_nonce:
|
|
|
|
self.script_nonce = CryptHash.random(encoding="base64")
|
|
|
|
|
|
|
|
return self.script_nonce
|
|
|
|
|
2017-07-15 01:32:15 +02:00
|
|
|
# Create a new wrapper nonce that allows to get one site
|
|
|
|
def getAddNonce(self):
|
|
|
|
add_nonce = CryptHash.random()
|
|
|
|
self.server.add_nonces.append(add_nonce)
|
|
|
|
return add_nonce
|
|
|
|
|
2017-07-09 14:11:06 +02:00
|
|
|
def isSameOrigin(self, url_a, url_b):
|
|
|
|
if not url_a or not url_b:
|
2015-08-06 00:51:25 +02:00
|
|
|
return False
|
2017-07-18 20:57:33 +02:00
|
|
|
origin_a = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_a)
|
|
|
|
origin_b = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_b)
|
2017-07-09 14:11:06 +02:00
|
|
|
return origin_a == origin_b
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-08-10 12:24:09 +02:00
|
|
|
# Return {address: 1Site.., inner_path: /data/users.json} from url path
|
2016-03-16 00:33:05 +01:00
|
|
|
def parsePath(self, path):
|
2018-06-25 14:23:38 +02:00
|
|
|
path = path.replace("\\", "/")
|
2015-07-12 20:36:46 +02:00
|
|
|
path = path.replace("/index.html/", "/") # Base Backward compatibility fix
|
|
|
|
if path.endswith("/"):
|
|
|
|
path = path + "index.html"
|
|
|
|
|
2017-10-04 12:37:22 +02:00
|
|
|
if ".." in path or "./" in path:
|
|
|
|
raise SecurityError("Invalid path")
|
2017-02-19 00:51:47 +01:00
|
|
|
|
2017-10-04 12:38:44 +02:00
|
|
|
match = re.match("/media/(?P<address>[A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
2016-03-16 00:33:05 +01:00
|
|
|
if match:
|
2016-08-10 12:24:09 +02:00
|
|
|
path_parts = match.groupdict()
|
|
|
|
path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites)
|
2017-07-10 02:42:28 +02:00
|
|
|
path_parts["inner_path"] = path_parts["inner_path"].lstrip("/")
|
2018-01-30 14:29:38 +01:00
|
|
|
if not path_parts["inner_path"]:
|
|
|
|
path_parts["inner_path"] = "index.html"
|
2016-08-10 12:24:09 +02:00
|
|
|
return path_parts
|
2016-03-16 00:33:05 +01:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Serve a media for site
|
2017-07-09 14:10:01 +02:00
|
|
|
def actionSiteMedia(self, path, header_length=True, header_noscript=False):
|
2017-10-04 12:39:08 +02:00
|
|
|
try:
|
|
|
|
path_parts = self.parsePath(path)
|
|
|
|
except SecurityError as err:
|
|
|
|
return self.error403(err)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
if not path_parts:
|
|
|
|
return self.error404(path)
|
|
|
|
|
|
|
|
address = path_parts["address"]
|
|
|
|
file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"])
|
2018-02-21 03:12:51 +01:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
if config.debug and file_path.split("/")[-1].startswith("all."):
|
|
|
|
# If debugging merge *.css to all.css and *.js to all.js
|
|
|
|
site = self.server.sites.get(address)
|
|
|
|
if site and site.settings["own"]:
|
|
|
|
from Debug import DebugMedia
|
|
|
|
DebugMedia.merge(file_path)
|
2017-10-04 12:44:34 +02:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
if not address or address == ".":
|
|
|
|
return self.error403(path_parts["inner_path"])
|
2017-08-15 02:40:38 +02:00
|
|
|
|
2017-10-04 12:44:34 +02:00
|
|
|
header_allow_ajax = False
|
|
|
|
if self.get.get("ajax_key"):
|
|
|
|
site = SiteManager.site_manager.get(path_parts["request_address"])
|
|
|
|
if self.get["ajax_key"] == site.settings["ajax_key"]:
|
|
|
|
header_allow_ajax = True
|
|
|
|
else:
|
|
|
|
return self.error403("Invalid ajax_key")
|
|
|
|
|
|
|
|
file_size = helper.getFilesize(file_path)
|
|
|
|
|
|
|
|
if file_size is not None:
|
|
|
|
return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts)
|
2017-08-15 02:40:38 +02:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect
|
|
|
|
if path_parts["inner_path"]:
|
|
|
|
return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1])
|
|
|
|
else:
|
|
|
|
return self.actionRedirect("./%s/" % path_parts["address"])
|
2017-08-15 02:40:38 +02:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
else: # File not exists, try to download
|
|
|
|
if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading
|
|
|
|
return self.actionSiteAddPrompt(path)
|
2017-02-27 11:21:22 +01:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
site = SiteManager.site_manager.need(address)
|
2017-02-27 11:21:22 +01:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites
|
|
|
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-07-19 16:46:37 +02:00
|
|
|
result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads
|
|
|
|
if result:
|
2017-10-04 12:44:34 +02:00
|
|
|
file_size = helper.getFilesize(file_path)
|
|
|
|
return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts)
|
2017-07-19 16:46:37 +02:00
|
|
|
else:
|
|
|
|
self.log.debug("File not found: %s" % path_parts["inner_path"])
|
|
|
|
return self.error404(path_parts["inner_path"])
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Serve a media for ui
|
|
|
|
def actionUiMedia(self, path):
|
|
|
|
match = re.match("/uimedia/(?P<inner_path>.*)", path)
|
|
|
|
if match: # Looks like a valid path
|
|
|
|
file_path = "src/Ui/media/%s" % match.group("inner_path")
|
|
|
|
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
|
|
|
|
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
|
|
|
|
# File not in allowed path
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
if config.debug and match.group("inner_path").startswith("all."):
|
|
|
|
# If debugging merge *.css to all.css and *.js to all.js
|
|
|
|
from Debug import DebugMedia
|
|
|
|
DebugMedia.merge(file_path)
|
2016-11-07 22:49:13 +01:00
|
|
|
return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content
|
2015-07-12 20:36:46 +02:00
|
|
|
else: # Bad url
|
|
|
|
return self.error400()
|
|
|
|
|
2017-07-15 01:32:15 +02:00
|
|
|
def actionSiteAdd(self):
|
|
|
|
post = dict(cgi.parse_qsl(self.env["wsgi.input"].read()))
|
|
|
|
if post["add_nonce"] not in self.server.add_nonces:
|
|
|
|
return self.error403("Add nonce error.")
|
|
|
|
self.server.add_nonces.remove(post["add_nonce"])
|
|
|
|
SiteManager.site_manager.need(post["address"])
|
|
|
|
return self.actionRedirect(post["url"])
|
|
|
|
|
|
|
|
def actionSiteAddPrompt(self, path):
|
|
|
|
path_parts = self.parsePath(path)
|
|
|
|
if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]):
|
|
|
|
return self.error404(path)
|
|
|
|
|
|
|
|
self.sendHeader(200, "text/html", noscript=True)
|
|
|
|
template = open("src/Ui/template/site_add.html").read()
|
|
|
|
template = template.replace("{url}", cgi.escape(self.env["PATH_INFO"], True))
|
|
|
|
template = template.replace("{address}", path_parts["address"])
|
|
|
|
template = template.replace("{add_nonce}", self.getAddNonce())
|
|
|
|
return template
|
|
|
|
|
2018-10-30 04:49:11 +01:00
|
|
|
def replaceHtmlVariables(self, block, path_parts):
|
|
|
|
user = self.getCurrentUser()
|
|
|
|
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
|
|
|
block = block.replace("{themeclass}", themeclass.encode("utf8"))
|
|
|
|
|
|
|
|
if path_parts:
|
|
|
|
site = self.server.sites.get(path_parts.get("address"))
|
|
|
|
if site.settings["own"]:
|
|
|
|
modified = int(time.time())
|
|
|
|
else:
|
|
|
|
modified = int(site.content_manager.contents["content.json"]["modified"])
|
|
|
|
block = block.replace("{site_modified}", str(modified))
|
|
|
|
|
|
|
|
return block
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Stream a file to client
|
2017-10-04 12:46:06 +02:00
|
|
|
def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, file_size=None, file_obj=None, path_parts=None):
|
|
|
|
if file_size is None:
|
|
|
|
file_size = helper.getFilesize(file_path)
|
|
|
|
|
|
|
|
if file_size is not None:
|
2015-07-12 20:36:46 +02:00
|
|
|
# Try to figure out content type by extension
|
|
|
|
content_type = self.getContentType(file_path)
|
|
|
|
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
range = self.env.get("HTTP_RANGE")
|
|
|
|
range_start = None
|
2018-10-20 02:35:48 +02:00
|
|
|
|
|
|
|
is_html_file = file_path.endswith(".html")
|
|
|
|
if is_html_file:
|
2018-10-30 04:49:11 +01:00
|
|
|
header_length = False
|
2018-10-20 02:35:48 +02:00
|
|
|
|
2015-07-31 23:34:53 +02:00
|
|
|
if send_header:
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
extra_headers = {}
|
|
|
|
extra_headers["Accept-Ranges"] = "bytes"
|
2016-11-07 22:49:13 +01:00
|
|
|
if header_length:
|
|
|
|
extra_headers["Content-Length"] = str(file_size)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range:
|
|
|
|
range_start = int(re.match(".*?([0-9]+)", range).group(1))
|
|
|
|
if re.match(".*?-([0-9]+)", range):
|
2016-02-02 00:49:43 +01:00
|
|
|
range_end = int(re.match(".*?-([0-9]+)", range).group(1)) + 1
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
else:
|
|
|
|
range_end = file_size
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
extra_headers["Content-Length"] = str(range_end - range_start)
|
2016-02-02 00:49:43 +01:00
|
|
|
extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end - 1, file_size)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range:
|
|
|
|
status = 206
|
|
|
|
else:
|
|
|
|
status = 200
|
2018-03-09 15:01:45 +01:00
|
|
|
self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers)
|
2015-07-12 20:36:46 +02:00
|
|
|
if self.env["REQUEST_METHOD"] != "OPTIONS":
|
2017-10-04 12:46:06 +02:00
|
|
|
if not file_obj:
|
|
|
|
file_obj = open(file_path, "rb")
|
|
|
|
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range_start:
|
2017-10-04 12:46:06 +02:00
|
|
|
file_obj.seek(range_start)
|
2015-07-12 20:36:46 +02:00
|
|
|
while 1:
|
|
|
|
try:
|
2017-10-04 12:46:06 +02:00
|
|
|
block = file_obj.read(block_size)
|
2018-10-20 02:35:48 +02:00
|
|
|
if is_html_file:
|
2018-10-30 04:49:11 +01:00
|
|
|
block = self.replaceHtmlVariables(block, path_parts)
|
2015-07-12 20:36:46 +02:00
|
|
|
if block:
|
|
|
|
yield block
|
|
|
|
else:
|
|
|
|
raise StopIteration
|
|
|
|
except StopIteration:
|
2017-10-04 12:46:06 +02:00
|
|
|
file_obj.close()
|
2015-07-12 20:36:46 +02:00
|
|
|
break
|
2016-10-02 14:24:48 +02:00
|
|
|
else: # File not exists
|
2015-07-12 20:36:46 +02:00
|
|
|
yield self.error404(file_path)
|
|
|
|
|
|
|
|
# On websocket connection
|
|
|
|
def actionWebsocket(self):
|
|
|
|
ws = self.env.get("wsgi.websocket")
|
|
|
|
if ws:
|
|
|
|
wrapper_key = self.get["wrapper_key"]
|
|
|
|
# Find site by wrapper_key
|
|
|
|
site = None
|
|
|
|
for site_check in self.server.sites.values():
|
|
|
|
if site_check.settings["wrapper_key"] == wrapper_key:
|
|
|
|
site = site_check
|
|
|
|
|
|
|
|
if site: # Correct wrapper key
|
2017-08-22 16:43:55 +02:00
|
|
|
try:
|
|
|
|
user = self.getCurrentUser()
|
|
|
|
except Exception, err:
|
|
|
|
self.log.error("Error in data/user.json: %s" % err)
|
|
|
|
return self.error500()
|
2015-07-12 20:36:46 +02:00
|
|
|
if not user:
|
|
|
|
self.log.error("No user found")
|
|
|
|
return self.error403()
|
2015-07-17 00:28:43 +02:00
|
|
|
ui_websocket = UiWebsocket(ws, site, self.server, user, self)
|
2015-07-12 20:36:46 +02:00
|
|
|
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
|
2017-11-05 23:40:59 +01:00
|
|
|
self.server.websockets.append(ui_websocket)
|
2015-07-12 20:36:46 +02:00
|
|
|
ui_websocket.start()
|
2017-11-05 23:40:59 +01:00
|
|
|
self.server.websockets.remove(ui_websocket)
|
2015-07-12 20:36:46 +02:00
|
|
|
for site_check in self.server.sites.values():
|
|
|
|
# Remove websocket from every site (admin sites allowed to join other sites event channels)
|
|
|
|
if ui_websocket in site_check.websockets:
|
|
|
|
site_check.websockets.remove(ui_websocket)
|
|
|
|
return "Bye."
|
|
|
|
else: # No site found by wrapper key
|
|
|
|
self.log.error("Wrapper key not found: %s" % wrapper_key)
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
self.start_response("400 Bad Request", [])
|
|
|
|
return "Not a websocket!"
|
|
|
|
|
|
|
|
# Debug last error
|
|
|
|
def actionDebug(self):
|
|
|
|
# Raise last error from DebugHook
|
|
|
|
import sys
|
|
|
|
last_error = sys.modules["main"].DebugHook.last_error
|
|
|
|
if last_error:
|
|
|
|
raise last_error[0], last_error[1], last_error[2]
|
|
|
|
else:
|
|
|
|
self.sendHeader()
|
|
|
|
return "No error! :)"
|
|
|
|
|
|
|
|
# Just raise an error to get console
|
|
|
|
def actionConsole(self):
|
|
|
|
import sys
|
|
|
|
sites = self.server.sites
|
|
|
|
main = sys.modules["main"]
|
2017-05-11 17:59:46 +02:00
|
|
|
|
2018-01-19 02:32:44 +01:00
|
|
|
def bench(code, times=100, init=None):
|
2017-02-27 00:13:41 +01:00
|
|
|
sites = self.server.sites
|
|
|
|
main = sys.modules["main"]
|
|
|
|
s = time.time()
|
2018-01-19 02:32:44 +01:00
|
|
|
if init:
|
2018-01-20 11:49:17 +01:00
|
|
|
eval(compile(init, '<string>', 'exec'), globals(), locals())
|
2017-02-27 00:13:41 +01:00
|
|
|
for _ in range(times):
|
|
|
|
back = eval(code, globals(), locals())
|
|
|
|
return ["%s run: %.3fs" % (times, time.time() - s), back]
|
2015-07-12 20:36:46 +02:00
|
|
|
raise Exception("Here is your console")
|
|
|
|
|
|
|
|
# - Tests -
|
|
|
|
|
|
|
|
def actionTestStream(self):
|
|
|
|
self.sendHeader()
|
|
|
|
yield " " * 1080 # Overflow browser's buffer
|
|
|
|
yield "He"
|
|
|
|
time.sleep(1)
|
|
|
|
yield "llo!"
|
2015-09-08 03:07:44 +02:00
|
|
|
# yield "Running websockets: %s" % len(self.server.websockets)
|
|
|
|
# self.server.sendMessage("Hello!")
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# - Errors -
|
|
|
|
|
|
|
|
# Send bad request error
|
2015-09-10 23:25:09 +02:00
|
|
|
def error400(self, message=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(400)
|
2015-09-10 23:25:09 +02:00
|
|
|
return self.formatError("Bad Request", message)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# You are not allowed to access this
|
2015-10-25 23:08:25 +01:00
|
|
|
def error403(self, message="", details=True):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(403)
|
2018-03-20 21:55:45 +01:00
|
|
|
self.log.error("Error 403: %s" % message)
|
2015-10-25 23:08:25 +01:00
|
|
|
return self.formatError("Forbidden", message, details=details)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Send file not found error
|
2015-09-10 23:25:09 +02:00
|
|
|
def error404(self, path=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(404)
|
2016-03-01 23:16:31 +01:00
|
|
|
return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Internal server error
|
|
|
|
def error500(self, message=":("):
|
|
|
|
self.sendHeader(500)
|
2015-09-10 23:25:09 +02:00
|
|
|
return self.formatError("Server error", cgi.escape(message))
|
|
|
|
|
2015-10-25 23:08:25 +01:00
|
|
|
def formatError(self, title, message, details=True):
|
2015-09-11 14:03:48 +02:00
|
|
|
import sys
|
|
|
|
import gevent
|
|
|
|
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
if details:
|
|
|
|
details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key}
|
|
|
|
details["version_zeronet"] = "%s r%s" % (config.version, config.rev)
|
|
|
|
details["version_python"] = sys.version
|
|
|
|
details["version_gevent"] = gevent.__version__
|
|
|
|
details["plugins"] = PluginManager.plugin_manager.plugin_names
|
|
|
|
arguments = {key: val for key, val in vars(config.arguments).items() if "password" not in key}
|
|
|
|
details["arguments"] = arguments
|
|
|
|
return """
|
|
|
|
<style>
|
|
|
|
* { font-family: Consolas, Monospace; color: #333 }
|
|
|
|
pre { padding: 10px; background-color: #EEE }
|
|
|
|
</style>
|
|
|
|
<h1>%s</h1>
|
|
|
|
<h2>%s</h3>
|
|
|
|
<h3>Please <a href="https://github.com/HelloZeroNet/ZeroNet/issues" target="_blank">report it</a> if you think this an error.</h3>
|
|
|
|
<h4>Details:</h4>
|
|
|
|
<pre>%s</pre>
|
|
|
|
""" % (title, message, json.dumps(details, indent=4, sort_keys=True))
|
|
|
|
else:
|
|
|
|
return """
|
|
|
|
<h1>%s</h1>
|
|
|
|
<h2>%s</h3>
|
2017-06-15 19:49:51 +02:00
|
|
|
""" % (title, cgi.escape(message))
|