2015-07-12 20:36:46 +02:00
|
|
|
import time
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import mimetypes
|
|
|
|
import json
|
|
|
|
import cgi
|
|
|
|
|
2015-01-12 02:03:45 +01:00
|
|
|
from Config import config
|
|
|
|
from Site import SiteManager
|
version 0.2.0, new lib for bitcoin ecc, dont display or track notify errors, dont reload again within 1 sec, null peer ip fix, signingmoved to ContentManager, content.json include support, content.json multisig ready, content.json proper bitcoincore compatible signing, content.json include permissions, multithreaded publish, publish timeout 60s, no exception on invalid bitcoin address, testcase for new lib, bip32 based persite privatekey generation, multiuser ready, simple json database query command, websocket api fileGet, wrapper loading title stuck bugfix
2015-02-09 02:09:02 +01:00
|
|
|
from User import UserManager
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
from Plugin import PluginManager
|
2015-01-12 02:03:45 +01:00
|
|
|
from Ui.UiWebsocket import UiWebsocket
|
2015-09-13 23:17:13 +02:00
|
|
|
from Crypt import CryptHash
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
status_texts = {
|
2015-07-12 20:36:46 +02:00
|
|
|
200: "200 OK",
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
206: "206 Partial Content",
|
2015-07-12 20:36:46 +02:00
|
|
|
400: "400 Bad Request",
|
|
|
|
403: "403 Forbidden",
|
|
|
|
404: "404 Not Found",
|
|
|
|
500: "500 Internal Server Error",
|
2015-01-12 02:03:45 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
@PluginManager.acceptPlugins
|
|
|
|
class UiRequest(object):
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
def __init__(self, server, get, env, start_response):
|
|
|
|
if server:
|
|
|
|
self.server = server
|
|
|
|
self.log = server.log
|
|
|
|
self.get = get # Get parameters
|
|
|
|
self.env = env # Enviroment settings
|
2015-07-17 00:28:43 +02:00
|
|
|
# ['CONTENT_LENGTH', 'CONTENT_TYPE', 'GATEWAY_INTERFACE', 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE',
|
|
|
|
# 'HTTP_COOKIE', 'HTTP_CACHE_CONTROL', 'HTTP_HOST', 'HTTP_HTTPS', 'HTTP_ORIGIN', 'HTTP_PROXY_CONNECTION', 'HTTP_REFERER',
|
|
|
|
# 'HTTP_USER_AGENT', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_ADDR', 'REMOTE_PORT', 'REQUEST_METHOD', 'SCRIPT_NAME',
|
|
|
|
# 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'SERVER_SOFTWARE', 'werkzeug.request', 'wsgi.errors',
|
|
|
|
# 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version']
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
self.start_response = start_response # Start response function
|
|
|
|
self.user = None
|
|
|
|
|
|
|
|
# Call the request handler function base on path
|
|
|
|
def route(self, path):
|
|
|
|
if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip
|
2015-10-25 23:08:25 +01:00
|
|
|
return self.error403(details=False)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
|
|
|
|
path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
|
|
|
|
|
2015-09-13 23:17:13 +02:00
|
|
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
|
|
|
self.sendHeader()
|
|
|
|
return ""
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if path == "/":
|
|
|
|
return self.actionIndex()
|
|
|
|
elif path.endswith("favicon.ico"):
|
|
|
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
|
|
|
# Media
|
|
|
|
elif path.startswith("/uimedia/"):
|
|
|
|
return self.actionUiMedia(path)
|
2015-08-06 00:51:25 +02:00
|
|
|
elif "/uimedia/" in path:
|
|
|
|
# uimedia within site dir (for chrome extension)
|
|
|
|
path = re.sub(".*?/uimedia/", "/uimedia/", path)
|
|
|
|
return self.actionUiMedia(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
elif path.startswith("/media"):
|
|
|
|
return self.actionSiteMedia(path)
|
|
|
|
# Websocket
|
|
|
|
elif path == "/Websocket":
|
|
|
|
return self.actionWebsocket()
|
|
|
|
# Debug
|
|
|
|
elif path == "/Debug" and config.debug:
|
|
|
|
return self.actionDebug()
|
|
|
|
elif path == "/Console" and config.debug:
|
|
|
|
return self.actionConsole()
|
|
|
|
# Site media wrapper
|
|
|
|
else:
|
2015-09-10 23:25:09 +02:00
|
|
|
if self.get.get("wrapper_nonce"):
|
2015-07-17 00:28:43 +02:00
|
|
|
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
|
|
|
|
else:
|
|
|
|
body = self.actionWrapper(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
if body:
|
|
|
|
return body
|
|
|
|
else:
|
|
|
|
func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function
|
|
|
|
if func:
|
|
|
|
return func()
|
|
|
|
else:
|
|
|
|
return self.error404(path)
|
|
|
|
|
|
|
|
# The request is proxied by chrome extension
|
|
|
|
def isProxyRequest(self):
|
|
|
|
return self.env["PATH_INFO"].startswith("http://")
|
|
|
|
|
|
|
|
def isAjaxRequest(self):
|
|
|
|
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
|
|
|
|
|
|
|
|
# Get mime by filename
|
|
|
|
def getContentType(self, file_name):
|
|
|
|
content_type = mimetypes.guess_type(file_name)[0]
|
|
|
|
if not content_type:
|
|
|
|
if file_name.endswith("json"): # Correct json header
|
|
|
|
content_type = "application/json"
|
|
|
|
else:
|
|
|
|
content_type = "application/octet-stream"
|
|
|
|
return content_type
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
# Return: <dict> Posted variables
|
|
|
|
def getPosted(self):
|
|
|
|
if self.env['REQUEST_METHOD'] == "POST":
|
|
|
|
return dict(cgi.parse_qsl(
|
|
|
|
self.env['wsgi.input'].readline().decode()
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# Return: <dict> Cookies based on self.env
|
2015-07-12 20:36:46 +02:00
|
|
|
def getCookies(self):
|
|
|
|
raw_cookies = self.env.get('HTTP_COOKIE')
|
|
|
|
if raw_cookies:
|
|
|
|
cookies = cgi.parse_qsl(raw_cookies)
|
|
|
|
return {key.strip(): val for key, val in cookies}
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def getCurrentUser(self):
|
|
|
|
if self.user:
|
|
|
|
return self.user # Cache
|
|
|
|
self.user = UserManager.user_manager.get() # Get user
|
|
|
|
if not self.user:
|
|
|
|
self.user = UserManager.user_manager.create()
|
|
|
|
return self.user
|
|
|
|
|
|
|
|
# Send response headers
|
|
|
|
def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
|
|
|
|
if content_type == "text/html":
|
|
|
|
content_type = "text/html; charset=utf-8"
|
|
|
|
headers = []
|
|
|
|
headers.append(("Version", "HTTP/1.1"))
|
2015-09-08 03:07:44 +02:00
|
|
|
headers.append(("Connection", "Keep-Alive"))
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
headers.append(("Keep-Alive", "max=25, timeout=30"))
|
2015-07-12 20:36:46 +02:00
|
|
|
headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
# headers.append(("Content-Security-Policy", "default-src 'self' data: 'unsafe-inline' ws://127.0.0.1:* http://127.0.0.1:* wss://tracker.webtorrent.io; sandbox allow-same-origin allow-top-navigation allow-scripts")) # Only local connections
|
2015-07-12 20:36:46 +02:00
|
|
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
|
|
|
# Allow json access
|
2015-07-17 00:28:43 +02:00
|
|
|
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Cookie"))
|
|
|
|
headers.append(("Access-Control-Allow-Credentials", "true"))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
cacheable_type = (
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or
|
2015-07-12 20:36:46 +02:00
|
|
|
self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
|
|
|
|
)
|
|
|
|
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min
|
2015-07-12 20:36:46 +02:00
|
|
|
headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
else:
|
2015-07-12 20:36:46 +02:00
|
|
|
headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
|
|
|
|
headers.append(("Content-Type", content_type))
|
|
|
|
for extra_header in extra_headers:
|
|
|
|
headers.append(extra_header)
|
|
|
|
return self.start_response(status_texts[status], headers)
|
|
|
|
|
|
|
|
# Renders a template
|
|
|
|
def render(self, template_path, *args, **kwargs):
|
|
|
|
template = open(template_path).read().decode("utf8")
|
|
|
|
return template.format(**kwargs).encode("utf8")
|
|
|
|
|
|
|
|
# - Actions -
|
|
|
|
|
|
|
|
# Redirect to an url
|
|
|
|
def actionRedirect(self, url):
|
|
|
|
self.start_response('301 Redirect', [('Location', url)])
|
|
|
|
yield "Location changed: %s" % url
|
|
|
|
|
|
|
|
def actionIndex(self):
|
|
|
|
return self.actionRedirect("/" + config.homepage)
|
|
|
|
|
|
|
|
# Render a file from media with iframe site wrapper
|
|
|
|
def actionWrapper(self, path, extra_headers=None):
|
|
|
|
if not extra_headers:
|
|
|
|
extra_headers = []
|
|
|
|
|
|
|
|
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
|
|
|
if match:
|
|
|
|
address = match.group("address")
|
|
|
|
inner_path = match.group("inner_path").lstrip("/")
|
|
|
|
if "." in inner_path and not inner_path.endswith(".html"):
|
|
|
|
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
|
|
|
|
if self.env.get("HTTP_X_REQUESTED_WITH"):
|
|
|
|
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
|
|
|
|
|
|
|
|
site = SiteManager.site_manager.get(address)
|
|
|
|
|
|
|
|
if (
|
|
|
|
site and site.content_manager.contents.get("content.json") and
|
|
|
|
(not site.getReachableBadFiles() or site.settings["own"])
|
|
|
|
): # Its downloaded or own
|
|
|
|
title = site.content_manager.contents["content.json"]["title"]
|
|
|
|
else:
|
|
|
|
title = "Loading %s..." % address
|
|
|
|
site = SiteManager.site_manager.need(address) # Start download site
|
|
|
|
|
|
|
|
if not site:
|
|
|
|
return False
|
2015-07-31 23:34:53 +02:00
|
|
|
|
|
|
|
self.sendHeader(extra_headers=extra_headers[:])
|
|
|
|
return iter([self.renderWrapper(site, path, inner_path, title, extra_headers)])
|
|
|
|
# Dont know why wrapping with iter necessary, but without it around 100x slower
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
else: # Bad url
|
|
|
|
return False
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
def renderWrapper(self, site, path, inner_path, title, extra_headers):
|
|
|
|
file_inner_path = inner_path
|
|
|
|
if not file_inner_path:
|
|
|
|
file_inner_path = "index.html" # If inner path defaults to index.html
|
|
|
|
|
|
|
|
address = re.sub("/.*", "", path.lstrip("/"))
|
|
|
|
if self.isProxyRequest() and (not path or "/" in path[1:]):
|
|
|
|
file_url = re.sub(".*/", "", inner_path)
|
|
|
|
else:
|
|
|
|
file_url = "/" + address + "/" + inner_path
|
|
|
|
|
|
|
|
# Wrapper variable inits
|
|
|
|
query_string = ""
|
|
|
|
body_style = ""
|
|
|
|
meta_tags = ""
|
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
wrapper_nonce = self.getWrapperNonce()
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
if self.env.get("QUERY_STRING"):
|
2015-09-10 23:25:09 +02:00
|
|
|
query_string = "?%s&wrapper_nonce=%s" % (self.env["QUERY_STRING"], wrapper_nonce)
|
2015-07-17 00:28:43 +02:00
|
|
|
else:
|
2015-09-10 23:25:09 +02:00
|
|
|
query_string = "?wrapper_nonce=%s" % wrapper_nonce
|
2015-07-17 00:28:43 +02:00
|
|
|
|
|
|
|
if self.isProxyRequest(): # Its a remote proxy request
|
|
|
|
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
|
|
|
|
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
|
|
|
|
else: # Remote client, use SERVER_NAME as server's real address
|
|
|
|
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
|
|
|
|
homepage = "http://zero/" + config.homepage
|
|
|
|
else: # Use relative path
|
|
|
|
server_url = ""
|
|
|
|
homepage = "/" + config.homepage
|
|
|
|
|
|
|
|
if site.content_manager.contents.get("content.json"): # Got content.json
|
|
|
|
content = site.content_manager.contents["content.json"]
|
|
|
|
if content.get("background-color"):
|
|
|
|
body_style += "background-color: %s;" % \
|
|
|
|
cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)
|
|
|
|
if content.get("viewport"):
|
|
|
|
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
|
|
|
|
|
2015-09-17 02:20:43 +02:00
|
|
|
if site.settings.get("own"):
|
|
|
|
sandbox_permissions = "allow-modals" # For coffeescript compile errors
|
|
|
|
else:
|
|
|
|
sandbox_permissions = ""
|
|
|
|
|
2015-07-31 23:34:53 +02:00
|
|
|
return self.render(
|
2015-07-17 00:28:43 +02:00
|
|
|
"src/Ui/template/wrapper.html",
|
|
|
|
server_url=server_url,
|
|
|
|
inner_path=inner_path,
|
|
|
|
file_url=file_url,
|
|
|
|
file_inner_path=file_inner_path,
|
|
|
|
address=site.address,
|
|
|
|
title=title,
|
|
|
|
body_style=body_style,
|
|
|
|
meta_tags=meta_tags,
|
|
|
|
query_string=query_string,
|
|
|
|
wrapper_key=site.settings["wrapper_key"],
|
|
|
|
permissions=json.dumps(site.settings["permissions"]),
|
|
|
|
show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)),
|
2015-09-17 02:20:43 +02:00
|
|
|
sandbox_permissions=sandbox_permissions,
|
2015-07-17 00:28:43 +02:00
|
|
|
rev=config.rev,
|
|
|
|
homepage=homepage
|
|
|
|
)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
# Create a new wrapper nonce that allows to get one html file without the wrapper
|
|
|
|
def getWrapperNonce(self):
|
2015-09-13 23:17:13 +02:00
|
|
|
wrapper_nonce = CryptHash.random()
|
2015-09-10 23:25:09 +02:00
|
|
|
self.server.wrapper_nonces.append(wrapper_nonce)
|
|
|
|
return wrapper_nonce
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Returns if media request allowed from that referer
|
|
|
|
def isMediaRequestAllowed(self, site_address, referer):
|
2015-08-06 00:51:25 +02:00
|
|
|
if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]):
|
|
|
|
return False
|
2015-07-12 20:36:46 +02:00
|
|
|
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
|
|
|
|
return referer_path.startswith("/" + site_address)
|
|
|
|
|
|
|
|
# Serve a media for site
|
|
|
|
def actionSiteMedia(self, path):
|
|
|
|
path = path.replace("/index.html/", "/") # Base Backward compatibility fix
|
|
|
|
if path.endswith("/"):
|
|
|
|
path = path + "index.html"
|
|
|
|
|
|
|
|
match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path)
|
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
# Check wrapper nonce
|
|
|
|
content_type = self.getContentType(path)
|
|
|
|
if "htm" in content_type: # Valid nonce must present to render html files
|
2015-09-20 00:27:54 +02:00
|
|
|
wrapper_nonce = self.get.get("wrapper_nonce")
|
2015-09-10 23:25:09 +02:00
|
|
|
if wrapper_nonce not in self.server.wrapper_nonces:
|
2015-09-11 14:03:48 +02:00
|
|
|
return self.error403("Wrapper nonce error. Please reload the page.")
|
2015-09-10 23:25:09 +02:00
|
|
|
self.server.wrapper_nonces.remove(self.get["wrapper_nonce"])
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
referer = self.env.get("HTTP_REFERER")
|
|
|
|
if referer and match: # Only allow same site to receive media
|
|
|
|
if not self.isMediaRequestAllowed(match.group("address"), referer):
|
|
|
|
return self.error403("Media referrer error") # Referrer not starts same address as requested path
|
|
|
|
|
|
|
|
if match: # Looks like a valid path
|
|
|
|
address = match.group("address")
|
|
|
|
file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
|
|
|
|
allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
|
|
|
|
data_dir = os.path.abspath("data") # No files from data/ allowed
|
|
|
|
if (
|
|
|
|
".." in file_path
|
|
|
|
or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir)
|
|
|
|
or allowed_dir == data_dir
|
|
|
|
): # File not in allowed path
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
if config.debug and file_path.split("/")[-1].startswith("all."):
|
|
|
|
# If debugging merge *.css to all.css and *.js to all.js
|
|
|
|
site = self.server.sites.get(address)
|
|
|
|
if site.settings["own"]:
|
|
|
|
from Debug import DebugMedia
|
|
|
|
DebugMedia.merge(file_path)
|
|
|
|
if os.path.isfile(file_path): # File exits
|
|
|
|
return self.actionFile(file_path)
|
|
|
|
else: # File not exits, try to download
|
|
|
|
site = SiteManager.site_manager.need(address, all_file=False)
|
2015-09-27 02:08:53 +02:00
|
|
|
result = site.needFile(match.group("inner_path"), priority=5) # Wait until file downloads
|
2015-07-12 20:36:46 +02:00
|
|
|
if result:
|
|
|
|
return self.actionFile(file_path)
|
|
|
|
else:
|
|
|
|
self.log.debug("File not found: %s" % match.group("inner_path"))
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
# Site larger than allowed, re-add wrapper nonce to allow reload
|
2015-10-25 23:08:25 +01:00
|
|
|
if site.settings.get("size", 0) > site.getSizeLimit() * 1024 * 1024:
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
self.server.wrapper_nonces.append(self.get.get("wrapper_nonce"))
|
2015-07-12 20:36:46 +02:00
|
|
|
return self.error404(match.group("inner_path"))
|
|
|
|
|
|
|
|
else: # Bad url
|
|
|
|
return self.error404(path)
|
|
|
|
|
|
|
|
# Serve a media for ui
|
|
|
|
def actionUiMedia(self, path):
|
|
|
|
match = re.match("/uimedia/(?P<inner_path>.*)", path)
|
|
|
|
if match: # Looks like a valid path
|
|
|
|
file_path = "src/Ui/media/%s" % match.group("inner_path")
|
|
|
|
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
|
|
|
|
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
|
|
|
|
# File not in allowed path
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
if config.debug and match.group("inner_path").startswith("all."):
|
|
|
|
# If debugging merge *.css to all.css and *.js to all.js
|
|
|
|
from Debug import DebugMedia
|
|
|
|
DebugMedia.merge(file_path)
|
|
|
|
return self.actionFile(file_path)
|
|
|
|
else: # Bad url
|
|
|
|
return self.error400()
|
|
|
|
|
|
|
|
# Stream a file to client
|
2015-07-31 23:34:53 +02:00
|
|
|
def actionFile(self, file_path, block_size=64 * 1024, send_header=True):
|
2015-07-12 20:36:46 +02:00
|
|
|
if os.path.isfile(file_path):
|
|
|
|
# Try to figure out content type by extension
|
|
|
|
content_type = self.getContentType(file_path)
|
|
|
|
|
|
|
|
# TODO: Dont allow external access: extra_headers=
|
|
|
|
# [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
range = self.env.get("HTTP_RANGE")
|
|
|
|
range_start = None
|
2015-07-31 23:34:53 +02:00
|
|
|
if send_header:
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
extra_headers = {}
|
|
|
|
file_size = os.path.getsize(file_path)
|
|
|
|
extra_headers["Accept-Ranges"] = "bytes"
|
|
|
|
if range:
|
|
|
|
range_start = int(re.match(".*?([0-9]+)", range).group(1))
|
|
|
|
if re.match(".*?-([0-9]+)", range):
|
|
|
|
range_end = int(re.match(".*?-([0-9]+)", range).group(1))+1
|
|
|
|
else:
|
|
|
|
range_end = file_size
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
extra_headers["Content-Length"] = str(range_end - range_start)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end-1, file_size)
|
|
|
|
if range:
|
|
|
|
status = 206
|
|
|
|
else:
|
|
|
|
status = 200
|
|
|
|
self.sendHeader(status, content_type=content_type, extra_headers=extra_headers.items())
|
2015-07-12 20:36:46 +02:00
|
|
|
if self.env["REQUEST_METHOD"] != "OPTIONS":
|
|
|
|
file = open(file_path, "rb")
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range_start:
|
|
|
|
file.seek(range_start)
|
2015-07-12 20:36:46 +02:00
|
|
|
while 1:
|
|
|
|
try:
|
|
|
|
block = file.read(block_size)
|
|
|
|
if block:
|
|
|
|
yield block
|
|
|
|
else:
|
|
|
|
raise StopIteration
|
|
|
|
except StopIteration:
|
|
|
|
file.close()
|
|
|
|
break
|
|
|
|
else: # File not exits
|
|
|
|
yield self.error404(file_path)
|
|
|
|
|
|
|
|
# On websocket connection
|
|
|
|
def actionWebsocket(self):
|
|
|
|
ws = self.env.get("wsgi.websocket")
|
|
|
|
if ws:
|
|
|
|
wrapper_key = self.get["wrapper_key"]
|
|
|
|
# Find site by wrapper_key
|
|
|
|
site = None
|
|
|
|
for site_check in self.server.sites.values():
|
|
|
|
if site_check.settings["wrapper_key"] == wrapper_key:
|
|
|
|
site = site_check
|
|
|
|
|
|
|
|
if site: # Correct wrapper key
|
|
|
|
user = self.getCurrentUser()
|
|
|
|
if not user:
|
|
|
|
self.log.error("No user found")
|
|
|
|
return self.error403()
|
2015-07-17 00:28:43 +02:00
|
|
|
ui_websocket = UiWebsocket(ws, site, self.server, user, self)
|
2015-07-12 20:36:46 +02:00
|
|
|
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
|
|
|
|
ui_websocket.start()
|
|
|
|
for site_check in self.server.sites.values():
|
|
|
|
# Remove websocket from every site (admin sites allowed to join other sites event channels)
|
|
|
|
if ui_websocket in site_check.websockets:
|
|
|
|
site_check.websockets.remove(ui_websocket)
|
|
|
|
return "Bye."
|
|
|
|
else: # No site found by wrapper key
|
|
|
|
self.log.error("Wrapper key not found: %s" % wrapper_key)
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
self.start_response("400 Bad Request", [])
|
|
|
|
return "Not a websocket!"
|
|
|
|
|
|
|
|
# Debug last error
|
|
|
|
def actionDebug(self):
|
|
|
|
# Raise last error from DebugHook
|
|
|
|
import sys
|
|
|
|
last_error = sys.modules["main"].DebugHook.last_error
|
|
|
|
if last_error:
|
|
|
|
raise last_error[0], last_error[1], last_error[2]
|
|
|
|
else:
|
|
|
|
self.sendHeader()
|
|
|
|
return "No error! :)"
|
|
|
|
|
|
|
|
# Just raise an error to get console
|
|
|
|
def actionConsole(self):
|
|
|
|
import sys
|
|
|
|
sites = self.server.sites
|
|
|
|
main = sys.modules["main"]
|
|
|
|
raise Exception("Here is your console")
|
|
|
|
|
|
|
|
# - Tests -
|
|
|
|
|
|
|
|
def actionTestStream(self):
|
|
|
|
self.sendHeader()
|
|
|
|
yield " " * 1080 # Overflow browser's buffer
|
|
|
|
yield "He"
|
|
|
|
time.sleep(1)
|
|
|
|
yield "llo!"
|
2015-09-08 03:07:44 +02:00
|
|
|
# yield "Running websockets: %s" % len(self.server.websockets)
|
|
|
|
# self.server.sendMessage("Hello!")
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# - Errors -
|
|
|
|
|
|
|
|
# Send bad request error
|
2015-09-10 23:25:09 +02:00
|
|
|
def error400(self, message=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(400)
|
2015-09-10 23:25:09 +02:00
|
|
|
return self.formatError("Bad Request", message)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# You are not allowed to access this
|
2015-10-25 23:08:25 +01:00
|
|
|
def error403(self, message="", details=True):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(403)
|
2015-10-25 23:08:25 +01:00
|
|
|
return self.formatError("Forbidden", message, details=details)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Send file not found error
|
2015-09-10 23:25:09 +02:00
|
|
|
def error404(self, path=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(404)
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
return self.formatError("Not Found", path.encode("utf8"), details=False)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Internal server error
|
|
|
|
def error500(self, message=":("):
|
|
|
|
self.sendHeader(500)
|
2015-09-10 23:25:09 +02:00
|
|
|
return self.formatError("Server error", cgi.escape(message))
|
|
|
|
|
2015-10-25 23:08:25 +01:00
|
|
|
def formatError(self, title, message, details=True):
|
2015-09-11 14:03:48 +02:00
|
|
|
import sys
|
|
|
|
import gevent
|
|
|
|
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
if details:
|
|
|
|
details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key}
|
|
|
|
details["version_zeronet"] = "%s r%s" % (config.version, config.rev)
|
|
|
|
details["version_python"] = sys.version
|
|
|
|
details["version_gevent"] = gevent.__version__
|
|
|
|
details["plugins"] = PluginManager.plugin_manager.plugin_names
|
|
|
|
arguments = {key: val for key, val in vars(config.arguments).items() if "password" not in key}
|
|
|
|
details["arguments"] = arguments
|
|
|
|
return """
|
|
|
|
<style>
|
|
|
|
* { font-family: Consolas, Monospace; color: #333 }
|
|
|
|
pre { padding: 10px; background-color: #EEE }
|
|
|
|
</style>
|
|
|
|
<h1>%s</h1>
|
|
|
|
<h2>%s</h3>
|
|
|
|
<h3>Please <a href="https://github.com/HelloZeroNet/ZeroNet/issues" target="_blank">report it</a> if you think this an error.</h3>
|
|
|
|
<h4>Details:</h4>
|
|
|
|
<pre>%s</pre>
|
|
|
|
""" % (title, message, json.dumps(details, indent=4, sort_keys=True))
|
|
|
|
else:
|
|
|
|
return """
|
|
|
|
<h1>%s</h1>
|
|
|
|
<h2>%s</h3>
|
|
|
|
""" % (title, message)
|
version 0.2.8, Namecoin domains using internal resolver site, --disable_zeromq option to skip backward compatiblity layer and save some memory, connectionserver firstchar error fixes, missing unpacker crash fix, sitemanager class to allow extensions, add loaded plugin list to websocket api, faster content publishing, mark updating file as bad, remove coppersurfer tracker add eddie4, internal server error with error displaying, allow site domains in UiRequest, better progress bar, wait for siteinfo before before using localstorage, csslater hide only if opacity is 0
2015-03-30 23:44:29 +02:00
|
|
|
|
|
|
|
|
2015-04-12 23:59:22 +02:00
|
|
|
# - Reload for eaiser developing -
|
2015-07-12 20:36:46 +02:00
|
|
|
# def reload():
|
|
|
|
# import imp, sys
|
|
|
|
# global UiWebsocket
|
|
|
|
# UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
|
|
|
|
# reload(sys.modules["User.UserManager"])
|
|
|
|
# UserManager.reloadModule()
|
|
|
|
# self.user = UserManager.user_manager.getCurrent()
|