2015-07-12 20:36:46 +02:00
|
|
|
import time
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import mimetypes
|
|
|
|
import json
|
|
|
|
import cgi
|
|
|
|
|
2015-01-12 02:03:45 +01:00
|
|
|
from Config import config
|
|
|
|
from Site import SiteManager
|
version 0.2.0, new lib for bitcoin ecc, dont display or track notify errors, dont reload again within 1 sec, null peer ip fix, signingmoved to ContentManager, content.json include support, content.json multisig ready, content.json proper bitcoincore compatible signing, content.json include permissions, multithreaded publish, publish timeout 60s, no exception on invalid bitcoin address, testcase for new lib, bip32 based persite privatekey generation, multiuser ready, simple json database query command, websocket api fileGet, wrapper loading title stuck bugfix
2015-02-09 02:09:02 +01:00
|
|
|
from User import UserManager
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
from Plugin import PluginManager
|
2015-01-12 02:03:45 +01:00
|
|
|
from Ui.UiWebsocket import UiWebsocket
|
2015-09-13 23:17:13 +02:00
|
|
|
from Crypt import CryptHash
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
status_texts = {
|
2015-07-12 20:36:46 +02:00
|
|
|
200: "200 OK",
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
206: "206 Partial Content",
|
2015-07-12 20:36:46 +02:00
|
|
|
400: "400 Bad Request",
|
|
|
|
403: "403 Forbidden",
|
|
|
|
404: "404 Not Found",
|
|
|
|
500: "500 Internal Server Error",
|
2015-01-12 02:03:45 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
@PluginManager.acceptPlugins
|
|
|
|
class UiRequest(object):
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
def __init__(self, server, get, env, start_response):
|
|
|
|
if server:
|
|
|
|
self.server = server
|
|
|
|
self.log = server.log
|
|
|
|
self.get = get # Get parameters
|
|
|
|
self.env = env # Enviroment settings
|
2015-07-17 00:28:43 +02:00
|
|
|
# ['CONTENT_LENGTH', 'CONTENT_TYPE', 'GATEWAY_INTERFACE', 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE',
|
|
|
|
# 'HTTP_COOKIE', 'HTTP_CACHE_CONTROL', 'HTTP_HOST', 'HTTP_HTTPS', 'HTTP_ORIGIN', 'HTTP_PROXY_CONNECTION', 'HTTP_REFERER',
|
|
|
|
# 'HTTP_USER_AGENT', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_ADDR', 'REMOTE_PORT', 'REQUEST_METHOD', 'SCRIPT_NAME',
|
|
|
|
# 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'SERVER_SOFTWARE', 'werkzeug.request', 'wsgi.errors',
|
|
|
|
# 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version']
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
self.start_response = start_response # Start response function
|
|
|
|
self.user = None
|
|
|
|
|
2017-06-13 14:19:23 +02:00
|
|
|
def isHostAllowed(self, host):
|
|
|
|
if host in self.server.allowed_hosts:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.isProxyRequest(): # Support for chrome extension proxy
|
|
|
|
if self.server.site_manager.isDomain(host):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-06-15 19:48:01 +02:00
|
|
|
if self.server.learn_allowed_host:
|
2017-06-13 14:19:23 +02:00
|
|
|
# Learn the first request's host as allowed one
|
|
|
|
self.server.learn_allowed_host = False
|
|
|
|
self.server.allowed_hosts.add(host)
|
|
|
|
self.server.log.info("Added %s as allowed host" % host)
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Call the request handler function base on path
|
|
|
|
def route(self, path):
|
2017-06-13 14:19:23 +02:00
|
|
|
# Restict Ui access by ip
|
|
|
|
if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict:
|
2015-10-25 23:08:25 +01:00
|
|
|
return self.error403(details=False)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-06-13 14:19:23 +02:00
|
|
|
# Check if host allowed to do request
|
|
|
|
if not self.isHostAllowed(self.env.get("HTTP_HOST")):
|
2017-06-15 19:49:51 +02:00
|
|
|
return self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)
|
2017-06-13 14:19:23 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
|
|
|
|
path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
|
|
|
|
|
2015-09-13 23:17:13 +02:00
|
|
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
2017-01-08 17:27:57 +01:00
|
|
|
if "/" not in path.strip("/"):
|
|
|
|
content_type = self.getContentType("index.html")
|
|
|
|
else:
|
|
|
|
content_type = self.getContentType(path)
|
2016-02-02 00:49:43 +01:00
|
|
|
self.sendHeader(content_type=content_type)
|
2015-09-13 23:17:13 +02:00
|
|
|
return ""
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if path == "/":
|
|
|
|
return self.actionIndex()
|
2016-11-10 23:14:30 +01:00
|
|
|
elif path == "/favicon.ico":
|
2015-07-12 20:36:46 +02:00
|
|
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
|
|
|
# Media
|
|
|
|
elif path.startswith("/uimedia/"):
|
|
|
|
return self.actionUiMedia(path)
|
2015-08-06 00:51:25 +02:00
|
|
|
elif "/uimedia/" in path:
|
|
|
|
# uimedia within site dir (for chrome extension)
|
|
|
|
path = re.sub(".*?/uimedia/", "/uimedia/", path)
|
|
|
|
return self.actionUiMedia(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
# Websocket
|
|
|
|
elif path == "/Websocket":
|
|
|
|
return self.actionWebsocket()
|
|
|
|
# Debug
|
|
|
|
elif path == "/Debug" and config.debug:
|
|
|
|
return self.actionDebug()
|
|
|
|
elif path == "/Console" and config.debug:
|
|
|
|
return self.actionConsole()
|
|
|
|
# Site media wrapper
|
|
|
|
else:
|
2015-09-10 23:25:09 +02:00
|
|
|
if self.get.get("wrapper_nonce"):
|
2015-07-17 00:28:43 +02:00
|
|
|
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
|
|
|
|
else:
|
|
|
|
body = self.actionWrapper(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
if body:
|
|
|
|
return body
|
|
|
|
else:
|
|
|
|
func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function
|
|
|
|
if func:
|
|
|
|
return func()
|
|
|
|
else:
|
|
|
|
return self.error404(path)
|
|
|
|
|
|
|
|
# The request is proxied by chrome extension
|
|
|
|
def isProxyRequest(self):
|
|
|
|
return self.env["PATH_INFO"].startswith("http://")
|
|
|
|
|
2017-01-08 17:26:59 +01:00
|
|
|
def isWebSocketRequest(self):
|
2017-01-08 07:56:42 +01:00
|
|
|
return self.env.get("HTTP_UPGRADE") == "websocket"
|
2017-01-08 17:26:59 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
def isAjaxRequest(self):
|
|
|
|
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
|
|
|
|
|
|
|
|
# Get mime by filename
|
|
|
|
def getContentType(self, file_name):
|
|
|
|
content_type = mimetypes.guess_type(file_name)[0]
|
2017-01-05 02:33:33 +01:00
|
|
|
|
2017-01-12 06:22:26 +01:00
|
|
|
if file_name.endswith(".css"): # Force correct css content type
|
2017-01-05 02:33:33 +01:00
|
|
|
content_type = "text/css"
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if not content_type:
|
2017-01-12 06:22:26 +01:00
|
|
|
if file_name.endswith(".json"): # Correct json header
|
2015-07-12 20:36:46 +02:00
|
|
|
content_type = "application/json"
|
|
|
|
else:
|
|
|
|
content_type = "application/octet-stream"
|
|
|
|
return content_type
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
# Return: <dict> Posted variables
|
|
|
|
def getPosted(self):
|
|
|
|
if self.env['REQUEST_METHOD'] == "POST":
|
|
|
|
return dict(cgi.parse_qsl(
|
|
|
|
self.env['wsgi.input'].readline().decode()
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# Return: <dict> Cookies based on self.env
|
2015-07-12 20:36:46 +02:00
|
|
|
def getCookies(self):
|
|
|
|
raw_cookies = self.env.get('HTTP_COOKIE')
|
|
|
|
if raw_cookies:
|
|
|
|
cookies = cgi.parse_qsl(raw_cookies)
|
|
|
|
return {key.strip(): val for key, val in cookies}
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def getCurrentUser(self):
|
|
|
|
if self.user:
|
|
|
|
return self.user # Cache
|
|
|
|
self.user = UserManager.user_manager.get() # Get user
|
|
|
|
if not self.user:
|
|
|
|
self.user = UserManager.user_manager.create()
|
|
|
|
return self.user
|
|
|
|
|
|
|
|
# Send response headers
|
|
|
|
def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
|
|
|
|
headers = []
|
|
|
|
headers.append(("Version", "HTTP/1.1"))
|
2015-09-08 03:07:44 +02:00
|
|
|
headers.append(("Connection", "Keep-Alive"))
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
headers.append(("Keep-Alive", "max=25, timeout=30"))
|
2017-01-06 02:44:22 +01:00
|
|
|
headers.append(("X-Frame-Options", "SAMEORIGIN"))
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
# headers.append(("Content-Security-Policy", "default-src 'self' data: 'unsafe-inline' ws://127.0.0.1:* http://127.0.0.1:* wss://tracker.webtorrent.io; sandbox allow-same-origin allow-top-navigation allow-scripts")) # Only local connections
|
2015-07-12 20:36:46 +02:00
|
|
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
|
|
|
# Allow json access
|
2015-07-17 00:28:43 +02:00
|
|
|
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Cookie"))
|
|
|
|
headers.append(("Access-Control-Allow-Credentials", "true"))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-01-12 21:52:24 +01:00
|
|
|
if content_type == "text/html":
|
|
|
|
content_type = "text/html; charset=utf-8"
|
2017-02-24 14:39:20 +01:00
|
|
|
if content_type == "text/plain":
|
|
|
|
content_type = "text/plain; charset=utf-8"
|
|
|
|
|
2017-07-06 00:09:05 +02:00
|
|
|
# Download instead of display file types that can be dangerous
|
|
|
|
if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type):
|
|
|
|
headers.append(("Content-Disposition", "attachment"))
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
cacheable_type = (
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or
|
2015-07-12 20:36:46 +02:00
|
|
|
self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
|
|
|
|
)
|
|
|
|
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min
|
2015-07-12 20:36:46 +02:00
|
|
|
headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
else:
|
2015-07-12 20:36:46 +02:00
|
|
|
headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
|
|
|
|
headers.append(("Content-Type", content_type))
|
|
|
|
for extra_header in extra_headers:
|
|
|
|
headers.append(extra_header)
|
|
|
|
return self.start_response(status_texts[status], headers)
|
|
|
|
|
|
|
|
# Renders a template
|
|
|
|
def render(self, template_path, *args, **kwargs):
|
|
|
|
template = open(template_path).read().decode("utf8")
|
|
|
|
return template.format(**kwargs).encode("utf8")
|
|
|
|
|
|
|
|
# - Actions -
|
|
|
|
|
|
|
|
# Redirect to an url
|
|
|
|
def actionRedirect(self, url):
|
2017-05-11 17:56:01 +02:00
|
|
|
self.start_response('301 Redirect', [('Location', str(url))])
|
2015-07-12 20:36:46 +02:00
|
|
|
yield "Location changed: %s" % url
|
|
|
|
|
|
|
|
def actionIndex(self):
|
|
|
|
return self.actionRedirect("/" + config.homepage)
|
|
|
|
|
|
|
|
# Render a file from media with iframe site wrapper
|
|
|
|
def actionWrapper(self, path, extra_headers=None):
|
|
|
|
if not extra_headers:
|
|
|
|
extra_headers = []
|
|
|
|
|
|
|
|
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
|
|
|
if match:
|
|
|
|
address = match.group("address")
|
|
|
|
inner_path = match.group("inner_path").lstrip("/")
|
|
|
|
if "." in inner_path and not inner_path.endswith(".html"):
|
|
|
|
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
|
2016-03-06 15:44:55 +01:00
|
|
|
if self.isAjaxRequest():
|
2015-07-12 20:36:46 +02:00
|
|
|
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
|
2017-01-08 17:26:59 +01:00
|
|
|
|
|
|
|
if self.isWebSocketRequest():
|
2017-02-02 12:51:41 +01:00
|
|
|
return self.error403("WebSocket request not allowed to load wrapper") # No websocket
|
2017-01-08 17:26:59 +01:00
|
|
|
|
2017-01-09 06:01:49 +01:00
|
|
|
if "text/html" not in self.env.get("HTTP_ACCEPT", ""):
|
2016-12-27 11:37:35 +01:00
|
|
|
return self.error403("Invalid Accept header to load wrapper")
|
|
|
|
if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""):
|
|
|
|
return self.error403("Prefetch not allowed to load wrapper")
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
site = SiteManager.site_manager.get(address)
|
|
|
|
|
|
|
|
if (
|
|
|
|
site and site.content_manager.contents.get("content.json") and
|
|
|
|
(not site.getReachableBadFiles() or site.settings["own"])
|
|
|
|
): # Its downloaded or own
|
|
|
|
title = site.content_manager.contents["content.json"]["title"]
|
|
|
|
else:
|
|
|
|
title = "Loading %s..." % address
|
|
|
|
site = SiteManager.site_manager.need(address) # Start download site
|
|
|
|
|
|
|
|
if not site:
|
|
|
|
return False
|
2015-07-31 23:34:53 +02:00
|
|
|
|
|
|
|
self.sendHeader(extra_headers=extra_headers[:])
|
|
|
|
return iter([self.renderWrapper(site, path, inner_path, title, extra_headers)])
|
|
|
|
# Dont know why wrapping with iter necessary, but without it around 100x slower
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
else: # Bad url
|
|
|
|
return False
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-05-11 17:57:29 +02:00
|
|
|
def getSiteUrl(self, address):
|
|
|
|
if self.isProxyRequest():
|
|
|
|
return "http://zero/" + address
|
|
|
|
else:
|
|
|
|
return "/" + address
|
|
|
|
|
2017-05-11 18:01:16 +02:00
|
|
|
def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None):
|
2015-07-17 00:28:43 +02:00
|
|
|
file_inner_path = inner_path
|
|
|
|
if not file_inner_path:
|
|
|
|
file_inner_path = "index.html" # If inner path defaults to index.html
|
|
|
|
|
Rev903, FeedQuery command only available for ADMIN sites, Show bad files in sidebar, Log unknown messages, Add and check inner_path and site address on sign/verify, Better peer cleanup limit, Log site load times, Testcase for address and inner_path verification, Re-sign testsite with new fields, Fix unnecessary loading screen display when browsing sub-folder with index.html, Fix safari notification width
2016-02-18 11:22:21 +01:00
|
|
|
if file_inner_path.endswith("/"):
|
|
|
|
file_inner_path = file_inner_path + "index.html"
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
address = re.sub("/.*", "", path.lstrip("/"))
|
|
|
|
if self.isProxyRequest() and (not path or "/" in path[1:]):
|
2017-02-05 22:55:24 +01:00
|
|
|
if self.env["HTTP_HOST"] == "zero":
|
|
|
|
root_url = "/" + address + "/"
|
2017-05-11 18:18:12 +02:00
|
|
|
file_url = "/" + address + "/" + inner_path
|
2017-02-05 22:55:24 +01:00
|
|
|
else:
|
2017-05-11 18:18:12 +02:00
|
|
|
file_url = "/" + inner_path
|
2017-02-05 22:55:24 +01:00
|
|
|
root_url = "/"
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
else:
|
|
|
|
file_url = "/" + address + "/" + inner_path
|
2016-11-10 23:14:30 +01:00
|
|
|
root_url = "/" + address + "/"
|
2015-07-17 00:28:43 +02:00
|
|
|
|
|
|
|
# Wrapper variable inits
|
|
|
|
query_string = ""
|
|
|
|
body_style = ""
|
|
|
|
meta_tags = ""
|
Rev900, Sidebar filestats bar width round fix, Sidebar WebGL not supported error, Sidebar optimalizations, Trayicon gray shadow, Trim end of line whitespace from json files, Fix testweb testcase, Implement experimental postMessage nonce security, Return None when testing external ip, Window opener security check and message, Increase timeout for large files
2016-02-10 02:30:04 +01:00
|
|
|
postmessage_nonce_security = "false"
|
2015-07-17 00:28:43 +02:00
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
wrapper_nonce = self.getWrapperNonce()
|
|
|
|
|
2015-07-17 00:28:43 +02:00
|
|
|
if self.env.get("QUERY_STRING"):
|
2015-09-10 23:25:09 +02:00
|
|
|
query_string = "?%s&wrapper_nonce=%s" % (self.env["QUERY_STRING"], wrapper_nonce)
|
2017-05-11 17:59:12 +02:00
|
|
|
elif "?" in inner_path:
|
|
|
|
query_string = "&wrapper_nonce=%s" % wrapper_nonce
|
2015-07-17 00:28:43 +02:00
|
|
|
else:
|
2015-09-10 23:25:09 +02:00
|
|
|
query_string = "?wrapper_nonce=%s" % wrapper_nonce
|
2015-07-17 00:28:43 +02:00
|
|
|
|
|
|
|
if self.isProxyRequest(): # Its a remote proxy request
|
|
|
|
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
|
|
|
|
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
|
|
|
|
else: # Remote client, use SERVER_NAME as server's real address
|
|
|
|
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
|
|
|
|
homepage = "http://zero/" + config.homepage
|
|
|
|
else: # Use relative path
|
|
|
|
server_url = ""
|
|
|
|
homepage = "/" + config.homepage
|
|
|
|
|
|
|
|
if site.content_manager.contents.get("content.json"): # Got content.json
|
|
|
|
content = site.content_manager.contents["content.json"]
|
|
|
|
if content.get("background-color"):
|
|
|
|
body_style += "background-color: %s;" % \
|
|
|
|
cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)
|
|
|
|
if content.get("viewport"):
|
|
|
|
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
|
2016-11-10 23:14:30 +01:00
|
|
|
if content.get("favicon"):
|
|
|
|
meta_tags += '<link rel="icon" href="%s%s">' % (root_url, cgi.escape(content["favicon"], True))
|
Rev900, Sidebar filestats bar width round fix, Sidebar WebGL not supported error, Sidebar optimalizations, Trayicon gray shadow, Trim end of line whitespace from json files, Fix testweb testcase, Implement experimental postMessage nonce security, Return None when testing external ip, Window opener security check and message, Increase timeout for large files
2016-02-10 02:30:04 +01:00
|
|
|
if content.get("postmessage_nonce_security"):
|
|
|
|
postmessage_nonce_security = "true"
|
|
|
|
|
2015-09-17 02:20:43 +02:00
|
|
|
if site.settings.get("own"):
|
|
|
|
sandbox_permissions = "allow-modals" # For coffeescript compile errors
|
|
|
|
else:
|
|
|
|
sandbox_permissions = ""
|
|
|
|
|
2017-05-11 18:01:16 +02:00
|
|
|
if show_loadingscreen is None:
|
|
|
|
show_loadingscreen = not site.storage.isFile(file_inner_path)
|
|
|
|
|
2015-07-31 23:34:53 +02:00
|
|
|
return self.render(
|
2015-07-17 00:28:43 +02:00
|
|
|
"src/Ui/template/wrapper.html",
|
|
|
|
server_url=server_url,
|
|
|
|
inner_path=inner_path,
|
2016-02-20 11:19:28 +01:00
|
|
|
file_url=re.escape(file_url),
|
|
|
|
file_inner_path=re.escape(file_inner_path),
|
2015-07-17 00:28:43 +02:00
|
|
|
address=site.address,
|
2016-02-18 19:44:52 +01:00
|
|
|
title=cgi.escape(title, True),
|
2015-07-17 00:28:43 +02:00
|
|
|
body_style=body_style,
|
|
|
|
meta_tags=meta_tags,
|
2016-02-18 19:44:52 +01:00
|
|
|
query_string=re.escape(query_string),
|
2015-07-17 00:28:43 +02:00
|
|
|
wrapper_key=site.settings["wrapper_key"],
|
Rev900, Sidebar filestats bar width round fix, Sidebar WebGL not supported error, Sidebar optimalizations, Trayicon gray shadow, Trim end of line whitespace from json files, Fix testweb testcase, Implement experimental postMessage nonce security, Return None when testing external ip, Window opener security check and message, Increase timeout for large files
2016-02-10 02:30:04 +01:00
|
|
|
wrapper_nonce=wrapper_nonce,
|
|
|
|
postmessage_nonce_security=postmessage_nonce_security,
|
2015-07-17 00:28:43 +02:00
|
|
|
permissions=json.dumps(site.settings["permissions"]),
|
2017-05-11 18:01:16 +02:00
|
|
|
show_loadingscreen=json.dumps(show_loadingscreen),
|
2015-09-17 02:20:43 +02:00
|
|
|
sandbox_permissions=sandbox_permissions,
|
2015-07-17 00:28:43 +02:00
|
|
|
rev=config.rev,
|
2016-11-18 20:07:58 +01:00
|
|
|
lang=config.language,
|
2015-07-17 00:28:43 +02:00
|
|
|
homepage=homepage
|
|
|
|
)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
# Create a new wrapper nonce that allows to get one html file without the wrapper
|
|
|
|
def getWrapperNonce(self):
|
2015-09-13 23:17:13 +02:00
|
|
|
wrapper_nonce = CryptHash.random()
|
2015-09-10 23:25:09 +02:00
|
|
|
self.server.wrapper_nonces.append(wrapper_nonce)
|
|
|
|
return wrapper_nonce
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Returns if media request allowed from that referer
|
|
|
|
def isMediaRequestAllowed(self, site_address, referer):
|
2015-08-06 00:51:25 +02:00
|
|
|
if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]):
|
|
|
|
return False
|
2015-07-12 20:36:46 +02:00
|
|
|
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
|
|
|
|
return referer_path.startswith("/" + site_address)
|
|
|
|
|
2016-08-10 12:24:09 +02:00
|
|
|
# Return {address: 1Site.., inner_path: /data/users.json} from url path
|
2016-03-16 00:33:05 +01:00
|
|
|
def parsePath(self, path):
|
2015-07-12 20:36:46 +02:00
|
|
|
path = path.replace("/index.html/", "/") # Base Backward compatibility fix
|
|
|
|
if path.endswith("/"):
|
|
|
|
path = path + "index.html"
|
|
|
|
|
2017-02-19 00:51:47 +01:00
|
|
|
if ".." in path:
|
|
|
|
raise Exception("Invalid path")
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path)
|
2016-03-16 00:33:05 +01:00
|
|
|
if match:
|
2016-08-10 12:24:09 +02:00
|
|
|
path_parts = match.groupdict()
|
|
|
|
path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites)
|
|
|
|
return path_parts
|
2016-03-16 00:33:05 +01:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Serve a media for site
|
2016-11-16 11:19:35 +01:00
|
|
|
def actionSiteMedia(self, path, header_length=True):
|
2017-02-27 11:21:22 +01:00
|
|
|
if ".." in path: # File not in allowed path
|
|
|
|
return self.error403("Invalid file path")
|
|
|
|
|
2016-03-16 00:33:05 +01:00
|
|
|
path_parts = self.parsePath(path)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2015-09-10 23:25:09 +02:00
|
|
|
# Check wrapper nonce
|
2017-02-02 12:51:41 +01:00
|
|
|
content_type = self.getContentType(path_parts["inner_path"])
|
2015-09-10 23:25:09 +02:00
|
|
|
if "htm" in content_type: # Valid nonce must present to render html files
|
2015-09-20 00:27:54 +02:00
|
|
|
wrapper_nonce = self.get.get("wrapper_nonce")
|
2015-09-10 23:25:09 +02:00
|
|
|
if wrapper_nonce not in self.server.wrapper_nonces:
|
2015-09-11 14:03:48 +02:00
|
|
|
return self.error403("Wrapper nonce error. Please reload the page.")
|
2015-09-10 23:25:09 +02:00
|
|
|
self.server.wrapper_nonces.remove(self.get["wrapper_nonce"])
|
2017-05-11 18:00:57 +02:00
|
|
|
else:
|
|
|
|
referer = self.env.get("HTTP_REFERER")
|
|
|
|
if referer and path_parts: # Only allow same site to receive media
|
|
|
|
if not self.isMediaRequestAllowed(path_parts["request_address"], referer):
|
|
|
|
self.log.error("Media referrer error: %s not allowed from %s" % (path_parts["address"], referer))
|
|
|
|
return self.error403("Media referrer error") # Referrer not starts same address as requested path
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-03-16 00:33:05 +01:00
|
|
|
if path_parts: # Looks like a valid path
|
|
|
|
address = path_parts["address"]
|
|
|
|
file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"])
|
2017-02-27 11:21:22 +01:00
|
|
|
if config.debug and file_path.split("/")[-1].startswith("all."):
|
|
|
|
# If debugging merge *.css to all.css and *.js to all.js
|
|
|
|
site = self.server.sites.get(address)
|
|
|
|
if site.settings["own"]:
|
|
|
|
from Debug import DebugMedia
|
|
|
|
DebugMedia.merge(file_path)
|
2017-04-09 16:54:28 +02:00
|
|
|
if not address or address == ".":
|
|
|
|
return self.error403(path_parts["inner_path"])
|
2017-02-27 11:21:22 +01:00
|
|
|
if os.path.isfile(file_path): # File exists
|
|
|
|
return self.actionFile(file_path, header_length=header_length)
|
|
|
|
elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect
|
|
|
|
return self.actionRedirect("./{0}/".format(path_parts["inner_path"].split("/")[-1]))
|
|
|
|
else: # File not exists, try to download
|
|
|
|
if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading
|
|
|
|
return self.error404(path_parts["inner_path"])
|
|
|
|
|
|
|
|
site = SiteManager.site_manager.need(address)
|
|
|
|
|
|
|
|
if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites
|
|
|
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
|
|
|
|
2017-05-07 21:21:26 +02:00
|
|
|
result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads
|
2017-02-27 11:21:22 +01:00
|
|
|
if result:
|
2016-11-16 11:19:35 +01:00
|
|
|
return self.actionFile(file_path, header_length=header_length)
|
2017-02-27 11:21:22 +01:00
|
|
|
else:
|
|
|
|
self.log.debug("File not found: %s" % path_parts["inner_path"])
|
|
|
|
# Site larger than allowed, re-add wrapper nonce to allow reload
|
|
|
|
if site.settings.get("size", 0) > site.getSizeLimit() * 1024 * 1024:
|
|
|
|
self.server.wrapper_nonces.append(self.get.get("wrapper_nonce"))
|
|
|
|
return self.error404(path_parts["inner_path"])
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
else: # Bad url
|
|
|
|
return self.error404(path)
|
|
|
|
|
|
|
|
# Serve a media for ui
|
|
|
|
def actionUiMedia(self, path):
|
|
|
|
match = re.match("/uimedia/(?P<inner_path>.*)", path)
|
|
|
|
if match: # Looks like a valid path
|
|
|
|
file_path = "src/Ui/media/%s" % match.group("inner_path")
|
|
|
|
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
|
|
|
|
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
|
|
|
|
# File not in allowed path
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
if config.debug and match.group("inner_path").startswith("all."):
|
|
|
|
# If debugging merge *.css to all.css and *.js to all.js
|
|
|
|
from Debug import DebugMedia
|
|
|
|
DebugMedia.merge(file_path)
|
2016-11-07 22:49:13 +01:00
|
|
|
return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content
|
2015-07-12 20:36:46 +02:00
|
|
|
else: # Bad url
|
|
|
|
return self.error400()
|
|
|
|
|
|
|
|
# Stream a file to client
|
2016-11-07 22:49:13 +01:00
|
|
|
def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True):
|
2017-05-11 17:59:46 +02:00
|
|
|
if ".." in file_path:
|
|
|
|
raise Exception("Invalid path")
|
2015-07-12 20:36:46 +02:00
|
|
|
if os.path.isfile(file_path):
|
|
|
|
# Try to figure out content type by extension
|
|
|
|
content_type = self.getContentType(file_path)
|
|
|
|
|
|
|
|
# TODO: Dont allow external access: extra_headers=
|
|
|
|
# [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
range = self.env.get("HTTP_RANGE")
|
|
|
|
range_start = None
|
2015-07-31 23:34:53 +02:00
|
|
|
if send_header:
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
extra_headers = {}
|
|
|
|
file_size = os.path.getsize(file_path)
|
|
|
|
extra_headers["Accept-Ranges"] = "bytes"
|
2016-11-07 22:49:13 +01:00
|
|
|
if header_length:
|
|
|
|
extra_headers["Content-Length"] = str(file_size)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range:
|
|
|
|
range_start = int(re.match(".*?([0-9]+)", range).group(1))
|
|
|
|
if re.match(".*?-([0-9]+)", range):
|
2016-02-02 00:49:43 +01:00
|
|
|
range_end = int(re.match(".*?-([0-9]+)", range).group(1)) + 1
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
else:
|
|
|
|
range_end = file_size
|
Rev571, Optional file sizes to sidebar, Download all optional files option in sidebar, Optional file number in peer stats, Delete removed or changed optional files, Auto download optional files if autodownloadoptional checked, SiteReload command, Peer use global file server if no site defined, Allow browser cache video files, Allow more keepalive connections, Gevent 1.1 ranged request bugfix, Dont sent optional files details on websocket, Remove files from workermanager tasks if no longer in bad_files, Notify local client about changes on external siteSign
2015-11-09 00:44:03 +01:00
|
|
|
extra_headers["Content-Length"] = str(range_end - range_start)
|
2016-02-02 00:49:43 +01:00
|
|
|
extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end - 1, file_size)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range:
|
|
|
|
status = 206
|
|
|
|
else:
|
|
|
|
status = 200
|
|
|
|
self.sendHeader(status, content_type=content_type, extra_headers=extra_headers.items())
|
2015-07-12 20:36:46 +02:00
|
|
|
if self.env["REQUEST_METHOD"] != "OPTIONS":
|
|
|
|
file = open(file_path, "rb")
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if range_start:
|
|
|
|
file.seek(range_start)
|
2015-07-12 20:36:46 +02:00
|
|
|
while 1:
|
|
|
|
try:
|
|
|
|
block = file.read(block_size)
|
|
|
|
if block:
|
|
|
|
yield block
|
|
|
|
else:
|
|
|
|
raise StopIteration
|
|
|
|
except StopIteration:
|
|
|
|
file.close()
|
|
|
|
break
|
2016-10-02 14:24:48 +02:00
|
|
|
else: # File not exists
|
2015-07-12 20:36:46 +02:00
|
|
|
yield self.error404(file_path)
|
|
|
|
|
|
|
|
# On websocket connection
|
|
|
|
def actionWebsocket(self):
|
|
|
|
ws = self.env.get("wsgi.websocket")
|
|
|
|
if ws:
|
|
|
|
wrapper_key = self.get["wrapper_key"]
|
|
|
|
# Find site by wrapper_key
|
|
|
|
site = None
|
|
|
|
for site_check in self.server.sites.values():
|
|
|
|
if site_check.settings["wrapper_key"] == wrapper_key:
|
|
|
|
site = site_check
|
|
|
|
|
|
|
|
if site: # Correct wrapper key
|
|
|
|
user = self.getCurrentUser()
|
|
|
|
if not user:
|
|
|
|
self.log.error("No user found")
|
|
|
|
return self.error403()
|
2015-07-17 00:28:43 +02:00
|
|
|
ui_websocket = UiWebsocket(ws, site, self.server, user, self)
|
2015-07-12 20:36:46 +02:00
|
|
|
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
|
|
|
|
ui_websocket.start()
|
|
|
|
for site_check in self.server.sites.values():
|
|
|
|
# Remove websocket from every site (admin sites allowed to join other sites event channels)
|
|
|
|
if ui_websocket in site_check.websockets:
|
|
|
|
site_check.websockets.remove(ui_websocket)
|
|
|
|
return "Bye."
|
|
|
|
else: # No site found by wrapper key
|
|
|
|
self.log.error("Wrapper key not found: %s" % wrapper_key)
|
|
|
|
return self.error403()
|
|
|
|
else:
|
|
|
|
self.start_response("400 Bad Request", [])
|
|
|
|
return "Not a websocket!"
|
|
|
|
|
|
|
|
# Debug last error
|
|
|
|
def actionDebug(self):
|
|
|
|
# Raise last error from DebugHook
|
|
|
|
import sys
|
|
|
|
last_error = sys.modules["main"].DebugHook.last_error
|
|
|
|
if last_error:
|
|
|
|
raise last_error[0], last_error[1], last_error[2]
|
|
|
|
else:
|
|
|
|
self.sendHeader()
|
|
|
|
return "No error! :)"
|
|
|
|
|
|
|
|
# Just raise an error to get console
|
|
|
|
def actionConsole(self):
|
|
|
|
import sys
|
|
|
|
sites = self.server.sites
|
|
|
|
main = sys.modules["main"]
|
2017-05-11 17:59:46 +02:00
|
|
|
|
2017-02-27 00:13:41 +01:00
|
|
|
def bench(code, times=100):
|
|
|
|
sites = self.server.sites
|
|
|
|
main = sys.modules["main"]
|
|
|
|
s = time.time()
|
|
|
|
for _ in range(times):
|
|
|
|
back = eval(code, globals(), locals())
|
|
|
|
return ["%s run: %.3fs" % (times, time.time() - s), back]
|
2015-07-12 20:36:46 +02:00
|
|
|
raise Exception("Here is your console")
|
|
|
|
|
|
|
|
# - Tests -
|
|
|
|
|
|
|
|
def actionTestStream(self):
|
|
|
|
self.sendHeader()
|
|
|
|
yield " " * 1080 # Overflow browser's buffer
|
|
|
|
yield "He"
|
|
|
|
time.sleep(1)
|
|
|
|
yield "llo!"
|
2015-09-08 03:07:44 +02:00
|
|
|
# yield "Running websockets: %s" % len(self.server.websockets)
|
|
|
|
# self.server.sendMessage("Hello!")
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# - Errors -
|
|
|
|
|
|
|
|
# Send bad request error
|
2015-09-10 23:25:09 +02:00
|
|
|
def error400(self, message=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(400)
|
2015-09-10 23:25:09 +02:00
|
|
|
return self.formatError("Bad Request", message)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# You are not allowed to access this
|
2015-10-25 23:08:25 +01:00
|
|
|
def error403(self, message="", details=True):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(403)
|
Version 0.3.6, Rev879, Fix sidebar error on description missing, New trayicon, New favicon, Disable some functions on MultiUser proxies, New homepage, Replace only the last ? in SQL queries, Alwaays grant ADMIN permission to homepage site, Announce before publish if no peers, configSet, serverShutdown, ADMIN WebsocketAPI command, Stop Tor client before updating, Ignore peer ip packing error, Ignore db files from git, Fix safari ajax error when UiPassword enabled
2016-02-02 11:40:45 +01:00
|
|
|
self.log.debug("Error 403: %s" % message)
|
2015-10-25 23:08:25 +01:00
|
|
|
return self.formatError("Forbidden", message, details=details)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Send file not found error
|
2015-09-10 23:25:09 +02:00
|
|
|
def error404(self, path=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.sendHeader(404)
|
2016-03-01 23:16:31 +01:00
|
|
|
return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Internal server error
|
|
|
|
def error500(self, message=":("):
|
|
|
|
self.sendHeader(500)
|
2015-09-10 23:25:09 +02:00
|
|
|
return self.formatError("Server error", cgi.escape(message))
|
|
|
|
|
2015-10-25 23:08:25 +01:00
|
|
|
def formatError(self, title, message, details=True):
|
2015-09-11 14:03:48 +02:00
|
|
|
import sys
|
|
|
|
import gevent
|
|
|
|
|
Rev426, Fix for nonce error on bigsites asking, Dont display error details on 404 error, Dont log Websocket close errors, Add travis pip caching and osx test, Add build status to readme, Test for site files after cloning, Test for json to db mapping, Test site deleteFiles command, Test user certificate and auth address generation, Exclude debug lines from coverage, Dont run webtests every time
2015-09-20 22:35:45 +02:00
|
|
|
if details:
|
|
|
|
details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key}
|
|
|
|
details["version_zeronet"] = "%s r%s" % (config.version, config.rev)
|
|
|
|
details["version_python"] = sys.version
|
|
|
|
details["version_gevent"] = gevent.__version__
|
|
|
|
details["plugins"] = PluginManager.plugin_manager.plugin_names
|
|
|
|
arguments = {key: val for key, val in vars(config.arguments).items() if "password" not in key}
|
|
|
|
details["arguments"] = arguments
|
|
|
|
return """
|
|
|
|
<style>
|
|
|
|
* { font-family: Consolas, Monospace; color: #333 }
|
|
|
|
pre { padding: 10px; background-color: #EEE }
|
|
|
|
</style>
|
|
|
|
<h1>%s</h1>
|
|
|
|
<h2>%s</h3>
|
|
|
|
<h3>Please <a href="https://github.com/HelloZeroNet/ZeroNet/issues" target="_blank">report it</a> if you think this an error.</h3>
|
|
|
|
<h4>Details:</h4>
|
|
|
|
<pre>%s</pre>
|
|
|
|
""" % (title, message, json.dumps(details, indent=4, sort_keys=True))
|
|
|
|
else:
|
|
|
|
return """
|
|
|
|
<h1>%s</h1>
|
|
|
|
<h2>%s</h3>
|
2017-06-15 19:49:51 +02:00
|
|
|
""" % (title, cgi.escape(message))
|
version 0.2.8, Namecoin domains using internal resolver site, --disable_zeromq option to skip backward compatiblity layer and save some memory, connectionserver firstchar error fixes, missing unpacker crash fix, sitemanager class to allow extensions, add loaded plugin list to websocket api, faster content publishing, mark updating file as bad, remove coppersurfer tracker add eddie4, internal server error with error displaying, allow site domains in UiRequest, better progress bar, wait for siteinfo before before using localstorage, csslater hide only if opacity is 0
2015-03-30 23:44:29 +02:00
|
|
|
|
|
|
|
|
2015-04-12 23:59:22 +02:00
|
|
|
# - Reload for eaiser developing -
|
2015-07-12 20:36:46 +02:00
|
|
|
# def reload():
|
|
|
|
# import imp, sys
|
|
|
|
# global UiWebsocket
|
|
|
|
# UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
|
|
|
|
# reload(sys.modules["User.UserManager"])
|
|
|
|
# UserManager.reloadModule()
|
|
|
|
# self.user = UserManager.user_manager.getCurrent()
|