Add files via upload
web.archive.org/https://github.com/zeronet-enhanced/ZeroNet/
|
@ -0,0 +1,703 @@
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import locale
|
||||||
|
import re
|
||||||
|
import configparser
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import stat
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class Config(object):
|
||||||
|
|
||||||
|
def __init__(self, argv):
|
||||||
|
self.version = "0.7.2"
|
||||||
|
self.rev = 4555
|
||||||
|
self.argv = argv
|
||||||
|
self.action = None
|
||||||
|
self.test_parser = None
|
||||||
|
self.pending_changes = {}
|
||||||
|
self.need_restart = False
|
||||||
|
self.keys_api_change_allowed = set([
|
||||||
|
"tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers",
|
||||||
|
"trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline",
|
||||||
|
"threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
|
||||||
|
])
|
||||||
|
self.keys_restart_need = set([
|
||||||
|
"tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
|
||||||
|
])
|
||||||
|
self.start_dir = self.getStartDir()
|
||||||
|
|
||||||
|
self.config_file = self.start_dir + "/zeronet.conf"
|
||||||
|
self.data_dir = self.start_dir + "/data"
|
||||||
|
self.log_dir = self.start_dir + "/log"
|
||||||
|
self.openssl_lib_file = None
|
||||||
|
self.openssl_bin_file = None
|
||||||
|
|
||||||
|
self.trackers_file = False
|
||||||
|
self.createParser()
|
||||||
|
self.createArguments()
|
||||||
|
|
||||||
|
def createParser(self):
|
||||||
|
# Create parser
|
||||||
|
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
self.parser.register('type', 'bool', self.strToBool)
|
||||||
|
self.subparsers = self.parser.add_subparsers(title="Action to perform", dest="action")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
|
||||||
|
|
||||||
|
# Convert string to bool
|
||||||
|
def strToBool(self, v):
|
||||||
|
return v.lower() in ("yes", "true", "t", "1")
|
||||||
|
|
||||||
|
def getStartDir(self):
|
||||||
|
this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd")
|
||||||
|
|
||||||
|
if "--start_dir" in self.argv:
|
||||||
|
start_dir = self.argv[self.argv.index("--start_dir") + 1]
|
||||||
|
elif this_file.endswith("/Contents/Resources/core/src/Config.py"):
|
||||||
|
# Running as ZeroNet.app
|
||||||
|
if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")):
|
||||||
|
# Runnig from non-writeable directory, put data to Application Support
|
||||||
|
start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet")
|
||||||
|
else:
|
||||||
|
# Running from writeable directory put data next to .app
|
||||||
|
start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file)
|
||||||
|
elif this_file.endswith("/core/src/Config.py"):
|
||||||
|
# Running as exe or source is at Application Support directory, put var files to outside of core dir
|
||||||
|
start_dir = this_file.replace("/core/src/Config.py", "")
|
||||||
|
elif this_file.endswith("usr/share/zeronet/src/Config.py"):
|
||||||
|
# Running from non-writeable location, e.g., AppImage
|
||||||
|
start_dir = os.path.expanduser("~/ZeroNet")
|
||||||
|
else:
|
||||||
|
start_dir = "."
|
||||||
|
|
||||||
|
return start_dir
|
||||||
|
|
||||||
|
# Create command line arguments
|
||||||
|
def createArguments(self):
|
||||||
|
trackers = [
|
||||||
|
# by zeroseed at http://127.0.0.1:43110/19HKdTAeBh5nRiKn791czY7TwRB1QNrf1Q/?:users/1HvNGwHKqhj3ZMEM53tz6jbdqe4LRpanEu:zn:dc17f896-bf3f-4962-bdd4-0a470040c9c5
|
||||||
|
"zero://k5w77dozo3hy5zualyhni6vrh73iwfkaofa64abbilwyhhd3wgenbjqd.onion:15441",
|
||||||
|
"zero://2kcb2fqesyaevc4lntogupa4mkdssth2ypfwczd2ov5a3zo6ytwwbayd.onion:15441",
|
||||||
|
"zero://my562dxpjropcd5hy3nd5pemsc4aavbiptci5amwxzbelmzgkkuxpvid.onion:15441",
|
||||||
|
"zero://pn4q2zzt2pw4nk7yidxvsxmydko7dfibuzxdswi6gu6ninjpofvqs2id.onion:15441",
|
||||||
|
"zero://6i54dd5th73oelv636ivix6sjnwfgk2qsltnyvswagwphub375t3xcad.onion:15441",
|
||||||
|
"zero://tl74auz4tyqv4bieeclmyoe4uwtoc2dj7fdqv4nc4gl5j2bwg2r26bqd.onion:15441",
|
||||||
|
"zero://wlxav3szbrdhest4j7dib2vgbrd7uj7u7rnuzg22cxbih7yxyg2hsmid.onion:15441",
|
||||||
|
"zero://zy7wttvjtsijt5uwmlar4yguvjc2gppzbdj4v6bujng6xwjmkdg7uvqd.onion:15441",
|
||||||
|
|
||||||
|
# ZeroNet 0.7.2 defaults:
|
||||||
|
"zero://boot3rdez4rzn36x.onion:15441",
|
||||||
|
"zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY
|
||||||
|
"udp://tracker.coppersurfer.tk:6969", # DE
|
||||||
|
"udp://104.238.198.186:8000", # US/LA
|
||||||
|
"udp://retracker.akado-ural.ru:80", # RU
|
||||||
|
"http://h4.trakx.nibba.trade:80/announce", # US/VA
|
||||||
|
"http://open.acgnxtracker.com:80/announce", # DE
|
||||||
|
"http://tracker.bt4g.com:2095/announce", # Cloudflare
|
||||||
|
"zero://2602:ffc5::c5b2:5360:26312" # US/ATL
|
||||||
|
]
|
||||||
|
# Platform specific
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
|
||||||
|
else:
|
||||||
|
coffeescript = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
language, enc = locale.getdefaultlocale()
|
||||||
|
language = language.lower().replace("_", "-")
|
||||||
|
if language not in ["pt-br", "zh-tw"]:
|
||||||
|
language = language.split("-")[0]
|
||||||
|
except Exception:
|
||||||
|
language = "en"
|
||||||
|
|
||||||
|
use_openssl = True
|
||||||
|
|
||||||
|
if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue
|
||||||
|
fix_float_decimals = True
|
||||||
|
else:
|
||||||
|
fix_float_decimals = False
|
||||||
|
|
||||||
|
config_file = self.start_dir + "/zeronet.conf"
|
||||||
|
data_dir = self.start_dir + "/data"
|
||||||
|
log_dir = self.start_dir + "/log"
|
||||||
|
|
||||||
|
ip_local = ["127.0.0.1", "::1"]
|
||||||
|
|
||||||
|
# Main
|
||||||
|
action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
|
||||||
|
|
||||||
|
# SiteCreate
|
||||||
|
action = self.subparsers.add_parser("siteCreate", help='Create a new site')
|
||||||
|
action.register('type', 'bool', self.strToBool)
|
||||||
|
action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True)
|
||||||
|
|
||||||
|
# SiteNeedFile
|
||||||
|
action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site')
|
||||||
|
action.add_argument('address', help='Site address')
|
||||||
|
action.add_argument('inner_path', help='File inner path')
|
||||||
|
|
||||||
|
# SiteDownload
|
||||||
|
action = self.subparsers.add_parser("siteDownload", help='Download a new site')
|
||||||
|
action.add_argument('address', help='Site address')
|
||||||
|
|
||||||
|
# SiteSign
|
||||||
|
action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
|
||||||
|
action.add_argument('address', help='Site to sign')
|
||||||
|
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
|
||||||
|
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
|
||||||
|
default="content.json", metavar="inner_path")
|
||||||
|
action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true')
|
||||||
|
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
|
||||||
|
|
||||||
|
# SitePublish
|
||||||
|
action = self.subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
|
||||||
|
action.add_argument('address', help='Site to publish')
|
||||||
|
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
|
||||||
|
default=None, nargs='?')
|
||||||
|
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
|
||||||
|
default=15441, nargs='?')
|
||||||
|
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
|
||||||
|
default="content.json", metavar="inner_path")
|
||||||
|
|
||||||
|
# SiteVerify
|
||||||
|
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
|
||||||
|
action.add_argument('address', help='Site to verify')
|
||||||
|
|
||||||
|
# SiteCmd
|
||||||
|
action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site')
|
||||||
|
action.add_argument('address', help='Site address')
|
||||||
|
action.add_argument('cmd', help='API command name')
|
||||||
|
action.add_argument('parameters', help='Parameters of the command', nargs='?')
|
||||||
|
|
||||||
|
# dbRebuild
|
||||||
|
action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
|
||||||
|
action.add_argument('address', help='Site to rebuild')
|
||||||
|
|
||||||
|
# dbQuery
|
||||||
|
action = self.subparsers.add_parser("dbQuery", help='Query site sql cache')
|
||||||
|
action.add_argument('address', help='Site to query')
|
||||||
|
action.add_argument('query', help='Sql query')
|
||||||
|
|
||||||
|
# PeerPing
|
||||||
|
action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer')
|
||||||
|
action.add_argument('peer_ip', help='Peer ip')
|
||||||
|
action.add_argument('peer_port', help='Peer port', nargs='?')
|
||||||
|
|
||||||
|
# PeerGetFile
|
||||||
|
action = self.subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
|
||||||
|
action.add_argument('peer_ip', help='Peer ip')
|
||||||
|
action.add_argument('peer_port', help='Peer port')
|
||||||
|
action.add_argument('site', help='Site address')
|
||||||
|
action.add_argument('filename', help='File name to request')
|
||||||
|
action.add_argument('--benchmark', help='Request file 10x then displays the total time', action='store_true')
|
||||||
|
|
||||||
|
# PeerCmd
|
||||||
|
action = self.subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
|
||||||
|
action.add_argument('peer_ip', help='Peer ip')
|
||||||
|
action.add_argument('peer_port', help='Peer port')
|
||||||
|
action.add_argument('cmd', help='Command to execute')
|
||||||
|
action.add_argument('parameters', help='Parameters to command', nargs='?')
|
||||||
|
|
||||||
|
# CryptSign
|
||||||
|
action = self.subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
|
||||||
|
action.add_argument('message', help='Message to sign')
|
||||||
|
action.add_argument('privatekey', help='Private key')
|
||||||
|
|
||||||
|
# Crypt Verify
|
||||||
|
action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address')
|
||||||
|
action.add_argument('message', help='Message to verify')
|
||||||
|
action.add_argument('sign', help='Signiture for message')
|
||||||
|
action.add_argument('address', help='Signer\'s address')
|
||||||
|
|
||||||
|
# Crypt GetPrivatekey
|
||||||
|
action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed')
|
||||||
|
action.add_argument('master_seed', help='Source master seed')
|
||||||
|
action.add_argument('site_address_index', help='Site address index', type=int)
|
||||||
|
|
||||||
|
action = self.subparsers.add_parser("getConfig", help='Return json-encoded info')
|
||||||
|
action = self.subparsers.add_parser("testConnection", help='Testing')
|
||||||
|
action = self.subparsers.add_parser("testAnnounce", help='Testing')
|
||||||
|
|
||||||
|
self.test_parser = self.subparsers.add_parser("test", help='Run a test')
|
||||||
|
self.test_parser.add_argument('test_name', help='Test name', nargs="?")
|
||||||
|
# self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true')
|
||||||
|
|
||||||
|
# Config parameters
|
||||||
|
self.parser.add_argument('--verbose', help='More detailed logging', action='store_true')
|
||||||
|
self.parser.add_argument('--debug', help='Debug mode', action='store_true')
|
||||||
|
self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true')
|
||||||
|
self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
|
||||||
|
self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true')
|
||||||
|
|
||||||
|
self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
|
||||||
|
|
||||||
|
self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path")
|
||||||
|
self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path")
|
||||||
|
self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path")
|
||||||
|
|
||||||
|
self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"])
|
||||||
|
|
||||||
|
self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path")
|
||||||
|
self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"])
|
||||||
|
self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"])
|
||||||
|
self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int)
|
||||||
|
|
||||||
|
self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language')
|
||||||
|
self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
|
||||||
|
self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
|
||||||
|
self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
|
||||||
|
self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*')
|
||||||
|
self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true')
|
||||||
|
|
||||||
|
self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
|
||||||
|
nargs='?', const="default_browser", metavar='browser_name')
|
||||||
|
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLoPVbqF3UEj8aWXErwTxrwkyjwGtZN',
|
||||||
|
metavar='address')
|
||||||
|
self.parser.add_argument('--updatesite', help='Source code update site', default='1uPDaT3uSyWAPdCv1WkMb5hBQjWSNNACf',
|
||||||
|
metavar='address')
|
||||||
|
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
|
||||||
|
|
||||||
|
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit')
|
||||||
|
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
|
||||||
|
self.parser.add_argument('--connected_limit', help='Max number of connected peers per site. Soft limit.', default=10, type=int, metavar='connected_limit')
|
||||||
|
self.parser.add_argument('--global_connected_limit', help='Max number of connections. Soft limit.', default=512, type=int, metavar='global_connected_limit')
|
||||||
|
self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers')
|
||||||
|
|
||||||
|
self.parser.add_argument('--site_announce_interval_min', help='Site announce interval for the most active sites, in minutes.', default=4, type=int, metavar='site_announce_interval_min')
|
||||||
|
self.parser.add_argument('--site_announce_interval_max', help='Site announce interval for inactive sites, in minutes.', default=30, type=int, metavar='site_announce_interval_max')
|
||||||
|
|
||||||
|
self.parser.add_argument('--site_peer_check_interval_min', help='Connectable peers check interval for the most active sites, in minutes.', default=5, type=int, metavar='site_peer_check_interval_min')
|
||||||
|
self.parser.add_argument('--site_peer_check_interval_max', help='Connectable peers check interval for inactive sites, in minutes.', default=20, type=int, metavar='site_peer_check_interval_max')
|
||||||
|
|
||||||
|
self.parser.add_argument('--site_update_check_interval_min', help='Site update check interval for the most active sites, in minutes.', default=5, type=int, metavar='site_update_check_interval_min')
|
||||||
|
self.parser.add_argument('--site_update_check_interval_max', help='Site update check interval for inactive sites, in minutes.', default=45, type=int, metavar='site_update_check_interval_max')
|
||||||
|
|
||||||
|
self.parser.add_argument('--site_connectable_peer_count_max', help='Search for as many connectable peers for the most active sites', default=10, type=int, metavar='site_connectable_peer_count_max')
|
||||||
|
self.parser.add_argument('--site_connectable_peer_count_min', help='Search for as many connectable peers for inactive sites', default=2, type=int, metavar='site_connectable_peer_count_min')
|
||||||
|
|
||||||
|
self.parser.add_argument('--send_back_lru_size', help='Size of the send back LRU cache', default=5000, type=int, metavar='send_back_lru_size')
|
||||||
|
self.parser.add_argument('--send_back_limit', help='Send no more than so many files at once back to peer, when we discovered that the peer held older file versions', default=3, type=int, metavar='send_back_limit')
|
||||||
|
|
||||||
|
self.parser.add_argument('--expose_no_ownership', help='By default, ZeroNet tries checking updates for own sites more frequently. This can be used by a third party for revealing the network addresses of a site owner. If this option is enabled, ZeroNet performs the checks in the same way for any sites.', type='bool', choices=[True, False], default=False)
|
||||||
|
|
||||||
|
self.parser.add_argument('--simultaneous_connection_throttle_threshold', help='Throttle opening new connections when the number of outgoing connections in not fully established state exceeds the threshold.', default=15, type=int, metavar='simultaneous_connection_throttle_threshold')
|
||||||
|
|
||||||
|
self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
|
||||||
|
self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port')
|
||||||
|
self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port')
|
||||||
|
self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"])
|
||||||
|
self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
|
||||||
|
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*')
|
||||||
|
self.parser.add_argument('--offline', help='Disable network communication', action='store_true')
|
||||||
|
|
||||||
|
self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
|
||||||
|
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
|
||||||
|
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
|
||||||
|
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
|
||||||
|
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*')
|
||||||
|
self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable")
|
||||||
|
self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True)
|
||||||
|
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True)
|
||||||
|
self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path")
|
||||||
|
self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path")
|
||||||
|
self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true')
|
||||||
|
self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
|
||||||
|
self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true')
|
||||||
|
self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
|
||||||
|
type='bool', choices=[True, False], default=True)
|
||||||
|
self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true')
|
||||||
|
self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup',
|
||||||
|
default=2048, type=int, metavar='limit')
|
||||||
|
self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size')
|
||||||
|
self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)',
|
||||||
|
type='bool', choices=[True, False], default=False)
|
||||||
|
self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)',
|
||||||
|
type='bool', choices=[True, False], default=False)
|
||||||
|
self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power',
|
||||||
|
type='bool', choices=[True, False], default=False)
|
||||||
|
self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification',
|
||||||
|
type='bool', choices=[True, False], default=fix_float_decimals)
|
||||||
|
self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed")
|
||||||
|
|
||||||
|
self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int)
|
||||||
|
self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int)
|
||||||
|
self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int)
|
||||||
|
self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int)
|
||||||
|
|
||||||
|
self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual")
|
||||||
|
|
||||||
|
self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
|
||||||
|
metavar='executable_path')
|
||||||
|
|
||||||
|
self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable')
|
||||||
|
self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051')
|
||||||
|
self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050')
|
||||||
|
self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password')
|
||||||
|
self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true')
|
||||||
|
self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10)
|
||||||
|
self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441)
|
||||||
|
|
||||||
|
self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
|
||||||
|
self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true')
|
||||||
|
|
||||||
|
return self.parser
|
||||||
|
|
||||||
|
def loadTrackersFile(self):
|
||||||
|
if not self.trackers_file:
|
||||||
|
return None
|
||||||
|
|
||||||
|
self.trackers = self.arguments.trackers[:]
|
||||||
|
|
||||||
|
for trackers_file in self.trackers_file:
|
||||||
|
try:
|
||||||
|
if trackers_file.startswith("/"): # Absolute
|
||||||
|
trackers_file_path = trackers_file
|
||||||
|
elif trackers_file.startswith("{data_dir}"): # Relative to data_dir
|
||||||
|
trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir)
|
||||||
|
else: # Relative to zeronet.py
|
||||||
|
trackers_file_path = self.start_dir + "/" + trackers_file
|
||||||
|
|
||||||
|
for line in open(trackers_file_path):
|
||||||
|
tracker = line.strip()
|
||||||
|
if "://" in tracker and tracker not in self.trackers:
|
||||||
|
self.trackers.append(tracker)
|
||||||
|
except Exception as err:
|
||||||
|
print("Error loading trackers file: %s" % err)
|
||||||
|
|
||||||
|
# Find arguments specified for current action
|
||||||
|
def getActionArguments(self):
|
||||||
|
back = {}
|
||||||
|
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
|
||||||
|
for argument in arguments:
|
||||||
|
back[argument.dest] = getattr(self, argument.dest)
|
||||||
|
return back
|
||||||
|
|
||||||
|
# Try to find action from argv
|
||||||
|
def getAction(self, argv):
|
||||||
|
actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||||
|
found_action = False
|
||||||
|
for action in actions: # See if any in argv
|
||||||
|
if action in argv:
|
||||||
|
found_action = action
|
||||||
|
break
|
||||||
|
return found_action
|
||||||
|
|
||||||
|
# Move plugin parameters to end of argument list
|
||||||
|
def moveUnknownToEnd(self, argv, default_action):
|
||||||
|
valid_actions = sum([action.option_strings for action in self.parser._actions], [])
|
||||||
|
valid_parameters = []
|
||||||
|
plugin_parameters = []
|
||||||
|
plugin = False
|
||||||
|
for arg in argv:
|
||||||
|
if arg.startswith("--"):
|
||||||
|
if arg not in valid_actions:
|
||||||
|
plugin = True
|
||||||
|
else:
|
||||||
|
plugin = False
|
||||||
|
elif arg == default_action:
|
||||||
|
plugin = False
|
||||||
|
|
||||||
|
if plugin:
|
||||||
|
plugin_parameters.append(arg)
|
||||||
|
else:
|
||||||
|
valid_parameters.append(arg)
|
||||||
|
return valid_parameters + plugin_parameters
|
||||||
|
|
||||||
|
def getParser(self, argv):
|
||||||
|
action = self.getAction(argv)
|
||||||
|
if not action:
|
||||||
|
return self.parser
|
||||||
|
else:
|
||||||
|
return self.subparsers.choices[action]
|
||||||
|
|
||||||
|
# Parse arguments from config file and command line
|
||||||
|
def parse(self, silent=False, parse_config=True):
|
||||||
|
argv = self.argv[:] # Copy command line arguments
|
||||||
|
current_parser = self.getParser(argv)
|
||||||
|
if silent: # Don't display messages or quit on unknown parameter
|
||||||
|
original_print_message = self.parser._print_message
|
||||||
|
original_exit = self.parser.exit
|
||||||
|
|
||||||
|
def silencer(parser, function_name):
|
||||||
|
parser.exited = True
|
||||||
|
return None
|
||||||
|
current_parser.exited = False
|
||||||
|
current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message")
|
||||||
|
current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit")
|
||||||
|
|
||||||
|
self.parseCommandline(argv, silent) # Parse argv
|
||||||
|
self.setAttributes()
|
||||||
|
if parse_config:
|
||||||
|
argv = self.parseConfig(argv) # Add arguments from config file
|
||||||
|
|
||||||
|
self.parseCommandline(argv, silent) # Parse argv
|
||||||
|
self.setAttributes()
|
||||||
|
|
||||||
|
if not silent:
|
||||||
|
if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
|
||||||
|
self.ip_local.append(self.fileserver_ip)
|
||||||
|
|
||||||
|
if silent: # Restore original functions
|
||||||
|
if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
|
||||||
|
self.action = None
|
||||||
|
current_parser._print_message = original_print_message
|
||||||
|
current_parser.exit = original_exit
|
||||||
|
|
||||||
|
self.loadTrackersFile()
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
def parseCommandline(self, argv, silent=False):
|
||||||
|
# Find out if action is specificed on start
|
||||||
|
action = self.getAction(argv)
|
||||||
|
if not action:
|
||||||
|
argv.append("--end")
|
||||||
|
argv.append("main")
|
||||||
|
action = "main"
|
||||||
|
argv = self.moveUnknownToEnd(argv, action)
|
||||||
|
if silent:
|
||||||
|
res = self.parser.parse_known_args(argv[1:])
|
||||||
|
if res:
|
||||||
|
self.arguments = res[0]
|
||||||
|
else:
|
||||||
|
self.arguments = {}
|
||||||
|
else:
|
||||||
|
self.arguments = self.parser.parse_args(argv[1:])
|
||||||
|
|
||||||
|
# Parse config file
|
||||||
|
def parseConfig(self, argv):
|
||||||
|
# Find config file path from parameters
|
||||||
|
if "--config_file" in argv:
|
||||||
|
self.config_file = argv[argv.index("--config_file") + 1]
|
||||||
|
# Load config file
|
||||||
|
if os.path.isfile(self.config_file):
|
||||||
|
config = configparser.RawConfigParser(allow_no_value=True, strict=False)
|
||||||
|
config.read(self.config_file)
|
||||||
|
for section in config.sections():
|
||||||
|
for key, val in config.items(section):
|
||||||
|
if val == "True":
|
||||||
|
val = None
|
||||||
|
if section != "global": # If not global prefix key with section
|
||||||
|
key = section + "_" + key
|
||||||
|
|
||||||
|
if key == "open_browser": # Prefer config file value over cli argument
|
||||||
|
while "--%s" % key in argv:
|
||||||
|
pos = argv.index("--open_browser")
|
||||||
|
del argv[pos:pos + 2]
|
||||||
|
|
||||||
|
argv_extend = ["--%s" % key]
|
||||||
|
if val:
|
||||||
|
for line in val.strip().split("\n"): # Allow multi-line values
|
||||||
|
argv_extend.append(line)
|
||||||
|
if "\n" in val:
|
||||||
|
argv_extend.append("--end")
|
||||||
|
|
||||||
|
argv = argv[:1] + argv_extend + argv[1:]
|
||||||
|
return argv
|
||||||
|
|
||||||
|
# Return command line value of given argument
|
||||||
|
def getCmdlineValue(self, key):
|
||||||
|
if key not in self.argv:
|
||||||
|
return None
|
||||||
|
argv_index = self.argv.index(key)
|
||||||
|
if argv_index == len(self.argv) - 1: # last arg, test not specified
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.argv[argv_index + 1]
|
||||||
|
|
||||||
|
# Expose arguments as class attributes
|
||||||
|
def setAttributes(self):
|
||||||
|
# Set attributes from arguments
|
||||||
|
if self.arguments:
|
||||||
|
args = vars(self.arguments)
|
||||||
|
for key, val in args.items():
|
||||||
|
if type(val) is list:
|
||||||
|
val = val[:]
|
||||||
|
if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"):
|
||||||
|
if val:
|
||||||
|
val = val.replace("\\", "/")
|
||||||
|
setattr(self, key, val)
|
||||||
|
|
||||||
|
def loadPlugins(self):
|
||||||
|
from Plugin import PluginManager
|
||||||
|
|
||||||
|
@PluginManager.acceptPlugins
|
||||||
|
class ConfigPlugin(object):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.argv = config.argv
|
||||||
|
self.parser = config.parser
|
||||||
|
self.subparsers = config.subparsers
|
||||||
|
self.test_parser = config.test_parser
|
||||||
|
self.getCmdlineValue = config.getCmdlineValue
|
||||||
|
self.createArguments()
|
||||||
|
|
||||||
|
def createArguments(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
ConfigPlugin(self)
|
||||||
|
|
||||||
|
def saveValue(self, key, value):
|
||||||
|
if not os.path.isfile(self.config_file):
|
||||||
|
content = ""
|
||||||
|
else:
|
||||||
|
content = open(self.config_file).read()
|
||||||
|
lines = content.splitlines()
|
||||||
|
|
||||||
|
global_line_i = None
|
||||||
|
key_line_i = None
|
||||||
|
i = 0
|
||||||
|
for line in lines:
|
||||||
|
if line.strip() == "[global]":
|
||||||
|
global_line_i = i
|
||||||
|
if line.startswith(key + " =") or line == key:
|
||||||
|
key_line_i = i
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
if key_line_i and len(lines) > key_line_i + 1:
|
||||||
|
while True: # Delete previous multiline values
|
||||||
|
is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t")
|
||||||
|
if not is_value_line:
|
||||||
|
break
|
||||||
|
del lines[key_line_i + 1]
|
||||||
|
|
||||||
|
if value is None: # Delete line
|
||||||
|
if key_line_i:
|
||||||
|
del lines[key_line_i]
|
||||||
|
|
||||||
|
else: # Add / update
|
||||||
|
if type(value) is list:
|
||||||
|
value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value]
|
||||||
|
else:
|
||||||
|
value_lines = [str(value).replace("\n", "").replace("\r", "")]
|
||||||
|
new_line = "%s = %s" % (key, "\n ".join(value_lines))
|
||||||
|
if key_line_i: # Already in the config, change the line
|
||||||
|
lines[key_line_i] = new_line
|
||||||
|
elif global_line_i is None: # No global section yet, append to end of file
|
||||||
|
lines.append("[global]")
|
||||||
|
lines.append(new_line)
|
||||||
|
else: # Has global section, append the line after it
|
||||||
|
lines.insert(global_line_i + 1, new_line)
|
||||||
|
|
||||||
|
open(self.config_file, "w").write("\n".join(lines))
|
||||||
|
|
||||||
|
def getServerInfo(self):
|
||||||
|
from Plugin import PluginManager
|
||||||
|
import main
|
||||||
|
|
||||||
|
info = {
|
||||||
|
"platform": sys.platform,
|
||||||
|
"fileserver_ip": self.fileserver_ip,
|
||||||
|
"fileserver_port": self.fileserver_port,
|
||||||
|
"ui_ip": self.ui_ip,
|
||||||
|
"ui_port": self.ui_port,
|
||||||
|
"version": self.version,
|
||||||
|
"rev": self.rev,
|
||||||
|
"language": self.language,
|
||||||
|
"debug": self.debug,
|
||||||
|
"plugins": PluginManager.plugin_manager.plugin_names,
|
||||||
|
|
||||||
|
"log_dir": os.path.abspath(self.log_dir),
|
||||||
|
"data_dir": os.path.abspath(self.data_dir),
|
||||||
|
"src_dir": os.path.dirname(os.path.abspath(__file__))
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
info["ip_external"] = main.file_server.port_opened
|
||||||
|
info["tor_enabled"] = main.file_server.tor_manager.enabled
|
||||||
|
info["tor_status"] = main.file_server.tor_manager.status
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
|
def initConsoleLogger(self):
|
||||||
|
if self.action == "main":
|
||||||
|
format = '[%(asctime)s] %(name)s %(message)s'
|
||||||
|
else:
|
||||||
|
format = '%(name)s %(message)s'
|
||||||
|
|
||||||
|
if self.console_log_level == "default":
|
||||||
|
if self.silent:
|
||||||
|
level = logging.ERROR
|
||||||
|
elif self.debug:
|
||||||
|
level = logging.DEBUG
|
||||||
|
else:
|
||||||
|
level = logging.INFO
|
||||||
|
else:
|
||||||
|
level = logging.getLevelName(self.console_log_level)
|
||||||
|
|
||||||
|
console_logger = logging.StreamHandler()
|
||||||
|
console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S"))
|
||||||
|
console_logger.setLevel(level)
|
||||||
|
logging.getLogger('').addHandler(console_logger)
|
||||||
|
|
||||||
|
def initFileLogger(self):
|
||||||
|
if self.action == "main":
|
||||||
|
log_file_path = "%s/debug.log" % self.log_dir
|
||||||
|
else:
|
||||||
|
log_file_path = "%s/cmd.log" % self.log_dir
|
||||||
|
|
||||||
|
if self.log_rotate == "off":
|
||||||
|
file_logger = logging.FileHandler(log_file_path, "w", "utf-8")
|
||||||
|
else:
|
||||||
|
when_names = {"weekly": "w", "daily": "d", "hourly": "h"}
|
||||||
|
file_logger = logging.handlers.TimedRotatingFileHandler(
|
||||||
|
log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count,
|
||||||
|
encoding="utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.path.isfile(log_file_path):
|
||||||
|
file_logger.doRollover() # Always start with empty log file
|
||||||
|
file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s'))
|
||||||
|
file_logger.setLevel(logging.getLevelName(self.log_level))
|
||||||
|
logging.getLogger('').setLevel(logging.getLevelName(self.log_level))
|
||||||
|
logging.getLogger('').addHandler(file_logger)
|
||||||
|
|
||||||
|
def initLogging(self, console_logging=None, file_logging=None):
|
||||||
|
if console_logging == None:
|
||||||
|
console_logging = self.console_log_level != "off"
|
||||||
|
|
||||||
|
if file_logging == None:
|
||||||
|
file_logging = self.log_level != "off"
|
||||||
|
|
||||||
|
# Create necessary files and dirs
|
||||||
|
if not os.path.isdir(self.log_dir):
|
||||||
|
os.mkdir(self.log_dir)
|
||||||
|
try:
|
||||||
|
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||||
|
except Exception as err:
|
||||||
|
print("Can't change permission of %s: %s" % (self.log_dir, err))
|
||||||
|
|
||||||
|
# Make warning hidden from console
|
||||||
|
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
||||||
|
logging.addLevelName(15, "WARNING")
|
||||||
|
|
||||||
|
logging.getLogger('').name = "-" # Remove root prefix
|
||||||
|
|
||||||
|
self.error_logger = ErrorLogHandler()
|
||||||
|
self.error_logger.setLevel(logging.getLevelName("ERROR"))
|
||||||
|
logging.getLogger('').addHandler(self.error_logger)
|
||||||
|
|
||||||
|
if console_logging:
|
||||||
|
self.initConsoleLogger()
|
||||||
|
if file_logging:
|
||||||
|
self.initFileLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorLogHandler(logging.StreamHandler):
|
||||||
|
def __init__(self):
|
||||||
|
self.lines = []
|
||||||
|
return super().__init__()
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
self.lines.append([time.time(), record.levelname, self.format(record)])
|
||||||
|
|
||||||
|
def onNewRecord(self, record):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
config = Config(sys.argv)
|
|
@ -0,0 +1,135 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import inspect
|
||||||
|
import re
|
||||||
|
import html
|
||||||
|
import string
|
||||||
|
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
translates = []
|
||||||
|
|
||||||
|
|
||||||
|
class EscapeProxy(dict):
|
||||||
|
# Automatically escape the accessed string values
|
||||||
|
def __getitem__(self, key):
|
||||||
|
val = dict.__getitem__(self, key)
|
||||||
|
if type(val) in (str, str):
|
||||||
|
return html.escape(val)
|
||||||
|
elif type(val) is dict:
|
||||||
|
return EscapeProxy(val)
|
||||||
|
elif type(val) is list:
|
||||||
|
return EscapeProxy(enumerate(val)) # Convert lists to dict
|
||||||
|
else:
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
class Translate(dict):
|
||||||
|
def __init__(self, lang_dir=None, lang=None):
|
||||||
|
if not lang_dir:
|
||||||
|
lang_dir = os.path.dirname(__file__) + "/languages/"
|
||||||
|
if not lang:
|
||||||
|
lang = config.language
|
||||||
|
self.lang = lang
|
||||||
|
self.lang_dir = lang_dir
|
||||||
|
self.setLanguage(lang)
|
||||||
|
self.formatter = string.Formatter()
|
||||||
|
|
||||||
|
if config.debug:
|
||||||
|
# Auto reload FileRequest on change
|
||||||
|
from Debug import DebugReloader
|
||||||
|
DebugReloader.watcher.addCallback(self.load)
|
||||||
|
|
||||||
|
translates.append(self)
|
||||||
|
|
||||||
|
def setLanguage(self, lang):
|
||||||
|
self.lang = re.sub("[^a-z-]", "", lang)
|
||||||
|
self.lang_file = self.lang_dir + "%s.json" % lang
|
||||||
|
self.load()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<translate %s>" % self.lang
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
if self.lang == "en":
|
||||||
|
data = {}
|
||||||
|
dict.__init__(self, data)
|
||||||
|
self.clear()
|
||||||
|
elif os.path.isfile(self.lang_file):
|
||||||
|
try:
|
||||||
|
data = json.load(open(self.lang_file, encoding="utf8"))
|
||||||
|
logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data)))
|
||||||
|
except Exception as err:
|
||||||
|
logging.error("Error loading translate file %s: %s" % (self.lang_file, err))
|
||||||
|
data = {}
|
||||||
|
dict.__init__(self, data)
|
||||||
|
else:
|
||||||
|
data = {}
|
||||||
|
dict.__init__(self, data)
|
||||||
|
self.clear()
|
||||||
|
logging.debug("Translate file not exists: %s" % self.lang_file)
|
||||||
|
|
||||||
|
def format(self, s, kwargs, nested=False):
|
||||||
|
kwargs["_"] = self
|
||||||
|
if nested:
|
||||||
|
back = self.formatter.vformat(s, [], kwargs) # PY3 TODO: Change to format_map
|
||||||
|
return self.formatter.vformat(back, [], kwargs)
|
||||||
|
else:
|
||||||
|
return self.formatter.vformat(s, [], kwargs)
|
||||||
|
|
||||||
|
def formatLocals(self, s, nested=False):
|
||||||
|
kwargs = inspect.currentframe().f_back.f_locals
|
||||||
|
return self.format(s, kwargs, nested=nested)
|
||||||
|
|
||||||
|
def __call__(self, s, kwargs=None, nested=False, escape=True):
|
||||||
|
if not kwargs:
|
||||||
|
kwargs = inspect.currentframe().f_back.f_locals
|
||||||
|
if escape:
|
||||||
|
kwargs = EscapeProxy(kwargs)
|
||||||
|
return self.format(s, kwargs, nested=nested)
|
||||||
|
|
||||||
|
def __missing__(self, key):
|
||||||
|
return key
|
||||||
|
|
||||||
|
def pluralize(self, value, single, multi):
|
||||||
|
if value > 1:
|
||||||
|
return self[multi].format(value)
|
||||||
|
else:
|
||||||
|
return self[single].format(value)
|
||||||
|
|
||||||
|
def translateData(self, data, translate_table=None, mode="js"):
|
||||||
|
if not translate_table:
|
||||||
|
translate_table = self
|
||||||
|
|
||||||
|
patterns = []
|
||||||
|
for key, val in list(translate_table.items()):
|
||||||
|
if key.startswith("_("): # Problematic string: only match if called between _(" ") function
|
||||||
|
key = key.replace("_(", "").replace(")", "").replace(", ", '", "')
|
||||||
|
translate_table[key] = "|" + val
|
||||||
|
patterns.append(re.escape(key))
|
||||||
|
|
||||||
|
def replacer(match):
|
||||||
|
target = translate_table[match.group(1)]
|
||||||
|
if mode == "js":
|
||||||
|
if target and target[0] == "|": # Strict string match
|
||||||
|
if match.string[match.start() - 2] == "_": # Only if the match if called between _(" ") function
|
||||||
|
return '"' + target[1:] + '"'
|
||||||
|
else:
|
||||||
|
return '"' + match.group(1) + '"'
|
||||||
|
return '"' + target + '"'
|
||||||
|
else:
|
||||||
|
return match.group(0)[0] + target + match.group(0)[-1]
|
||||||
|
|
||||||
|
if mode == "html":
|
||||||
|
pattern = '[">](' + "|".join(patterns) + ')["<]'
|
||||||
|
else:
|
||||||
|
pattern = '"(' + "|".join(patterns) + ')"'
|
||||||
|
data = re.sub(pattern, replacer, data)
|
||||||
|
|
||||||
|
if mode == "html":
|
||||||
|
data = data.replace("lang={lang}", "lang=%s" % self.lang) # lang get parameter to .js file to avoid cache
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
translate = Translate()
|
|
@ -0,0 +1 @@
|
||||||
|
from .Translate import *
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Tillykke, din port (<b>{0}</b>) er åben.<br>Du er nu fuld klient på ZeroNet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "TOR er aktiv, alle forbindelser anvender Onions.",
|
||||||
|
"Successfully started Tor onion hidden services.": "OK. Startede TOR skjult onion service.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Fejl. Kunne ikke starte TOR skjult onion service. Tjek din opsætning!",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Åben port <b>{0}</b> på din router for hurtigere forbindelse.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Begrænset forbindelse. Åben venligst port <b>{0}</b> på din router",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "eller opsæt TOR for fuld adgang til ZeroNet!",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Vælg bruger til brug på denne side:",
|
||||||
|
"currently selected": "nuværende bruger",
|
||||||
|
"Unique to site": "Unik på siden",
|
||||||
|
|
||||||
|
"Content signing failed": "Signering af indhold fejlede",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Indhold i kø for offentliggørelse i {0:.0f} sekunder.",
|
||||||
|
"Content published to {0} peers.": "Indhold offentliggjort til {0} klienter.",
|
||||||
|
"No peers found, but your content is ready to access.": "Ingen klienter fundet, men dit indhold er klar til hentning.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Din forbindelse er begrænset. Åben venligst port <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "på din router for at dele din side med alle.",
|
||||||
|
"Content publish failed.": "Offentliggørelse af indhold fejlede.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Denne fil er endnu ikke delt færdig. Tidligere indhold kan gå tabt hvis du skriver til filen nu.",
|
||||||
|
"Write content anyway": "Del indhold alligevel",
|
||||||
|
"New certificate added:": "Nyt certifikat oprettet:",
|
||||||
|
"You current certificate:": "Dit nuværende certifikat: ",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Skift certificat til {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certifikat ændret til <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Side klonet",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "OK. Du har nu skiftet sprog på web brugergrænsefladen!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Pga. browser cache kan skift af sprog tage nogle minutter.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Forbindelse til <b>UiServer Websocket</b> blev tabt. Genopretter forbindelse...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Forbindelse til <b>UiServer Websocket</b> genoprettet.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket fejl. Genindlæs venligst siden (F5)!",
|
||||||
|
" Connecting...": " Opretter forbindelse...",
|
||||||
|
"Site size: <b>": "Side størrelse: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> er større end den tilladte default ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Åben side og sæt max side størrelse til \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " filer skal downloades",
|
||||||
|
" downloaded": " downloadet",
|
||||||
|
" download failed": " download fejlede",
|
||||||
|
"Peers found: ": "Klienter fundet: ",
|
||||||
|
"No peers found": "Ingen klienter fundet",
|
||||||
|
"Running out of size limit (": "Siden fylder snart for meget (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Ret max side størrelse til \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Max side størrelse ændret til {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Ny version af denne side er blevet offentliggjort.<br>Genindlæs venligst siden (F5) for at se nyt indhold!",
|
||||||
|
"This site requests permission:": "Denne side betyder om tilladdelse:",
|
||||||
|
"_(Accept)": "Tillad"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Gratulation, dein Port <b>{0}</b> ist offen.<br>Du bist ein volles Mitglied des ZeroNet Netzwerks!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor Modus aktiv, jede Verbindung nutzt die Onion Route.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Tor versteckte Dienste erfolgreich gestartet.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Nicht möglich versteckte Dienste zu starten.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Für schnellere Verbindungen, öffne Port <b>{0}</b> auf deinem Router.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Deine Verbindung ist eingeschränkt. Bitte öffne Port <b>{0}</b> auf deinem Router",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Wähle das Konto, das du auf dieser Seite benutzen willst:",
|
||||||
|
"currently selected": "aktuell ausgewählt",
|
||||||
|
"Unique to site": "Eindeutig zur Seite",
|
||||||
|
|
||||||
|
"Content signing failed": "Signierung des Inhalts fehlgeschlagen",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Veröffentlichung des Inhalts um {0:.0f} Sekunden verzögert.",
|
||||||
|
"Content published to {0} peers.": "Inhalt zu {0} Peers veröffentlicht.",
|
||||||
|
"No peers found, but your content is ready to access.": "Keine Peers gefunden, aber dein Inhalt ist bereit zum Zugriff.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Deine Netzwerkverbindung ist beschränkt. Bitte öffne Port <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "auf deinem Router um deine Seite für Jeden zugänglich zu machen.",
|
||||||
|
"Content publish failed.": "Inhalt konnte nicht veröffentlicht werden.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei wird noch synchronisiert. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.",
|
||||||
|
"Write content anyway": "Inhalt trotzdem schreiben",
|
||||||
|
"New certificate added:": "Neues Zertifikat hinzugefügt:",
|
||||||
|
"You current certificate:": "Dein aktuelles Zertifikat:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Ändere es zu {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Zertifikat geändert zu: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Seite geklont",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Du hast die Sprache des Webinterface erfolgreich geändert!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Aufgrund des Browsercaches kann die volle Transformation Minuten dauern.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Die Verbindung mit <b>UiServer Websocket</b>ist abgebrochen. Neu verbinden...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Die Verbindung mit <b>UiServer Websocket</b> wurde wiederhergestellt.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket Fehler, bitte Seite neu laden.",
|
||||||
|
" Connecting...": " Verbinden...",
|
||||||
|
"Site size: <b>": "Seitengröße: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> ist größer als der erlaubte Standart",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das Limit auf \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " Dateien müssen noch heruntergeladen werden",
|
||||||
|
" downloaded": " heruntergeladen",
|
||||||
|
" download failed": " Herunterladen fehlgeschlagen",
|
||||||
|
"Peers found: ": "Peers gefunden: ",
|
||||||
|
"No peers found": "Keine Peers gefunden",
|
||||||
|
"Running out of size limit (": "Das Speicherlimit ist bald ausgeschöpft (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Limit auf \" + site_info.next_size_limit + \"MB ändern",
|
||||||
|
"Site size limit changed to {0}MB": "Speicherlimit für diese Seite auf {0}MB geändert",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Neue version dieser Seite wurde gerade veröffentlicht.<br>Lade die Seite neu um den geänderten Inhalt zu sehen.",
|
||||||
|
"This site requests permission:": "Diese Seite fordert rechte:",
|
||||||
|
"_(Accept)": "Genehmigen"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "¡Felicidades! tu puerto <b>{0}</b> está abierto.<br>¡Eres un miembro completo de la red Zeronet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Modo Tor activado, cada conexión usa una ruta Onion.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Tor ha iniciado satisfactoriamente la ocultación de los servicios onion.",
|
||||||
|
"Unable to start hidden services, please check your config.": "No se puedo iniciar los servicios ocultos, por favor comprueba tu configuración.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Para conexiones más rápidas abre el puerto <b>{0}</b> en tu router.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Tu conexión está limitada. Por favor, abre el puerto <b>{0}</b> en tu router",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Selecciona la cuenta que quieres utilizar en este sitio:",
|
||||||
|
"currently selected": "actualmente seleccionada",
|
||||||
|
"Unique to site": "Única para el sitio",
|
||||||
|
|
||||||
|
"Content signing failed": "Firma del contenido fallida",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Publicación de contenido en cola durante {0:.0f} segundos.",
|
||||||
|
"Content published to {0} peers.": "Contenido publicado para {0} pares.",
|
||||||
|
"No peers found, but your content is ready to access.": "No se ha encontrado pares, pero tu contenido está listo para ser accedido.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Tu conexión de red está restringida. Por favor, abre el puerto<b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "en tu router para hacer tu sitio accesible a todo el mundo.",
|
||||||
|
"Content publish failed.": "Publicación de contenido fallida.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Este archivo está aún sincronizado, si le escribes ahora el contenido previo podría perderse.",
|
||||||
|
"Write content anyway": "Escribir el contenido de todas formas",
|
||||||
|
"New certificate added:": "Nuevo certificado añadido:",
|
||||||
|
"You current certificate:": "Tu certificado actual:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Cambia esto a {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certificado cambiado a: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Sitio clonado",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "¡Has cambiado con éxito el idioma de la interfaz web!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Debido a la caché del navegador, la transformación completa podría llevar unos minutos.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Se perdió la conexión con <b>UiServer Websocket</b>. Reconectando...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Conexión con <b>UiServer Websocket</b> recuperada.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Error de UiServer Websocket, por favor recarga la página.",
|
||||||
|
" Connecting...": " Conectando...",
|
||||||
|
"Site size: <b>": "Tamaño del sitio: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> es más grande de lo permitido por defecto",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abre tu sitio and establece el límite de tamaño a \" + site_info.next_size_limit + \"MBs",
|
||||||
|
" files needs to be downloaded": " Los archivos necesitan ser descargados",
|
||||||
|
" downloaded": " descargados",
|
||||||
|
" download failed": " descarga fallida",
|
||||||
|
"Peers found: ": "Pares encontrados: ",
|
||||||
|
"No peers found": "No se han encontrado pares",
|
||||||
|
"Running out of size limit (": "Superando el tamaño límite (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Establece ellímite a \" + site_info.next_size_limit + \"MB ändern",
|
||||||
|
"Site size limit changed to {0}MB": "Límite de tamaño del sitio cambiado a {0}MBs",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Se ha publicado una nueva versión de esta página .<br>Recarga para ver el contenido modificado.",
|
||||||
|
"This site requests permission:": "Este sitio solicita permiso:",
|
||||||
|
"_(Accept)": "Conceder"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "تبریک، درگاه <b>{0}</b> شما باز شده است.<br>شما یک عضو تمام شبکه ZeroNet هستید!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "حالت Tor فعال است، هر ارتباط از مسیریابی پیاز (Onion) استفاده میکند.",
|
||||||
|
"Successfully started Tor onion hidden services.": "خدمات پنهان پیاز (Onion) Tor با موفقیت راهاندازی شد.",
|
||||||
|
"Unable to start hidden services, please check your config.": "قادر به راهاندازی خدمات پنهان نیستیم، لطفا تنظیمات خود را بررسی نمایید.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "برای ارتباطات سریعتر درگاه <b>{0}</b> را بر روی مسیریاب (روتر) خود باز نمایید.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "ارتباط شما محدودشده است. لطفا درگاه <b>{0}</b> را در مسیریاب (روتر) خود باز نمایید",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "یا پیکربندی Tor را انجام دهید تا به یک عضو تمام شبکه ZeroNet تبدیل شوید.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "حسابی را که میخواهید در این سایت استفاده کنید، انتخاب کنید:",
|
||||||
|
"currently selected": "در حال حاضر انتخابشده",
|
||||||
|
"Unique to site": "مختص به سایت",
|
||||||
|
|
||||||
|
"Content signing failed": "امضای محتوا با شکست مواجه شد",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "محتوا در صف انتشار با {0:.0f} ثانیه تاخیر قرار گرفت.",
|
||||||
|
"Content published to {0} peers.": "محتوا برای {0} تعداد همتا انتشار یافت.",
|
||||||
|
"No peers found, but your content is ready to access.": "همتایی یافت نشد، اما محتوای شما آماده دسترسی است.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "ارتباط شبکه شما محدودشده است. لطفا درگاه <b>{0}</b> را",
|
||||||
|
"on your router to make your site accessible for everyone.": "در مسیریاب (روتر) خود باز کنید تا سایت خود را برای همه در دسترس قرار دهید.",
|
||||||
|
"Content publish failed.": "انتشار محتوا موفق نبود.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "این فایل همچنان همگام است، اگز شما آن را بنویسید، ممکن است محتوای قبلی ازبین رود.",
|
||||||
|
"Write content anyway": "در هر صورت محتوا را بنویس",
|
||||||
|
"New certificate added:": "گواهی جدیدی افزوده شد:",
|
||||||
|
"You current certificate:": "گواهی فعلی شما:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "تغییرش بده به {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "گواهینامه به: <b>{auth_type}/{auth_user_name}@{domain}</b> تغییر پیدا کرد.",
|
||||||
|
"Site cloned": "سایت همسانسازی شد",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "شما با موفقیت زبان رابط وب را تغییر دادید!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "به دلیل ذخیرهسازی در مرورگر، امکان دارد تغییر شکل کامل چند دقیقه طول بکشد.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "اتصال با <b>UiServer Websocket</b> قطع شد. اتصال دوباره...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "ارتباط با <b>UiServer Websocket</b> دوباره برقرار شد.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "خطای UiServer Websocket, لطفا صفحه را دوباره بارگیری کنید.",
|
||||||
|
" Connecting...": " برقراری ارتباط...",
|
||||||
|
"Site size: <b>": "حجم سایت: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> بیشتر از پیشفرض مجاز است ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "سایت را باز کرده و محدوده حجم را به \" + site_info.next_size_limit + \"MB تنظیم کن",
|
||||||
|
" files needs to be downloaded": " فایلهایی که نیاز است، دانلود شوند",
|
||||||
|
" downloaded": " دانلود شد",
|
||||||
|
" download failed": " دانلود موفق نبود",
|
||||||
|
"Peers found: ": "چند همتا یافت شد: ",
|
||||||
|
"No peers found": "همتایی یافت نشد",
|
||||||
|
"Running out of size limit (": "عبور کرده از محدوده حجم (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "محدوده را به \" + site_info.next_size_limit + \"MB تنظیم کن",
|
||||||
|
"Site size limit changed to {0}MB": "محدوده حجم سایت به {0}MB تغییر کرد",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " نسخه جدیدی از این صفحه منتشر شده است.<br> برای مشاهده محتوای تغییریافته دوباره بارگیری نمایید.",
|
||||||
|
"This site requests permission:": "این سایت درخواست مجوز میکند:",
|
||||||
|
"_(Accept)": "_(پذیرفتن)"
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Félicitations, le port (<b>{0}</b>) est ouvert.<br>Vous êtes maintenant membre de ZeroNet!!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor actif, toutes les connexions utilisent un routage Onion.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Tor activé avec succès.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Impossible d'activer Tor, veuillez vérifier votre configuration.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Pour une meilleure connectivité, ouvrez le port <b>{0}</b> sur votre routeur.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Connectivité limitée. Veuillez ouvrir le port <b>{0}</b> sur votre routeur",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Sélectionnez le compte que vous voulez utiliser pour ce site:",
|
||||||
|
"currently selected": "présentement sélectionné",
|
||||||
|
"Unique to site": "Unique au site",
|
||||||
|
|
||||||
|
"Content signing failed": "Échec à la signature du contenu",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Publication du contenu différée {0:.0f} secondes.",
|
||||||
|
"Content published to {0} peers.": "Contenu publié à {0} pairs.",
|
||||||
|
"No peers found, but your content is ready to access.": "Aucun pair trouvé, mais votre contenu est accessible.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Connectivité limitée. Veuillez ouvrir le port <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "sur votre routeur pour que votre site soit accessible à tous.",
|
||||||
|
"Content publish failed.": "Échec de la publication du contenu.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Ce fichier n'est pas à jour, si vous le modifiez maintenant une version antérieure pourrait être perdue.",
|
||||||
|
"Write content anyway": "Enregistrer quand même",
|
||||||
|
"New certificate added:": "Nouveau cetificat ajouté :",
|
||||||
|
"You current certificate:": "Votre certificat actuel :",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Changer pour {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certificat changé pour : <b>{auth_type}/{auth_user_name}@{domain}</b>-ra.",
|
||||||
|
"Site cloned": "Site cloné",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Vous avez modifié la langue d'affichage avec succès!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "En fonction du cache du navigateur, la modification pourrait prendre quelques minutes.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Connexion avec <b>UiServer Websocket</b> rompue. Reconnexion...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Connexion avec <b>UiServer Websocket</b> rétablie.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Erreur du UiServer Websocket, veuillez recharger la page.",
|
||||||
|
" Connecting...": " Connexion...",
|
||||||
|
"Site size: <b>": "Taille du site : <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> est plus large que la taille permise par défaut ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Ouvrez le site et augmentez la taille maximale à \" + site_info.next_size_limit + \"MB-ra",
|
||||||
|
" files needs to be downloaded": " fichiers doivent être téléchargés",
|
||||||
|
" downloaded": " téléchargés",
|
||||||
|
" download failed": " échec de téléchargement",
|
||||||
|
"Peers found: ": "Pairs trouvés: ",
|
||||||
|
"No peers found": "Aucun pair trouvé",
|
||||||
|
"Running out of size limit (": "Vous approchez la taille maximale (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Augmentez la taille maximale à \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Taille maximale du site changée à {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Une nouvelle version de cette page vient d'être publiée.<br> Rechargez pour voir les modifications.",
|
||||||
|
"This site requests permission:": "Ce site requiert une permission :",
|
||||||
|
"_(Accept)": "Autoriser"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Gratulálunk, a portod (<b>{0}</b>) nyitva van.<br>Teljes értékű tagja vagy a hálózatnak!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor mód aktív, minden kapcsolat az Onion hálózaton keresztül történik.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Sikeresen elindultak a Tor onion titkos szolgáltatások.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Nem sikerült elindítani a Tor onion szolgáltatásokat. Kérjük, ellenőrizd a beállításokat!",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "A gyorsabb kapcsolatok érdekében nyisd ki a <b>{0}</b> portot a routereden.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "A kapcsolatod korlátozott. Kérjük, nyisd ki a <b>{0}</b> portot a routereden",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "vagy állítsd be a Tor kliensed, hogy teljes értékű tagja legyél a hálózatnak!",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Válaszd ki az oldalhoz használt felhasználónevet:",
|
||||||
|
"currently selected": "jelenleg kijelölt",
|
||||||
|
"Unique to site": "Egyedi az oldalon",
|
||||||
|
|
||||||
|
"Content signing failed": "Tartalom aláírása sikeretelen",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Tartalom publikálása elhalasztva {0:.0f} másodperccel.",
|
||||||
|
"Content published to {0} peers.": "Tartalom publikálva {0} fél részére.",
|
||||||
|
"No peers found, but your content is ready to access.": "Aktív csatlakozási pont nem található, de a tartalmad készen áll a kiszolgálásra.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "A kapcsolatod korlátozott. Kérjük, nyisd ki a <b>{0}</b> portot",
|
||||||
|
"on your router to make your site accessible for everyone.": "a routereden, hogy az oldalad mindenki számára elérhető legyen.",
|
||||||
|
"Content publish failed.": "Sikertelen tartalom publikálás.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Ez a fájl még letöltés alatt van, ha most felülírod a korábbi tartalma elveszhet.",
|
||||||
|
"Write content anyway": "Felülírás",
|
||||||
|
"New certificate added:": "Új tanúsítvány hozzáadva:",
|
||||||
|
"You current certificate:": "A jelenlegi tanúsítványod: ",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Változtatás {auth_type}/{auth_user_name}@{domain}-ra",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "A tanúsítvány megváltozott <b>{auth_type}/{auth_user_name}@{domain}</b>-ra.",
|
||||||
|
"Site cloned": "Az oldal klónozva",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Sikeresen átállítottad a web felület nyelvét!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "A böngésző cache-elése miatt egy pár percig eltarthat a teljes átállás.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Az <b>UiServer Websocket</b> kapcsolat megszakadt. Újracsatlakozás...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Az <b>UiServer Websocket</b> kapcsolat visszaállt.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket hiba, töltsd újra az oldalt!",
|
||||||
|
" Connecting...": " Csatlakozás...",
|
||||||
|
"Site size: <b>": "Oldal mérete: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> nagyobb, mint az engedélyezett ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Az oldal megnyitása és a korlát módosítása \" + site_info.next_size_limit + \"MB-ra",
|
||||||
|
" files needs to be downloaded": " fájlt kell letölteni",
|
||||||
|
" downloaded": " letöltve",
|
||||||
|
" download failed": " letöltés sikertelen",
|
||||||
|
"Peers found: ": "Talált csatlakozási pontok: ",
|
||||||
|
"No peers found": "Nincs csatlakozási pont",
|
||||||
|
"Running out of size limit (": "Az oldal hamarosan eléri a méretkorlátot (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "A korlát módosítása \" + site_info.next_size_limit + \"MB-ra",
|
||||||
|
"Site size limit changed to {0}MB": "A méretkorlát módosítva {0}MB-ra",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": "Az oldal épp most módosult<br>A megváltozott tartalomért töltsd újra!",
|
||||||
|
"This site requests permission:": "Az oldal megtekintéséhez szükséges jog:",
|
||||||
|
"_(Accept)": "Engedélyezés"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Congratulazioni, la tua porta <b>{0}</b> è aperta.<br>Ora sei un membro effettivo della rete ZeroNet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Modalità Tor attiva, ogni connessione sta usando la rete Onion.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Servizi Tor onion nascosti avviati con successo.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Impossibile avviare i servizi nascosti. Si prega di controllare la propria configurazione!",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Per avere connessioni più veloci aprire la porta <b>{0}</b> sul router.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "La tua connessione è limitata. Aprire la porta <b>{0}</b> sul router",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "o configurare Tor per diventare membro effettivo della rete ZeroNet!",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Seleziona l'account che vuoi utilizzare per questo sito:",
|
||||||
|
"currently selected": "attualmente selezionato",
|
||||||
|
"Unique to site": "Unico sul sito",
|
||||||
|
|
||||||
|
"Content signing failed": "Firma contenuti fallita",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Pubblicazione contenuti in coda per {0:.0f} secondi.",
|
||||||
|
"Content published to {0} peers.": "Contenuti pubblicati su {0} peer.",
|
||||||
|
"No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per l'accesso.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "La tua connessione di rete è limitata. Aprire la porta <b>{0}</b> ",
|
||||||
|
"on your router to make your site accessible for everyone.": "sul router, per rendere il sito accessibile a chiunque.",
|
||||||
|
"Content publish failed.": "Pubblicazione contenuti fallita.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se viene modificato i contenuti precedenti andranno persi.",
|
||||||
|
"Write content anyway": "Scrivere comunque i contenuti",
|
||||||
|
"New certificate added:": "Aggiunto nuovo certificato:",
|
||||||
|
"You current certificate:": "Il tuo attuale certificato:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Cambiarlo in {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certificato cambianto in: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Sito clonato",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua dell'interfaccia web!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "La trasformazione completa potrebbe richiedre alcuni minuti a causa della cache del browser.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "La connessione con <b>UiServer Websocket</b> è andata persa. Riconnessione...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Connessione con <b>UiServer Websocket</b> recuperata.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, ricaricare la pagina!",
|
||||||
|
" Connecting...": " Connessione...",
|
||||||
|
"Site size: <b>": "Dimensione del sito: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> è più grande del valore predefinito consentito ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Aprire il sito e impostare la dimensione limite a \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " i file devono essere scaricati",
|
||||||
|
" downloaded": " scaricati",
|
||||||
|
" download failed": " scaricamento fallito",
|
||||||
|
"Peers found: ": "Peer trovati: ",
|
||||||
|
"No peers found": "Nessun peer trovato",
|
||||||
|
"Running out of size limit (": "Superato il limite di spazio (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Limite di spazio cambiato a {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": "E' stata rilasciata una nuova versione di questa pagina<br>Ricaricare per vedere il contenuto modificato!",
|
||||||
|
"This site requests permission:": "Questo sito richiede permessi:",
|
||||||
|
"_(Accept)": "Concedere"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,66 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "おめでとうございます。ポート <b>{0}</b> が開きました。これでZeroNetネットワークのメンバーです。",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Torモードがアクティブです、全ての接続はOnionルートを使用します。",
|
||||||
|
"Successfully started Tor onion hidden services.": "Tor onionサービスを正常に開始しました。",
|
||||||
|
"Unable to start hidden services, please check your config.": "非表示のサービスを開始できません。設定を確認してください。",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "接続を高速化するにはルーターのポート <b>{0}</b> を開けてください。",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "接続が制限されています。ルーターのポート <b>{0}</b> を開けてください。",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "または、TorをZeroNetネットワークのメンバーになるように設定してください。",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "このサイトで使用するアカウントを選択:",
|
||||||
|
"No certificate": "証明書がありません",
|
||||||
|
"currently selected": "現在選択中",
|
||||||
|
"Unique to site": "サイト固有",
|
||||||
|
|
||||||
|
"Content signing failed": "コンテンツの署名に失敗",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "コンテンツの公開は{0:.0f}秒のキューに入れられました。",
|
||||||
|
"Content published to {0}/{1} peers.": "サイトの更新を通知済 {0}/{1} ピア",
|
||||||
|
"Content published to {0} peers.": "{0}ピアに公開されたコンテンツ。",
|
||||||
|
"No peers found, but your content is ready to access.": "ピアは見つかりませんでしたが、コンテンツにアクセスする準備ができました。",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "ネットワーク接続が制限されています。ポート <b>{0}</b> を開いて、",
|
||||||
|
"on your router to make your site accessible for everyone.": "誰でもサイトにアクセスできるようにしてください。",
|
||||||
|
"Content publish failed.": "コンテンツの公開に失敗しました。",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "このファイルはまだ同期しています。今すぐ書き込むと、前のコンテンツが失われる可能性があります。",
|
||||||
|
"Write content anyway": "とにかくコンテンツを書く",
|
||||||
|
"New certificate added:": "新しい証明書が追加されました:",
|
||||||
|
"You current certificate:": "現在の証明書:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} に変更",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "変更後の証明書: <b>{auth_type}/{auth_user_name}@{domain}</b>",
|
||||||
|
"Site cloned": "複製されたサイト",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Webインターフェースの言語が正常に変更されました!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "ブラウザのキャッシュにより、完全な変換には数分かかる場合があります。",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "<b>UiServer Websocket</b>との接続が失われました。再接続しています...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "<b>UiServer Websocket</b>との接続が回復しました。",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocketエラー、ページをリロードしてください。",
|
||||||
|
" Connecting...": " 接続しています...",
|
||||||
|
"Site size: <b>": "サイトサイズ: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b>はデフォルトの許容値よりも大きいです。 ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "サイトを開き、サイズ制限を \" + site_info.next_size_limit + \"MB に設定",
|
||||||
|
" files needs to be downloaded": " ファイルをダウンロードする必要があります",
|
||||||
|
" downloaded": " ダウンロード",
|
||||||
|
" download failed": " ダウンロード失敗",
|
||||||
|
"Peers found: ": "ピアが見つかりました: ",
|
||||||
|
"No peers found": "ピアが見つかりません",
|
||||||
|
"Running out of size limit (": "サイズ制限を使い果たしました (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "制限を \" + site_info.next_size_limit + \"MB に設定",
|
||||||
|
"Cloning site...": "サイトを複製中…",
|
||||||
|
"Site size limit changed to {0}MB": "サイトのサイズ制限が {0}MB に変更されました",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " このページの新しいバージョンが公開されました。<br>変更されたコンテンツを見るには再読み込みしてください。",
|
||||||
|
"This site requests permission:": "このサイトは権限を要求しています:",
|
||||||
|
"_(Accept)": "_(許可)",
|
||||||
|
|
||||||
|
"Save": "保存",
|
||||||
|
"Trackers announcing": "トラッカーをお知らせ",
|
||||||
|
"Error": "エラー",
|
||||||
|
"Done": "完了",
|
||||||
|
"Tracker connection error detected.": "トラッカー接続エラーが検出されました。",
|
||||||
|
|
||||||
|
"Update <b>ZeroNet client</b> to latest version?": "<b>ZeroNetクライアント</b>を最新版に更新しますか?",
|
||||||
|
"Update": "更新",
|
||||||
|
"Restart <b>ZeroNet client</b>?": "ZeroNetクライアントを再起動しますか?",
|
||||||
|
"Restart": "再起動",
|
||||||
|
"Shut down <b>ZeroNet client</b>?": "<b>ZeroNetクライアント</b>を終了しますか?",
|
||||||
|
"Shut down": "終了"
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Gefeliciteerd, je poort <b>{0}</b> is geopend.<br>Je bent een volledig lid van het ZeroNet netwerk!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor modus actief, elke verbinding gebruikt een Onion route.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Tor onion verborgen diensten zijn met succes gestart.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Het was niet mogelijk om verborgen diensten te starten, controleer je configuratie.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Voor snellere verbindingen open je de poort <b>{0}</b> op je router.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Je verbinding is beperkt. Open altjeblieft poort <b>{0}</b> op je router",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Selecteer het account die je wilt gebruiken binnen deze site:",
|
||||||
|
"currently selected": "huidige selectie",
|
||||||
|
"Unique to site": "Uniek voor deze site",
|
||||||
|
|
||||||
|
"Content signing failed": "Inhoud ondertekenen mislukt",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Publiceren van inhoud staat in de wachtrij voor {0:.0f} seconden.",
|
||||||
|
"Content published to {0} peers.": "Inhoud is gepubliceerd naar {0} peers",
|
||||||
|
"No peers found, but your content is ready to access.": "Geen peers gevonden, maar je inhoud is klaar voor toegang.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Je netwerkverbinding is beperkt. Open alsjeblieft poort <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "op je router om je site toegankelijk te maken voor iedereen.",
|
||||||
|
"Content publish failed.": "Inhoud publicatie mislukt.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Dit bestand is nog in sync, als je het nu overschrijft, dan is mogelijk de vorige inhoud verloren.",
|
||||||
|
"Write content anyway": "Inhoud toch schrijven",
|
||||||
|
"New certificate added:": "Nieuw certificaat toegevoegd:",
|
||||||
|
"You current certificate:": "Je huidige certificaat:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Verander het naar {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certificaat veranderd naar: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Site gecloned",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Je hebt met succes de taal van de web interface aangepast!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Door caching van je browser kan de volledige transformatie enkele minuten duren.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Verbinding met <b>UiServer Websocket</b> verbroken. Opnieuw verbinden...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Verbinding met <b>UiServer Websocket</b> hersteld.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket fout, herlaad alsjeblieft de pagina.",
|
||||||
|
" Connecting...": " Verbinden...",
|
||||||
|
"Site size: <b>": "Site grootte <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> is groter dan de standaard toegestaan ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Open de site en stel de limeit op de grootte in op \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " bestanden moeten worden gedownload",
|
||||||
|
" downloaded": " gedownload",
|
||||||
|
" download failed": " download mislukt",
|
||||||
|
"Peers found: ": "Peers gevonden: ",
|
||||||
|
"No peers found": "Geen peers gevonden",
|
||||||
|
"Running out of size limit (": "Limeit op grootte bereikt (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Stel limiet in op \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Site limiet op grootte is veranderd naar {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Een nieuwe versie van deze pagina is zojuist uitgekomen.<br>Herlaad de pagina om de bijgewerkte inhoud te zien.",
|
||||||
|
"This site requests permission:": "Deze site vraagt om permissie:",
|
||||||
|
"_(Accept)": "Toekennen"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Gratulacje, twój port <b>{0}</b> jest otwarty.<br>Jesteś pełnoprawnym użytkownikiem sieci ZeroNet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tryb Tor aktywny, każde połączenie przy użyciu trasy Cebulowej.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Pomyślnie zainicjowano ukryte usługi cebulowe Tor.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Niezdolny do uruchomienia ukrytych usług, proszę sprawdź swoją konfigurację.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Dla szybszego połączenia otwórz <b>{0}</b> port w swoim routerze.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Połączenie jest ograniczone. Proszę, otwórz port <b>{0}</b> w swoim routerze",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "bądź skonfiguruj Tora by stać się pełnoprawnym użytkownikiem sieci ZeroNet.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Wybierz konto którego chcesz użyć na tej stronie:",
|
||||||
|
"currently selected": "aktualnie wybrany",
|
||||||
|
"Unique to site": "Unikatowy dla strony",
|
||||||
|
|
||||||
|
"Content signing failed": "Podpisanie treści zawiodło",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).",
|
||||||
|
"Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników.",
|
||||||
|
"No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników, ale twoja treść jest dostępna.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.",
|
||||||
|
"Content publish failed.": "Publikacja treści zawiodła.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Ten plik wciąż się synchronizuje, jeśli zapiszesz go teraz, poprzednia treść może zostać utracona.",
|
||||||
|
"Write content anyway": "Zapisz treść mimo wszystko",
|
||||||
|
"New certificate added:": "Nowy certyfikat dodany:",
|
||||||
|
"You current certificate:": "Twój aktualny certyfikat: ",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Zmień na {auth_type}/{auth_user_name}@{domain}-ra",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certyfikat zmieniony na <b>{auth_type}/{auth_user_name}@{domain}</b>-ra.",
|
||||||
|
"Site cloned": "Strona sklonowana",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Pomyślnie zmieniono język interfejsu stron!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Ze względu na buforowanie przeglądarki, pełna zmiana może zająć parę minutę.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Połączenie z <b>UiServer Websocket</b> zostało przerwane. Ponowne łączenie...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Połączenie z <b>UiServer Websocket</b> przywrócone.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Błąd UiServer Websocket, prosze odświeżyć stronę.",
|
||||||
|
" Connecting...": " Łączenie...",
|
||||||
|
"Site size: <b>": "Rozmiar strony: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> jest większy niż domyślnie dozwolony ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otwórz stronę i ustaw limit na \" + site_info.next_size_limit + \"MBów",
|
||||||
|
" files needs to be downloaded": " pliki muszą zostać ściągnięte",
|
||||||
|
" downloaded": " ściągnięte",
|
||||||
|
" download failed": " ściąganie nie powiodło się",
|
||||||
|
"Peers found: ": "Odnaleziono użytkowników: ",
|
||||||
|
"No peers found": "Nie odnaleziono użytkowników",
|
||||||
|
"Running out of size limit (": "Limit rozmiaru na wyczerpaniu (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów",
|
||||||
|
"Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.<br>Odśwież by zobaczyć nową, zmodyfikowaną treść strony.",
|
||||||
|
"This site requests permission:": "Ta strona wymaga uprawnień:",
|
||||||
|
"_(Accept)": "Przyznaj uprawnienia",
|
||||||
|
|
||||||
|
"Sign and publish": "Podpisz i opublikuj",
|
||||||
|
"Restart <b>ZeroNet client</b>?": "Uruchomić ponownie klienta ZeroNet?",
|
||||||
|
"Restart": "Uruchom ponownie"
|
||||||
|
}
|
|
@ -0,0 +1,57 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Parabéns, a porta<b>{0}</b> está aberta.<br>Você é um membro completo da rede ZeroNet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Modo Tor ativado, todas as conexões usam a rota Onion.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Os serviços ocultos Tor onion foram inciados com sucesso.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Não foi possível iniciar os serviços ocultos, por favor verifique suas configurações.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Para conexões mais rápidas, abra a porta <b>{0}</b> em seu roteador.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Sua conexão está restrita. Por favor, abra a porta <b>{0}</b> em seu roteador",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Selecione a conta que deseja usar nesse site:",
|
||||||
|
"currently selected": "atualmente selecionada",
|
||||||
|
"Unique to site": "Única para o site",
|
||||||
|
|
||||||
|
"Content signing failed": "Assinatura de conteúdo falhou",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Publicação de conteúdo na fila por {0:.0f} segundos.",
|
||||||
|
"Content published to {0} peers.": "Conteúdo publicado para {0} peers.",
|
||||||
|
"No peers found, but your content is ready to access.": "Nenhum peer encontrado, mas seu conteúdo está pronto para ser acessado.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Sua conexão de rede está restrita. Por favor, abra a porta <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "em seu roteador para tornar seu site acessível para todos.",
|
||||||
|
"Content publish failed.": "Publicação de conteúdo falhou.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Esse arquivo ainda está sincronizado, se escreve-lo agora o conteúdo anterior poderá ser perdido.",
|
||||||
|
"Write content anyway": "Escrever o conteúdo mesmo assim",
|
||||||
|
"New certificate added:": "Novo certificado adicionado:",
|
||||||
|
"You current certificate:": "Seu certificado atual:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Alterar para {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certificado alterado para: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Site clonado",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Você alterou o idioma da interface web com sucesso!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Devido ao cache do navegador, a transformação completa pode levar alguns minutos.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "A conexão com <b>UiServer Websocket</b> foi perdida. Reconectando...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Conexão com <b>UiServer Websocket</b> recuperada.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Erro de UiServer Websocket, por favor atualize a página.",
|
||||||
|
" Connecting...": " Conectando...",
|
||||||
|
"Site size: <b>": "Tamanho do site: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> é maior do que o tamanho permitido por padrão",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abrir site e definir limite de tamanho para \" + site_info.next_size_limit + \"MBs",
|
||||||
|
" files needs to be downloaded": " os arquivos precisam ser baixados",
|
||||||
|
" downloaded": " baixados",
|
||||||
|
" download failed": " falha no download",
|
||||||
|
"Peers found: ": "Peers encontrados: ",
|
||||||
|
"No peers found": "Nenhum peer encontrado",
|
||||||
|
"Running out of size limit (": "Passando do tamanho limite (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Definir limite para \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Limite de tamanho do site alterado para {0}MBs",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Uma nova versão desse site acaba de ser publicada.<br>Atualize para ver o conteúdo modificado.",
|
||||||
|
"This site requests permission:": "Esse site solicita permissão:",
|
||||||
|
"_(Accept)": "Conceder",
|
||||||
|
|
||||||
|
"Save": "Salvar",
|
||||||
|
"Trackers announcing": "Trackers anunciando",
|
||||||
|
"Error": "Erro",
|
||||||
|
"Done": "Concluído",
|
||||||
|
"Tracker connection error detected.": "Erro de conexão com tracker foi detectado."
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Поздравляем, ваш порт <b>{0}</b> открыт.<br>Вы полноценный участник сети ZeroNet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Режим Tor включен, все соединения осуществляются через Tor.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Скрытый сервис Tor запущено успешно.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Ошибка при запуске скрытого сервиса, пожалуйста проверьте настройки",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Для более быстрой работы сети откройте <b>{0}</b> порт на вашем роутере.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Подключение ограничено. Пожалуйста откройте <b>{0}</b> порт на вашем роутере",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "или настройте Tor что бы стать полноценным участником сети ZeroNet.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Выберите аккаунт для использования на этом сайте:",
|
||||||
|
"currently selected": "сейчас выбран",
|
||||||
|
"Unique to site": "Уникальный для этого сайта",
|
||||||
|
|
||||||
|
"Content signing failed": "Подпись контента не удалась",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Публикация контента поставлена в очередь {0:.0f} секунд.",
|
||||||
|
"Content published to {0} peers.": "Контент опубликован на {0} пирах.",
|
||||||
|
"No peers found, but your content is ready to access.": "Пиры не найдены, но ваш контент доступен.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Ваше подключение ограничено. Пожалуйста откройте <b>{0}</b> порт. ",
|
||||||
|
"on your router to make your site accessible for everyone.": "на вашем роутере, что бы ваш сайт стал доступнг посетителям.",
|
||||||
|
"Content publish failed.": "Ошибка при публикации контента.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Этот файл всё еще синхронизируется, если продолжить его изменение, предыдущий контент может быть потерян.",
|
||||||
|
"Write content anyway": "Записать контент в любом случае",
|
||||||
|
"New certificate added:": "Добавлен новый сертификат:",
|
||||||
|
"You current certificate:": "Ваш текущий сертификат: ",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Изменить его на {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Сертификат изменен на: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Сайт склонирован",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Язык интерфейса успешно изменен!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "В зависимости от работы вашего браузера полное преобразование может занять пару минут.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Подключение к <b>UiServer Websocket</b> прервано. Переподключаюсь...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Подключение к <b>UiServer Websocket</b> восстановлено.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Ошибка <b>UiServer Websocket</b>, перезагрузите страницу!",
|
||||||
|
" Connecting...": " Подключение...",
|
||||||
|
"Site size: <b>": "Размер сайта: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> больше чем разрешено по умолчанию ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Открыть сайт и установить лимит занимаемого места на \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " файлы должны быть загружены",
|
||||||
|
" downloaded": " загружено",
|
||||||
|
" download failed": " ошибка загрузки",
|
||||||
|
"Peers found: ": "Пиров найдено: ",
|
||||||
|
"No peers found": "Пиры не найдены",
|
||||||
|
"Running out of size limit (": "Доступное место закончилось (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Установить лимит на \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Лимит памяти на диске изменен на {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": "Доступна новая версия данной страницы<br>Обновите страницу, что бы увидеть изменения!",
|
||||||
|
"This site requests permission:": "Данный сайт запрашивает разрешения:",
|
||||||
|
"_(Accept)": "Предоставить"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,57 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Blahoželáme, váš port <b>{0}</b> je otvorený.<br>Ste úplným členom siete ZeroNet!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor mód aktívny, všetky spojenia teraz používajú Onion sieť.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Tor úspešne spustený.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Nebolo možné spustiť Tor, prosím skontrolujte nastavenia.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Pre rýchlejšie spojenie otvorte na vašom routery port <b>{0}</b>",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Vaše pripojenie je obmedzené. Prosím otvorte port <b>{0}</b> na vašom routery.",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "alebo nastavte Tor aby ste sa tali plným členom siete ZeroNet.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Zvoľte účet ktorý chcete používať na tejto stránke:",
|
||||||
|
"currently selected": "aktuálne zvolené",
|
||||||
|
"Unique to site": "Unikátny pre stránku",
|
||||||
|
|
||||||
|
"Content signing failed": "Podpísanie obsahu zlyhalo",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Podpísanie obsahu bude na rade za {0:.0f} sekúnd",
|
||||||
|
"Content published to {0} peers.": "Obsah publikovaný {0} peer-erom",
|
||||||
|
"No peers found, but your content is ready to access.": "Neboli nájdený žiadny peer-ery, ale váš obsah je pripravený pre prístup.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Vaše pripojenie k sieti je obmedzené. Prosím otvorte port <b>{0}</b> na vašom routery.",
|
||||||
|
"on your router to make your site accessible for everyone.": "na vašom routery aby bola vaša stránka prístupná pre všetkých.",
|
||||||
|
"Content publish failed.": "Publikovanie obsahu zlyhalo.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Tento súbor sa stále synchronizuje, ak v ňom spravíte zmeny, predchádzajúci obsah sa môže stratiť.",
|
||||||
|
"Write content anyway": "Aj tak spraviť zmeny",
|
||||||
|
"New certificate added:": "Pridaný nový certifikát:",
|
||||||
|
"You current certificate:": "Váš aktuálny certifikát:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Zvoľte to na {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Certifikát zmenený na: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Stránka naklonovaná",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Úspešne ste zmenili jazyk webového rozhrania!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Kôli cachu webového prehliadavača, ceľková transformácia môže chvíĺu trvať.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Spojenie s <b>UiServer Websocket</b> bolo stratené. Znovu pripájame...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Spojenie s <b>UiServer Websocket</b> obnovené.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Chyba UiServer Websocket-u, prosím znovu načítajte stránku.",
|
||||||
|
" Connecting...": " Pripájanie...",
|
||||||
|
"Site size: <b>": "Veľkosť stránky: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> je viac ako povolená hodnota",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otvoriť stránku a nastaviť limit veľkosti na \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " súbory je potrebné stiahnuť",
|
||||||
|
" downloaded": " stiahnuté",
|
||||||
|
" download failed": " sťahovanie zlyhalo",
|
||||||
|
"Peers found: ": "Peer-erov nájdených: ",
|
||||||
|
"No peers found": "Neboli nájdený žiadny peer-ery",
|
||||||
|
"Running out of size limit (": "Presahuje povolený limit veľkosti pamäte (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Nastaviť limit na \" + site_info.next_size_limit + \"MB ändern",
|
||||||
|
"Site size limit changed to {0}MB": "Limit veľkosti pamäte nastavený na {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Bola vydaná nová verzia tejto stránky.<br>Znovu načítajte túto stránku aby bolo vidieť zmeny.",
|
||||||
|
"This site requests permission:": "Táto stránka vyžaduje povolenie:",
|
||||||
|
"_(Accept)": "Udeliť",
|
||||||
|
|
||||||
|
"on": "",
|
||||||
|
"Oct": "Okt",
|
||||||
|
"May": "Máj",
|
||||||
|
"Jun": "Jún",
|
||||||
|
"Jul": "Júl"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Čestitke, vaša vrata <b>{0}</b> so odprta.<br>Postali ste polnopravni član ZeroNet omrežja!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Način Tor aktiven.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Storitve Tor uspešno zagnane.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Ni bilo mogoče zagnati Tor storitev. Preverite nastavitve.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Za hitrejše povezave na svojem usmerjevalniku odprite vrata <b>{0}</b>.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Vaša povezava je omejena. Na svojem usmerjevalniku odprite vrata <b>{0}</b>",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "ali nastavite Tor, da postanete polnopravni član ZeroNet omrežja.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Izberite račun, ki ga želite uporabiti na tem spletnem mestu:",
|
||||||
|
"currently selected": "trenutno izbrano",
|
||||||
|
"Unique to site": "Edinstven za spletno mesto",
|
||||||
|
|
||||||
|
"Content signing failed": "Podpisovanje vsebine ni uspelo",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "Objava vsebine na čakanju za {0:.0f} sekund.",
|
||||||
|
"Content published to {0} peers.": "Vsebina objavljena na {0} povezavah.",
|
||||||
|
"No peers found, but your content is ready to access.": "Ni nobenih povezav, vendar je vaša vsebina pripravljena za dostop.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Vaša povezava je omejena. Prosimo, odprite vrata <b>{0}</b>",
|
||||||
|
"on your router to make your site accessible for everyone.": "na vašem usmerjevalniku, da bo vaše spletno mesto dostopno za vse.",
|
||||||
|
"Content publish failed.": "Objavljanje vsebine ni uspelo.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Ta datoteka se še vedno sinhronizira. Če jo uredite zdaj, se lahko zgodi, da bo prejšnja vsebina izgubljena.",
|
||||||
|
"Write content anyway": "Vseeno uredi vsebino",
|
||||||
|
"New certificate added:": "Dodano novo potrdilo:",
|
||||||
|
"You current certificate:": "Trenutno potrdilo:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "Spremenite ga na {auth_type}/{auth_user_name}@{domain}",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "Potrdilo spremenjeno na: <b>{auth_type}/{auth_user_name}@{domain}</b>.",
|
||||||
|
"Site cloned": "Stran klonirana",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "Uspešno ste spremenili jezik spletnega vmesnika!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Zaradi predpomnjenja brskalnika lahko popolna preobrazba traja nekaj minut.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "Povezava z <b>UiServer Websocket</b> je bila izgubljena. Ponovno povezovanje ...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "Povezava z <b>UiServer Websocket</b> je vzpostavljena.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "Napaka UiServer Websocket. Prosimo osvežite stran.",
|
||||||
|
" Connecting...": " Povezovanje ...",
|
||||||
|
"Site size: <b>": "Velikost strani: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> je večja od dovoljenih",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Odpri to stran in nastavi omejitev na \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " datotek mora biti prenešenih",
|
||||||
|
" downloaded": " preneseno",
|
||||||
|
" download failed": " prenos ni uspel",
|
||||||
|
"Peers found: ": "Najdene povezave: ",
|
||||||
|
"No peers found": "Ni najdenih povezav",
|
||||||
|
"Running out of size limit (": "Zmanjkuje dovoljenega prostora (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Nastavi omejitev na \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Site size limit changed to {0}MB": "Omejitev strani nastavljena na{0} MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Ravnokar je bila objavljena nova različica te strani.<br>Osvežite jo, da boste videli novo vsebino.",
|
||||||
|
"This site requests permission:": "Ta stran zahteva dovoljenja:",
|
||||||
|
"_(Accept)": "Dovoli"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "Tebrikler, portunuz (<b>{0}</b>) açık.<br>Artık ZeroNet ağına katıldınız!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor aktif, tüm bağlantılar Onion yönlendircisini kullanıyor.",
|
||||||
|
"Successfully started Tor onion hidden services.": "Gizli Tor hizmetleri başlatıldı.",
|
||||||
|
"Unable to start hidden services, please check your config.": "Gizli hizmetler başlatılamadı, lütfen ayarlarınızı kontrol ediniz.",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "Daha hızlı bağlantı için <b>{0}</b> nolu portu bilgisayarınıza yönlendirin.",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "Sınırlı bağlantı. Lütfen, <b>{0}</b> nolu portu bilgisayarınıza yönlendirin",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "ya da ZeroNet ağına tam olarak katılabilmek için Tor'u kullanın.",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "Bu sitede kullanmak için bir hesap seçiniz:",
|
||||||
|
"currently selected": "kullanılan",
|
||||||
|
"Unique to site": "Bu site için benzersiz",
|
||||||
|
|
||||||
|
"Content signing failed": "İçerik imzalama başarısız oldu",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "İçerik yayımlanmak üzere {0:.0f} saniyedir kuyrukta.",
|
||||||
|
"Content published to {0} peers.": "İçerik {0} eşe dağıtıldı.",
|
||||||
|
"No peers found, but your content is ready to access.": "Eş bulunamadı, ama içeriğiniz erişime hazır.",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "Sınırlı bağlantı. Lütfen, <b>{0}</b> nolu portu bilgisayarınıza yönlendirin",
|
||||||
|
"on your router to make your site accessible for everyone.": "böylece sitenizi herkes için erişilebilir yapabilirsiniz",
|
||||||
|
"Content publish failed.": "İçerik yayımlama başarısız oldu.",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "Bu dosya hala güncelleniyor, eğer şimdi kaydederseniz, önceki içerik kaybolabilir.",
|
||||||
|
"Write content anyway": "Yine de kaydet",
|
||||||
|
"New certificate added:": "Yeni sertifika eklendi:",
|
||||||
|
"You current certificate:": "Kullanılan sertifikanız:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} olarak değiştir.",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "<b>{auth_type}/{auth_user_name}@{domain}</b> olarak değiştirildi",
|
||||||
|
"Site cloned": "Site klonlandı",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "WEB ara yüzü için dil başarıyla değiştirildi!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "Tam dönüşümün sağlanması, tarayıcı önbelleklemesi yüzünden zaman alabilir.",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "<b>UiServer Websocket</b> ile bağlantı kesildi. Yeniden bağlanılıyor...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "<b>UiServer Websocket</b> ile bağlantı yeniden kuruldu.",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket hatası, lütfen sayfayı yenileyin.",
|
||||||
|
" Connecting...": " Bağlanıyor...",
|
||||||
|
"Site size: <b>": "Site boyutu: <b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> izin verilenden fazla ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Siteyi açın ve boyut sınırını \" + site_info.next_size_limit + \"MB'ye yükseltin",
|
||||||
|
" files needs to be downloaded": " indirilmesi gereken dosyalar",
|
||||||
|
" downloaded": " indirildi",
|
||||||
|
" download failed": " indirme başarısız",
|
||||||
|
"Peers found: ": "Bulunan eşler: ",
|
||||||
|
"No peers found": "Eş bulunamadı",
|
||||||
|
"Running out of size limit (": "Boyut sınırlamasını aştı (",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "Sınırlamayı \" + site_info.next_size_limit + \"MB'ye yükselt",
|
||||||
|
"Site size limit changed to {0}MB": "Site boyut sınırlaması {0}MB olarak ayarlandı",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " Bu sayfanın yeni versiyonu yayımlandı.<br>Değişen içeriği görmek için yeniden yükleyiniz.",
|
||||||
|
"This site requests permission:": "Bu site bir izin istiyor:",
|
||||||
|
"_(Accept)": "İzin ver"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "祝賀,你的埠 (<b>{0}</b>) 已經打開。<br>你已經是 ZeroNet 網路的正式成員了!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor 模式啟用,每個連接正在使用洋蔥路由。",
|
||||||
|
"Successfully started Tor onion hidden services.": "成功啟動 Tor 洋蔥隱藏服務。",
|
||||||
|
"Unable to start hidden services, please check your config.": "無法打開隱藏服務,請檢查你的配置。",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "為了更快的連接請在路由器上打開 <b>{0}</b> 埠。",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "你的連接受限制。請在你的路由器上打開 <b>{0}</b> 埠",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "或者配置你的 Tor 來成為 ZeroNet 的正式成員。",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "選擇你要在這個網站使用的帳戶:",
|
||||||
|
"currently selected": "當前選擇",
|
||||||
|
"Unique to site": "網站獨有身份",
|
||||||
|
|
||||||
|
"Content signing failed": "內容簽署失敗",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "內容已加入 {0:.0f} 秒後的發佈隊列。",
|
||||||
|
"Content published to {0}/{1} peers.": "內容已發佈到 {0}/{1} 個節點。",
|
||||||
|
"Content published to {0} peers.": "內容已發佈到 {0} 個節點。",
|
||||||
|
"No peers found, but your content is ready to access.": "找不到節點,但是你的內容已經準備好被訪問。",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "你的網路連接受限制。請在你的路由器上打開 <b>{0}</b> 埠",
|
||||||
|
"on your router to make your site accessible for everyone.": "確保你的網站能被每一個人訪問。",
|
||||||
|
"Content publish failed.": "內容發佈失敗。",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "這個檔仍然在同步中,如果你現在寫入它,之前的內容可能會被丟失。",
|
||||||
|
"Write content anyway": "強制寫入內容",
|
||||||
|
"New certificate added:": "新證書:",
|
||||||
|
"You current certificate:": "你當前的證書:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "改變至 {auth_type}/{auth_user_name}@{domain}-ra",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "證書更改至:<b>{auth_type}/{auth_user_name}@{domain}</b>。",
|
||||||
|
"Site cloned": "網站已克隆",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "你已經成功改變了 Web 界面的語言!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "由於你的瀏覽器緩存,完整的翻譯可能需要花幾分鐘。",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "<b>UiServer Websocket</b> 的連線已丟失。重新連線中...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "<b>UiServer Websocket</b> 的連線已恢復。",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket 錯誤,請重新載入頁面。",
|
||||||
|
" Connecting...": " 連線中...",
|
||||||
|
"Site size: <b>": "網站大小:<b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> 比預設允許的值更大 ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打開網站並設定大小限制到 \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " 個檔需要下載",
|
||||||
|
" downloaded": " 已下載",
|
||||||
|
" download failed": " 下載失敗",
|
||||||
|
"Peers found: ": "已找到節點:",
|
||||||
|
"No peers found": "找不到節點",
|
||||||
|
"Running out of size limit (": "超出大小限制",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "設定限制到 \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Cloning site...": "複製網站中...",
|
||||||
|
"Site cloned": "網站已複製",
|
||||||
|
"Site size limit changed to {0}MB": "網站大小限制已改變到 {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " 本頁面的新版本已經發佈。<br>重新載入來查看更改後的內容。",
|
||||||
|
"This site requests permission:": "這個網站的請求許可權:",
|
||||||
|
"_(Accept)": "授權"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,55 @@
|
||||||
|
{
|
||||||
|
"Congratulations, your port <b>{0}</b> is opened.<br>You are a full member of the ZeroNet network!": "祝贺,您的端口 (<b>{0}</b>) 已经打开。<br>您已经是 ZeroNet 网络的正式成员了!",
|
||||||
|
"Tor mode active, every connection using Onion route.": "Tor 模式启用,每个连接正在使用洋葱路由。",
|
||||||
|
"Successfully started Tor onion hidden services.": "成功启动 Tor 洋葱隐藏服务。",
|
||||||
|
"Unable to start hidden services, please check your config.": "无法打开隐藏服务,请检查您的配置。",
|
||||||
|
"For faster connections open <b>{0}</b> port on your router.": "为了更快的连接请在路由器上打开 <b>{0}</b> 端口。",
|
||||||
|
"Your connection is restricted. Please, open <b>{0}</b> port on your router": "您的连接受限制。请在您的路由器上打开 <b>{0}</b> 端口",
|
||||||
|
"or configure Tor to become a full member of the ZeroNet network.": "或者配置您的 Tor 来成为 ZeroNet 的正式成员。",
|
||||||
|
|
||||||
|
"Select account you want to use in this site:": "选择您要在这个网站使用的帐户:",
|
||||||
|
"No certificate": "没有证书",
|
||||||
|
"currently selected": "当前选择",
|
||||||
|
"Unique to site": "网站独有身份",
|
||||||
|
|
||||||
|
"Content signing failed": "内容签名失败",
|
||||||
|
"Content publish queued for {0:.0f} seconds.": "内容已加入 {0:.0f} 秒后的发布队列。",
|
||||||
|
"Content published to {0}/{1} peers.": "内容已发布到 {0}/{1} 个节点。",
|
||||||
|
"Content published to {0} peers.": "内容已发布到 {0} 个节点。",
|
||||||
|
"No peers found, but your content is ready to access.": "找不到节点,但是您的内容已经准备好被访问。",
|
||||||
|
"Your network connection is restricted. Please, open <b>{0}</b> port": "您的网络连接受限制。请在您的路由器上打开 <b>{0}</b> 端口",
|
||||||
|
"on your router to make your site accessible for everyone.": "确保您的站点能被每一个人访问。",
|
||||||
|
"Content publish failed.": "内容发布失败。",
|
||||||
|
"This file still in sync, if you write it now, then the previous content may be lost.": "这个文件仍然在同步中,如果您现在写入它,之前的内容可能会被丢失。",
|
||||||
|
"Write content anyway": "强制写入内容",
|
||||||
|
"New certificate added:": "新证书:",
|
||||||
|
"You current certificate:": "您当前的证书:",
|
||||||
|
"Change it to {auth_type}/{auth_user_name}@{domain}": "更改至 {auth_type}/{auth_user_name}@{domain}-ra",
|
||||||
|
"Certificate changed to: <b>{auth_type}/{auth_user_name}@{domain}</b>.": "证书更改至:<b>{auth_type}/{auth_user_name}@{domain}</b>。",
|
||||||
|
"Site cloned": "站点已克隆",
|
||||||
|
|
||||||
|
"You have successfully changed the web interface's language!": "您已经成功更改了 web 界面的语言!",
|
||||||
|
"Due to the browser's caching, the full transformation could take some minute.": "由于您的浏览器缓存,完整的翻译可能需要花几分钟。",
|
||||||
|
|
||||||
|
"Connection with <b>UiServer Websocket</b> was lost. Reconnecting...": "<b>UiServer Websocket</b> 的连接已丢失。重新连接中...",
|
||||||
|
"Connection with <b>UiServer Websocket</b> recovered.": "<b>UiServer Websocket</b> 的连接已恢复。",
|
||||||
|
"UiServer Websocket error, please reload the page.": "UiServer Websocket 错误,请重新加载页面。",
|
||||||
|
" Connecting...": " 连接中...",
|
||||||
|
"Site size: <b>": "站点大小:<b>",
|
||||||
|
"MB</b> is larger than default allowed ": "MB</b> 比默认允许的值更大 ",
|
||||||
|
"Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打开站点并设置大小限制到 \" + site_info.next_size_limit + \"MB",
|
||||||
|
" files needs to be downloaded": " 个文件需要下载",
|
||||||
|
" downloaded": " 已下载",
|
||||||
|
" download failed": " 下载失败",
|
||||||
|
"Peers found: ": "已找到节点:",
|
||||||
|
"No peers found": "找不到节点",
|
||||||
|
"Running out of size limit (": "超出大小限制",
|
||||||
|
"Set limit to \" + site_info.next_size_limit + \"MB": "设置限制到 \" + site_info.next_size_limit + \"MB",
|
||||||
|
"Cloning site...": "克隆站点中...",
|
||||||
|
"Site cloned": "站点已克隆",
|
||||||
|
"Site size limit changed to {0}MB": "站点大小限制已更改到 {0}MB",
|
||||||
|
" New version of this page has just released.<br>Reload to see the modified content.": " 本页面的新版本已经发布。<br>重新加载来查看更改后的内容。",
|
||||||
|
"This site requests permission:": "这个站点的请求权限:",
|
||||||
|
"_(Accept)": "授权"
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,949 @@
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import mimetypes
|
||||||
|
import json
|
||||||
|
import html
|
||||||
|
import urllib
|
||||||
|
import socket
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
from Config import config
|
||||||
|
from Site import SiteManager
|
||||||
|
from User import UserManager
|
||||||
|
from Plugin import PluginManager
|
||||||
|
from Ui.UiWebsocket import UiWebsocket
|
||||||
|
from Crypt import CryptHash
|
||||||
|
from util import helper
|
||||||
|
|
||||||
|
status_texts = {
|
||||||
|
200: "200 OK",
|
||||||
|
206: "206 Partial Content",
|
||||||
|
400: "400 Bad Request",
|
||||||
|
403: "403 Forbidden",
|
||||||
|
404: "404 Not Found",
|
||||||
|
500: "500 Internal Server Error",
|
||||||
|
}
|
||||||
|
|
||||||
|
content_types = {
|
||||||
|
"asc": "application/pgp-keys",
|
||||||
|
"css": "text/css",
|
||||||
|
"gpg": "application/pgp-encrypted",
|
||||||
|
"html": "text/html",
|
||||||
|
"js": "application/javascript",
|
||||||
|
"json": "application/json",
|
||||||
|
"oga": "audio/ogg",
|
||||||
|
"ogg": "application/ogg",
|
||||||
|
"ogv": "video/ogg",
|
||||||
|
"sig": "application/pgp-signature",
|
||||||
|
"txt": "text/plain",
|
||||||
|
"webmanifest": "application/manifest+json",
|
||||||
|
"wasm": "application/wasm",
|
||||||
|
"webp": "image/webp"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@PluginManager.acceptPlugins
|
||||||
|
class UiRequest(object):
|
||||||
|
|
||||||
|
def __init__(self, server, get, env, start_response):
|
||||||
|
if server:
|
||||||
|
self.server = server
|
||||||
|
self.log = server.log
|
||||||
|
self.get = get # Get parameters
|
||||||
|
self.env = env # Enviroment settings
|
||||||
|
# ['CONTENT_LENGTH', 'CONTENT_TYPE', 'GATEWAY_INTERFACE', 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE',
|
||||||
|
# 'HTTP_COOKIE', 'HTTP_CACHE_CONTROL', 'HTTP_HOST', 'HTTP_HTTPS', 'HTTP_ORIGIN', 'HTTP_PROXY_CONNECTION', 'HTTP_REFERER',
|
||||||
|
# 'HTTP_USER_AGENT', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_ADDR', 'REMOTE_PORT', 'REQUEST_METHOD', 'SCRIPT_NAME',
|
||||||
|
# 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'SERVER_SOFTWARE', 'werkzeug.request', 'wsgi.errors',
|
||||||
|
# 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version']
|
||||||
|
|
||||||
|
self.start_response = start_response # Start response function
|
||||||
|
self.user = None
|
||||||
|
self.script_nonce = None # Nonce for script tags in wrapper html
|
||||||
|
|
||||||
|
def learnHost(self, host):
|
||||||
|
self.server.allowed_hosts.add(host)
|
||||||
|
self.server.log.info("Added %s as allowed host" % host)
|
||||||
|
|
||||||
|
def isHostAllowed(self, host):
|
||||||
|
if host in self.server.allowed_hosts:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Allow any IP address as they are not affected by DNS rebinding
|
||||||
|
# attacks
|
||||||
|
if helper.isIp(host):
|
||||||
|
self.learnHost(host)
|
||||||
|
return True
|
||||||
|
|
||||||
|
if ":" in host and helper.isIp(host.rsplit(":", 1)[0]): # Test without port
|
||||||
|
self.learnHost(host)
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.isProxyRequest(): # Support for chrome extension proxy
|
||||||
|
if self.isDomain(host):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isDomain(self, address):
|
||||||
|
return self.server.site_manager.isDomainCached(address)
|
||||||
|
|
||||||
|
def resolveDomain(self, domain):
|
||||||
|
return self.server.site_manager.resolveDomainCached(domain)
|
||||||
|
|
||||||
|
# Call the request handler function base on path
|
||||||
|
def route(self, path):
|
||||||
|
# Restict Ui access by ip
|
||||||
|
if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict:
|
||||||
|
return self.error403(details=False)
|
||||||
|
|
||||||
|
# Check if host allowed to do request
|
||||||
|
if not self.isHostAllowed(self.env.get("HTTP_HOST")):
|
||||||
|
ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False))
|
||||||
|
|
||||||
|
http_get = self.env["PATH_INFO"]
|
||||||
|
if self.env["QUERY_STRING"]:
|
||||||
|
http_get += "?{0}".format(self.env["QUERY_STRING"])
|
||||||
|
self_host = self.env["HTTP_HOST"].split(":")[0]
|
||||||
|
self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host))
|
||||||
|
link = "http://{0}{1}".format(self_ip, http_get)
|
||||||
|
ret_body = """
|
||||||
|
<h4>Start the client with <code>--ui_host "{host}"</code> argument</h4>
|
||||||
|
<h4>or access via ip: <a href="{link}">{link}</a></h4>
|
||||||
|
""".format(
|
||||||
|
host=html.escape(self.env["HTTP_HOST"]),
|
||||||
|
link=html.escape(link)
|
||||||
|
).encode("utf8")
|
||||||
|
return iter([ret_error, ret_body])
|
||||||
|
|
||||||
|
# Prepend .bit host for transparent proxy
|
||||||
|
if self.isDomain(self.env.get("HTTP_HOST")):
|
||||||
|
path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path)
|
||||||
|
path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
|
||||||
|
path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
|
||||||
|
|
||||||
|
# Sanitize request url
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
if "../" in path or "./" in path:
|
||||||
|
return self.error403("Invalid path: %s" % path)
|
||||||
|
|
||||||
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
||||||
|
if "/" not in path.strip("/"):
|
||||||
|
content_type = self.getContentType("index.html")
|
||||||
|
else:
|
||||||
|
content_type = self.getContentType(path)
|
||||||
|
|
||||||
|
extra_headers = {"Access-Control-Allow-Origin": "null"}
|
||||||
|
|
||||||
|
self.sendHeader(content_type=content_type, extra_headers=extra_headers, noscript=True)
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if path == "/":
|
||||||
|
return self.actionIndex()
|
||||||
|
elif path in ("/favicon.ico", "/apple-touch-icon.png"):
|
||||||
|
return self.actionFile("src/Ui/media/img/%s" % path)
|
||||||
|
# Internal functions
|
||||||
|
elif "/ZeroNet-Internal/" in path:
|
||||||
|
path = re.sub(".*?/ZeroNet-Internal/", "/", path)
|
||||||
|
func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function
|
||||||
|
if func:
|
||||||
|
return func()
|
||||||
|
else:
|
||||||
|
return self.error404(path)
|
||||||
|
# Media
|
||||||
|
elif path.startswith("/uimedia/"):
|
||||||
|
return self.actionUiMedia(path)
|
||||||
|
elif "/uimedia/" in path:
|
||||||
|
# uimedia within site dir (for chrome extension)
|
||||||
|
path = re.sub(".*?/uimedia/", "/uimedia/", path)
|
||||||
|
return self.actionUiMedia(path)
|
||||||
|
# Websocket
|
||||||
|
elif path == "/Websocket":
|
||||||
|
return self.actionWebsocket()
|
||||||
|
# Debug
|
||||||
|
elif path == "/Debug" and config.debug:
|
||||||
|
return self.actionDebug()
|
||||||
|
elif path == "/Console" and config.debug:
|
||||||
|
return self.actionConsole()
|
||||||
|
# Wrapper-less static files
|
||||||
|
elif path.startswith("/raw/"):
|
||||||
|
return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True)
|
||||||
|
|
||||||
|
elif path.startswith("/add/"):
|
||||||
|
return self.actionSiteAdd()
|
||||||
|
# Site media wrapper
|
||||||
|
else:
|
||||||
|
if self.get.get("wrapper_nonce"):
|
||||||
|
if self.get["wrapper_nonce"] in self.server.wrapper_nonces:
|
||||||
|
self.server.wrapper_nonces.remove(self.get["wrapper_nonce"])
|
||||||
|
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
|
||||||
|
else:
|
||||||
|
self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"])
|
||||||
|
body = self.actionWrapper(path)
|
||||||
|
else:
|
||||||
|
body = self.actionWrapper(path)
|
||||||
|
if body:
|
||||||
|
return body
|
||||||
|
else:
|
||||||
|
func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function
|
||||||
|
if func:
|
||||||
|
return func()
|
||||||
|
else:
|
||||||
|
ret = self.error404(path)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
# The request is proxied by chrome extension or a transparent proxy
|
||||||
|
def isProxyRequest(self):
|
||||||
|
return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST")))
|
||||||
|
|
||||||
|
def isWebSocketRequest(self):
|
||||||
|
return self.env.get("HTTP_UPGRADE") == "websocket"
|
||||||
|
|
||||||
|
def isAjaxRequest(self):
|
||||||
|
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
|
||||||
|
|
||||||
|
# Get mime by filename
|
||||||
|
def getContentType(self, file_name):
|
||||||
|
file_name = file_name.lower()
|
||||||
|
ext = file_name.rsplit(".", 1)[-1]
|
||||||
|
|
||||||
|
if ext in content_types:
|
||||||
|
content_type = content_types[ext]
|
||||||
|
elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"):
|
||||||
|
content_type = "font/%s" % ext
|
||||||
|
else:
|
||||||
|
content_type = mimetypes.guess_type(file_name)[0]
|
||||||
|
|
||||||
|
if not content_type:
|
||||||
|
content_type = "application/octet-stream"
|
||||||
|
|
||||||
|
return content_type.lower()
|
||||||
|
|
||||||
|
# Return: <dict> Posted variables
|
||||||
|
def getPosted(self):
|
||||||
|
if self.env['REQUEST_METHOD'] == "POST":
|
||||||
|
return dict(urllib.parse.parse_qsl(
|
||||||
|
self.env['wsgi.input'].readline().decode()
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# Return: <dict> Cookies based on self.env
|
||||||
|
def getCookies(self):
|
||||||
|
raw_cookies = self.env.get('HTTP_COOKIE')
|
||||||
|
if raw_cookies:
|
||||||
|
cookies = urllib.parse.parse_qsl(raw_cookies)
|
||||||
|
return {key.strip(): val for key, val in cookies}
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def getCurrentUser(self):
|
||||||
|
if self.user:
|
||||||
|
return self.user # Cache
|
||||||
|
self.user = UserManager.user_manager.get() # Get user
|
||||||
|
if not self.user:
|
||||||
|
self.user = UserManager.user_manager.create()
|
||||||
|
return self.user
|
||||||
|
|
||||||
|
def getRequestUrl(self):
|
||||||
|
if self.isProxyRequest():
|
||||||
|
if self.env["PATH_INFO"].startswith("http://zero/"):
|
||||||
|
return self.env["PATH_INFO"]
|
||||||
|
else: # Add http://zero to direct domain access
|
||||||
|
return self.env["PATH_INFO"].replace("http://", "http://zero/", 1)
|
||||||
|
else:
|
||||||
|
return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"]
|
||||||
|
|
||||||
|
def getReferer(self):
|
||||||
|
referer = self.env.get("HTTP_REFERER")
|
||||||
|
if referer and self.isProxyRequest() and not referer.startswith("http://zero/"):
|
||||||
|
return referer.replace("http://", "http://zero/", 1)
|
||||||
|
else:
|
||||||
|
return referer
|
||||||
|
|
||||||
|
def isScriptNonceSupported(self):
|
||||||
|
user_agent = self.env.get("HTTP_USER_AGENT")
|
||||||
|
if "Edge/" in user_agent:
|
||||||
|
is_script_nonce_supported = False
|
||||||
|
elif "Safari/" in user_agent and "Chrome/" not in user_agent:
|
||||||
|
is_script_nonce_supported = False
|
||||||
|
else:
|
||||||
|
is_script_nonce_supported = True
|
||||||
|
return is_script_nonce_supported
|
||||||
|
|
||||||
|
# Send response headers
|
||||||
|
def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]):
|
||||||
|
headers = {}
|
||||||
|
headers["Version"] = "HTTP/1.1"
|
||||||
|
headers["Connection"] = "Keep-Alive"
|
||||||
|
headers["Keep-Alive"] = "max=25, timeout=30"
|
||||||
|
headers["X-Frame-Options"] = "SAMEORIGIN"
|
||||||
|
if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()):
|
||||||
|
headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css
|
||||||
|
|
||||||
|
if noscript:
|
||||||
|
headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';"
|
||||||
|
elif script_nonce and self.isScriptNonceSupported():
|
||||||
|
headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce)
|
||||||
|
|
||||||
|
if allow_ajax:
|
||||||
|
headers["Access-Control-Allow-Origin"] = "null"
|
||||||
|
|
||||||
|
if self.env["REQUEST_METHOD"] == "OPTIONS":
|
||||||
|
# Allow json access
|
||||||
|
headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range"
|
||||||
|
headers["Access-Control-Allow-Credentials"] = "true"
|
||||||
|
|
||||||
|
# Download instead of display file types that can be dangerous
|
||||||
|
if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type):
|
||||||
|
headers["Content-Disposition"] = "attachment"
|
||||||
|
|
||||||
|
cacheable_type = (
|
||||||
|
self.env["REQUEST_METHOD"] == "OPTIONS" or
|
||||||
|
content_type.split("/", 1)[0] in ("image", "video", "font") or
|
||||||
|
content_type in ("application/javascript", "text/css")
|
||||||
|
)
|
||||||
|
|
||||||
|
if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"):
|
||||||
|
content_type += "; charset=utf-8"
|
||||||
|
|
||||||
|
if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min
|
||||||
|
headers["Cache-Control"] = "public, max-age=600" # Cache 10 min
|
||||||
|
else:
|
||||||
|
headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all
|
||||||
|
headers["Content-Type"] = content_type
|
||||||
|
headers.update(extra_headers)
|
||||||
|
return self.start_response(status_texts[status], list(headers.items()))
|
||||||
|
|
||||||
|
# Renders a template
|
||||||
|
def render(self, template_path, *args, **kwargs):
|
||||||
|
template = open(template_path, encoding="utf8").read()
|
||||||
|
|
||||||
|
def renderReplacer(m):
|
||||||
|
if m.group(1) in kwargs:
|
||||||
|
return "%s" % kwargs.get(m.group(1), "")
|
||||||
|
else:
|
||||||
|
return m.group(0)
|
||||||
|
|
||||||
|
template_rendered = re.sub("{(.*?)}", renderReplacer, template)
|
||||||
|
|
||||||
|
return template_rendered.encode("utf8")
|
||||||
|
|
||||||
|
def isWrapperNecessary(self, path):
|
||||||
|
match = re.match(r"/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
return True
|
||||||
|
|
||||||
|
inner_path = match.group("inner_path").lstrip("/")
|
||||||
|
if not inner_path or path.endswith("/"): # It's a directory
|
||||||
|
content_type = self.getContentType("index.html")
|
||||||
|
else: # It's a file
|
||||||
|
content_type = self.getContentType(inner_path)
|
||||||
|
|
||||||
|
is_html_file = "html" in content_type or "xhtml" in content_type
|
||||||
|
|
||||||
|
return is_html_file
|
||||||
|
|
||||||
|
@helper.encodeResponse
|
||||||
|
def formatRedirect(self, url):
|
||||||
|
return """
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
Redirecting to <a href="{0}" target="_top">{0}</a>
|
||||||
|
<script>
|
||||||
|
window.top.location = "{0}"
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
""".format(html.escape(url))
|
||||||
|
|
||||||
|
# - Actions -
|
||||||
|
|
||||||
|
# Redirect to an url
|
||||||
|
def actionRedirect(self, url):
|
||||||
|
self.start_response('301 Redirect', [('Location', str(url))])
|
||||||
|
yield self.formatRedirect(url)
|
||||||
|
|
||||||
|
def actionIndex(self):
|
||||||
|
return self.actionRedirect("/" + config.homepage + "/")
|
||||||
|
|
||||||
|
# Render a file from media with iframe site wrapper
|
||||||
|
def actionWrapper(self, path, extra_headers=None):
|
||||||
|
if not extra_headers:
|
||||||
|
extra_headers = {}
|
||||||
|
script_nonce = self.getScriptNonce()
|
||||||
|
|
||||||
|
match = re.match(r"/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
||||||
|
just_added = False
|
||||||
|
if match:
|
||||||
|
address = match.group("address")
|
||||||
|
inner_path = match.group("inner_path").lstrip("/")
|
||||||
|
|
||||||
|
if not self.isWrapperNecessary(path):
|
||||||
|
return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper
|
||||||
|
|
||||||
|
if self.isAjaxRequest():
|
||||||
|
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
|
||||||
|
|
||||||
|
if self.isWebSocketRequest():
|
||||||
|
return self.error403("WebSocket request not allowed to load wrapper") # No websocket
|
||||||
|
|
||||||
|
if "text/html" not in self.env.get("HTTP_ACCEPT", ""):
|
||||||
|
return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", ""))
|
||||||
|
if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""):
|
||||||
|
return self.error403("Prefetch not allowed to load wrapper")
|
||||||
|
|
||||||
|
site = SiteManager.site_manager.get(address)
|
||||||
|
|
||||||
|
if site and site.content_manager.contents.get("content.json"):
|
||||||
|
title = site.content_manager.contents["content.json"]["title"]
|
||||||
|
else:
|
||||||
|
title = "Loading %s..." % address
|
||||||
|
site = SiteManager.site_manager.get(address)
|
||||||
|
if site: # Already added, but not downloaded
|
||||||
|
if time.time() - site.announcer.time_last_announce > 5:
|
||||||
|
site.log.debug("Reannouncing site...")
|
||||||
|
gevent.spawn(site.update, announce=True)
|
||||||
|
else: # If not added yet
|
||||||
|
site = SiteManager.site_manager.need(address)
|
||||||
|
just_added = True
|
||||||
|
|
||||||
|
if not site:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce)
|
||||||
|
|
||||||
|
min_last_announce = (time.time() - site.announcer.time_last_announce) / 60
|
||||||
|
if min_last_announce > 60 and site.isServing() and not just_added:
|
||||||
|
site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce)
|
||||||
|
gevent.spawn(site.update, announce=True)
|
||||||
|
|
||||||
|
return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)])
|
||||||
|
# Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092)
|
||||||
|
|
||||||
|
else: # Bad url
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getSiteUrl(self, address):
|
||||||
|
if self.isProxyRequest():
|
||||||
|
return "http://zero/" + address
|
||||||
|
else:
|
||||||
|
return "/" + address
|
||||||
|
|
||||||
|
def getWsServerUrl(self):
|
||||||
|
if self.isProxyRequest():
|
||||||
|
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
|
||||||
|
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
|
||||||
|
else: # Remote client, use SERVER_NAME as server's real address
|
||||||
|
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
|
||||||
|
else:
|
||||||
|
server_url = ""
|
||||||
|
return server_url
|
||||||
|
|
||||||
|
def processQueryString(self, site, query_string):
|
||||||
|
match = re.search("zeronet_peers=(.*?)(&|$)", query_string)
|
||||||
|
if match:
|
||||||
|
query_string = query_string.replace(match.group(0), "")
|
||||||
|
num_added = 0
|
||||||
|
for peer in match.group(1).split(","):
|
||||||
|
if not re.match(".*?:[0-9]+$", peer):
|
||||||
|
continue
|
||||||
|
ip, port = peer.rsplit(":", 1)
|
||||||
|
if site.addPeer(ip, int(port), source="query_string"):
|
||||||
|
num_added += 1
|
||||||
|
site.log.debug("%s peers added by query string" % num_added)
|
||||||
|
|
||||||
|
return query_string
|
||||||
|
|
||||||
|
def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None):
|
||||||
|
file_inner_path = inner_path
|
||||||
|
if not file_inner_path:
|
||||||
|
file_inner_path = "index.html" # If inner path defaults to index.html
|
||||||
|
|
||||||
|
if file_inner_path.endswith("/"):
|
||||||
|
file_inner_path = file_inner_path + "index.html"
|
||||||
|
|
||||||
|
address = re.sub("/.*", "", path.lstrip("/"))
|
||||||
|
if self.isProxyRequest() and (not path or "/" in path[1:]):
|
||||||
|
if self.env["HTTP_HOST"] == "zero":
|
||||||
|
root_url = "/" + address + "/"
|
||||||
|
file_url = "/" + address + "/" + inner_path
|
||||||
|
else:
|
||||||
|
file_url = "/" + inner_path
|
||||||
|
root_url = "/"
|
||||||
|
|
||||||
|
else:
|
||||||
|
file_url = "/" + address + "/" + inner_path
|
||||||
|
root_url = "/" + address + "/"
|
||||||
|
|
||||||
|
if self.isProxyRequest():
|
||||||
|
self.server.allowed_ws_origins.add(self.env["HTTP_HOST"])
|
||||||
|
|
||||||
|
# Wrapper variable inits
|
||||||
|
body_style = ""
|
||||||
|
meta_tags = ""
|
||||||
|
postmessage_nonce_security = "false"
|
||||||
|
|
||||||
|
wrapper_nonce = self.getWrapperNonce()
|
||||||
|
inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", ""))
|
||||||
|
|
||||||
|
if "?" in inner_path:
|
||||||
|
sep = "&"
|
||||||
|
else:
|
||||||
|
sep = "?"
|
||||||
|
|
||||||
|
if inner_query_string:
|
||||||
|
inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce)
|
||||||
|
else:
|
||||||
|
inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce)
|
||||||
|
|
||||||
|
if self.isProxyRequest(): # Its a remote proxy request
|
||||||
|
homepage = "http://zero/" + config.homepage
|
||||||
|
else: # Use relative path
|
||||||
|
homepage = "/" + config.homepage
|
||||||
|
|
||||||
|
server_url = self.getWsServerUrl() # Real server url for WS connections
|
||||||
|
|
||||||
|
user = self.getCurrentUser()
|
||||||
|
if user:
|
||||||
|
theme = user.settings.get("theme", "light")
|
||||||
|
else:
|
||||||
|
theme = "light"
|
||||||
|
|
||||||
|
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme)
|
||||||
|
|
||||||
|
if site.content_manager.contents.get("content.json"): # Got content.json
|
||||||
|
content = site.content_manager.contents["content.json"]
|
||||||
|
if content.get("background-color"):
|
||||||
|
background_color = content.get("background-color-%s" % theme, content["background-color"])
|
||||||
|
body_style += "background-color: %s;" % html.escape(background_color)
|
||||||
|
if content.get("viewport"):
|
||||||
|
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % html.escape(content["viewport"])
|
||||||
|
if content.get("favicon"):
|
||||||
|
meta_tags += '<link rel="icon" href="%s%s">' % (root_url, html.escape(content["favicon"]))
|
||||||
|
if content.get("postmessage_nonce_security"):
|
||||||
|
postmessage_nonce_security = "true"
|
||||||
|
|
||||||
|
sandbox_permissions = ""
|
||||||
|
|
||||||
|
if "NOSANDBOX" in site.settings["permissions"]:
|
||||||
|
sandbox_permissions += " allow-same-origin"
|
||||||
|
|
||||||
|
if show_loadingscreen is None:
|
||||||
|
show_loadingscreen = not site.storage.isFile(file_inner_path)
|
||||||
|
|
||||||
|
return self.render(
|
||||||
|
"src/Ui/template/wrapper.html",
|
||||||
|
server_url=server_url,
|
||||||
|
inner_path=inner_path,
|
||||||
|
file_url=re.escape(file_url),
|
||||||
|
file_inner_path=re.escape(file_inner_path),
|
||||||
|
address=site.address,
|
||||||
|
title=html.escape(title),
|
||||||
|
body_style=body_style,
|
||||||
|
meta_tags=meta_tags,
|
||||||
|
query_string=re.escape(inner_query_string),
|
||||||
|
wrapper_key=site.settings["wrapper_key"],
|
||||||
|
ajax_key=site.settings["ajax_key"],
|
||||||
|
wrapper_nonce=wrapper_nonce,
|
||||||
|
postmessage_nonce_security=postmessage_nonce_security,
|
||||||
|
permissions=json.dumps(site.settings["permissions"]),
|
||||||
|
show_loadingscreen=json.dumps(show_loadingscreen),
|
||||||
|
sandbox_permissions=sandbox_permissions,
|
||||||
|
rev=config.rev,
|
||||||
|
lang=config.language,
|
||||||
|
homepage=homepage,
|
||||||
|
themeclass=themeclass,
|
||||||
|
script_nonce=script_nonce
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a new wrapper nonce that allows to get one html file without the wrapper
|
||||||
|
def getWrapperNonce(self):
|
||||||
|
wrapper_nonce = CryptHash.random()
|
||||||
|
self.server.wrapper_nonces.append(wrapper_nonce)
|
||||||
|
return wrapper_nonce
|
||||||
|
|
||||||
|
def getScriptNonce(self):
|
||||||
|
if not self.script_nonce:
|
||||||
|
self.script_nonce = CryptHash.random(encoding="base64")
|
||||||
|
|
||||||
|
return self.script_nonce
|
||||||
|
|
||||||
|
# Create a new wrapper nonce that allows to get one site
|
||||||
|
def getAddNonce(self):
|
||||||
|
add_nonce = CryptHash.random()
|
||||||
|
self.server.add_nonces.append(add_nonce)
|
||||||
|
return add_nonce
|
||||||
|
|
||||||
|
def isSameOrigin(self, url_a, url_b):
|
||||||
|
if not url_a or not url_b:
|
||||||
|
return False
|
||||||
|
|
||||||
|
url_a = url_a.replace("/raw/", "/")
|
||||||
|
url_b = url_b.replace("/raw/", "/")
|
||||||
|
|
||||||
|
origin_pattern = "http[s]{0,1}://(.*?/.*?/).*"
|
||||||
|
is_origin_full = re.match(origin_pattern, url_a)
|
||||||
|
if not is_origin_full: # Origin looks trimmed to host, require only same host
|
||||||
|
origin_pattern = "http[s]{0,1}://(.*?/).*"
|
||||||
|
|
||||||
|
origin_a = re.sub(origin_pattern, "\\1", url_a)
|
||||||
|
origin_b = re.sub(origin_pattern, "\\1", url_b)
|
||||||
|
|
||||||
|
return origin_a == origin_b
|
||||||
|
|
||||||
|
# Return {address: 1Site.., inner_path: /data/users.json} from url path
|
||||||
|
def parsePath(self, path):
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
path = path.replace("/index.html/", "/") # Base Backward compatibility fix
|
||||||
|
if path.endswith("/"):
|
||||||
|
path = path + "index.html"
|
||||||
|
|
||||||
|
if "../" in path or "./" in path:
|
||||||
|
raise SecurityError("Invalid path")
|
||||||
|
|
||||||
|
match = re.match(r"/media/(?P<address>[A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
||||||
|
if match:
|
||||||
|
path_parts = match.groupdict()
|
||||||
|
if self.isDomain(path_parts["address"]):
|
||||||
|
path_parts["address"] = self.resolveDomain(path_parts["address"])
|
||||||
|
path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites)
|
||||||
|
path_parts["inner_path"] = path_parts["inner_path"].lstrip("/")
|
||||||
|
if not path_parts["inner_path"]:
|
||||||
|
path_parts["inner_path"] = "index.html"
|
||||||
|
return path_parts
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Serve a media for site
|
||||||
|
def actionSiteMedia(self, path, header_length=True, header_noscript=False):
|
||||||
|
try:
|
||||||
|
path_parts = self.parsePath(path)
|
||||||
|
except SecurityError as err:
|
||||||
|
return self.error403(err)
|
||||||
|
|
||||||
|
if not path_parts:
|
||||||
|
return self.error404(path)
|
||||||
|
|
||||||
|
address = path_parts["address"]
|
||||||
|
|
||||||
|
file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"])
|
||||||
|
|
||||||
|
if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."):
|
||||||
|
# If debugging merge *.css to all.css and *.js to all.js
|
||||||
|
site = self.server.sites.get(address)
|
||||||
|
if site and site.settings["own"]:
|
||||||
|
from Debug import DebugMedia
|
||||||
|
DebugMedia.merge(file_path)
|
||||||
|
|
||||||
|
if not address or address == ".":
|
||||||
|
return self.error403(path_parts["inner_path"])
|
||||||
|
|
||||||
|
header_allow_ajax = False
|
||||||
|
if self.get.get("ajax_key"):
|
||||||
|
site = SiteManager.site_manager.get(path_parts["request_address"])
|
||||||
|
if self.get["ajax_key"] == site.settings["ajax_key"]:
|
||||||
|
header_allow_ajax = True
|
||||||
|
else:
|
||||||
|
return self.error403("Invalid ajax_key")
|
||||||
|
|
||||||
|
file_size = helper.getFilesize(file_path)
|
||||||
|
|
||||||
|
if file_size is not None:
|
||||||
|
return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts)
|
||||||
|
|
||||||
|
elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect
|
||||||
|
if path_parts["inner_path"]:
|
||||||
|
return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1])
|
||||||
|
else:
|
||||||
|
return self.actionRedirect("./%s/" % path_parts["address"])
|
||||||
|
|
||||||
|
else: # File not exists, try to download
|
||||||
|
if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading
|
||||||
|
return self.actionSiteAddPrompt(path)
|
||||||
|
|
||||||
|
site = SiteManager.site_manager.need(address)
|
||||||
|
|
||||||
|
if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites
|
||||||
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
||||||
|
|
||||||
|
result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads
|
||||||
|
if result:
|
||||||
|
file_size = helper.getFilesize(file_path)
|
||||||
|
return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts)
|
||||||
|
else:
|
||||||
|
self.log.debug("File not found: %s" % path_parts["inner_path"])
|
||||||
|
return self.error404(path)
|
||||||
|
|
||||||
|
# Serve a media for ui
|
||||||
|
def actionUiMedia(self, path):
|
||||||
|
match = re.match("/uimedia/(?P<inner_path>.*)", path)
|
||||||
|
if match: # Looks like a valid path
|
||||||
|
file_path = "src/Ui/media/%s" % match.group("inner_path")
|
||||||
|
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
|
||||||
|
if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
|
||||||
|
# File not in allowed path
|
||||||
|
return self.error403()
|
||||||
|
else:
|
||||||
|
if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."):
|
||||||
|
# If debugging merge *.css to all.css and *.js to all.js
|
||||||
|
from Debug import DebugMedia
|
||||||
|
DebugMedia.merge(file_path)
|
||||||
|
return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content
|
||||||
|
|
||||||
|
else: # Bad url
|
||||||
|
return self.error400()
|
||||||
|
|
||||||
|
def actionSiteAdd(self):
|
||||||
|
post_data = self.env["wsgi.input"].read().decode()
|
||||||
|
post = dict(urllib.parse.parse_qsl(post_data))
|
||||||
|
if post["add_nonce"] not in self.server.add_nonces:
|
||||||
|
return self.error403("Add nonce error.")
|
||||||
|
self.server.add_nonces.remove(post["add_nonce"])
|
||||||
|
SiteManager.site_manager.need(post["address"])
|
||||||
|
return self.actionRedirect(post["url"])
|
||||||
|
|
||||||
|
@helper.encodeResponse
|
||||||
|
def actionSiteAddPrompt(self, path):
|
||||||
|
path_parts = self.parsePath(path)
|
||||||
|
if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]):
|
||||||
|
return self.error404(path)
|
||||||
|
|
||||||
|
self.sendHeader(200, "text/html", noscript=True)
|
||||||
|
template = open("src/Ui/template/site_add.html").read()
|
||||||
|
template = template.replace("{url}", html.escape(self.env["PATH_INFO"]))
|
||||||
|
template = template.replace("{address}", path_parts["address"])
|
||||||
|
template = template.replace("{add_nonce}", self.getAddNonce())
|
||||||
|
return template
|
||||||
|
|
||||||
|
def replaceHtmlVariables(self, block, path_parts):
|
||||||
|
user = self.getCurrentUser()
|
||||||
|
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
||||||
|
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
|
||||||
|
|
||||||
|
if path_parts:
|
||||||
|
site = self.server.sites.get(path_parts.get("address"))
|
||||||
|
if site.settings["own"]:
|
||||||
|
modified = int(time.time())
|
||||||
|
else:
|
||||||
|
modified = int(site.content_manager.contents["content.json"]["modified"])
|
||||||
|
block = block.replace(b"{site_modified}", str(modified).encode("utf8"))
|
||||||
|
|
||||||
|
return block
|
||||||
|
|
||||||
|
# Stream a file to client
|
||||||
|
def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None):
|
||||||
|
file_name = os.path.basename(file_path)
|
||||||
|
|
||||||
|
if file_size is None:
|
||||||
|
file_size = helper.getFilesize(file_path)
|
||||||
|
|
||||||
|
if file_size is not None:
|
||||||
|
# Try to figure out content type by extension
|
||||||
|
content_type = self.getContentType(file_name)
|
||||||
|
|
||||||
|
range = self.env.get("HTTP_RANGE")
|
||||||
|
range_start = None
|
||||||
|
|
||||||
|
is_html_file = file_name.endswith(".html")
|
||||||
|
if is_html_file:
|
||||||
|
header_length = False
|
||||||
|
|
||||||
|
if send_header:
|
||||||
|
extra_headers = extra_headers.copy()
|
||||||
|
content_encoding = self.get.get("zeronet_content_encoding", "")
|
||||||
|
if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")):
|
||||||
|
extra_headers["Content-Encoding"] = content_encoding
|
||||||
|
extra_headers["Accept-Ranges"] = "bytes"
|
||||||
|
if header_length:
|
||||||
|
extra_headers["Content-Length"] = str(file_size)
|
||||||
|
if range:
|
||||||
|
range_start = int(re.match(".*?([0-9]+)", range).group(1))
|
||||||
|
if re.match(".*?-([0-9]+)", range):
|
||||||
|
range_end = int(re.match(".*?-([0-9]+)", range).group(1)) + 1
|
||||||
|
else:
|
||||||
|
range_end = file_size
|
||||||
|
extra_headers["Content-Length"] = str(range_end - range_start)
|
||||||
|
extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end - 1, file_size)
|
||||||
|
if range:
|
||||||
|
status = 206
|
||||||
|
else:
|
||||||
|
status = 200
|
||||||
|
self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers)
|
||||||
|
if self.env["REQUEST_METHOD"] != "OPTIONS":
|
||||||
|
if not file_obj:
|
||||||
|
file_obj = open(file_path, "rb")
|
||||||
|
|
||||||
|
if range_start:
|
||||||
|
file_obj.seek(range_start)
|
||||||
|
while 1:
|
||||||
|
try:
|
||||||
|
block = file_obj.read(block_size)
|
||||||
|
if is_html_file:
|
||||||
|
block = self.replaceHtmlVariables(block, path_parts)
|
||||||
|
if block:
|
||||||
|
yield block
|
||||||
|
else:
|
||||||
|
raise StopIteration
|
||||||
|
except StopIteration:
|
||||||
|
file_obj.close()
|
||||||
|
break
|
||||||
|
else: # File not exists
|
||||||
|
for part in self.error404(str(file_path)):
|
||||||
|
yield part
|
||||||
|
|
||||||
|
# On websocket connection
|
||||||
|
def actionWebsocket(self):
|
||||||
|
ws = self.env.get("wsgi.websocket")
|
||||||
|
|
||||||
|
if ws:
|
||||||
|
# Allow only same-origin websocket requests
|
||||||
|
origin = self.env.get("HTTP_ORIGIN")
|
||||||
|
host = self.env.get("HTTP_HOST")
|
||||||
|
# Allow only same-origin websocket requests
|
||||||
|
if origin:
|
||||||
|
origin_host = origin.split("://", 1)[-1]
|
||||||
|
if origin_host != host and origin_host not in self.server.allowed_ws_origins:
|
||||||
|
error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins)
|
||||||
|
ws.send(json.dumps({"error": error_message}))
|
||||||
|
return self.error403(error_message)
|
||||||
|
|
||||||
|
# Find site by wrapper_key
|
||||||
|
wrapper_key = self.get["wrapper_key"]
|
||||||
|
site = None
|
||||||
|
for site_check in list(self.server.sites.values()):
|
||||||
|
if site_check.settings["wrapper_key"] == wrapper_key:
|
||||||
|
site = site_check
|
||||||
|
|
||||||
|
if site: # Correct wrapper key
|
||||||
|
try:
|
||||||
|
user = self.getCurrentUser()
|
||||||
|
except Exception as err:
|
||||||
|
ws.send(json.dumps({"error": "Error in data/user.json: %s" % err}))
|
||||||
|
return self.error500("Error in data/user.json: %s" % err)
|
||||||
|
if not user:
|
||||||
|
ws.send(json.dumps({"error": "No user found"}))
|
||||||
|
return self.error403("No user found")
|
||||||
|
ui_websocket = UiWebsocket(ws, site, self.server, user, self)
|
||||||
|
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
|
||||||
|
self.server.websockets.append(ui_websocket)
|
||||||
|
ui_websocket.start()
|
||||||
|
self.server.websockets.remove(ui_websocket)
|
||||||
|
for site_check in list(self.server.sites.values()):
|
||||||
|
# Remove websocket from every site (admin sites allowed to join other sites event channels)
|
||||||
|
if ui_websocket in site_check.websockets:
|
||||||
|
site_check.websockets.remove(ui_websocket)
|
||||||
|
return [b"Bye."]
|
||||||
|
else: # No site found by wrapper key
|
||||||
|
ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key}))
|
||||||
|
return self.error403("Wrapper key not found: %s" % wrapper_key)
|
||||||
|
else:
|
||||||
|
self.start_response("400 Bad Request", [])
|
||||||
|
return [b"Not a websocket request!"]
|
||||||
|
|
||||||
|
# Debug last error
|
||||||
|
def actionDebug(self):
|
||||||
|
# Raise last error from DebugHook
|
||||||
|
import main
|
||||||
|
last_error = main.DebugHook.last_error
|
||||||
|
if last_error:
|
||||||
|
raise last_error[0](last_error[1]).with_traceback(last_error[2])
|
||||||
|
else:
|
||||||
|
self.sendHeader()
|
||||||
|
return [b"No error! :)"]
|
||||||
|
|
||||||
|
# Just raise an error to get console
|
||||||
|
def actionConsole(self):
|
||||||
|
import sys
|
||||||
|
sites = self.server.sites
|
||||||
|
main = sys.modules["main"]
|
||||||
|
|
||||||
|
def bench(code, times=100, init=None):
|
||||||
|
sites = self.server.sites
|
||||||
|
main = sys.modules["main"]
|
||||||
|
s = time.time()
|
||||||
|
if init:
|
||||||
|
eval(compile(init, '<string>', 'exec'), globals(), locals())
|
||||||
|
for _ in range(times):
|
||||||
|
back = eval(code, globals(), locals())
|
||||||
|
return ["%s run: %.3fs" % (times, time.time() - s), back]
|
||||||
|
raise Exception("Here is your console")
|
||||||
|
|
||||||
|
# - Tests -
|
||||||
|
|
||||||
|
def actionTestStream(self):
|
||||||
|
self.sendHeader()
|
||||||
|
yield " " * 1080 # Overflow browser's buffer
|
||||||
|
yield "He"
|
||||||
|
time.sleep(1)
|
||||||
|
yield "llo!"
|
||||||
|
# yield "Running websockets: %s" % len(self.server.websockets)
|
||||||
|
# self.server.sendMessage("Hello!")
|
||||||
|
|
||||||
|
# - Errors -
|
||||||
|
|
||||||
|
# Send bad request error
|
||||||
|
def error400(self, message=""):
|
||||||
|
self.sendHeader(400, noscript=True)
|
||||||
|
self.log.error("Error 400: %s" % message)
|
||||||
|
return self.formatError("Bad Request", message)
|
||||||
|
|
||||||
|
# You are not allowed to access this
|
||||||
|
def error403(self, message="", details=True):
|
||||||
|
self.sendHeader(403, noscript=True)
|
||||||
|
self.log.warning("Error 403: %s" % message)
|
||||||
|
return self.formatError("Forbidden", message, details=details)
|
||||||
|
|
||||||
|
# Send file not found error
|
||||||
|
def error404(self, path=""):
|
||||||
|
self.sendHeader(404, noscript=True)
|
||||||
|
return self.formatError("Not Found", path, details=False)
|
||||||
|
|
||||||
|
# Internal server error
|
||||||
|
def error500(self, message=":("):
|
||||||
|
self.sendHeader(500, noscript=True)
|
||||||
|
self.log.error("Error 500: %s" % message)
|
||||||
|
return self.formatError("Server error", message)
|
||||||
|
|
||||||
|
@helper.encodeResponse
|
||||||
|
def formatError(self, title, message, details=True):
|
||||||
|
import sys
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
if details and config.debug:
|
||||||
|
details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key}
|
||||||
|
details["version_zeronet"] = "%s r%s" % (config.version, config.rev)
|
||||||
|
details["version_python"] = sys.version
|
||||||
|
details["version_gevent"] = gevent.__version__
|
||||||
|
details["plugins"] = PluginManager.plugin_manager.plugin_names
|
||||||
|
arguments = {key: val for key, val in vars(config.arguments).items() if "password" not in key}
|
||||||
|
details["arguments"] = arguments
|
||||||
|
return """
|
||||||
|
<style>
|
||||||
|
* { font-family: Consolas, Monospace; color: #333 }
|
||||||
|
pre { padding: 10px; background-color: #EEE }
|
||||||
|
</style>
|
||||||
|
<h1>%s</h1>
|
||||||
|
<h2>%s</h3>
|
||||||
|
<h3>Please <a href="https://github.com/HelloZeroNet/ZeroNet/issues" target="_top">report it</a> if you think this an error.</h3>
|
||||||
|
<h4>Details:</h4>
|
||||||
|
<pre>%s</pre>
|
||||||
|
""" % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True)))
|
||||||
|
else:
|
||||||
|
return """
|
||||||
|
<style>
|
||||||
|
* { font-family: Consolas, Monospace; color: #333; }
|
||||||
|
code { font-family: Consolas, Monospace; background-color: #EEE }
|
||||||
|
</style>
|
||||||
|
<h1>%s</h1>
|
||||||
|
<h2>%s</h3>
|
||||||
|
""" % (title, html.escape(message))
|
|
@ -0,0 +1,206 @@
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import urllib
|
||||||
|
import socket
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from lib.gevent_ws import WebSocketHandler
|
||||||
|
|
||||||
|
from .UiRequest import UiRequest
|
||||||
|
from Site import SiteManager
|
||||||
|
from Config import config
|
||||||
|
from Debug import Debug
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
|
||||||
|
# Skip websocket handler if not necessary
|
||||||
|
class UiWSGIHandler(WebSocketHandler):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.server = args[2]
|
||||||
|
super(UiWSGIHandler, self).__init__(*args, **kwargs)
|
||||||
|
self.args = args
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def handleError(self, err):
|
||||||
|
if config.debug: # Allow websocket errors to appear on /Debug
|
||||||
|
import main
|
||||||
|
main.DebugHook.handleError()
|
||||||
|
else:
|
||||||
|
ui_request = UiRequest(self.server, {}, self.environ, self.start_response)
|
||||||
|
block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err))
|
||||||
|
for block in block_gen:
|
||||||
|
self.write(block)
|
||||||
|
|
||||||
|
def run_application(self):
|
||||||
|
err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler"
|
||||||
|
try:
|
||||||
|
super(UiWSGIHandler, self).run_application()
|
||||||
|
except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err:
|
||||||
|
logging.warning("%s connection error: %s" % (err_name, err))
|
||||||
|
except Exception as err:
|
||||||
|
logging.warning("%s error: %s" % (err_name, Debug.formatException(err)))
|
||||||
|
self.handleError(err)
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
# Save socket to be able to close them properly on exit
|
||||||
|
self.server.sockets[self.client_address] = self.socket
|
||||||
|
super(UiWSGIHandler, self).handle()
|
||||||
|
del self.server.sockets[self.client_address]
|
||||||
|
|
||||||
|
|
||||||
|
class UiServer:
|
||||||
|
def __init__(self):
|
||||||
|
self.ip = config.ui_ip
|
||||||
|
self.port = config.ui_port
|
||||||
|
self.running = False
|
||||||
|
if self.ip == "*":
|
||||||
|
self.ip = "0.0.0.0" # Bind all
|
||||||
|
if config.ui_host:
|
||||||
|
self.allowed_hosts = set(config.ui_host)
|
||||||
|
elif config.ui_ip == "127.0.0.1":
|
||||||
|
# IP Addresses are inherently allowed as they are immune to DNS
|
||||||
|
# rebinding attacks.
|
||||||
|
self.allowed_hosts = set(["zero", "localhost:%s" % config.ui_port])
|
||||||
|
# "URI producers and normalizers should omit the port component and
|
||||||
|
# its ':' delimiter if port is empty or if its value would be the
|
||||||
|
# same as that of the scheme's default."
|
||||||
|
# Source: https://tools.ietf.org/html/rfc3986#section-3.2.3
|
||||||
|
# As a result, we need to support portless hosts if port 80 is in
|
||||||
|
# use.
|
||||||
|
if config.ui_port == 80:
|
||||||
|
self.allowed_hosts.update(["localhost"])
|
||||||
|
else:
|
||||||
|
self.allowed_hosts = set([])
|
||||||
|
self.allowed_ws_origins = set()
|
||||||
|
self.allow_trans_proxy = config.ui_trans_proxy
|
||||||
|
|
||||||
|
self.wrapper_nonces = []
|
||||||
|
self.add_nonces = []
|
||||||
|
self.websockets = []
|
||||||
|
self.site_manager = SiteManager.site_manager
|
||||||
|
self.sites = SiteManager.site_manager.list()
|
||||||
|
self.log = logging.getLogger(__name__)
|
||||||
|
config.error_logger.onNewRecord = self.handleErrorLogRecord
|
||||||
|
|
||||||
|
def handleErrorLogRecord(self, record):
|
||||||
|
self.updateWebsocket(log_event=record.levelname)
|
||||||
|
|
||||||
|
# After WebUI started
|
||||||
|
def afterStarted(self):
|
||||||
|
from util import Platform
|
||||||
|
Platform.setMaxfilesopened(config.max_files_opened)
|
||||||
|
|
||||||
|
# Handle WSGI request
|
||||||
|
def handleRequest(self, env, start_response):
|
||||||
|
path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8")
|
||||||
|
if env.get("QUERY_STRING"):
|
||||||
|
get = dict(urllib.parse.parse_qsl(env['QUERY_STRING']))
|
||||||
|
else:
|
||||||
|
get = {}
|
||||||
|
ui_request = UiRequest(self, get, env, start_response)
|
||||||
|
if config.debug: # Let the exception catched by werkezung
|
||||||
|
return ui_request.route(path)
|
||||||
|
else: # Catch and display the error
|
||||||
|
try:
|
||||||
|
return ui_request.route(path)
|
||||||
|
except Exception as err:
|
||||||
|
logging.debug("UiRequest error: %s" % Debug.formatException(err))
|
||||||
|
return ui_request.error500("Err: %s" % Debug.formatException(err))
|
||||||
|
|
||||||
|
# Reload the UiRequest class to prevent restarts in debug mode
|
||||||
|
def reload(self):
|
||||||
|
global UiRequest
|
||||||
|
import imp
|
||||||
|
import sys
|
||||||
|
importlib.reload(sys.modules["User.UserManager"])
|
||||||
|
importlib.reload(sys.modules["Ui.UiWebsocket"])
|
||||||
|
UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
|
||||||
|
# UiRequest.reload()
|
||||||
|
|
||||||
|
# Bind and run the server
|
||||||
|
def start(self):
|
||||||
|
self.running = True
|
||||||
|
handler = self.handleRequest
|
||||||
|
|
||||||
|
if config.debug:
|
||||||
|
# Auto reload UiRequest on change
|
||||||
|
from Debug import DebugReloader
|
||||||
|
DebugReloader.watcher.addCallback(self.reload)
|
||||||
|
|
||||||
|
# Werkzeug Debugger
|
||||||
|
try:
|
||||||
|
from werkzeug.debug import DebuggedApplication
|
||||||
|
handler = DebuggedApplication(self.handleRequest, evalex=True)
|
||||||
|
except Exception as err:
|
||||||
|
self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
|
||||||
|
from Debug import DebugReloader
|
||||||
|
self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
|
||||||
|
self.log.info("--------------------------------------")
|
||||||
|
if ":" in config.ui_ip:
|
||||||
|
self.log.info("Web interface: http://[%s]:%s/" % (config.ui_ip, config.ui_port))
|
||||||
|
else:
|
||||||
|
self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port))
|
||||||
|
self.log.info("--------------------------------------")
|
||||||
|
|
||||||
|
if config.open_browser and config.open_browser != "False":
|
||||||
|
logging.info("Opening browser: %s...", config.open_browser)
|
||||||
|
import webbrowser
|
||||||
|
try:
|
||||||
|
if config.open_browser == "default_browser":
|
||||||
|
browser = webbrowser.get()
|
||||||
|
else:
|
||||||
|
browser = webbrowser.get(config.open_browser)
|
||||||
|
url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage)
|
||||||
|
gevent.spawn_later(0.3, browser.open, url, new=2)
|
||||||
|
except Exception as err:
|
||||||
|
print("Error starting browser: %s" % err)
|
||||||
|
|
||||||
|
self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log)
|
||||||
|
self.server.sockets = {}
|
||||||
|
self.afterStarted()
|
||||||
|
try:
|
||||||
|
self.server.serve_forever()
|
||||||
|
except Exception as err:
|
||||||
|
self.log.error("Web interface bind error, must be running already, exiting.... %s" % err)
|
||||||
|
import main
|
||||||
|
main.file_server.stop()
|
||||||
|
self.log.info("Stopped.")
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.log.debug("Stopping...")
|
||||||
|
# Close WS sockets
|
||||||
|
if "clients" in dir(self.server):
|
||||||
|
for client in list(self.server.clients.values()):
|
||||||
|
client.ws.close()
|
||||||
|
# Close http sockets
|
||||||
|
sock_closed = 0
|
||||||
|
for sock in list(self.server.sockets.values()):
|
||||||
|
try:
|
||||||
|
sock.send(b"bye")
|
||||||
|
sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
# sock._sock.close()
|
||||||
|
# sock.close()
|
||||||
|
sock_closed += 1
|
||||||
|
except Exception as err:
|
||||||
|
self.log.debug("Http connection close error: %s" % err)
|
||||||
|
self.log.debug("Socket closed: %s" % sock_closed)
|
||||||
|
time.sleep(0.1)
|
||||||
|
if config.debug:
|
||||||
|
from Debug import DebugReloader
|
||||||
|
DebugReloader.watcher.stop()
|
||||||
|
|
||||||
|
self.server.socket.close()
|
||||||
|
self.server.stop()
|
||||||
|
self.running = False
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
def updateWebsocket(self, **kwargs):
|
||||||
|
if kwargs:
|
||||||
|
param = {"event": list(kwargs.items())[0]}
|
||||||
|
else:
|
||||||
|
param = None
|
||||||
|
|
||||||
|
for ws in self.websockets:
|
||||||
|
ws.event("serverChanged", param)
|
|
@ -0,0 +1,3 @@
|
||||||
|
from .UiServer import UiServer
|
||||||
|
from .UiRequest import UiRequest
|
||||||
|
from .UiWebsocket import UiWebsocket
|
|
@ -0,0 +1,32 @@
|
||||||
|
class Fixbutton
|
||||||
|
constructor: ->
|
||||||
|
@dragging = false
|
||||||
|
$(".fixbutton-bg").on "mouseover", ->
|
||||||
|
$(".fixbutton-bg").stop().animate({"scale": 0.7}, 800, "easeOutElastic")
|
||||||
|
$(".fixbutton-burger").stop().animate({"opacity": 1.5, "left": 0}, 800, "easeOutElastic")
|
||||||
|
$(".fixbutton-text").stop().animate({"opacity": 0, "left": 20}, 300, "easeOutCubic")
|
||||||
|
|
||||||
|
$(".fixbutton-bg").on "mouseout", ->
|
||||||
|
if $(".fixbutton").hasClass("dragging")
|
||||||
|
return true
|
||||||
|
$(".fixbutton-bg").stop().animate({"scale": 0.6}, 300, "easeOutCubic")
|
||||||
|
$(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic")
|
||||||
|
$(".fixbutton-text").stop().animate({"opacity": 0.9, "left": 0}, 300, "easeOutBack")
|
||||||
|
|
||||||
|
|
||||||
|
###$(".fixbutton-bg").on "click", ->
|
||||||
|
return false
|
||||||
|
###
|
||||||
|
|
||||||
|
$(".fixbutton-bg").on "mousedown", ->
|
||||||
|
# $(".fixbutton-burger").stop().animate({"scale": 0.7, "left": 0}, 300, "easeOutCubic")
|
||||||
|
#$("#inner-iframe").toggleClass("back")
|
||||||
|
#$(".wrapper-iframe").stop().animate({"scale": 0.9}, 600, "easeOutCubic")
|
||||||
|
#$("body").addClass("back")
|
||||||
|
|
||||||
|
$(".fixbutton-bg").on "mouseup", ->
|
||||||
|
# $(".fixbutton-burger").stop().animate({"scale": 1, "left": 0}, 600, "easeOutElastic")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
window.Fixbutton = Fixbutton
|
|
@ -0,0 +1,57 @@
|
||||||
|
class Infopanel
|
||||||
|
constructor: (@elem) ->
|
||||||
|
@visible = false
|
||||||
|
|
||||||
|
show: (closed=false) =>
|
||||||
|
@elem.parent().addClass("visible")
|
||||||
|
if closed
|
||||||
|
@close()
|
||||||
|
else
|
||||||
|
@open()
|
||||||
|
|
||||||
|
unfold: =>
|
||||||
|
@elem.toggleClass("unfolded")
|
||||||
|
return false
|
||||||
|
|
||||||
|
updateEvents: =>
|
||||||
|
@elem.off("click")
|
||||||
|
@elem.find(".close").off("click")
|
||||||
|
@elem.find(".line").off("click")
|
||||||
|
|
||||||
|
@elem.find(".line").on("click", @unfold)
|
||||||
|
|
||||||
|
if @elem.hasClass("closed")
|
||||||
|
@elem.on "click", =>
|
||||||
|
@onOpened()
|
||||||
|
@open()
|
||||||
|
else
|
||||||
|
@elem.find(".close").on "click", =>
|
||||||
|
@onClosed()
|
||||||
|
@close()
|
||||||
|
|
||||||
|
hide: =>
|
||||||
|
@elem.parent().removeClass("visible")
|
||||||
|
|
||||||
|
close: =>
|
||||||
|
@elem.addClass("closed")
|
||||||
|
@updateEvents()
|
||||||
|
return false
|
||||||
|
|
||||||
|
open: =>
|
||||||
|
@elem.removeClass("closed")
|
||||||
|
@updateEvents()
|
||||||
|
return false
|
||||||
|
|
||||||
|
setTitle: (line1, line2) =>
|
||||||
|
@elem.find(".line-1").text(line1)
|
||||||
|
@elem.find(".line-2").text(line2)
|
||||||
|
|
||||||
|
setClosedNum: (num) =>
|
||||||
|
@elem.find(".closed-num").text(num)
|
||||||
|
|
||||||
|
setAction: (title, func) =>
|
||||||
|
@elem.find(".button").text(title).off("click").on("click", func)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
window.Infopanel = Infopanel
|
|
@ -0,0 +1,91 @@
|
||||||
|
class Loading
|
||||||
|
constructor: (@wrapper) ->
|
||||||
|
if window.show_loadingscreen then @showScreen()
|
||||||
|
@timer_hide = null
|
||||||
|
@timer_set = null
|
||||||
|
|
||||||
|
setProgress: (percent) ->
|
||||||
|
if @timer_hide
|
||||||
|
clearInterval @timer_hide
|
||||||
|
@timer_set = RateLimit 500, ->
|
||||||
|
$(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block")
|
||||||
|
|
||||||
|
hideProgress: ->
|
||||||
|
@log "hideProgress"
|
||||||
|
if @timer_set
|
||||||
|
clearInterval @timer_set
|
||||||
|
@timer_hide = setTimeout ( =>
|
||||||
|
$(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000)
|
||||||
|
), 300
|
||||||
|
|
||||||
|
|
||||||
|
showScreen: ->
|
||||||
|
$(".loadingscreen").css("display", "block").addClassLater("ready")
|
||||||
|
@screen_visible = true
|
||||||
|
@printLine " Connecting..."
|
||||||
|
|
||||||
|
|
||||||
|
showTooLarge: (site_info) ->
|
||||||
|
@log "Displaying large site confirmation"
|
||||||
|
if $(".console .button-setlimit").length == 0 # Not displaying it yet
|
||||||
|
line = @printLine("Site size: <b>#{parseInt(site_info.settings.size/1024/1024)}MB</b> is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning")
|
||||||
|
button = $("<a href='#Set+limit' class='button button-setlimit'>" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "</a>")
|
||||||
|
button.on "click", =>
|
||||||
|
button.addClass("loading")
|
||||||
|
return @wrapper.setSizeLimit(site_info.next_size_limit)
|
||||||
|
line.after(button)
|
||||||
|
setTimeout (=>
|
||||||
|
@printLine('Ready.')
|
||||||
|
), 100
|
||||||
|
|
||||||
|
showTrackerTorBridge: (server_info) ->
|
||||||
|
if $(".console .button-settrackerbridge").length == 0 and not server_info.tor_use_meek_bridges
|
||||||
|
line = @printLine("Tracker connection error detected.", "error")
|
||||||
|
button = $("<a href='#Enable+Tor+bridges' class='button button-settrackerbridge'>" + "Use Tor meek bridges for tracker connections" + "</a>")
|
||||||
|
button.on "click", =>
|
||||||
|
button.addClass("loading")
|
||||||
|
@wrapper.ws.cmd "configSet", ["tor_use_bridges", ""]
|
||||||
|
@wrapper.ws.cmd "configSet", ["trackers_proxy", "tor"]
|
||||||
|
@wrapper.ws.cmd "siteUpdate", {address: @wrapper.site_info.address, announce: true}
|
||||||
|
@wrapper.reloadIframe()
|
||||||
|
return false
|
||||||
|
line.after(button)
|
||||||
|
if not server_info.tor_has_meek_bridges
|
||||||
|
button.addClass("disabled")
|
||||||
|
@printLine("No meek bridge support in your client, please <a href='https://github.com/HelloZeroNet/ZeroNet#how-to-join'>download the latest bundle</a>.", "warning")
|
||||||
|
|
||||||
|
# We dont need loadingscreen anymore
|
||||||
|
hideScreen: ->
|
||||||
|
@log "hideScreen"
|
||||||
|
if not $(".loadingscreen").hasClass("done") # Only if its not animating already
|
||||||
|
if @screen_visible # Hide with animate
|
||||||
|
$(".loadingscreen").addClass("done").removeLater(2000)
|
||||||
|
else # Not visible, just remove
|
||||||
|
$(".loadingscreen").remove()
|
||||||
|
@screen_visible = false
|
||||||
|
|
||||||
|
|
||||||
|
# Append text to last line of loadingscreen
|
||||||
|
print: (text, type="normal") ->
|
||||||
|
if not @screen_visible then return false
|
||||||
|
$(".loadingscreen .console .cursor").remove() # Remove previous cursor
|
||||||
|
last_line = $(".loadingscreen .console .console-line:last-child")
|
||||||
|
if type == "error" then text = "<span class='console-error'>#{text}</span>"
|
||||||
|
last_line.html(last_line.html()+text)
|
||||||
|
|
||||||
|
|
||||||
|
# Add line to loading screen
|
||||||
|
printLine: (text, type="normal") ->
|
||||||
|
if not @screen_visible then return false
|
||||||
|
$(".loadingscreen .console .cursor").remove() # Remove previous cursor
|
||||||
|
if type == "error" then text = "<span class='console-error'>#{text}</span>" else text = text+"<span class='cursor'> </span>"
|
||||||
|
|
||||||
|
line = $("<div class='console-line'>#{text}</div>").appendTo(".loadingscreen .console")
|
||||||
|
if type == "warning" then line.addClass("console-warning")
|
||||||
|
return line
|
||||||
|
|
||||||
|
log: (args...) ->
|
||||||
|
console.log "[Loading]", args...
|
||||||
|
|
||||||
|
|
||||||
|
window.Loading = Loading
|
|
@ -0,0 +1,89 @@
|
||||||
|
class Notifications
|
||||||
|
constructor: (@elem) ->
|
||||||
|
@
|
||||||
|
|
||||||
|
test: ->
|
||||||
|
setTimeout (=>
|
||||||
|
@add("connection", "error", "Connection lost to <b>UiServer</b> on <b>localhost</b>!")
|
||||||
|
@add("message-Anyone", "info", "New from <b>Anyone</b>.")
|
||||||
|
), 1000
|
||||||
|
setTimeout (=>
|
||||||
|
@add("connection", "done", "<b>UiServer</b> connection recovered.", 5000)
|
||||||
|
), 3000
|
||||||
|
|
||||||
|
|
||||||
|
add: (id, type, body, timeout=0) ->
|
||||||
|
id = id.replace /[^A-Za-z0-9-]/g, ""
|
||||||
|
# Close notifications with same id
|
||||||
|
for elem in $(".notification-#{id}")
|
||||||
|
@close $(elem)
|
||||||
|
|
||||||
|
# Create element
|
||||||
|
elem = $(".notification.template", @elem).clone().removeClass("template")
|
||||||
|
elem.addClass("notification-#{type}").addClass("notification-#{id}")
|
||||||
|
if type == "progress"
|
||||||
|
elem.addClass("notification-done")
|
||||||
|
|
||||||
|
# Update text
|
||||||
|
if type == "error"
|
||||||
|
$(".notification-icon", elem).html("!")
|
||||||
|
else if type == "done"
|
||||||
|
$(".notification-icon", elem).html("<div class='icon-success'></div>")
|
||||||
|
else if type == "progress"
|
||||||
|
$(".notification-icon", elem).html("<div class='icon-success'></div>")
|
||||||
|
else if type == "ask"
|
||||||
|
$(".notification-icon", elem).html("?")
|
||||||
|
else
|
||||||
|
$(".notification-icon", elem).html("i")
|
||||||
|
|
||||||
|
if typeof(body) == "string"
|
||||||
|
$(".body", elem).html("<div class='message'><span class='multiline'>"+body+"</span></div>")
|
||||||
|
else
|
||||||
|
$(".body", elem).html("").append(body)
|
||||||
|
|
||||||
|
elem.appendTo(@elem)
|
||||||
|
|
||||||
|
# Timeout
|
||||||
|
if timeout
|
||||||
|
$(".close", elem).remove() # No need of close button
|
||||||
|
setTimeout (=>
|
||||||
|
@close elem
|
||||||
|
), timeout
|
||||||
|
|
||||||
|
# Animate
|
||||||
|
width = Math.min(elem.outerWidth() + 50, 580)
|
||||||
|
if not timeout then width += 20 # Add space for close button
|
||||||
|
if elem.outerHeight() > 55 then elem.addClass("long")
|
||||||
|
elem.css({"width": "50px", "transform": "scale(0.01)"})
|
||||||
|
elem.animate({"scale": 1}, 800, "easeOutElastic")
|
||||||
|
elem.animate({"width": width}, 700, "easeInOutCubic")
|
||||||
|
$(".body", elem).css("width": (width - 50))
|
||||||
|
$(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000)
|
||||||
|
|
||||||
|
# Close button or Confirm button
|
||||||
|
$(".close, .button", elem).on "click", =>
|
||||||
|
@close elem
|
||||||
|
return false
|
||||||
|
|
||||||
|
# Select list
|
||||||
|
$(".select", elem).on "click", =>
|
||||||
|
@close elem
|
||||||
|
|
||||||
|
# Input enter
|
||||||
|
$("input", elem).on "keyup", (e) =>
|
||||||
|
if e.keyCode == 13
|
||||||
|
@close elem
|
||||||
|
|
||||||
|
return elem
|
||||||
|
|
||||||
|
|
||||||
|
close: (elem) ->
|
||||||
|
elem.stop().animate {"width": 0, "opacity": 0}, 700, "easeInOutCubic"
|
||||||
|
elem.slideUp 300, (-> elem.remove())
|
||||||
|
|
||||||
|
|
||||||
|
log: (args...) ->
|
||||||
|
console.log "[Notifications]", args...
|
||||||
|
|
||||||
|
|
||||||
|
window.Notifications = Notifications
|
|
@ -0,0 +1,714 @@
|
||||||
|
class Wrapper
|
||||||
|
constructor: (ws_url) ->
|
||||||
|
@log "Created!"
|
||||||
|
|
||||||
|
@loading = new Loading(@)
|
||||||
|
@notifications = new Notifications($(".notifications"))
|
||||||
|
@infopanel = new Infopanel($(".infopanel"))
|
||||||
|
@infopanel.onClosed = =>
|
||||||
|
@ws.cmd("siteSetSettingsValue", ["modified_files_notification", false])
|
||||||
|
@infopanel.onOpened = =>
|
||||||
|
@ws.cmd("siteSetSettingsValue", ["modified_files_notification", true])
|
||||||
|
@fixbutton = new Fixbutton()
|
||||||
|
|
||||||
|
window.addEventListener("message", @onMessageInner, false)
|
||||||
|
@inner = document.getElementById("inner-iframe").contentWindow
|
||||||
|
@ws = new ZeroWebsocket(ws_url)
|
||||||
|
@ws.next_message_id = 1000000 # Avoid messageid collision :)
|
||||||
|
@ws.onOpen = @onOpenWebsocket
|
||||||
|
@ws.onClose = @onCloseWebsocket
|
||||||
|
@ws.onMessage = @onMessageWebsocket
|
||||||
|
@ws.connect()
|
||||||
|
@ws_error = null # Ws error message
|
||||||
|
|
||||||
|
@next_cmd_message_id = -1
|
||||||
|
|
||||||
|
@site_info = null # Hold latest site info
|
||||||
|
@server_info = null # Hold latest server info
|
||||||
|
@event_site_info = $.Deferred() # Event when site_info received
|
||||||
|
@inner_loaded = false # If iframe loaded or not
|
||||||
|
@inner_ready = false # Inner frame ready to receive messages
|
||||||
|
@wrapperWsInited = false # Wrapper notified on websocket open
|
||||||
|
@site_error = null # Latest failed file download
|
||||||
|
@address = null
|
||||||
|
@opener_tested = false
|
||||||
|
@announcer_line = null
|
||||||
|
@web_notifications = {}
|
||||||
|
@is_title_changed = false
|
||||||
|
|
||||||
|
@allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors
|
||||||
|
|
||||||
|
window.onload = @onPageLoad # On iframe loaded
|
||||||
|
window.onhashchange = (e) => # On hash change
|
||||||
|
@log "Hashchange", window.location.hash
|
||||||
|
if window.location.hash
|
||||||
|
src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash
|
||||||
|
$("#inner-iframe").attr("src", src)
|
||||||
|
|
||||||
|
window.onpopstate = (e) =>
|
||||||
|
@sendInner {"cmd": "wrapperPopState", "params": {"href": document.location.href, "state": e.state}}
|
||||||
|
|
||||||
|
$("#inner-iframe").focus()
|
||||||
|
|
||||||
|
|
||||||
|
verifyEvent: (allowed_target, e) =>
|
||||||
|
if not e.originalEvent.isTrusted
|
||||||
|
throw "Event not trusted"
|
||||||
|
|
||||||
|
if e.originalEvent.constructor not in @allowed_event_constructors
|
||||||
|
throw "Invalid event constructor: #{e.constructor} not in #{JSON.stringify(@allowed_event_constructors)}"
|
||||||
|
|
||||||
|
if e.originalEvent.currentTarget != allowed_target[0]
|
||||||
|
throw "Invalid event target: #{e.originalEvent.currentTarget} != #{allowed_target[0]}"
|
||||||
|
|
||||||
|
# Incoming message from UiServer websocket
|
||||||
|
onMessageWebsocket: (e) =>
|
||||||
|
message = JSON.parse(e.data)
|
||||||
|
@handleMessageWebsocket(message)
|
||||||
|
|
||||||
|
handleMessageWebsocket: (message) =>
|
||||||
|
cmd = message.cmd
|
||||||
|
if cmd == "response"
|
||||||
|
if @ws.waiting_cb[message.to]? # We are waiting for response
|
||||||
|
@ws.waiting_cb[message.to](message.result)
|
||||||
|
else
|
||||||
|
@sendInner message # Pass message to inner frame
|
||||||
|
else if cmd == "notification" # Display notification
|
||||||
|
type = message.params[0]
|
||||||
|
id = "notification-ws-#{message.id}"
|
||||||
|
if "-" in message.params[0] # - in first param: message id defined
|
||||||
|
[id, type] = message.params[0].split("-")
|
||||||
|
@notifications.add(id, type, message.params[1], message.params[2])
|
||||||
|
else if cmd == "progress" # Display notification
|
||||||
|
@actionProgress(message)
|
||||||
|
else if cmd == "prompt" # Prompt input
|
||||||
|
@displayPrompt message.params[0], message.params[1], message.params[2], message.params[3], (res) =>
|
||||||
|
@ws.response message.id, res
|
||||||
|
else if cmd == "confirm" # Confirm action
|
||||||
|
@displayConfirm message.params[0], message.params[1], (res) =>
|
||||||
|
@ws.response message.id, res
|
||||||
|
else if cmd == "setSiteInfo"
|
||||||
|
@sendInner message # Pass to inner frame
|
||||||
|
if message.params.address == @address # Current page
|
||||||
|
@setSiteInfo message.params
|
||||||
|
@updateProgress message.params
|
||||||
|
else if cmd == "setAnnouncerInfo"
|
||||||
|
@sendInner message # Pass to inner frame
|
||||||
|
if message.params.address == @address # Current page
|
||||||
|
@setAnnouncerInfo message.params
|
||||||
|
@updateProgress message.params
|
||||||
|
else if cmd == "error"
|
||||||
|
@notifications.add("notification-#{message.id}", "error", message.params, 0)
|
||||||
|
else if cmd == "updating" # Close connection
|
||||||
|
@log "Updating: Closing websocket"
|
||||||
|
@ws.ws.close()
|
||||||
|
@ws.onCloseWebsocket(null, 4000)
|
||||||
|
else if cmd == "redirect"
|
||||||
|
window.top.location = message.params
|
||||||
|
else if cmd == "injectHtml"
|
||||||
|
$("body").append(message.params)
|
||||||
|
else if cmd == "injectScript"
|
||||||
|
script_tag = $("<script>")
|
||||||
|
script_tag.attr("nonce", @script_nonce)
|
||||||
|
script_tag.html(message.params)
|
||||||
|
document.head.appendChild(script_tag[0])
|
||||||
|
else
|
||||||
|
@sendInner message # Pass message to inner frame
|
||||||
|
|
||||||
|
# Incoming message from inner frame
|
||||||
|
onMessageInner: (e) =>
|
||||||
|
# No nonce security enabled, test if window opener present
|
||||||
|
if not window.postmessage_nonce_security and @opener_tested == false
|
||||||
|
if window.opener and window.opener != window
|
||||||
|
@log "Opener present", window.opener
|
||||||
|
@displayOpenerDialog()
|
||||||
|
return false
|
||||||
|
else
|
||||||
|
@opener_tested = true
|
||||||
|
|
||||||
|
message = e.data
|
||||||
|
# Invalid message (probably not for us)
|
||||||
|
if not message.cmd
|
||||||
|
@log "Invalid message:", message
|
||||||
|
return false
|
||||||
|
|
||||||
|
# Test nonce security to avoid third-party messages
|
||||||
|
if window.postmessage_nonce_security and message.wrapper_nonce != window.wrapper_nonce
|
||||||
|
@log "Message nonce error:", message.wrapper_nonce, '!=', window.wrapper_nonce
|
||||||
|
return
|
||||||
|
|
||||||
|
@handleMessage message
|
||||||
|
|
||||||
|
cmd: (cmd, params={}, cb=null) =>
|
||||||
|
message = {}
|
||||||
|
message.cmd = cmd
|
||||||
|
message.params = params
|
||||||
|
message.id = @next_cmd_message_id
|
||||||
|
if cb
|
||||||
|
@ws.waiting_cb[message.id] = cb
|
||||||
|
@next_cmd_message_id -= 1
|
||||||
|
|
||||||
|
@handleMessage(message)
|
||||||
|
|
||||||
|
handleMessage: (message) =>
|
||||||
|
cmd = message.cmd
|
||||||
|
if cmd == "innerReady"
|
||||||
|
@inner_ready = true
|
||||||
|
if @ws.ws.readyState == 1 and not @wrapperWsInited # If ws already opened
|
||||||
|
@sendInner {"cmd": "wrapperOpenedWebsocket"}
|
||||||
|
@wrapperWsInited = true
|
||||||
|
else if cmd == "innerLoaded" or cmd == "wrapperInnerLoaded"
|
||||||
|
if window.location.hash
|
||||||
|
$("#inner-iframe")[0].src += window.location.hash # Hash tag
|
||||||
|
@log "Added hash to location", $("#inner-iframe")[0].src
|
||||||
|
else if cmd == "wrapperNotification" # Display notification
|
||||||
|
@actionNotification(message)
|
||||||
|
else if cmd == "wrapperConfirm" # Display confirm message
|
||||||
|
@actionConfirm(message)
|
||||||
|
else if cmd == "wrapperPrompt" # Prompt input
|
||||||
|
@actionPrompt(message)
|
||||||
|
else if cmd == "wrapperProgress" # Progress bar
|
||||||
|
@actionProgress(message)
|
||||||
|
else if cmd == "wrapperSetViewport" # Set the viewport
|
||||||
|
@actionSetViewport(message)
|
||||||
|
else if cmd == "wrapperSetTitle"
|
||||||
|
@log "wrapperSetTitle", message.params
|
||||||
|
$("head title").text(message.params)
|
||||||
|
@is_title_changed = true
|
||||||
|
else if cmd == "wrapperReload" # Reload current page
|
||||||
|
@actionReload(message)
|
||||||
|
else if cmd == "wrapperGetLocalStorage"
|
||||||
|
@actionGetLocalStorage(message)
|
||||||
|
else if cmd == "wrapperSetLocalStorage"
|
||||||
|
@actionSetLocalStorage(message)
|
||||||
|
else if cmd == "wrapperPushState"
|
||||||
|
query = @toRelativeQuery(message.params[2])
|
||||||
|
window.history.pushState(message.params[0], message.params[1], query)
|
||||||
|
else if cmd == "wrapperReplaceState"
|
||||||
|
query = @toRelativeQuery(message.params[2])
|
||||||
|
window.history.replaceState(message.params[0], message.params[1], query)
|
||||||
|
else if cmd == "wrapperGetState"
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": window.history.state}
|
||||||
|
else if cmd == "wrapperGetAjaxKey"
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": window.ajax_key}
|
||||||
|
else if cmd == "wrapperOpenWindow"
|
||||||
|
@actionOpenWindow(message.params)
|
||||||
|
else if cmd == "wrapperPermissionAdd"
|
||||||
|
@actionPermissionAdd(message)
|
||||||
|
else if cmd == "wrapperRequestFullscreen"
|
||||||
|
@actionRequestFullscreen()
|
||||||
|
else if cmd == "wrapperWebNotification"
|
||||||
|
@actionWebNotification(message)
|
||||||
|
else if cmd == "wrapperCloseWebNotification"
|
||||||
|
@actionCloseWebNotification(message)
|
||||||
|
else # Send to websocket
|
||||||
|
if message.id < 1000000
|
||||||
|
if message.cmd == "fileWrite" and not @modified_panel_updater_timer and site_info?.settings?.own
|
||||||
|
@modified_panel_updater_timer = setTimeout ( => @updateModifiedPanel(); @modified_panel_updater_timer = null ), 1000
|
||||||
|
@ws.send(message) # Pass message to websocket
|
||||||
|
else
|
||||||
|
@log "Invalid inner message id"
|
||||||
|
|
||||||
|
toRelativeQuery: (query=null) ->
|
||||||
|
if query == null
|
||||||
|
query = window.location.search
|
||||||
|
back = window.location.pathname
|
||||||
|
if back.match /^\/[^\/]+$/ # Add / after site address if called without it
|
||||||
|
back += "/"
|
||||||
|
if query.startsWith("#")
|
||||||
|
back = query
|
||||||
|
else if query.replace("?", "")
|
||||||
|
back += "?"+query.replace("?", "")
|
||||||
|
return back
|
||||||
|
|
||||||
|
|
||||||
|
displayOpenerDialog: ->
|
||||||
|
elem = $("<div class='opener-overlay'><div class='dialog'>You have opened this page by clicking on a link. Please, confirm if you want to load this site.<a href='?' target='_blank' class='button'>Open site</a></div></div>")
|
||||||
|
elem.find('a').on "click", ->
|
||||||
|
window.open("?", "_blank")
|
||||||
|
window.close()
|
||||||
|
return false
|
||||||
|
$("body").prepend(elem)
|
||||||
|
|
||||||
|
# - Actions -
|
||||||
|
|
||||||
|
actionOpenWindow: (params) ->
|
||||||
|
if typeof(params) == "string"
|
||||||
|
w = window.open()
|
||||||
|
w.opener = null
|
||||||
|
w.location = params
|
||||||
|
else
|
||||||
|
w = window.open(null, params[1], params[2])
|
||||||
|
w.opener = null
|
||||||
|
w.location = params[0]
|
||||||
|
|
||||||
|
actionRequestFullscreen: ->
|
||||||
|
elem = document.getElementById("inner-iframe")
|
||||||
|
request_fullscreen = elem.requestFullScreen || elem.webkitRequestFullscreen || elem.mozRequestFullScreen || elem.msRequestFullScreen
|
||||||
|
request_fullscreen.call(elem)
|
||||||
|
|
||||||
|
actionWebNotification: (message) ->
|
||||||
|
$.when(@event_site_info).done =>
|
||||||
|
# Check that the wrapper may send notifications
|
||||||
|
if Notification.permission == "granted"
|
||||||
|
@displayWebNotification message
|
||||||
|
else if Notification.permission == "denied"
|
||||||
|
res = {"error": "Web notifications are disabled by the user"}
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": res}
|
||||||
|
else
|
||||||
|
Notification.requestPermission().then (permission) =>
|
||||||
|
if permission == "granted"
|
||||||
|
@displayWebNotification message
|
||||||
|
|
||||||
|
actionCloseWebNotification: (message) ->
|
||||||
|
$.when(@event_site_info).done =>
|
||||||
|
id = message.params[0]
|
||||||
|
@web_notifications[id].close()
|
||||||
|
|
||||||
|
displayWebNotification: (message) ->
|
||||||
|
title = message.params[0]
|
||||||
|
id = message.params[1]
|
||||||
|
options = message.params[2]
|
||||||
|
notification = new Notification(title, options)
|
||||||
|
@web_notifications[id] = notification
|
||||||
|
notification.onshow = () =>
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": "ok"}
|
||||||
|
notification.onclick = (e) =>
|
||||||
|
if not options.focus_tab
|
||||||
|
e.preventDefault()
|
||||||
|
@sendInner {"cmd": "webNotificationClick", "params": {"id": id}}
|
||||||
|
notification.onclose = () =>
|
||||||
|
@sendInner {"cmd": "webNotificationClose", "params": {"id": id}}
|
||||||
|
delete @web_notifications[id]
|
||||||
|
|
||||||
|
actionPermissionAdd: (message) ->
|
||||||
|
permission = message.params
|
||||||
|
$.when(@event_site_info).done =>
|
||||||
|
if permission in @site_info.settings.permissions
|
||||||
|
return false
|
||||||
|
@ws.cmd "permissionDetails", permission, (permission_details) =>
|
||||||
|
@displayConfirm "This site requests permission:" + " <b>#{@toHtmlSafe(permission)}</b>" + "<br><small style='color: #4F4F4F'>#{permission_details}</small>", "Grant", =>
|
||||||
|
@ws.cmd "permissionAdd", permission, (res) =>
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": res}
|
||||||
|
|
||||||
|
actionNotification: (message) ->
|
||||||
|
message.params = @toHtmlSafe(message.params) # Escape html
|
||||||
|
body = $("<span class='message'>"+message.params[1]+"</span>")
|
||||||
|
@notifications.add("notification-#{message.id}", message.params[0], body, message.params[2])
|
||||||
|
|
||||||
|
displayConfirm: (body, captions, cb) ->
|
||||||
|
body = $("<span class='message-outer'><span class='message'>"+body+"</span></span>")
|
||||||
|
buttons = $("<span class='buttons'></span>")
|
||||||
|
if captions not instanceof Array then captions = [captions] # Convert to list if necessary
|
||||||
|
for caption, i in captions
|
||||||
|
button = $("<a></a>", {href: "#" + caption, class: "button button-confirm button-#{caption} button-#{i+1}", "data-value": i + 1}) # Add confirm button
|
||||||
|
button.text(caption)
|
||||||
|
((button) =>
|
||||||
|
button.on "click", (e) =>
|
||||||
|
@verifyEvent button, e
|
||||||
|
cb(parseInt(e.currentTarget.dataset.value))
|
||||||
|
return false
|
||||||
|
)(button)
|
||||||
|
buttons.append(button)
|
||||||
|
body.append(buttons)
|
||||||
|
@notifications.add("notification-#{caption}", "ask", body)
|
||||||
|
|
||||||
|
buttons.first().focus()
|
||||||
|
$(".notification").scrollLeft(0)
|
||||||
|
|
||||||
|
|
||||||
|
actionConfirm: (message, cb=false) ->
|
||||||
|
message.params = @toHtmlSafe(message.params) # Escape html
|
||||||
|
if message.params[1] then caption = message.params[1] else caption = "ok"
|
||||||
|
@displayConfirm message.params[0], caption, (res) =>
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": res} # Response to confirm
|
||||||
|
return false
|
||||||
|
|
||||||
|
|
||||||
|
displayPrompt: (message, type, caption, placeholder, cb) ->
|
||||||
|
body = $("<span class='message'></span>").html(message)
|
||||||
|
placeholder ?= ""
|
||||||
|
|
||||||
|
input = $("<input/>", {type: type, class: "input button-#{type}", placeholder: placeholder}) # Add input
|
||||||
|
input.on "keyup", (e) => # Send on enter
|
||||||
|
@verifyEvent input, e
|
||||||
|
if e.keyCode == 13
|
||||||
|
cb input.val() # Response to confirm
|
||||||
|
body.append(input)
|
||||||
|
|
||||||
|
button = $("<a></a>", {href: "#" + caption, class: "button button-#{caption}"}).text(caption) # Add confirm button
|
||||||
|
button.on "click", (e) => # Response on button click
|
||||||
|
@verifyEvent button, e
|
||||||
|
cb input.val()
|
||||||
|
return false
|
||||||
|
body.append(button)
|
||||||
|
|
||||||
|
@notifications.add("notification-#{message.id}", "ask", body)
|
||||||
|
|
||||||
|
input.focus()
|
||||||
|
$(".notification").scrollLeft(0)
|
||||||
|
|
||||||
|
|
||||||
|
actionPrompt: (message) ->
|
||||||
|
message.params = @toHtmlSafe(message.params) # Escape html
|
||||||
|
if message.params[1] then type = message.params[1] else type = "text"
|
||||||
|
caption = if message.params[2] then message.params[2] else "OK"
|
||||||
|
if message.params[3]?
|
||||||
|
placeholder = message.params[3]
|
||||||
|
else
|
||||||
|
placeholder = ""
|
||||||
|
|
||||||
|
@displayPrompt message.params[0], type, caption, placeholder, (res) =>
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": res} # Response to confirm
|
||||||
|
|
||||||
|
displayProgress: (type, body, percent) ->
|
||||||
|
percent = Math.min(100, percent)/100
|
||||||
|
offset = 75-(percent*75)
|
||||||
|
circle = """
|
||||||
|
<div class="circle"><svg class="circle-svg" width="30" height="30" viewport="0 0 30 30" version="1.1" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<circle r="12" cx="15" cy="15" fill="transparent" class="circle-bg"></circle>
|
||||||
|
<circle r="12" cx="15" cy="15" fill="transparent" class="circle-fg" style="stroke-dashoffset: #{offset}"></circle>
|
||||||
|
</svg></div>
|
||||||
|
"""
|
||||||
|
body = "<span class='message'>"+body+"</span>" + circle
|
||||||
|
elem = $(".notification-#{type}")
|
||||||
|
if elem.length
|
||||||
|
width = $(".body .message", elem).outerWidth()
|
||||||
|
$(".body .message", elem).html(body)
|
||||||
|
if $(".body .message", elem).css("width") == ""
|
||||||
|
$(".body .message", elem).css("width", width)
|
||||||
|
$(".body .circle-fg", elem).css("stroke-dashoffset", offset)
|
||||||
|
else
|
||||||
|
elem = @notifications.add(type, "progress", $(body))
|
||||||
|
if percent > 0
|
||||||
|
$(".body .circle-bg", elem).css {"animation-play-state": "paused", "stroke-dasharray": "180px"}
|
||||||
|
|
||||||
|
if $(".notification-icon", elem).data("done")
|
||||||
|
return false
|
||||||
|
else if percent >= 1 # Done
|
||||||
|
$(".circle-fg", elem).css("transition", "all 0.3s ease-in-out")
|
||||||
|
setTimeout (->
|
||||||
|
$(".notification-icon", elem).css {transform: "scale(1)", opacity: 1}
|
||||||
|
$(".notification-icon .icon-success", elem).css {transform: "rotate(45deg) scale(1)"}
|
||||||
|
), 300
|
||||||
|
setTimeout (=>
|
||||||
|
@notifications.close elem
|
||||||
|
), 3000
|
||||||
|
$(".notification-icon", elem).data("done", true)
|
||||||
|
else if percent < 0 # Error
|
||||||
|
$(".body .circle-fg", elem).css("stroke", "#ec6f47").css("transition", "transition: all 0.3s ease-in-out")
|
||||||
|
setTimeout (=>
|
||||||
|
$(".notification-icon", elem).css {transform: "scale(1)", opacity: 1}
|
||||||
|
elem.removeClass("notification-done").addClass("notification-error")
|
||||||
|
$(".notification-icon .icon-success", elem).removeClass("icon-success").html("!")
|
||||||
|
), 300
|
||||||
|
$(".notification-icon", elem).data("done", true)
|
||||||
|
|
||||||
|
|
||||||
|
actionProgress: (message) ->
|
||||||
|
message.params = @toHtmlSafe(message.params) # Escape html
|
||||||
|
@displayProgress(message.params[0], message.params[1], message.params[2])
|
||||||
|
|
||||||
|
actionSetViewport: (message) ->
|
||||||
|
@log "actionSetViewport", message
|
||||||
|
if $("#viewport").length > 0
|
||||||
|
$("#viewport").attr("content", @toHtmlSafe message.params)
|
||||||
|
else
|
||||||
|
$('<meta name="viewport" id="viewport">').attr("content", @toHtmlSafe message.params).appendTo("head")
|
||||||
|
|
||||||
|
actionReload: (message) ->
|
||||||
|
@reload(message.params[0])
|
||||||
|
|
||||||
|
reload: (url_post="") ->
|
||||||
|
@log "Reload"
|
||||||
|
current_url = window.location.toString().replace(/#.*/g, "")
|
||||||
|
if url_post
|
||||||
|
if current_url.indexOf("?") > 0
|
||||||
|
window.location = current_url + "&" + url_post
|
||||||
|
else
|
||||||
|
window.location = current_url + "?" + url_post
|
||||||
|
else
|
||||||
|
window.location.reload()
|
||||||
|
|
||||||
|
|
||||||
|
actionGetLocalStorage: (message) ->
|
||||||
|
$.when(@event_site_info).done =>
|
||||||
|
data = localStorage.getItem "site.#{@site_info.address}.#{@site_info.auth_address}"
|
||||||
|
if not data # Migrate from non auth_address based local storage
|
||||||
|
data = localStorage.getItem "site.#{@site_info.address}"
|
||||||
|
if data
|
||||||
|
localStorage.setItem "site.#{@site_info.address}.#{@site_info.auth_address}", data
|
||||||
|
localStorage.removeItem "site.#{@site_info.address}"
|
||||||
|
@log "Migrated LocalStorage from global to auth_address based"
|
||||||
|
if data then data = JSON.parse(data)
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": data}
|
||||||
|
|
||||||
|
|
||||||
|
actionSetLocalStorage: (message) ->
|
||||||
|
$.when(@event_site_info).done =>
|
||||||
|
back = localStorage.setItem "site.#{@site_info.address}.#{@site_info.auth_address}", JSON.stringify(message.params)
|
||||||
|
@sendInner {"cmd": "response", "to": message.id, "result": back}
|
||||||
|
|
||||||
|
|
||||||
|
# EOF actions
|
||||||
|
|
||||||
|
|
||||||
|
onOpenWebsocket: (e) =>
|
||||||
|
if window.show_loadingscreen # Get info on modifications
|
||||||
|
@ws.cmd "channelJoin", {"channels": ["siteChanged", "serverChanged", "announcerChanged"]}
|
||||||
|
else
|
||||||
|
@ws.cmd "channelJoin", {"channels": ["siteChanged", "serverChanged"]}
|
||||||
|
if not @wrapperWsInited and @inner_ready
|
||||||
|
@sendInner {"cmd": "wrapperOpenedWebsocket"} # Send to inner frame
|
||||||
|
@wrapperWsInited = true
|
||||||
|
if window.show_loadingscreen
|
||||||
|
@ws.cmd "serverInfo", [], (server_info) =>
|
||||||
|
@server_info = server_info
|
||||||
|
|
||||||
|
@ws.cmd "announcerInfo", [], (announcer_info) =>
|
||||||
|
@setAnnouncerInfo(announcer_info)
|
||||||
|
|
||||||
|
if @inner_loaded # Update site info
|
||||||
|
@reloadSiteInfo()
|
||||||
|
|
||||||
|
# If inner frame not loaded for 2 sec show peer informations on loading screen by loading site info
|
||||||
|
setTimeout (=>
|
||||||
|
if not @site_info then @reloadSiteInfo()
|
||||||
|
), 2000
|
||||||
|
|
||||||
|
if @ws_error
|
||||||
|
@notifications.add("connection", "done", "Connection with <b>UiServer Websocket</b> recovered.", 6000)
|
||||||
|
@ws_error = null
|
||||||
|
|
||||||
|
|
||||||
|
onCloseWebsocket: (e) =>
|
||||||
|
@wrapperWsInited = false
|
||||||
|
setTimeout (=> # Wait a bit, maybe its page closing
|
||||||
|
@sendInner {"cmd": "wrapperClosedWebsocket"} # Send to inner frame
|
||||||
|
if e and e.code == 1000 and e.wasClean == false # Server error please reload page
|
||||||
|
@ws_error = @notifications.add("connection", "error", "UiServer Websocket error, please reload the page.")
|
||||||
|
else if e and e.code == 1001 and e.wasClean == true # Navigating to other page
|
||||||
|
return
|
||||||
|
else if not @ws_error
|
||||||
|
@ws_error = @notifications.add("connection", "error", "Connection with <b>UiServer Websocket</b> was lost. Reconnecting...")
|
||||||
|
), 1000
|
||||||
|
|
||||||
|
|
||||||
|
# Iframe loaded
|
||||||
|
onPageLoad: (e) =>
|
||||||
|
@log "onPageLoad"
|
||||||
|
@inner_loaded = true
|
||||||
|
if not @inner_ready then @sendInner {"cmd": "wrapperReady"} # Inner frame loaded before wrapper
|
||||||
|
#if not @site_error then @loading.hideScreen() # Hide loading screen
|
||||||
|
if @ws.ws.readyState == 1 and not @site_info # Ws opened
|
||||||
|
@reloadSiteInfo()
|
||||||
|
else if @site_info and @site_info.content?.title? and not @is_title_changed
|
||||||
|
window.document.title = @site_info.content.title + " - ZeroNet"
|
||||||
|
@log "Setting title to", window.document.title
|
||||||
|
|
||||||
|
onWrapperLoad: =>
|
||||||
|
@script_nonce = window.script_nonce
|
||||||
|
@wrapper_key = window.wrapper_key
|
||||||
|
# Cleanup secret variables
|
||||||
|
delete window.wrapper
|
||||||
|
delete window.wrapper_key
|
||||||
|
delete window.script_nonce
|
||||||
|
$("#script_init").remove()
|
||||||
|
|
||||||
|
# Send message to innerframe
|
||||||
|
sendInner: (message) ->
|
||||||
|
@inner.postMessage(message, '*')
|
||||||
|
|
||||||
|
|
||||||
|
# Get site info from UiServer
|
||||||
|
reloadSiteInfo: ->
|
||||||
|
if @loading.screen_visible # Loading screen visible
|
||||||
|
params = {"file_status": window.file_inner_path} # Query the current required file status
|
||||||
|
else
|
||||||
|
params = {}
|
||||||
|
|
||||||
|
@ws.cmd "siteInfo", params, (site_info) =>
|
||||||
|
@address = site_info.address
|
||||||
|
@setSiteInfo site_info
|
||||||
|
|
||||||
|
if site_info.settings.size > site_info.size_limit * 1024 * 1024 and not @loading.screen_visible # Site size too large and not displaying it yet
|
||||||
|
@displayConfirm "Site is larger than allowed: #{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB", "Set limit to #{site_info.next_size_limit}MB", =>
|
||||||
|
@ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) =>
|
||||||
|
if res == "ok"
|
||||||
|
@notifications.add("size_limit", "done", "Site storage limit modified!", 5000)
|
||||||
|
|
||||||
|
if site_info.content?.title? and not @is_title_changed
|
||||||
|
window.document.title = site_info.content.title + " - ZeroNet"
|
||||||
|
@log "Setting title to", window.document.title
|
||||||
|
|
||||||
|
|
||||||
|
# Got setSiteInfo from websocket UiServer
|
||||||
|
setSiteInfo: (site_info) ->
|
||||||
|
if site_info.event? # If loading screen visible add event to it
|
||||||
|
# File started downloading
|
||||||
|
if site_info.event[0] == "file_added" and site_info.bad_files
|
||||||
|
@loading.printLine("#{site_info.bad_files} files needs to be downloaded")
|
||||||
|
# File finished downloading
|
||||||
|
else if site_info.event[0] == "file_done"
|
||||||
|
@loading.printLine("#{site_info.event[1]} downloaded")
|
||||||
|
if site_info.event[1] == window.file_inner_path # File downloaded we currently on
|
||||||
|
@loading.hideScreen()
|
||||||
|
if not @site_info then @reloadSiteInfo()
|
||||||
|
if site_info.content and not @is_title_changed
|
||||||
|
window.document.title = site_info.content.title + " - ZeroNet"
|
||||||
|
@log "Required file #{window.file_inner_path} done, setting title to", window.document.title
|
||||||
|
if not window.show_loadingscreen
|
||||||
|
@notifications.add("modified", "info", "New version of this page has just released.<br>Reload to see the modified content.")
|
||||||
|
# File failed downloading
|
||||||
|
else if site_info.event[0] == "file_failed"
|
||||||
|
@site_error = site_info.event[1]
|
||||||
|
if site_info.settings.size > site_info.size_limit*1024*1024 # Site size too large and not displaying it yet
|
||||||
|
@loading.showTooLarge(site_info)
|
||||||
|
|
||||||
|
else
|
||||||
|
@loading.printLine("#{site_info.event[1]} download failed", "error")
|
||||||
|
# New peers found
|
||||||
|
else if site_info.event[0] == "peers_added"
|
||||||
|
@loading.printLine("Peers found: #{site_info.peers}")
|
||||||
|
|
||||||
|
if @loading.screen_visible and not @site_info # First site info display current peers
|
||||||
|
if site_info.peers > 1
|
||||||
|
@loading.printLine "Peers found: #{site_info.peers}"
|
||||||
|
else
|
||||||
|
@site_error = "No peers found"
|
||||||
|
@loading.printLine "No peers found"
|
||||||
|
|
||||||
|
if not @site_info and not @loading.screen_visible and $("#inner-iframe").attr("src").replace("?wrapper=False", "").replace(/\?wrapper_nonce=[A-Za-z0-9]+/, "").indexOf("?") == -1 # First site info and we are on mainpage (does not have other parameter thatn wrapper)
|
||||||
|
if site_info.size_limit*1.1 < site_info.next_size_limit # Need upgrade soon
|
||||||
|
@displayConfirm "Running out of size limit (#{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB)", "Set limit to #{site_info.next_size_limit}MB", =>
|
||||||
|
@ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) =>
|
||||||
|
if res == "ok"
|
||||||
|
@notifications.add("size_limit", "done", "Site storage limit modified!", 5000)
|
||||||
|
return false
|
||||||
|
|
||||||
|
if @loading.screen_visible and @inner_loaded and site_info.settings.size < site_info.size_limit * 1024 * 1024 and site_info.settings.size > 0 # Loading screen still visible, but inner loaded
|
||||||
|
@log "Loading screen visible, but inner loaded"
|
||||||
|
@loading.hideScreen()
|
||||||
|
|
||||||
|
if site_info?.settings?.own and site_info?.settings?.modified != @site_info?.settings?.modified
|
||||||
|
@updateModifiedPanel()
|
||||||
|
|
||||||
|
if @loading.screen_visible and site_info.settings.size > site_info.size_limit * 1024 * 1024
|
||||||
|
@log "Site too large"
|
||||||
|
@loading.showTooLarge(site_info)
|
||||||
|
|
||||||
|
@site_info = site_info
|
||||||
|
@event_site_info.resolve()
|
||||||
|
|
||||||
|
siteSign: (inner_path, cb) =>
|
||||||
|
if @site_info.privatekey
|
||||||
|
# Privatekey stored in users.json
|
||||||
|
@infopanel.elem.find(".button").addClass("loading")
|
||||||
|
@ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) =>
|
||||||
|
if res == "ok"
|
||||||
|
cb?(true)
|
||||||
|
else
|
||||||
|
cb?(false)
|
||||||
|
@infopanel.elem.find(".button").removeClass("loading")
|
||||||
|
else
|
||||||
|
# Ask the user for privatekey
|
||||||
|
@displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key
|
||||||
|
@infopanel.elem.find(".button").addClass("loading")
|
||||||
|
@ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) =>
|
||||||
|
if res == "ok"
|
||||||
|
cb?(true)
|
||||||
|
else
|
||||||
|
cb?(false)
|
||||||
|
@infopanel.elem.find(".button").removeClass("loading")
|
||||||
|
|
||||||
|
sitePublish: (inner_path) =>
|
||||||
|
@ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}
|
||||||
|
|
||||||
|
updateModifiedPanel: =>
|
||||||
|
@ws.cmd "siteListModifiedFiles", [], (res) =>
|
||||||
|
num = res.modified_files?.length
|
||||||
|
if num > 0
|
||||||
|
closed = @site_info.settings.modified_files_notification == false
|
||||||
|
@infopanel.show(closed)
|
||||||
|
else
|
||||||
|
@infopanel.hide()
|
||||||
|
|
||||||
|
if num > 0
|
||||||
|
@infopanel.setTitle(
|
||||||
|
"#{res.modified_files.length} modified file#{if num > 1 then 's' else ''}",
|
||||||
|
res.modified_files.join(", ")
|
||||||
|
)
|
||||||
|
@infopanel.setClosedNum(num)
|
||||||
|
@infopanel.setAction "Sign & Publish", =>
|
||||||
|
@siteSign "content.json", (res) =>
|
||||||
|
if (res)
|
||||||
|
@notifications.add "sign", "done", "content.json Signed!", 5000
|
||||||
|
@sitePublish("content.json")
|
||||||
|
return false
|
||||||
|
@log "siteListModifiedFiles", num, res
|
||||||
|
|
||||||
|
setAnnouncerInfo: (announcer_info) ->
|
||||||
|
status_db = {announcing: [], error: [], announced: []}
|
||||||
|
for key, val of announcer_info.stats
|
||||||
|
if val.status
|
||||||
|
status_db[val.status].push(val)
|
||||||
|
status_line = "Trackers announcing: #{status_db.announcing.length}, error: #{status_db.error.length}, done: #{status_db.announced.length}"
|
||||||
|
if @announcer_line
|
||||||
|
@announcer_line.text(status_line)
|
||||||
|
else
|
||||||
|
@announcer_line = @loading.printLine(status_line)
|
||||||
|
|
||||||
|
if status_db.error.length > (status_db.announced.length + status_db.announcing.length) and status_db.announced.length < 3
|
||||||
|
@loading.showTrackerTorBridge(@server_info)
|
||||||
|
|
||||||
|
updateProgress: (site_info) ->
|
||||||
|
if site_info.tasks > 0 and site_info.started_task_num > 0
|
||||||
|
@loading.setProgress 1-(Math.max(site_info.tasks, site_info.bad_files) / site_info.started_task_num)
|
||||||
|
else
|
||||||
|
@loading.hideProgress()
|
||||||
|
|
||||||
|
|
||||||
|
toHtmlSafe: (values) ->
|
||||||
|
if values not instanceof Array then values = [values] # Convert to array if its not
|
||||||
|
for value, i in values
|
||||||
|
if value instanceof Array
|
||||||
|
value = @toHtmlSafe(value)
|
||||||
|
else
|
||||||
|
value = String(value).replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"').replace(/'/g, ''') # Escape dangerous characters
|
||||||
|
value = value.replace(/<([\/]{0,1}(br|b|u|i|small))>/g, "<$1>") # Unescape b, i, u, br tags
|
||||||
|
values[i] = value
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
setSizeLimit: (size_limit, reload=true) =>
|
||||||
|
@log "setSizeLimit: #{size_limit}, reload: #{reload}"
|
||||||
|
@inner_loaded = false # Inner frame not loaded, just a 404 page displayed
|
||||||
|
@ws.cmd "siteSetLimit", [size_limit], (res) =>
|
||||||
|
if res != "ok"
|
||||||
|
return false
|
||||||
|
@loading.printLine res
|
||||||
|
@inner_loaded = false
|
||||||
|
if reload then @reloadIframe()
|
||||||
|
return false
|
||||||
|
|
||||||
|
reloadIframe: =>
|
||||||
|
src = $("iframe").attr("src")
|
||||||
|
@ws.cmd "serverGetWrapperNonce", [], (wrapper_nonce) =>
|
||||||
|
src = src.replace(/wrapper_nonce=[A-Za-z0-9]+/, "wrapper_nonce=" + wrapper_nonce)
|
||||||
|
@log "Reloading iframe using url", src
|
||||||
|
$("iframe").attr "src", src
|
||||||
|
|
||||||
|
log: (args...) ->
|
||||||
|
console.log "[Wrapper]", args...
|
||||||
|
|
||||||
|
origin = window.server_url or window.location.href.replace(/(\:\/\/.*?)\/.*/, "$1")
|
||||||
|
|
||||||
|
if origin.indexOf("https:") == 0
|
||||||
|
proto = { ws: 'wss', http: 'https' }
|
||||||
|
else
|
||||||
|
proto = { ws: 'ws', http: 'http' }
|
||||||
|
|
||||||
|
ws_url = proto.ws + ":" + origin.replace(proto.http+":", "") + "/ZeroNet-Internal/Websocket?wrapper_key=" + window.wrapper_key
|
||||||
|
|
||||||
|
window.wrapper = new Wrapper(ws_url)
|
||||||
|
|
|
@ -0,0 +1,230 @@
|
||||||
|
body { margin: 0; padding: 0; height: 100%; background-color: #D2CECD; overflow: hidden }
|
||||||
|
body.back { background-color: #090909 }
|
||||||
|
a { color: black }
|
||||||
|
|
||||||
|
.unsupported { text-align: center; z-index: 999; position: relative; margin: auto; width: 480px; background-color: white; padding: 20px; border-bottom: 2px solid #e74c3c; box-shadow: 0px 0px 15px #DDD; font-family: monospace; }
|
||||||
|
.template { display: none !important }
|
||||||
|
|
||||||
|
#inner-iframe { width: 100%; height: 100%; position: absolute; border: 0; } /*; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out*/
|
||||||
|
#inner-iframe.back { transform: scale(0.95) translate(-300px, 0); opacity: 0.4 }
|
||||||
|
|
||||||
|
.button {
|
||||||
|
padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E;
|
||||||
|
border-radius: 2px; text-decoration: none; transition: all 0.5s; background-position: left center; white-space: nowrap;
|
||||||
|
}
|
||||||
|
.button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; transition: none }
|
||||||
|
.button:active { position: relative; top: 1px }
|
||||||
|
.button:focus { outline: none }
|
||||||
|
|
||||||
|
.button-Delete { background-color: #e74c3c; border-bottom-color: #c0392b; color: white }
|
||||||
|
.button-Delete:hover { background-color: #FF5442; border-bottom-color: #8E2B21 }
|
||||||
|
|
||||||
|
.button.loading {
|
||||||
|
color: rgba(0,0,0,0); background: #999 url(img/loading.gif) no-repeat center center;
|
||||||
|
transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666
|
||||||
|
}
|
||||||
|
.button.disabled { pointer-events: none; border-bottom: 2px solid #666; background-color: #999; opacity: 0.5 }
|
||||||
|
.button.button-2 { background-color: transparent; border: 1px solid #EEE; color: #555 }
|
||||||
|
.button.button-2:hover { border: 1px solid #CCC; color: #000 }
|
||||||
|
|
||||||
|
/* Fixbutton */
|
||||||
|
|
||||||
|
.fixbutton {
|
||||||
|
position: absolute; right: 35px; top: 15px; width: 40px; z-index: 999;
|
||||||
|
text-align: center; color: white; font-family: Consolas, Monaco, monospace; font-size: 25px;
|
||||||
|
}
|
||||||
|
.fixbutton-bg {
|
||||||
|
border-radius: 80px; background-color: rgba(180, 180, 180, 0.5); cursor: pointer;
|
||||||
|
display: block; width: 80px; height: 80px; transition: background-color 0.2s, box-shadow 0.5s; transform: scale(0.6); margin-left: -20px; margin-top: -20px; /* 2x size to prevent blur on anim */
|
||||||
|
/*box-shadow: inset 105px 260px 0 -200px rgba(0,0,0,0.1);*/ /* box-shadow: inset -75px 183px 0 -200px rgba(0,0,0,0.1); */
|
||||||
|
}
|
||||||
|
.fixbutton-text { pointer-events: none; position: absolute; z-index: 999; width: 40px; backface-visibility: hidden; perspective: 1000px; line-height: 0; padding-top: 5px; opacity: 0.9 }
|
||||||
|
.fixbutton-burger { pointer-events: none; position: absolute; z-index: 999; width: 40px; opacity: 0; left: -20px; font-size: 40px; line-height: 0; font-family: Verdana, sans-serif; margin-top: 17px }
|
||||||
|
.fixbutton-bg:hover { background-color: #AF3BFF }
|
||||||
|
.fixbutton-bg:active { background-color: #9E2FEA; top: 1px; transition: none }
|
||||||
|
|
||||||
|
/* Notification */
|
||||||
|
|
||||||
|
.notifications { position: absolute; top: 0; right: 80px; display: inline-block; z-index: 999; white-space: nowrap }
|
||||||
|
.notification {
|
||||||
|
position: relative; float: right; clear: both; margin: 10px; box-sizing: border-box; overflow: hidden; backface-visibility: hidden;
|
||||||
|
perspective: 1000px; padding-bottom: 5px; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif;
|
||||||
|
font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/
|
||||||
|
}
|
||||||
|
.notification-icon {
|
||||||
|
display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 2;
|
||||||
|
text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white;
|
||||||
|
}
|
||||||
|
.notification .body {
|
||||||
|
padding-left: 14px; padding-right: 60px; height: 50px; vertical-align: middle; display: table; padding-right: 20px; box-sizing: border-box;
|
||||||
|
background-color: white; left: 50px; top: 0; position: relative; padding-top: 5px; padding-bottom: 5px;
|
||||||
|
}
|
||||||
|
.notification .message-outer { display: table-row }
|
||||||
|
.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; padding-right: 20px; text-align: right; }
|
||||||
|
.notification.long .body { padding-top: 10px; padding-bottom: 10px }
|
||||||
|
.notification .message { display: table-cell; vertical-align: middle; max-width: 500px; white-space: normal; }
|
||||||
|
|
||||||
|
.notification.visible { max-width: 350px }
|
||||||
|
|
||||||
|
.notification .close { position: absolute; top: 0; right: 0; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none }
|
||||||
|
.notification .close:hover { color: black }
|
||||||
|
.notification .close:active, .notification .close:focus { color: #AF3BFF }
|
||||||
|
.notification small { color: #AAA }
|
||||||
|
.notification .multiline { white-space: normal; word-break: break-word; max-width: 300px; }
|
||||||
|
.body-white .notification { box-shadow: 0 1px 9px rgba(0,0,0,0.1) }
|
||||||
|
|
||||||
|
/* Notification select */
|
||||||
|
.notification .select {
|
||||||
|
display: block; padding: 10px; margin-right: -32px; text-decoration: none; border-left: 3px solid #EEE;
|
||||||
|
margin-top: 1px; transition: all 0.3s; color: #666
|
||||||
|
}
|
||||||
|
.notification .select:hover, .notification .select.active { background-color: #007AFF; border-left: 3px solid #5D68FF; color: white; transition: none }
|
||||||
|
.notification .select:active, .notification .select:focus { background-color: #3396FF; color: white; transition: none; border-left-color: #3396FF }
|
||||||
|
.notification .select.disabled { opacity: 0.5; pointer-events: none }
|
||||||
|
.notification .select small { color: inherit; }
|
||||||
|
|
||||||
|
/* Notification types */
|
||||||
|
.notification-ask .notification-icon { background-color: #f39c12; }
|
||||||
|
.notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px }
|
||||||
|
.notification-done .notification-icon { font-size: 22px; background-color: #27ae60 }
|
||||||
|
|
||||||
|
/* Notification input */
|
||||||
|
.notification .input { padding: 6px; border: 1px solid #DDD; margin-left: 10px; border-bottom: 2px solid #DDD; border-radius: 1px; margin-right: -11px; transition: all 0.3s }
|
||||||
|
.notification .input:focus { border-color: #95a5a6; outline: none }
|
||||||
|
|
||||||
|
/* Notification progress */
|
||||||
|
.notification .circle { width: 50px; height: 50px; position: absolute; left: -50px; top: 0px; background-color: #e2e9ec; z-index: 1; background: linear-gradient(405deg, rgba(226, 233, 236, 0.8), #efefef); }
|
||||||
|
.notification .circle-svg { margin-left: 10px; margin-top: 10px; transform: rotateZ(-90deg); }
|
||||||
|
.notification .circle-bg { stroke: #FFF; stroke-width: 2px; animation: rolling 0.4s infinite linear; stroke-dasharray: 40px; transition: all 1s }
|
||||||
|
.notification .circle-fg { stroke-dashoffset: 200; stroke: #2ecc71; stroke-width: 2px; stroke-dasharray: 75px; transition: all 5s cubic-bezier(0.19, 1, 0.22, 1); }
|
||||||
|
.notification-progress .notification-icon { opacity: 0; transform: scale(0); transition: all 0.3s ease-in-out }
|
||||||
|
.notification-progress .icon-success { transform: rotate(45deg) scale(0); transition: all 0.6s cubic-bezier(0.68, -0.55, 0.265, 1.55); }
|
||||||
|
@keyframes rolling {
|
||||||
|
0% { stroke-dashoffset: 80px }
|
||||||
|
100% { stroke-dashoffset: 0px }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Icons (based on http://nicolasgallagher.com/pure-css-gui-icons/demo/) */
|
||||||
|
|
||||||
|
.icon-success { left:6px; width:5px; height:12px; border-width:0 5px 5px 0; border-style:solid; border-color:white; margin-left: 20px; margin-top: 15px; transform:rotate(45deg) }
|
||||||
|
|
||||||
|
|
||||||
|
/* Infopanel */
|
||||||
|
.infopanel-container { width: 100%; height: 100%; overflow: hidden; position: absolute; display: none; }
|
||||||
|
.infopanel-container.visible { display: block; }
|
||||||
|
.infopanel {
|
||||||
|
position: absolute; z-index: 999; padding: 15px 15px; bottom: 25px; right: 50px; border: 1px solid #eff3fe;
|
||||||
|
font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17);
|
||||||
|
background-color: white; border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px;
|
||||||
|
transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1);
|
||||||
|
}
|
||||||
|
.infopanel.closed { box-shadow: none; transform: translateX(100%); right: 0px; cursor: pointer; }
|
||||||
|
.infopanel .message { font-size: 13px; line-height: 15px; display: inline-block; vertical-align: -9px; }
|
||||||
|
.infopanel .message .line { max-width: 200px; display: inline-block; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; }
|
||||||
|
.infopanel .message .line-1 { font-weight: bold; }
|
||||||
|
.infopanel .close { font-size: 16px; text-decoration: none; color: #AAA; padding: 5px; margin-right: -12px; vertical-align: 1px; display: inline-block; }
|
||||||
|
.infopanel .close:hover { color: black }
|
||||||
|
.infopanel .close:active, .infopanel .close:focus { color: #AF3BFF }
|
||||||
|
.infopanel.closed .closed-num { opacity: 1; margin-left: -36px; pointer-events: inherit; }
|
||||||
|
.infopanel .closed-num {
|
||||||
|
position: absolute; margin-top: 6px; background-color: #6666663d; color: white; width: 10px; text-align: center;
|
||||||
|
padding: 4px; border-top-left-radius: 6px; border-bottom-left-radius: 6px; font-size: 10px;
|
||||||
|
opacity: 0; margin-left: 0px; pointer-events: none; transition: all 0.6s;
|
||||||
|
}
|
||||||
|
.infopanel.unfolded .message .line { overflow: visible; white-space: normal; }
|
||||||
|
.body-sidebar .infopanel { right: 425px; }
|
||||||
|
.body-sidebar .infopanel.closed { right: 0px; }
|
||||||
|
|
||||||
|
/* Loading screen */
|
||||||
|
|
||||||
|
.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: auto; display: none }
|
||||||
|
.theme-dark .loadingscreen { background-color: #180922; }
|
||||||
|
.loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% }
|
||||||
|
.loading-config {
|
||||||
|
margin: 20px; display: inline-block; text-transform: uppercase; font-family: Consolas, monospace; position: relative;;
|
||||||
|
text-decoration: none; letter-spacing: 1px; font-size: 12px; border-bottom: 1px solid #999; top: -60px; transition: all 1s cubic-bezier(1, 0, 0, 1); transition-delay: 0.3s;
|
||||||
|
}
|
||||||
|
.loading-config:hover { border-bottom-color: #000; transition: none; }
|
||||||
|
.theme-dark .loading-config { color: white }
|
||||||
|
.loadingscreen.ready .loading-config { top: 0px; }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading console */
|
||||||
|
.loadingscreen .console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; transform: translateY(-20px); }
|
||||||
|
.loadingscreen .console-line:last-child { color: #6C6767 }
|
||||||
|
.loadingscreen .console .cursor {
|
||||||
|
background-color: #999; color: #999; animation: pulse 1.5s infinite ease-in-out; margin-right: -9px;
|
||||||
|
display: inline-block; width: 9px; height: 19px; vertical-align: -4px;
|
||||||
|
}
|
||||||
|
.loadingscreen .console .console-error { color: #e74c3c; font-weight: bold; animation: pulse 2s infinite linear }
|
||||||
|
.loadingscreen .console .console-warning { color: #8e44ad; }
|
||||||
|
.loadingscreen .console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px }
|
||||||
|
|
||||||
|
|
||||||
|
/* Flipper loading anim */
|
||||||
|
.flipper-container { width: 40px; height: 40px; position: absolute; top: 0%; left: 50%; transform: translate3d(-50%, -50%, 0); perspective: 1200; opacity: 0 }
|
||||||
|
.flipper { position: relative; display: block; height: inherit; width: inherit; animation: flip 1.2s infinite ease-in-out; -webkit-transform-style: preserve-3d; }
|
||||||
|
.flipper .front, .flipper .back {
|
||||||
|
position: absolute; top: 0; left: 0; backface-visibility: hidden; /*transform-style: preserve-3d;*/ display: block;
|
||||||
|
background-color: #d50000; height: 100%; width: 100%; /* outline: 1px solid transparent; /* FF AA fix */
|
||||||
|
}
|
||||||
|
.flipper .back { background-color: white; z-index: 800; transform: rotateY(-180deg) }
|
||||||
|
|
||||||
|
/* Loading ready */
|
||||||
|
.loadingscreen.ready .console { opacity: 1; transform: translateY(0px); transition: all 0.3s }
|
||||||
|
.loadingscreen.ready .flipper-container { top: 50%; opacity: 1; transition: all 1s cubic-bezier(1, 0, 0, 1); }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading done */
|
||||||
|
.loadingscreen.done { height: 0%; transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); }
|
||||||
|
.loadingscreen.done .console { transform: translateY(300px); opacity: 0; transition: all 1.5s }
|
||||||
|
.loadingscreen.done .flipper-container { opacity: 0; transition: all 1.5s }
|
||||||
|
|
||||||
|
|
||||||
|
.progressbar {
|
||||||
|
background: #26C281; position: fixed; width: 100%; z-index: 100; top: 0; left: 0; transform: scaleX(0); transform-origin: 0% 0%; transform:translate3d(0,0,0);
|
||||||
|
height: 2px; transition: transform 1s, opacity 1s; display: none; backface-visibility: hidden; transform-style: preserve-3d;
|
||||||
|
}
|
||||||
|
.progressbar .peg {
|
||||||
|
display: block; position: absolute; right: 0; width: 100px; height: 100%;
|
||||||
|
box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; opacity: 1.0; transform: rotate(3deg) translate(0px, -4px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Opener overlay */
|
||||||
|
.opener-overlay { position: fixed; z-index: 9999; width: 100%; text-align: center; background-color: rgba(100,100,100,0.5); height: 100%; vertical-align: middle; }
|
||||||
|
.opener-overlay .dialog { background-color: white; padding: 40px; display: inline-block; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; }
|
||||||
|
|
||||||
|
/* Icons */
|
||||||
|
.icon-profile { font-size: 6px; top: 0em; border-radius: 0.7em 0.7em 0 0; background: #FFFFFF; width: 1.5em; height: 0.7em; position: relative; display: inline-block; margin-right: 4px }
|
||||||
|
.icon-profile::before { position: absolute; content: ""; top: -1em; left: 0.38em; width: 0.8em; height: 0.85em; border-radius: 50%; background: #FFFFFF }
|
||||||
|
|
||||||
|
/* Animations */
|
||||||
|
|
||||||
|
@keyframes flip {
|
||||||
|
0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); }
|
||||||
|
50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print styles */
|
||||||
|
@media print {
|
||||||
|
#inner-iframe { position: fixed; }
|
||||||
|
.progressbar, .fixbutton, .notifications, .loadingscreen { visibility: hidden; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Small screen */
|
||||||
|
@media screen and (max-width: 600px) {
|
||||||
|
.notification .message { white-space: normal; }
|
||||||
|
.notification .buttons { padding-right: 22px; padding-right: 40px; }
|
||||||
|
.notification .button { white-space: nowrap; }
|
||||||
|
.notification { margin: 0px; }
|
||||||
|
.notifications { right: 0px; max-width: 80%; }
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
class WrapperZeroFrame
|
||||||
|
constructor: (wrapper) ->
|
||||||
|
@wrapperCmd = wrapper.cmd
|
||||||
|
@wrapperResponse = wrapper.ws.response
|
||||||
|
console.log "WrapperZeroFrame", wrapper
|
||||||
|
|
||||||
|
cmd: (cmd, params={}, cb=null) =>
|
||||||
|
@wrapperCmd(cmd, params, cb)
|
||||||
|
|
||||||
|
response: (to, result) =>
|
||||||
|
@wrapperResponse(to, result)
|
||||||
|
|
||||||
|
isProxyRequest: ->
|
||||||
|
return window.location.pathname == "/"
|
||||||
|
|
||||||
|
certSelectGotoSite: (elem) =>
|
||||||
|
href = $(elem).attr("href")
|
||||||
|
if @isProxyRequest() # Fix for proxy request
|
||||||
|
$(elem).attr("href", "http://zero#{href}")
|
||||||
|
|
||||||
|
|
||||||
|
window.zeroframe = new WrapperZeroFrame(window.wrapper)
|
|
@ -0,0 +1,49 @@
|
||||||
|
DARK = "(prefers-color-scheme: dark)"
|
||||||
|
LIGHT = "(prefers-color-scheme: light)"
|
||||||
|
|
||||||
|
mqDark = window.matchMedia(DARK)
|
||||||
|
mqLight = window.matchMedia(LIGHT)
|
||||||
|
|
||||||
|
|
||||||
|
changeColorScheme = (theme) ->
|
||||||
|
zeroframe.cmd "userGetGlobalSettings", [], (user_settings) ->
|
||||||
|
if user_settings.theme != theme
|
||||||
|
user_settings.theme = theme
|
||||||
|
zeroframe.cmd "userSetGlobalSettings", [user_settings], (status) ->
|
||||||
|
if status == "ok"
|
||||||
|
location.reload()
|
||||||
|
return
|
||||||
|
return
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
displayNotification = ({matches, media}) ->
|
||||||
|
if !matches
|
||||||
|
return
|
||||||
|
|
||||||
|
zeroframe.cmd "siteInfo", [], (site_info) ->
|
||||||
|
if "ADMIN" in site_info.settings.permissions
|
||||||
|
zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.<br>Please reload site to use it."]
|
||||||
|
else
|
||||||
|
zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.<br>Please open ZeroHello to use it."]
|
||||||
|
return
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
detectColorScheme = ->
|
||||||
|
if mqDark.matches
|
||||||
|
changeColorScheme("dark")
|
||||||
|
else if mqLight.matches
|
||||||
|
changeColorScheme("light")
|
||||||
|
|
||||||
|
mqDark.addListener(displayNotification)
|
||||||
|
mqLight.addListener(displayNotification)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
zeroframe.cmd "userGetGlobalSettings", [], (user_settings) ->
|
||||||
|
if user_settings.use_system_theme == true
|
||||||
|
detectColorScheme()
|
||||||
|
|
||||||
|
return
|
|
@ -0,0 +1,269 @@
|
||||||
|
|
||||||
|
/* ---- Wrapper.css ---- */
|
||||||
|
|
||||||
|
|
||||||
|
body { margin: 0; padding: 0; height: 100%; background-color: #D2CECD; overflow: hidden }
|
||||||
|
body.back { background-color: #090909 }
|
||||||
|
a { color: black }
|
||||||
|
|
||||||
|
.unsupported { text-align: center; z-index: 999; position: relative; margin: auto; width: 480px; background-color: white; padding: 20px; border-bottom: 2px solid #e74c3c; -webkit-box-shadow: 0px 0px 15px #DDD; -moz-box-shadow: 0px 0px 15px #DDD; -o-box-shadow: 0px 0px 15px #DDD; -ms-box-shadow: 0px 0px 15px #DDD; box-shadow: 0px 0px 15px #DDD ; font-family: monospace; }
|
||||||
|
.template { display: none !important }
|
||||||
|
|
||||||
|
#inner-iframe { width: 100%; height: 100%; position: absolute; border: 0; } /*; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out*/
|
||||||
|
#inner-iframe.back { -webkit-transform: scale(0.95) translate(-300px, 0); -moz-transform: scale(0.95) translate(-300px, 0); -o-transform: scale(0.95) translate(-300px, 0); -ms-transform: scale(0.95) translate(-300px, 0); transform: scale(0.95) translate(-300px, 0) ; opacity: 0.4 }
|
||||||
|
|
||||||
|
.button {
|
||||||
|
padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E;
|
||||||
|
-webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; text-decoration: none; -webkit-transition: all 0.5s; -moz-transition: all 0.5s; -o-transition: all 0.5s; -ms-transition: all 0.5s; transition: all 0.5s ; background-position: left center; white-space: nowrap;
|
||||||
|
}
|
||||||
|
.button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none }
|
||||||
|
.button:active { position: relative; top: 1px }
|
||||||
|
.button:focus { outline: none }
|
||||||
|
|
||||||
|
.button-Delete { background-color: #e74c3c; border-bottom-color: #c0392b; color: white }
|
||||||
|
.button-Delete:hover { background-color: #FF5442; border-bottom-color: #8E2B21 }
|
||||||
|
|
||||||
|
.button.loading {
|
||||||
|
color: rgba(0,0,0,0); background: #999 url(img/loading.gif) no-repeat center center;
|
||||||
|
-webkit-transition: all 0.5s ease-out ; -moz-transition: all 0.5s ease-out ; -o-transition: all 0.5s ease-out ; -ms-transition: all 0.5s ease-out ; transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666
|
||||||
|
}
|
||||||
|
.button.disabled { pointer-events: none; border-bottom: 2px solid #666; background-color: #999; opacity: 0.5 }
|
||||||
|
.button.button-2 { background-color: transparent; border: 1px solid #EEE; color: #555 }
|
||||||
|
.button.button-2:hover { border: 1px solid #CCC; color: #000 }
|
||||||
|
|
||||||
|
/* Fixbutton */
|
||||||
|
|
||||||
|
.fixbutton {
|
||||||
|
position: absolute; right: 35px; top: 15px; width: 40px; z-index: 999;
|
||||||
|
text-align: center; color: white; font-family: Consolas, Monaco, monospace; font-size: 25px;
|
||||||
|
}
|
||||||
|
.fixbutton-bg {
|
||||||
|
-webkit-border-radius: 80px; -moz-border-radius: 80px; -o-border-radius: 80px; -ms-border-radius: 80px; border-radius: 80px ; background-color: rgba(180, 180, 180, 0.5); cursor: pointer;
|
||||||
|
display: block; width: 80px; height: 80px; -webkit-transition: background-color 0.2s, box-shadow 0.5s; -moz-transition: background-color 0.2s, box-shadow 0.5s; -o-transition: background-color 0.2s, box-shadow 0.5s; -ms-transition: background-color 0.2s, box-shadow 0.5s; transition: background-color 0.2s, box-shadow 0.5s ; -webkit-transform: scale(0.6); -moz-transform: scale(0.6); -o-transform: scale(0.6); -ms-transform: scale(0.6); transform: scale(0.6) ; margin-left: -20px; margin-top: -20px; /* 2x size to prevent blur on anim */
|
||||||
|
/*box-shadow: inset 105px 260px 0 -200px rgba(0,0,0,0.1);*/ /* -webkit-box-shadow: inset -75px 183px 0 -200px rgba(0,0,0,0.1); -moz-box-shadow: inset -75px 183px 0 -200px rgba(0,0,0,0.1); -o-box-shadow: inset -75px 183px 0 -200px rgba(0,0,0,0.1); -ms-box-shadow: inset -75px 183px 0 -200px rgba(0,0,0,0.1); box-shadow: inset -75px 183px 0 -200px rgba(0,0,0,0.1) ; */
|
||||||
|
}
|
||||||
|
.fixbutton-text { pointer-events: none; position: absolute; z-index: 999; width: 40px; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; -webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ; line-height: 0; padding-top: 5px; opacity: 0.9 }
|
||||||
|
.fixbutton-burger { pointer-events: none; position: absolute; z-index: 999; width: 40px; opacity: 0; left: -20px; font-size: 40px; line-height: 0; font-family: Verdana, sans-serif; margin-top: 17px }
|
||||||
|
.fixbutton-bg:hover { background-color: #AF3BFF }
|
||||||
|
.fixbutton-bg:active { background-color: #9E2FEA; top: 1px; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none }
|
||||||
|
|
||||||
|
/* Notification */
|
||||||
|
|
||||||
|
.notifications { position: absolute; top: 0; right: 80px; display: inline-block; z-index: 999; white-space: nowrap }
|
||||||
|
.notification {
|
||||||
|
position: relative; float: right; clear: both; margin: 10px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; overflow: hidden; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ;
|
||||||
|
-webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ; padding-bottom: 5px; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif;
|
||||||
|
font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/
|
||||||
|
}
|
||||||
|
.notification-icon {
|
||||||
|
display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 2;
|
||||||
|
text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white;
|
||||||
|
}
|
||||||
|
.notification .body {
|
||||||
|
padding-left: 14px; padding-right: 60px; height: 50px; vertical-align: middle; display: table; padding-right: 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ;
|
||||||
|
background-color: white; left: 50px; top: 0; position: relative; padding-top: 5px; padding-bottom: 5px;
|
||||||
|
}
|
||||||
|
.notification .message-outer { display: table-row }
|
||||||
|
.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; padding-right: 20px; text-align: right; }
|
||||||
|
.notification.long .body { padding-top: 10px; padding-bottom: 10px }
|
||||||
|
.notification .message { display: table-cell; vertical-align: middle; max-width: 500px; white-space: normal; }
|
||||||
|
|
||||||
|
.notification.visible { max-width: 350px }
|
||||||
|
|
||||||
|
.notification .close { position: absolute; top: 0; right: 0; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none }
|
||||||
|
.notification .close:hover { color: black }
|
||||||
|
.notification .close:active, .notification .close:focus { color: #AF3BFF }
|
||||||
|
.notification small { color: #AAA }
|
||||||
|
.notification .multiline { white-space: normal; word-break: break-word; max-width: 300px; }
|
||||||
|
.body-white .notification { -webkit-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; -moz-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; -o-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; -ms-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; box-shadow: 0 1px 9px rgba(0,0,0,0.1) }
|
||||||
|
|
||||||
|
/* Notification select */
|
||||||
|
.notification .select {
|
||||||
|
display: block; padding: 10px; margin-right: -32px; text-decoration: none; border-left: 3px solid #EEE;
|
||||||
|
margin-top: 1px; -webkit-transition: all 0.3s; -moz-transition: all 0.3s; -o-transition: all 0.3s; -ms-transition: all 0.3s; transition: all 0.3s ; color: #666
|
||||||
|
}
|
||||||
|
.notification .select:hover, .notification .select.active { background-color: #007AFF; border-left: 3px solid #5D68FF; color: white; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none }
|
||||||
|
.notification .select:active, .notification .select:focus { background-color: #3396FF; color: white; -webkit-transition: none; -moz-transition: none; -o-transition: none; -ms-transition: none; transition: none ; border-left-color: #3396FF }
|
||||||
|
.notification .select.disabled { opacity: 0.5; pointer-events: none }
|
||||||
|
.notification .select small { color: inherit; }
|
||||||
|
|
||||||
|
/* Notification types */
|
||||||
|
.notification-ask .notification-icon { background-color: #f39c12; }
|
||||||
|
.notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px }
|
||||||
|
.notification-done .notification-icon { font-size: 22px; background-color: #27ae60 }
|
||||||
|
|
||||||
|
/* Notification input */
|
||||||
|
.notification .input { padding: 6px; border: 1px solid #DDD; margin-left: 10px; border-bottom: 2px solid #DDD; -webkit-border-radius: 1px; -moz-border-radius: 1px; -o-border-radius: 1px; -ms-border-radius: 1px; border-radius: 1px ; margin-right: -11px; -webkit-transition: all 0.3s ; -moz-transition: all 0.3s ; -o-transition: all 0.3s ; -ms-transition: all 0.3s ; transition: all 0.3s }
|
||||||
|
.notification .input:focus { border-color: #95a5a6; outline: none }
|
||||||
|
|
||||||
|
/* Notification progress */
|
||||||
|
.notification .circle { width: 50px; height: 50px; position: absolute; left: -50px; top: 0px; background-color: #e2e9ec; z-index: 1; background: -webkit-linear-gradient(405deg, rgba(226, 233, 236, 0.8), #efefef);background: -moz-linear-gradient(405deg, rgba(226, 233, 236, 0.8), #efefef);background: -o-linear-gradient(405deg, rgba(226, 233, 236, 0.8), #efefef);background: -ms-linear-gradient(405deg, rgba(226, 233, 236, 0.8), #efefef);background: linear-gradient(405deg, rgba(226, 233, 236, 0.8), #efefef); }
|
||||||
|
.notification .circle-svg { margin-left: 10px; margin-top: 10px; -webkit-transform: rotateZ(-90deg); -moz-transform: rotateZ(-90deg); -o-transform: rotateZ(-90deg); -ms-transform: rotateZ(-90deg); transform: rotateZ(-90deg) ; }
|
||||||
|
.notification .circle-bg { stroke: #FFF; stroke-width: 2px; -webkit-animation: rolling 0.4s infinite linear; -moz-animation: rolling 0.4s infinite linear; -o-animation: rolling 0.4s infinite linear; -ms-animation: rolling 0.4s infinite linear; animation: rolling 0.4s infinite linear ; stroke-dasharray: 40px; -webkit-transition: all 1s ; -moz-transition: all 1s ; -o-transition: all 1s ; -ms-transition: all 1s ; transition: all 1s }
|
||||||
|
.notification .circle-fg { stroke-dashoffset: 200; stroke: #2ecc71; stroke-width: 2px; stroke-dasharray: 75px; -webkit-transition: all 5s cubic-bezier(0.19, 1, 0.22, 1); -moz-transition: all 5s cubic-bezier(0.19, 1, 0.22, 1); -o-transition: all 5s cubic-bezier(0.19, 1, 0.22, 1); -ms-transition: all 5s cubic-bezier(0.19, 1, 0.22, 1); transition: all 5s cubic-bezier(0.19, 1, 0.22, 1) ; }
|
||||||
|
.notification-progress .notification-icon { opacity: 0; -webkit-transform: scale(0); -moz-transform: scale(0); -o-transform: scale(0); -ms-transform: scale(0); transform: scale(0) ; -webkit-transition: all 0.3s ease-in-out ; -moz-transition: all 0.3s ease-in-out ; -o-transition: all 0.3s ease-in-out ; -ms-transition: all 0.3s ease-in-out ; transition: all 0.3s ease-in-out }
|
||||||
|
.notification-progress .icon-success { -webkit-transform: rotate(45deg) scale(0); -moz-transform: rotate(45deg) scale(0); -o-transform: rotate(45deg) scale(0); -ms-transform: rotate(45deg) scale(0); transform: rotate(45deg) scale(0) ; -webkit-transition: all 0.6s cubic-bezier(0.68, -0.55, 0.265, 1.55); -moz-transition: all 0.6s cubic-bezier(0.68, -0.55, 0.265, 1.55); -o-transition: all 0.6s cubic-bezier(0.68, -0.55, 0.265, 1.55); -ms-transition: all 0.6s cubic-bezier(0.68, -0.55, 0.265, 1.55); transition: all 0.6s cubic-bezier(0.68, -0.55, 0.265, 1.55) ; }
|
||||||
|
@keyframes rolling {
|
||||||
|
0% { stroke-dashoffset: 80px }
|
||||||
|
100% { stroke-dashoffset: 0px }
|
||||||
|
}
|
||||||
|
@-webkit-keyframes rolling {
|
||||||
|
0% { stroke-dashoffset: 80px }
|
||||||
|
100% { stroke-dashoffset: 0px }
|
||||||
|
}
|
||||||
|
@-moz-keyframes rolling {
|
||||||
|
0% { stroke-dashoffset: 80px }
|
||||||
|
100% { stroke-dashoffset: 0px }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Icons (based on http://nicolasgallagher.com/pure-css-gui-icons/demo/) */
|
||||||
|
|
||||||
|
.icon-success { left:6px; width:5px; height:12px; border-width:0 5px 5px 0; border-style:solid; border-color:white; margin-left: 20px; margin-top: 15px; transform:rotate(45deg) }
|
||||||
|
|
||||||
|
|
||||||
|
/* Infopanel */
|
||||||
|
.infopanel-container { width: 100%; height: 100%; overflow: hidden; position: absolute; display: none; }
|
||||||
|
.infopanel-container.visible { display: block; }
|
||||||
|
.infopanel {
|
||||||
|
position: absolute; z-index: 999; padding: 15px 15px; bottom: 25px; right: 50px; border: 1px solid #eff3fe;
|
||||||
|
font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; -webkit-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -moz-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -o-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -ms-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17) ;
|
||||||
|
background-color: white; border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px;
|
||||||
|
-webkit-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -moz-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -o-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -ms-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1) ;
|
||||||
|
}
|
||||||
|
.infopanel.closed { -webkit-box-shadow: none; -moz-box-shadow: none; -o-box-shadow: none; -ms-box-shadow: none; box-shadow: none ; -webkit-transform: translateX(100%); -moz-transform: translateX(100%); -o-transform: translateX(100%); -ms-transform: translateX(100%); transform: translateX(100%) ; right: 0px; cursor: pointer; }
|
||||||
|
.infopanel .message { font-size: 13px; line-height: 15px; display: inline-block; vertical-align: -9px; }
|
||||||
|
.infopanel .message .line { max-width: 200px; display: inline-block; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; }
|
||||||
|
.infopanel .message .line-1 { font-weight: bold; }
|
||||||
|
.infopanel .close { font-size: 16px; text-decoration: none; color: #AAA; padding: 5px; margin-right: -12px; vertical-align: 1px; display: inline-block; }
|
||||||
|
.infopanel .close:hover { color: black }
|
||||||
|
.infopanel .close:active, .infopanel .close:focus { color: #AF3BFF }
|
||||||
|
.infopanel.closed .closed-num { opacity: 1; margin-left: -36px; pointer-events: inherit; }
|
||||||
|
.infopanel .closed-num {
|
||||||
|
position: absolute; margin-top: 6px; background-color: #6666663d; color: white; width: 10px; text-align: center;
|
||||||
|
padding: 4px; border-top-left-radius: 6px; border-bottom-left-radius: 6px; font-size: 10px;
|
||||||
|
opacity: 0; margin-left: 0px; pointer-events: none; -webkit-transition: all 0.6s; -moz-transition: all 0.6s; -o-transition: all 0.6s; -ms-transition: all 0.6s; transition: all 0.6s ;
|
||||||
|
}
|
||||||
|
.infopanel.unfolded .message .line { overflow: visible; white-space: normal; }
|
||||||
|
.body-sidebar .infopanel { right: 425px; }
|
||||||
|
.body-sidebar .infopanel.closed { right: 0px; }
|
||||||
|
|
||||||
|
/* Loading screen */
|
||||||
|
|
||||||
|
.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: auto; display: none }
|
||||||
|
.theme-dark .loadingscreen { background-color: #180922; }
|
||||||
|
.loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% }
|
||||||
|
.loading-config {
|
||||||
|
margin: 20px; display: inline-block; text-transform: uppercase; font-family: Consolas, monospace; position: relative;;
|
||||||
|
text-decoration: none; letter-spacing: 1px; font-size: 12px; border-bottom: 1px solid #999; top: -60px; -webkit-transition: all 1s cubic-bezier(1, 0, 0, 1); -moz-transition: all 1s cubic-bezier(1, 0, 0, 1); -o-transition: all 1s cubic-bezier(1, 0, 0, 1); -ms-transition: all 1s cubic-bezier(1, 0, 0, 1); transition: all 1s cubic-bezier(1, 0, 0, 1) ; transition-delay: 0.3s;
|
||||||
|
}
|
||||||
|
.loading-config:hover { border-bottom-color: #000; -webkit-transition: none; -moz-transition: none; -o-transition: none; -ms-transition: none; transition: none ; }
|
||||||
|
.theme-dark .loading-config { color: white }
|
||||||
|
.loadingscreen.ready .loading-config { top: 0px; }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading console */
|
||||||
|
.loadingscreen .console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; -webkit-transform: translateY(-20px); -moz-transform: translateY(-20px); -o-transform: translateY(-20px); -ms-transform: translateY(-20px); transform: translateY(-20px) ; }
|
||||||
|
.loadingscreen .console-line:last-child { color: #6C6767 }
|
||||||
|
.loadingscreen .console .cursor {
|
||||||
|
background-color: #999; color: #999; -webkit-animation: pulse 1.5s infinite ease-in-out; -moz-animation: pulse 1.5s infinite ease-in-out; -o-animation: pulse 1.5s infinite ease-in-out; -ms-animation: pulse 1.5s infinite ease-in-out; animation: pulse 1.5s infinite ease-in-out ; margin-right: -9px;
|
||||||
|
display: inline-block; width: 9px; height: 19px; vertical-align: -4px;
|
||||||
|
}
|
||||||
|
.loadingscreen .console .console-error { color: #e74c3c; font-weight: bold; -webkit-animation: pulse 2s infinite linear ; -moz-animation: pulse 2s infinite linear ; -o-animation: pulse 2s infinite linear ; -ms-animation: pulse 2s infinite linear ; animation: pulse 2s infinite linear }
|
||||||
|
.loadingscreen .console .console-warning { color: #8e44ad; }
|
||||||
|
.loadingscreen .console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px }
|
||||||
|
|
||||||
|
|
||||||
|
/* Flipper loading anim */
|
||||||
|
.flipper-container { width: 40px; height: 40px; position: absolute; top: 0%; left: 50%; -webkit-transform: translate3d(-50%, -50%, 0); -moz-transform: translate3d(-50%, -50%, 0); -o-transform: translate3d(-50%, -50%, 0); -ms-transform: translate3d(-50%, -50%, 0); transform: translate3d(-50%, -50%, 0) ; -webkit-perspective: 1200; -moz-perspective: 1200; -o-perspective: 1200; -ms-perspective: 1200; perspective: 1200 ; opacity: 0 }
|
||||||
|
.flipper { position: relative; display: block; height: inherit; width: inherit; -webkit-animation: flip 1.2s infinite ease-in-out; -moz-animation: flip 1.2s infinite ease-in-out; -o-animation: flip 1.2s infinite ease-in-out; -ms-animation: flip 1.2s infinite ease-in-out; animation: flip 1.2s infinite ease-in-out ; -webkit-transform-style: preserve-3d; }
|
||||||
|
.flipper .front, .flipper .back {
|
||||||
|
position: absolute; top: 0; left: 0; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; /*transform-style: preserve-3d;*/ display: block;
|
||||||
|
background-color: #d50000; height: 100%; width: 100%; /* outline: 1px solid transparent; /* FF AA fix */
|
||||||
|
}
|
||||||
|
.flipper .back { background-color: white; z-index: 800; -webkit-transform: rotateY(-180deg) ; -moz-transform: rotateY(-180deg) ; -o-transform: rotateY(-180deg) ; -ms-transform: rotateY(-180deg) ; transform: rotateY(-180deg) }
|
||||||
|
|
||||||
|
/* Loading ready */
|
||||||
|
.loadingscreen.ready .console { opacity: 1; -webkit-transform: translateY(0px); -moz-transform: translateY(0px); -o-transform: translateY(0px); -ms-transform: translateY(0px); transform: translateY(0px) ; -webkit-transition: all 0.3s ; -moz-transition: all 0.3s ; -o-transition: all 0.3s ; -ms-transition: all 0.3s ; transition: all 0.3s }
|
||||||
|
.loadingscreen.ready .flipper-container { top: 50%; opacity: 1; -webkit-transition: all 1s cubic-bezier(1, 0, 0, 1); -moz-transition: all 1s cubic-bezier(1, 0, 0, 1); -o-transition: all 1s cubic-bezier(1, 0, 0, 1); -ms-transition: all 1s cubic-bezier(1, 0, 0, 1); transition: all 1s cubic-bezier(1, 0, 0, 1) ; }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading done */
|
||||||
|
.loadingscreen.done { height: 0%; -webkit-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); -moz-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); -o-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); -ms-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045) ; }
|
||||||
|
.loadingscreen.done .console { -webkit-transform: translateY(300px); -moz-transform: translateY(300px); -o-transform: translateY(300px); -ms-transform: translateY(300px); transform: translateY(300px) ; opacity: 0; -webkit-transition: all 1.5s ; -moz-transition: all 1.5s ; -o-transition: all 1.5s ; -ms-transition: all 1.5s ; transition: all 1.5s }
|
||||||
|
.loadingscreen.done .flipper-container { opacity: 0; -webkit-transition: all 1.5s ; -moz-transition: all 1.5s ; -o-transition: all 1.5s ; -ms-transition: all 1.5s ; transition: all 1.5s }
|
||||||
|
|
||||||
|
|
||||||
|
.progressbar {
|
||||||
|
background: #26C281; position: fixed; width: 100%; z-index: 100; top: 0; left: 0; -webkit-transform: scaleX(0); -moz-transform: scaleX(0); -o-transform: scaleX(0); -ms-transform: scaleX(0); transform: scaleX(0) ; transform-origin: 0% 0%; transform:translate3d(0,0,0);
|
||||||
|
height: 2px; -webkit-transition: transform 1s, opacity 1s; -moz-transition: transform 1s, opacity 1s; -o-transition: transform 1s, opacity 1s; -ms-transition: transform 1s, opacity 1s; transition: transform 1s, opacity 1s ; display: none; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; transform-style: preserve-3d;
|
||||||
|
}
|
||||||
|
.progressbar .peg {
|
||||||
|
display: block; position: absolute; right: 0; width: 100px; height: 100%;
|
||||||
|
-webkit-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; -moz-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; -o-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; -ms-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d ; opacity: 1.0; -webkit-transform: rotate(3deg) translate(0px, -4px); -moz-transform: rotate(3deg) translate(0px, -4px); -o-transform: rotate(3deg) translate(0px, -4px); -ms-transform: rotate(3deg) translate(0px, -4px); transform: rotate(3deg) translate(0px, -4px) ;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Opener overlay */
|
||||||
|
.opener-overlay { position: fixed; z-index: 9999; width: 100%; text-align: center; background-color: rgba(100,100,100,0.5); height: 100%; vertical-align: middle; }
|
||||||
|
.opener-overlay .dialog { background-color: white; padding: 40px; display: inline-block; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; }
|
||||||
|
|
||||||
|
/* Icons */
|
||||||
|
.icon-profile { font-size: 6px; top: 0em; -webkit-border-radius: 0.7em 0.7em 0 0; -moz-border-radius: 0.7em 0.7em 0 0; -o-border-radius: 0.7em 0.7em 0 0; -ms-border-radius: 0.7em 0.7em 0 0; border-radius: 0.7em 0.7em 0 0 ; background: #FFFFFF; width: 1.5em; height: 0.7em; position: relative; display: inline-block; margin-right: 4px }
|
||||||
|
.icon-profile::before { position: absolute; content: ""; top: -1em; left: 0.38em; width: 0.8em; height: 0.85em; -webkit-border-radius: 50%; -moz-border-radius: 50%; -o-border-radius: 50%; -ms-border-radius: 50%; border-radius: 50% ; background: #FFFFFF }
|
||||||
|
|
||||||
|
/* Animations */
|
||||||
|
|
||||||
|
@keyframes flip {
|
||||||
|
0% { -webkit-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -moz-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -o-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -ms-transform: perspective(120px) rotateX(0deg) rotateY(0deg); transform: perspective(120px) rotateX(0deg) rotateY(0deg) ; }
|
||||||
|
50% { -webkit-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -moz-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -o-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -ms-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { -webkit-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -moz-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -o-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -ms-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg) ; }
|
||||||
|
}
|
||||||
|
@-webkit-keyframes flip {
|
||||||
|
0% { -webkit-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -moz-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -o-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -ms-transform: perspective(120px) rotateX(0deg) rotateY(0deg); transform: perspective(120px) rotateX(0deg) rotateY(0deg) ; }
|
||||||
|
50% { -webkit-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -moz-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -o-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -ms-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { -webkit-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -moz-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -o-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -ms-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg) ; }
|
||||||
|
}
|
||||||
|
@-moz-keyframes flip {
|
||||||
|
0% { -webkit-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -moz-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -o-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -ms-transform: perspective(120px) rotateX(0deg) rotateY(0deg); transform: perspective(120px) rotateX(0deg) rotateY(0deg) ; }
|
||||||
|
50% { -webkit-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -moz-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -o-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -ms-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { -webkit-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -moz-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -o-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -ms-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg) ; }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
@-webkit-keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
@-moz-keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Print styles */
|
||||||
|
@media print {
|
||||||
|
#inner-iframe { position: fixed; }
|
||||||
|
.progressbar, .fixbutton, .notifications, .loadingscreen { visibility: hidden; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Small screen */
|
||||||
|
@media screen and (max-width: 600px) {
|
||||||
|
.notification .message { white-space: normal; }
|
||||||
|
.notification .buttons { padding-right: 22px; padding-right: 40px; }
|
||||||
|
.notification .button { white-space: nowrap; }
|
||||||
|
.notification { margin: 0px; }
|
||||||
|
.notifications { right: 0px; max-width: 80%; }
|
||||||
|
}
|
After Width: | Height: | Size: 8.0 KiB |
After Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 2.3 KiB |
After Width: | Height: | Size: 723 B |
|
@ -0,0 +1 @@
|
||||||
|
<svg version="1.1" viewBox="0 0 2050 2050" xmlns="http://www.w3.org/2000/svg"><g fill="white"><path d="m299 1211v-787.6l725.7-423.4 725.3 420-175.34 325.7-549.86-340.7-373.5 221v381.2z"/><path d="m1749.4 842.6v787.6l-725.4 423.3-724.5-419.5 219.38-278.79 505.12 293.39 373.2-221.2v-381z"/><path d="M299.5 1634L1750 786.4V420L299 1267.4z"/></g></svg>
|
After Width: | Height: | Size: 350 B |
After Width: | Height: | Size: 11 KiB |
|
@ -0,0 +1 @@
|
||||||
|
<svg version="1.1" viewBox="0 0 2049.3 2053.5" xmlns="http://www.w3.org/2000/svg"><defs><linearGradient id="c" x1="1524.9" x2="520.7" y1="706" y2="1333.5" gradientUnits="userSpaceOnUse"><stop stop-color="#9563f9" offset="0"/><stop stop-color="#b073ec" offset="1"/></linearGradient><linearGradient id="a" x1="393.5" x2="1020.1" y1="961.9" y2="367.9" gradientUnits="userSpaceOnUse"><stop stop-color="#9764f8" offset="0"/><stop stop-color="#b576e9" offset="1"/></linearGradient><linearGradient id="b" x1="979.5" x2="947.9" y1="1540.5" y2="1984.5" gradientUnits="userSpaceOnUse"><stop stop-color="#965ff9" offset="0"/><stop stop-color="#c076e9" offset="1"/></linearGradient></defs><g fill-rule="evenodd"><path d="m299 1211v-787.6l725.7-423.4 725.3 420-175.34 325.7-549.86-340.7-373.5 221v381.2z" fill="url(#a)"/><path d="m1749.4 842.6v787.6l-725.4 423.3-724.5-419.5 219.38-278.79 505.12 293.39 373.2-221.2v-381z" fill="url(#b)"/><path d="M299.5 1634L1750 786.4V420L299 1267.4z" fill="url(#c)"/></g></svg>
|
After Width: | Height: | Size: 1001 B |
|
@ -0,0 +1,14 @@
|
||||||
|
limits = {}
|
||||||
|
call_after_interval = {}
|
||||||
|
window.RateLimit = (interval, fn) ->
|
||||||
|
if not limits[fn]
|
||||||
|
call_after_interval[fn] = false
|
||||||
|
fn() # First call is not delayed
|
||||||
|
limits[fn] = setTimeout (->
|
||||||
|
if call_after_interval[fn]
|
||||||
|
fn()
|
||||||
|
delete limits[fn]
|
||||||
|
delete call_after_interval[fn]
|
||||||
|
), interval
|
||||||
|
else # Called within iterval, delay the call
|
||||||
|
call_after_interval[fn] = true
|
|
@ -0,0 +1 @@
|
||||||
|
window._ = (s) -> return s
|
|
@ -0,0 +1,95 @@
|
||||||
|
class ZeroWebsocket
|
||||||
|
constructor: (url) ->
|
||||||
|
@url = url
|
||||||
|
@next_message_id = 1
|
||||||
|
@waiting_cb = {}
|
||||||
|
@init()
|
||||||
|
|
||||||
|
|
||||||
|
init: ->
|
||||||
|
@
|
||||||
|
|
||||||
|
|
||||||
|
connect: ->
|
||||||
|
@ws = new WebSocket(@url)
|
||||||
|
@ws.onmessage = @onMessage
|
||||||
|
@ws.onopen = @onOpenWebsocket
|
||||||
|
@ws.onerror = @onErrorWebsocket
|
||||||
|
@ws.onclose = @onCloseWebsocket
|
||||||
|
@connected = false
|
||||||
|
@message_queue = []
|
||||||
|
|
||||||
|
|
||||||
|
onMessage: (e) =>
|
||||||
|
message = JSON.parse(e.data)
|
||||||
|
cmd = message.cmd
|
||||||
|
if cmd == "response"
|
||||||
|
if @waiting_cb[message.to]?
|
||||||
|
@waiting_cb[message.to](message.result)
|
||||||
|
else
|
||||||
|
@log "Websocket callback not found:", message
|
||||||
|
else if cmd == "ping"
|
||||||
|
@response message.id, "pong"
|
||||||
|
else
|
||||||
|
@route cmd, message
|
||||||
|
|
||||||
|
route: (cmd, message) =>
|
||||||
|
@log "Unknown command", message
|
||||||
|
|
||||||
|
|
||||||
|
response: (to, result) =>
|
||||||
|
@send {"cmd": "response", "to": to, "result": result}
|
||||||
|
|
||||||
|
|
||||||
|
cmd: (cmd, params={}, cb=null) ->
|
||||||
|
@send {"cmd": cmd, "params": params}, cb
|
||||||
|
|
||||||
|
|
||||||
|
send: (message, cb=null) ->
|
||||||
|
if not message.id?
|
||||||
|
message.id = @next_message_id
|
||||||
|
@next_message_id += 1
|
||||||
|
if @connected
|
||||||
|
@ws.send(JSON.stringify(message))
|
||||||
|
else
|
||||||
|
@log "Not connected, adding message to queue"
|
||||||
|
@message_queue.push(message)
|
||||||
|
if cb
|
||||||
|
@waiting_cb[message.id] = cb
|
||||||
|
|
||||||
|
|
||||||
|
log: (args...) =>
|
||||||
|
console.log "[ZeroWebsocket]", args...
|
||||||
|
|
||||||
|
|
||||||
|
onOpenWebsocket: (e) =>
|
||||||
|
@log "Open"
|
||||||
|
@connected = true
|
||||||
|
|
||||||
|
# Process messages sent before websocket opened
|
||||||
|
for message in @message_queue
|
||||||
|
@ws.send(JSON.stringify(message))
|
||||||
|
@message_queue = []
|
||||||
|
|
||||||
|
if @onOpen? then @onOpen(e)
|
||||||
|
|
||||||
|
|
||||||
|
onErrorWebsocket: (e) =>
|
||||||
|
@log "Error", e
|
||||||
|
if @onError? then @onError(e)
|
||||||
|
|
||||||
|
|
||||||
|
onCloseWebsocket: (e, reconnect=10000) =>
|
||||||
|
@log "Closed", e
|
||||||
|
@connected = false
|
||||||
|
if e and e.code == 1000 and e.wasClean == false
|
||||||
|
@log "Server error, please reload the page", e.wasClean
|
||||||
|
else # Connection error
|
||||||
|
setTimeout (=>
|
||||||
|
@log "Reconnecting..."
|
||||||
|
@connect()
|
||||||
|
), reconnect
|
||||||
|
if @onClose? then @onClose(e)
|
||||||
|
|
||||||
|
|
||||||
|
window.ZeroWebsocket = ZeroWebsocket
|
|
@ -0,0 +1,34 @@
|
||||||
|
jQuery.cssHooks['scale'] = {
|
||||||
|
get: function(elem, computed) {
|
||||||
|
var match = window.getComputedStyle(elem)[transform_property].match("[0-9\.]+")
|
||||||
|
if (match) {
|
||||||
|
var scale = parseFloat(match[0])
|
||||||
|
return scale
|
||||||
|
} else {
|
||||||
|
return 1.0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
set: function(elem, val) {
|
||||||
|
//var transforms = $(elem).css("transform").match(/[0-9\.]+/g)
|
||||||
|
var transforms = window.getComputedStyle(elem)[transform_property].match(/[0-9\.]+/g)
|
||||||
|
if (transforms) {
|
||||||
|
transforms[0] = val
|
||||||
|
transforms[3] = val
|
||||||
|
//$(elem).css("transform", 'matrix('+transforms.join(", ")+")")
|
||||||
|
elem.style[transform_property] = 'matrix('+transforms.join(", ")+')'
|
||||||
|
} else {
|
||||||
|
elem.style[transform_property] = "scale("+val+")"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jQuery.fx.step.scale = function(fx) {
|
||||||
|
jQuery.cssHooks['scale'].set(fx.elem, fx.now)
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
if (window.getComputedStyle(document.body).transform) {
|
||||||
|
transform_property = "transform"
|
||||||
|
} else {
|
||||||
|
transform_property = "webkitTransform"
|
||||||
|
}
|
|
@ -0,0 +1,36 @@
|
||||||
|
jQuery.fn.readdClass = (class_name) ->
|
||||||
|
elem = @
|
||||||
|
elem.removeClass class_name
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.addClass class_name
|
||||||
|
), 1
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.removeLater = (time = 500) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.remove()
|
||||||
|
), time
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.hideLater = (time = 500) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
if elem.css("opacity") == 0
|
||||||
|
elem.css("display", "none")
|
||||||
|
), time
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.addClassLater = (class_name, time = 5) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.addClass(class_name)
|
||||||
|
), time
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.cssLater = (name, val, time = 500) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.css name, val
|
||||||
|
), time
|
||||||
|
return @
|
|
@ -0,0 +1,168 @@
|
||||||
|
/*
|
||||||
|
* jQuery Easing v1.4.1 - http://gsgd.co.uk/sandbox/jquery/easing/
|
||||||
|
* Open source under the BSD License.
|
||||||
|
* Copyright © 2008 George McGinley Smith
|
||||||
|
* All rights reserved.
|
||||||
|
* https://raw.github.com/gdsmith/jquery-easing/master/LICENSE
|
||||||
|
*/
|
||||||
|
|
||||||
|
(function (factory) {
|
||||||
|
if (typeof define === "function" && define.amd) {
|
||||||
|
define(['jquery'], function ($) {
|
||||||
|
return factory($);
|
||||||
|
});
|
||||||
|
} else if (typeof module === "object" && typeof module.exports === "object") {
|
||||||
|
exports = factory(require('jquery'));
|
||||||
|
} else {
|
||||||
|
factory(jQuery);
|
||||||
|
}
|
||||||
|
})(function($){
|
||||||
|
|
||||||
|
// Preserve the original jQuery "swing" easing as "jswing"
|
||||||
|
if (typeof $.easing !== 'undefined') {
|
||||||
|
$.easing['jswing'] = $.easing['swing'];
|
||||||
|
}
|
||||||
|
|
||||||
|
var pow = Math.pow,
|
||||||
|
sqrt = Math.sqrt,
|
||||||
|
sin = Math.sin,
|
||||||
|
cos = Math.cos,
|
||||||
|
PI = Math.PI,
|
||||||
|
c1 = 1.70158,
|
||||||
|
c2 = c1 * 1.525,
|
||||||
|
c3 = c1 + 1,
|
||||||
|
c4 = ( 2 * PI ) / 3,
|
||||||
|
c5 = ( 2 * PI ) / 4.5;
|
||||||
|
|
||||||
|
// x is the fraction of animation progress, in the range 0..1
|
||||||
|
function bounceOut(x) {
|
||||||
|
var n1 = 7.5625,
|
||||||
|
d1 = 2.75;
|
||||||
|
if ( x < 1/d1 ) {
|
||||||
|
return n1*x*x;
|
||||||
|
} else if ( x < 2/d1 ) {
|
||||||
|
return n1*(x-=(1.5/d1))*x + .75;
|
||||||
|
} else if ( x < 2.5/d1 ) {
|
||||||
|
return n1*(x-=(2.25/d1))*x + .9375;
|
||||||
|
} else {
|
||||||
|
return n1*(x-=(2.625/d1))*x + .984375;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$.extend( $.easing,
|
||||||
|
{
|
||||||
|
def: 'easeOutQuad',
|
||||||
|
swing: function (x) {
|
||||||
|
return $.easing[$.easing.def](x);
|
||||||
|
},
|
||||||
|
easeInQuad: function (x) {
|
||||||
|
return x * x;
|
||||||
|
},
|
||||||
|
easeOutQuad: function (x) {
|
||||||
|
return 1 - ( 1 - x ) * ( 1 - x );
|
||||||
|
},
|
||||||
|
easeInOutQuad: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
2 * x * x :
|
||||||
|
1 - pow( -2 * x + 2, 2 ) / 2;
|
||||||
|
},
|
||||||
|
easeInCubic: function (x) {
|
||||||
|
return x * x * x;
|
||||||
|
},
|
||||||
|
easeOutCubic: function (x) {
|
||||||
|
return 1 - pow( 1 - x, 3 );
|
||||||
|
},
|
||||||
|
easeInOutCubic: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
4 * x * x * x :
|
||||||
|
1 - pow( -2 * x + 2, 3 ) / 2;
|
||||||
|
},
|
||||||
|
easeInQuart: function (x) {
|
||||||
|
return x * x * x * x;
|
||||||
|
},
|
||||||
|
easeOutQuart: function (x) {
|
||||||
|
return 1 - pow( 1 - x, 4 );
|
||||||
|
},
|
||||||
|
easeInOutQuart: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
8 * x * x * x * x :
|
||||||
|
1 - pow( -2 * x + 2, 4 ) / 2;
|
||||||
|
},
|
||||||
|
easeInQuint: function (x) {
|
||||||
|
return x * x * x * x * x;
|
||||||
|
},
|
||||||
|
easeOutQuint: function (x) {
|
||||||
|
return 1 - pow( 1 - x, 5 );
|
||||||
|
},
|
||||||
|
easeInOutQuint: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
16 * x * x * x * x * x :
|
||||||
|
1 - pow( -2 * x + 2, 5 ) / 2;
|
||||||
|
},
|
||||||
|
easeInSine: function (x) {
|
||||||
|
return 1 - cos( x * PI/2 );
|
||||||
|
},
|
||||||
|
easeOutSine: function (x) {
|
||||||
|
return sin( x * PI/2 );
|
||||||
|
},
|
||||||
|
easeInOutSine: function (x) {
|
||||||
|
return -( cos( PI * x ) - 1 ) / 2;
|
||||||
|
},
|
||||||
|
easeInExpo: function (x) {
|
||||||
|
return x === 0 ? 0 : pow( 2, 10 * x - 10 );
|
||||||
|
},
|
||||||
|
easeOutExpo: function (x) {
|
||||||
|
return x === 1 ? 1 : 1 - pow( 2, -10 * x );
|
||||||
|
},
|
||||||
|
easeInOutExpo: function (x) {
|
||||||
|
return x === 0 ? 0 : x === 1 ? 1 : x < 0.5 ?
|
||||||
|
pow( 2, 20 * x - 10 ) / 2 :
|
||||||
|
( 2 - pow( 2, -20 * x + 10 ) ) / 2;
|
||||||
|
},
|
||||||
|
easeInCirc: function (x) {
|
||||||
|
return 1 - sqrt( 1 - pow( x, 2 ) );
|
||||||
|
},
|
||||||
|
easeOutCirc: function (x) {
|
||||||
|
return sqrt( 1 - pow( x - 1, 2 ) );
|
||||||
|
},
|
||||||
|
easeInOutCirc: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
( 1 - sqrt( 1 - pow( 2 * x, 2 ) ) ) / 2 :
|
||||||
|
( sqrt( 1 - pow( -2 * x + 2, 2 ) ) + 1 ) / 2;
|
||||||
|
},
|
||||||
|
easeInElastic: function (x) {
|
||||||
|
return x === 0 ? 0 : x === 1 ? 1 :
|
||||||
|
-pow( 2, 10 * x - 10 ) * sin( ( x * 10 - 10.75 ) * c4 );
|
||||||
|
},
|
||||||
|
easeOutElastic: function (x) {
|
||||||
|
return x === 0 ? 0 : x === 1 ? 1 :
|
||||||
|
pow( 2, -10 * x ) * sin( ( x * 10 - 0.75 ) * c4 ) + 1;
|
||||||
|
},
|
||||||
|
easeInOutElastic: function (x) {
|
||||||
|
return x === 0 ? 0 : x === 1 ? 1 : x < 0.5 ?
|
||||||
|
-( pow( 2, 20 * x - 10 ) * sin( ( 20 * x - 11.125 ) * c5 )) / 2 :
|
||||||
|
pow( 2, -20 * x + 10 ) * sin( ( 20 * x - 11.125 ) * c5 ) / 2 + 1;
|
||||||
|
},
|
||||||
|
easeInBack: function (x) {
|
||||||
|
return c3 * x * x * x - c1 * x * x;
|
||||||
|
},
|
||||||
|
easeOutBack: function (x) {
|
||||||
|
return 1 + c3 * pow( x - 1, 3 ) + c1 * pow( x - 1, 2 );
|
||||||
|
},
|
||||||
|
easeInOutBack: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
( pow( 2 * x, 2 ) * ( ( c2 + 1 ) * 2 * x - c2 ) ) / 2 :
|
||||||
|
( pow( 2 * x - 2, 2 ) *( ( c2 + 1 ) * ( x * 2 - 2 ) + c2 ) + 2 ) / 2;
|
||||||
|
},
|
||||||
|
easeInBounce: function (x) {
|
||||||
|
return 1 - bounceOut( 1 - x );
|
||||||
|
},
|
||||||
|
easeOutBounce: bounceOut,
|
||||||
|
easeInOutBounce: function (x) {
|
||||||
|
return x < 0.5 ?
|
||||||
|
( 1 - bounceOut( 1 - 2 * x ) ) / 2 :
|
||||||
|
( 1 + bounceOut( 2 * x - 1 ) ) / 2;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
|
@ -0,0 +1,40 @@
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Add new site</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.content { line-height: 24px; font-family: monospace; font-size: 14px; color: #636363; text-transform: uppercase; top: 38%; position: relative; text-align: center; }
|
||||||
|
.content h1, .content h2 { font-weight: normal; letter-spacing: 1px; }
|
||||||
|
.content h2 { font-size: 15px; margin-bottom: 50px }
|
||||||
|
.content #details {
|
||||||
|
text-align: left; display: inline-block; width: 350px; background-color: white; padding: 17px 27px; border-radius: 0px;
|
||||||
|
box-shadow: 0px 2px 7px -1px #d8d8d8; text-transform: none; margin: 15px; transform: scale(0) rotateX(90deg); transition: all 0.6s cubic-bezier(0.785, 0.135, 0.15, 0.86);
|
||||||
|
}
|
||||||
|
.content #details #added { font-size: 12px; text-align: right; color: #a9a9a9; }
|
||||||
|
|
||||||
|
.button {
|
||||||
|
padding: 8px 20px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; border-radius: 2px;
|
||||||
|
text-decoration: none; transition: all 0.5s; background-position: left center; color: black;
|
||||||
|
border-left: 0px; border-top: 0px; border-right: 0px; font-family: monospace; font-size: 14px;
|
||||||
|
}
|
||||||
|
.button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; transition: none; }
|
||||||
|
.button:active { position: relative; top: 1px; }
|
||||||
|
.button:focus { outline: none; }
|
||||||
|
|
||||||
|
</style>
|
||||||
|
|
||||||
|
<div class="content">
|
||||||
|
<h1>Add new site</h1>
|
||||||
|
<h2>Please confirm before adding a new site to the client</h2>
|
||||||
|
<form action="/add/" method="POST">
|
||||||
|
<input type="hidden" name="add_nonce" value="{add_nonce}">
|
||||||
|
<input type="hidden" name="address" value="{address}">
|
||||||
|
<input type="hidden" name="url" value="{url}">
|
||||||
|
<input type="submit" class="button button-submit" id="button" value="Load site"/>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,103 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>{title} - ZeroNet</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
|
||||||
|
<link rel="stylesheet" href="/uimedia/all.css?rev={rev}" />
|
||||||
|
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
|
||||||
|
{meta_tags}
|
||||||
|
</head>
|
||||||
|
<body style="{body_style}" class="{themeclass}">
|
||||||
|
|
||||||
|
<div class="unsupported" id="unsupported">
|
||||||
|
<script nonce="{script_nonce}">document.getElementById('unsupported').style.display = "none"</script>
|
||||||
|
<h3>ZeroNet requires JavaScript support.</h3>If you use NoScript/Tor browser: Click on toolbar icon with the notification and choose "Temp. TRUSTED" for 127.0.0.1.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script nonce="{script_nonce}">
|
||||||
|
// If we are inside iframe escape from it
|
||||||
|
if (window.self !== window.top) {
|
||||||
|
window.open(window.location.toString().replace(/([&?])wrapper=False/, "$1").replace(/&$/, "").replace(/[&?]wrapper_nonce=[A-Za-z0-9]+/, ""), "_top");
|
||||||
|
window.stop();
|
||||||
|
document.execCommand("Stop", false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We are opened as a parent-window
|
||||||
|
else if (window.opener && window.opener.location.toString()) {
|
||||||
|
document.write("Opened as child-window, stopping...");
|
||||||
|
window.stop();
|
||||||
|
document.execCommand("Stop", false);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="progressbar">
|
||||||
|
<div class="peg"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Fixed button -->
|
||||||
|
<div class='fixbutton'>
|
||||||
|
<div class='fixbutton-text'><img width=30 src='/uimedia/img/logo-white.svg'/></div>
|
||||||
|
<div class='fixbutton-burger'>≡</div>
|
||||||
|
<a class='fixbutton-bg' href="{homepage}/"></a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Notifications -->
|
||||||
|
<div class='notifications'>
|
||||||
|
<div class='notification template'><span class='notification-icon'>!</span> <span class='body'>Test notification</span><a class="close" href="#Close">×</a><div style="clear: both"></div></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Infopanel -->
|
||||||
|
<div class='infopanel-container'>
|
||||||
|
<div class='infopanel'>
|
||||||
|
<span class='closed-num'>8</span>
|
||||||
|
<div class="message">
|
||||||
|
<span class='line line-1'>8 modified files</span><br><span class='line line-2'>content.json, data.json</span>
|
||||||
|
</div>
|
||||||
|
<a href="#Publish" class="button button-submit">Sign & Publish</a>
|
||||||
|
<a href="#Close" class="close">×</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Loadingscreen -->
|
||||||
|
<div class='loadingscreen'>
|
||||||
|
<a href="/Config" class="loading-config">Config</a>
|
||||||
|
<div class='loading-text console'>
|
||||||
|
</div>
|
||||||
|
<div class="flipper-container">
|
||||||
|
<div class="flipper"> <div class="front"></div><div class="back"></div> </div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Site Iframe -->
|
||||||
|
<iframe src='about:blank' id='inner-iframe' sandbox="allow-forms allow-scripts allow-top-navigation allow-popups allow-modals allow-presentation allow-pointer-lock allow-popups-to-escape-sandbox {sandbox_permissions}" allowfullscreen="true" webkitallowfullscreen="true" mozallowfullscreen="true" oallowfullscreen="true" msallowfullscreen="true"></iframe>
|
||||||
|
|
||||||
|
<!-- Site info -->
|
||||||
|
<script id="script_init" nonce="{script_nonce}">
|
||||||
|
iframe_src = "{file_url}{query_string}"
|
||||||
|
console.log("Changing url from " + document.getElementById("inner-iframe").src + " to " + iframe_src)
|
||||||
|
document.getElementById("inner-iframe").src = document.getElementById("inner-iframe").src // Workaround for Firefox back button bug
|
||||||
|
document.getElementById("inner-iframe").src = iframe_src
|
||||||
|
address = "{address}"
|
||||||
|
wrapper_nonce = "{wrapper_nonce}"
|
||||||
|
wrapper_key = "{wrapper_key}"
|
||||||
|
ajax_key = "{ajax_key}"
|
||||||
|
postmessage_nonce_security = {postmessage_nonce_security}
|
||||||
|
file_inner_path = "{file_inner_path}"
|
||||||
|
permissions = {permissions}
|
||||||
|
show_loadingscreen = {show_loadingscreen}
|
||||||
|
server_url = '{server_url}'
|
||||||
|
script_nonce = '{script_nonce}'
|
||||||
|
|
||||||
|
if (typeof WebSocket === "undefined") {
|
||||||
|
tag = document.createElement('div');
|
||||||
|
tag.innerHTML += "<div class='unsupported'>Your browser does not support <a href='https://caniuse.com/#search=websocket'>WebSocket connections</a>.<br>Please use the latest <a href='http://outdatedbrowser.com'>Chrome or Firefox</a> browser.</div>";
|
||||||
|
document.body.appendChild(tag)
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<script type="text/javascript" src="/uimedia/all.js?rev={rev}&lang={lang}" nonce="{script_nonce}"></script>
|
||||||
|
<script nonce="{script_nonce}">setTimeout(window.wrapper.onWrapperLoad, 1)</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,176 @@
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import binascii
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
import util
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
from Plugin import PluginManager
|
||||||
|
from Config import config
|
||||||
|
from util import helper
|
||||||
|
from Debug import Debug
|
||||||
|
|
||||||
|
|
||||||
|
@PluginManager.acceptPlugins
|
||||||
|
class User(object):
|
||||||
|
def __init__(self, master_address=None, master_seed=None, data={}):
|
||||||
|
if master_seed:
|
||||||
|
self.master_seed = master_seed
|
||||||
|
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
||||||
|
elif master_address:
|
||||||
|
self.master_address = master_address
|
||||||
|
self.master_seed = data.get("master_seed")
|
||||||
|
else:
|
||||||
|
self.master_seed = CryptBitcoin.newSeed()
|
||||||
|
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
||||||
|
self.sites = data.get("sites", {})
|
||||||
|
self.certs = data.get("certs", {})
|
||||||
|
self.settings = data.get("settings", {})
|
||||||
|
self.delayed_save_thread = None
|
||||||
|
|
||||||
|
self.log = logging.getLogger("User:%s" % self.master_address)
|
||||||
|
|
||||||
|
# Save to data/users.json
|
||||||
|
@util.Noparallel(queue=True, ignore_class=True)
|
||||||
|
def save(self):
|
||||||
|
s = time.time()
|
||||||
|
users = json.load(open("%s/users.json" % config.data_dir))
|
||||||
|
if self.master_address not in users:
|
||||||
|
users[self.master_address] = {} # Create if not exist
|
||||||
|
user_data = users[self.master_address]
|
||||||
|
if self.master_seed:
|
||||||
|
user_data["master_seed"] = self.master_seed
|
||||||
|
user_data["sites"] = self.sites
|
||||||
|
user_data["certs"] = self.certs
|
||||||
|
user_data["settings"] = self.settings
|
||||||
|
helper.atomicWrite("%s/users.json" % config.data_dir, helper.jsonDumps(users).encode("utf8"))
|
||||||
|
self.log.debug("Saved in %.3fs" % (time.time() - s))
|
||||||
|
self.delayed_save_thread = None
|
||||||
|
|
||||||
|
def saveDelayed(self):
|
||||||
|
if not self.delayed_save_thread:
|
||||||
|
self.delayed_save_thread = gevent.spawn_later(5, self.save)
|
||||||
|
|
||||||
|
def getAddressAuthIndex(self, address):
|
||||||
|
return int(binascii.hexlify(address.encode()), 16)
|
||||||
|
|
||||||
|
@util.Noparallel()
|
||||||
|
def generateAuthAddress(self, address):
|
||||||
|
s = time.time()
|
||||||
|
address_id = self.getAddressAuthIndex(address) # Convert site address to int
|
||||||
|
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
|
||||||
|
self.sites[address] = {
|
||||||
|
"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
|
||||||
|
"auth_privatekey": auth_privatekey
|
||||||
|
}
|
||||||
|
self.saveDelayed()
|
||||||
|
self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s))
|
||||||
|
return self.sites[address]
|
||||||
|
|
||||||
|
# Get user site data
|
||||||
|
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
|
||||||
|
def getSiteData(self, address, create=True):
|
||||||
|
if address not in self.sites: # Generate new BIP32 child key based on site address
|
||||||
|
if not create:
|
||||||
|
return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
|
||||||
|
self.generateAuthAddress(address)
|
||||||
|
return self.sites[address]
|
||||||
|
|
||||||
|
def deleteSiteData(self, address):
|
||||||
|
if address in self.sites:
|
||||||
|
del(self.sites[address])
|
||||||
|
self.saveDelayed()
|
||||||
|
self.log.debug("Deleted site: %s" % address)
|
||||||
|
|
||||||
|
def setSiteSettings(self, address, settings):
|
||||||
|
site_data = self.getSiteData(address)
|
||||||
|
site_data["settings"] = settings
|
||||||
|
self.saveDelayed()
|
||||||
|
return site_data
|
||||||
|
|
||||||
|
# Get data for a new, unique site
|
||||||
|
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
|
||||||
|
def getNewSiteData(self):
|
||||||
|
import random
|
||||||
|
bip32_index = random.randrange(2 ** 256) % 100000000
|
||||||
|
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
|
||||||
|
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
|
||||||
|
if site_address in self.sites:
|
||||||
|
raise Exception("Random error: site exist!")
|
||||||
|
# Save to sites
|
||||||
|
self.getSiteData(site_address)
|
||||||
|
self.sites[site_address]["privatekey"] = site_privatekey
|
||||||
|
self.save()
|
||||||
|
return site_address, bip32_index, self.sites[site_address]
|
||||||
|
|
||||||
|
# Get BIP32 address from site address
|
||||||
|
# Return: BIP32 auth address
|
||||||
|
def getAuthAddress(self, address, create=True):
|
||||||
|
cert = self.getCert(address)
|
||||||
|
if cert:
|
||||||
|
return cert["auth_address"]
|
||||||
|
else:
|
||||||
|
return self.getSiteData(address, create)["auth_address"]
|
||||||
|
|
||||||
|
def getAuthPrivatekey(self, address, create=True):
|
||||||
|
cert = self.getCert(address)
|
||||||
|
if cert:
|
||||||
|
return cert["auth_privatekey"]
|
||||||
|
else:
|
||||||
|
return self.getSiteData(address, create)["auth_privatekey"]
|
||||||
|
|
||||||
|
# Add cert for the user
|
||||||
|
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
|
||||||
|
# Find privatekey by auth address
|
||||||
|
auth_privatekey = [site["auth_privatekey"] for site in list(self.sites.values()) if site["auth_address"] == auth_address][0]
|
||||||
|
cert_node = {
|
||||||
|
"auth_address": auth_address,
|
||||||
|
"auth_privatekey": auth_privatekey,
|
||||||
|
"auth_type": auth_type,
|
||||||
|
"auth_user_name": auth_user_name,
|
||||||
|
"cert_sign": cert_sign
|
||||||
|
}
|
||||||
|
# Check if we have already cert for that domain and its not the same
|
||||||
|
if self.certs.get(domain) and self.certs[domain] != cert_node:
|
||||||
|
return False
|
||||||
|
elif self.certs.get(domain) == cert_node: # Same, not updated
|
||||||
|
return None
|
||||||
|
else: # Not exist yet, add
|
||||||
|
self.certs[domain] = cert_node
|
||||||
|
self.save()
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Remove cert from user
|
||||||
|
def deleteCert(self, domain):
|
||||||
|
del self.certs[domain]
|
||||||
|
|
||||||
|
# Set active cert for a site
|
||||||
|
def setCert(self, address, domain):
|
||||||
|
site_data = self.getSiteData(address)
|
||||||
|
if domain:
|
||||||
|
site_data["cert"] = domain
|
||||||
|
else:
|
||||||
|
if "cert" in site_data:
|
||||||
|
del site_data["cert"]
|
||||||
|
self.saveDelayed()
|
||||||
|
return site_data
|
||||||
|
|
||||||
|
# Get cert for the site address
|
||||||
|
# Return: { "auth_address":.., "auth_privatekey":.., "auth_type": "web", "auth_user_name": "nofish", "cert_sign":.. } or None
|
||||||
|
def getCert(self, address):
|
||||||
|
site_data = self.getSiteData(address, create=False)
|
||||||
|
if not site_data or "cert" not in site_data:
|
||||||
|
return None # Site dont have cert
|
||||||
|
return self.certs.get(site_data["cert"])
|
||||||
|
|
||||||
|
# Get cert user name for the site address
|
||||||
|
# Return: user@certprovider.bit or None
|
||||||
|
def getCertUserId(self, address):
|
||||||
|
site_data = self.getSiteData(address, create=False)
|
||||||
|
if not site_data or "cert" not in site_data:
|
||||||
|
return None # Site dont have cert
|
||||||
|
cert = self.certs.get(site_data["cert"])
|
||||||
|
if cert:
|
||||||
|
return cert["auth_user_name"] + "@" + site_data["cert"]
|
|
@ -0,0 +1,77 @@
|
||||||
|
# Included modules
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
# ZeroNet Modules
|
||||||
|
from .User import User
|
||||||
|
from Plugin import PluginManager
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
|
||||||
|
@PluginManager.acceptPlugins
|
||||||
|
class UserManager(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.users = {}
|
||||||
|
self.log = logging.getLogger("UserManager")
|
||||||
|
|
||||||
|
# Load all user from data/users.json
|
||||||
|
def load(self):
|
||||||
|
if not self.users:
|
||||||
|
self.users = {}
|
||||||
|
|
||||||
|
user_found = []
|
||||||
|
added = 0
|
||||||
|
s = time.time()
|
||||||
|
# Load new users
|
||||||
|
try:
|
||||||
|
json_path = "%s/users.json" % config.data_dir
|
||||||
|
data = json.load(open(json_path))
|
||||||
|
except Exception as err:
|
||||||
|
raise Exception("Unable to load %s: %s" % (json_path, err))
|
||||||
|
|
||||||
|
for master_address, data in list(data.items()):
|
||||||
|
if master_address not in self.users:
|
||||||
|
user = User(master_address, data=data)
|
||||||
|
self.users[master_address] = user
|
||||||
|
added += 1
|
||||||
|
user_found.append(master_address)
|
||||||
|
|
||||||
|
# Remove deleted adresses
|
||||||
|
for master_address in list(self.users.keys()):
|
||||||
|
if master_address not in user_found:
|
||||||
|
del(self.users[master_address])
|
||||||
|
self.log.debug("Removed user: %s" % master_address)
|
||||||
|
|
||||||
|
if added:
|
||||||
|
self.log.debug("Added %s users in %.3fs" % (added, time.time() - s))
|
||||||
|
|
||||||
|
# Create new user
|
||||||
|
# Return: User
|
||||||
|
def create(self, master_address=None, master_seed=None):
|
||||||
|
self.list() # Load the users if it's not loaded yet
|
||||||
|
user = User(master_address, master_seed)
|
||||||
|
self.log.debug("Created user: %s" % user.master_address)
|
||||||
|
if user.master_address: # If successfully created
|
||||||
|
self.users[user.master_address] = user
|
||||||
|
user.saveDelayed()
|
||||||
|
return user
|
||||||
|
|
||||||
|
# List all users from data/users.json
|
||||||
|
# Return: {"usermasteraddr": User}
|
||||||
|
def list(self):
|
||||||
|
if self.users == {}: # Not loaded yet
|
||||||
|
self.load()
|
||||||
|
return self.users
|
||||||
|
|
||||||
|
# Get user based on master_address
|
||||||
|
# Return: User or None
|
||||||
|
def get(self, master_address=None):
|
||||||
|
users = self.list()
|
||||||
|
if users:
|
||||||
|
return list(users.values())[0] # Single user mode, always return the first
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
user_manager = UserManager() # Singleton
|
|
@ -0,0 +1 @@
|
||||||
|
from .User import User
|
|
@ -0,0 +1,239 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
import gevent.lock
|
||||||
|
|
||||||
|
from Debug import Debug
|
||||||
|
from Config import config
|
||||||
|
from Content.ContentManager import VerifyError
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerDownloadError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerIOError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerStop(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Worker(object):
|
||||||
|
|
||||||
|
def __init__(self, manager, peer):
|
||||||
|
self.manager = manager
|
||||||
|
self.peer = peer
|
||||||
|
self.task = None
|
||||||
|
self.key = None
|
||||||
|
self.running = False
|
||||||
|
self.thread = None
|
||||||
|
self.num_downloaded = 0
|
||||||
|
self.num_failed = 0
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "Worker %s %s" % (self.manager.site.address_short, self.key)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s>" % self.__str__()
|
||||||
|
|
||||||
|
def waitForTask(self, task, timeout): # Wait for other workers to finish the task
|
||||||
|
for sleep_i in range(1, timeout * 10):
|
||||||
|
time.sleep(0.1)
|
||||||
|
if task["done"] or task["workers_num"] == 0:
|
||||||
|
if config.verbose:
|
||||||
|
self.manager.log.debug("%s: %s, picked task free after %ss sleep. (done: %s)" % (
|
||||||
|
self.key, task["inner_path"], 0.1 * sleep_i, task["done"]
|
||||||
|
))
|
||||||
|
break
|
||||||
|
|
||||||
|
if sleep_i % 10 == 0:
|
||||||
|
workers = self.manager.findWorkers(task)
|
||||||
|
if not workers or not workers[0].peer.connection:
|
||||||
|
break
|
||||||
|
worker_idle = time.time() - workers[0].peer.connection.last_recv_time
|
||||||
|
if worker_idle > 1:
|
||||||
|
if config.verbose:
|
||||||
|
self.manager.log.debug("%s: %s, worker %s seems idle, picked up task after %ss sleep. (done: %s)" % (
|
||||||
|
self.key, task["inner_path"], workers[0].key, 0.1 * sleep_i, task["done"]
|
||||||
|
))
|
||||||
|
break
|
||||||
|
return True
|
||||||
|
|
||||||
|
def pickTask(self): # Find and select a new task for the worker
|
||||||
|
task = self.manager.getTask(self.peer)
|
||||||
|
if not task: # No more task
|
||||||
|
time.sleep(0.1) # Wait a bit for new tasks
|
||||||
|
task = self.manager.getTask(self.peer)
|
||||||
|
if not task: # Still no task, stop it
|
||||||
|
stats = "downloaded files: %s, failed: %s" % (self.num_downloaded, self.num_failed)
|
||||||
|
self.manager.log.debug("%s: No task found, stopping (%s)" % (self.key, stats))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not task["time_started"]:
|
||||||
|
task["time_started"] = time.time() # Task started now
|
||||||
|
|
||||||
|
if task["workers_num"] > 0: # Wait a bit if someone already working on it
|
||||||
|
if task["peers"]: # It's an update
|
||||||
|
timeout = 3
|
||||||
|
else:
|
||||||
|
timeout = 1
|
||||||
|
|
||||||
|
if task["size"] > 100 * 1024 * 1024:
|
||||||
|
timeout = timeout * 2
|
||||||
|
|
||||||
|
if config.verbose:
|
||||||
|
self.manager.log.debug("%s: Someone already working on %s (pri: %s), sleeping %s sec..." % (
|
||||||
|
self.key, task["inner_path"], task["priority"], timeout
|
||||||
|
))
|
||||||
|
|
||||||
|
self.waitForTask(task, timeout)
|
||||||
|
return task
|
||||||
|
|
||||||
|
def downloadTask(self, task):
|
||||||
|
try:
|
||||||
|
buff = self.peer.getFile(task["site"].address, task["inner_path"], task["size"])
|
||||||
|
except Exception as err:
|
||||||
|
self.manager.log.debug("%s: getFile error: %s" % (self.key, err))
|
||||||
|
raise WorkerDownloadError(str(err))
|
||||||
|
|
||||||
|
if not buff:
|
||||||
|
raise WorkerDownloadError("No response")
|
||||||
|
|
||||||
|
return buff
|
||||||
|
|
||||||
|
def getTaskLock(self, task):
|
||||||
|
if task["lock"] is None:
|
||||||
|
task["lock"] = gevent.lock.Semaphore()
|
||||||
|
return task["lock"]
|
||||||
|
|
||||||
|
def writeTask(self, task, buff):
|
||||||
|
buff.seek(0)
|
||||||
|
try:
|
||||||
|
task["site"].storage.write(task["inner_path"], buff)
|
||||||
|
except Exception as err:
|
||||||
|
if type(err) == Debug.Notify:
|
||||||
|
self.manager.log.debug("%s: Write aborted: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err))
|
||||||
|
else:
|
||||||
|
self.manager.log.error("%s: Error writing: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err))
|
||||||
|
raise WorkerIOError(str(err))
|
||||||
|
|
||||||
|
def onTaskVerifyFail(self, task, error_message):
|
||||||
|
self.num_failed += 1
|
||||||
|
if self.manager.started_task_num < 50 or config.verbose:
|
||||||
|
self.manager.log.debug(
|
||||||
|
"%s: Verify failed: %s, error: %s, failed peers: %s, workers: %s" %
|
||||||
|
(self.key, task["inner_path"], error_message, len(task["failed"]), task["workers_num"])
|
||||||
|
)
|
||||||
|
task["failed"].append(self.peer)
|
||||||
|
self.peer.hash_failed += 1
|
||||||
|
if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10:
|
||||||
|
# Broken peer: More fails than tasks number but atleast 3
|
||||||
|
raise WorkerStop(
|
||||||
|
"Too many errors (hash failed: %s, connection error: %s)" %
|
||||||
|
(self.peer.hash_failed, self.peer.connection_error)
|
||||||
|
)
|
||||||
|
|
||||||
|
def handleTask(self, task):
|
||||||
|
download_err = write_err = False
|
||||||
|
|
||||||
|
write_lock = None
|
||||||
|
try:
|
||||||
|
buff = self.downloadTask(task)
|
||||||
|
|
||||||
|
if task["done"] is True: # Task done, try to find new one
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.running is False: # Worker no longer needed or got killed
|
||||||
|
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
|
||||||
|
raise WorkerStop("Running got disabled")
|
||||||
|
|
||||||
|
write_lock = self.getTaskLock(task)
|
||||||
|
write_lock.acquire()
|
||||||
|
if task["site"].content_manager.verifyFile(task["inner_path"], buff) is None:
|
||||||
|
is_same = True
|
||||||
|
else:
|
||||||
|
is_same = False
|
||||||
|
is_valid = True
|
||||||
|
except (WorkerDownloadError, VerifyError) as err:
|
||||||
|
download_err = err
|
||||||
|
is_valid = False
|
||||||
|
is_same = False
|
||||||
|
|
||||||
|
if is_valid and not is_same:
|
||||||
|
if self.manager.started_task_num < 50 or task["priority"] > 10 or config.verbose:
|
||||||
|
self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"]))
|
||||||
|
try:
|
||||||
|
self.writeTask(task, buff)
|
||||||
|
except WorkerIOError as err:
|
||||||
|
write_err = err
|
||||||
|
|
||||||
|
if not task["done"]:
|
||||||
|
if write_err:
|
||||||
|
self.manager.failTask(task, reason="Write error")
|
||||||
|
self.num_failed += 1
|
||||||
|
self.manager.log.error("%s: Error writing %s: %s" % (self.key, task["inner_path"], write_err))
|
||||||
|
elif is_valid:
|
||||||
|
self.manager.doneTask(task)
|
||||||
|
self.num_downloaded += 1
|
||||||
|
|
||||||
|
if write_lock is not None and write_lock.locked():
|
||||||
|
write_lock.release()
|
||||||
|
|
||||||
|
if not is_valid:
|
||||||
|
self.onTaskVerifyFail(task, download_err)
|
||||||
|
time.sleep(1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def downloader(self):
|
||||||
|
self.peer.hash_failed = 0 # Reset hash error counter
|
||||||
|
while self.running:
|
||||||
|
# Try to pickup free file download task
|
||||||
|
task = self.pickTask()
|
||||||
|
|
||||||
|
if not task:
|
||||||
|
break
|
||||||
|
|
||||||
|
if task["done"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.task = task
|
||||||
|
|
||||||
|
self.manager.addTaskWorker(task, self)
|
||||||
|
|
||||||
|
try:
|
||||||
|
success = self.handleTask(task)
|
||||||
|
except WorkerStop as err:
|
||||||
|
self.manager.log.debug("%s: Worker stopped: %s" % (self.key, err))
|
||||||
|
self.manager.removeTaskWorker(task, self)
|
||||||
|
break
|
||||||
|
|
||||||
|
self.manager.removeTaskWorker(task, self)
|
||||||
|
|
||||||
|
self.peer.onWorkerDone()
|
||||||
|
self.running = False
|
||||||
|
self.manager.removeWorker(self)
|
||||||
|
|
||||||
|
# Start the worker
|
||||||
|
def start(self):
|
||||||
|
self.running = True
|
||||||
|
self.thread = gevent.spawn(self.downloader)
|
||||||
|
|
||||||
|
# Skip current task
|
||||||
|
def skip(self, reason="Unknown"):
|
||||||
|
self.manager.log.debug("%s: Force skipping (reason: %s)" % (self.key, reason))
|
||||||
|
if self.thread:
|
||||||
|
self.thread.kill(exception=Debug.createNotifyType("Worker skipping (reason: %s)" % reason))
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
# Force stop the worker
|
||||||
|
def stop(self, reason="Unknown"):
|
||||||
|
self.manager.log.debug("%s: Force stopping (reason: %s)" % (self.key, reason))
|
||||||
|
self.running = False
|
||||||
|
if self.thread:
|
||||||
|
self.thread.kill(exception=Debug.createNotifyType("Worker stopped (reason: %s)" % reason))
|
||||||
|
del self.thread
|
||||||
|
self.manager.removeWorker(self)
|
|
@ -0,0 +1,600 @@
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
import collections
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
from .Worker import Worker
|
||||||
|
from .WorkerTaskManager import WorkerTaskManager
|
||||||
|
from Config import config
|
||||||
|
from util import helper
|
||||||
|
from Plugin import PluginManager
|
||||||
|
from Debug.DebugLock import DebugLock
|
||||||
|
import util
|
||||||
|
|
||||||
|
|
||||||
|
@PluginManager.acceptPlugins
|
||||||
|
class WorkerManager(object):
|
||||||
|
|
||||||
|
def __init__(self, site):
|
||||||
|
self.site = site
|
||||||
|
self.workers = {} # Key: ip:port, Value: Worker.Worker
|
||||||
|
self.tasks = WorkerTaskManager()
|
||||||
|
self.next_task_id = 1
|
||||||
|
self.lock_add_task = DebugLock(name="Lock AddTask:%s" % self.site.address_short)
|
||||||
|
# {"id": 1, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": None,
|
||||||
|
# "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids, "lock": None or gevent.lock.RLock}
|
||||||
|
self.started_task_num = 0 # Last added task num
|
||||||
|
self.asked_peers = []
|
||||||
|
self.running = True
|
||||||
|
self.time_task_added = 0
|
||||||
|
self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
|
||||||
|
self.site.greenlet_manager.spawn(self.checkTasks)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "WorkerManager %s" % self.site.address_short
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s>" % self.__str__()
|
||||||
|
|
||||||
|
# Check expired tasks
|
||||||
|
def checkTasks(self):
|
||||||
|
while self.running:
|
||||||
|
tasks = task = worker = workers = None # Cleanup local variables
|
||||||
|
announced = False
|
||||||
|
time.sleep(15) # Check every 15 sec
|
||||||
|
|
||||||
|
# Clean up workers
|
||||||
|
for worker in list(self.workers.values()):
|
||||||
|
if worker.task and worker.task["done"]:
|
||||||
|
worker.skip(reason="Task done") # Stop workers with task done
|
||||||
|
|
||||||
|
if not self.tasks:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
|
||||||
|
num_tasks_started = len([task for task in tasks if task["time_started"]])
|
||||||
|
|
||||||
|
self.log.debug(
|
||||||
|
"Tasks: %s, started: %s, bad files: %s, total started: %s" %
|
||||||
|
(len(tasks), num_tasks_started, len(self.site.bad_files), self.started_task_num)
|
||||||
|
)
|
||||||
|
|
||||||
|
for task in tasks:
|
||||||
|
if task["time_started"] and time.time() >= task["time_started"] + 60:
|
||||||
|
self.log.debug("Timeout, Skipping: %s" % task) # Task taking too long time, skip it
|
||||||
|
# Skip to next file workers
|
||||||
|
workers = self.findWorkers(task)
|
||||||
|
if workers:
|
||||||
|
for worker in workers:
|
||||||
|
worker.skip(reason="Task timeout")
|
||||||
|
else:
|
||||||
|
self.failTask(task, reason="No workers")
|
||||||
|
|
||||||
|
elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left
|
||||||
|
self.failTask(task, reason="Timeout")
|
||||||
|
|
||||||
|
elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers:
|
||||||
|
# Find more workers: Task started more than 15 sec ago or no workers
|
||||||
|
workers = self.findWorkers(task)
|
||||||
|
self.log.debug(
|
||||||
|
"Slow task: %s, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" %
|
||||||
|
(
|
||||||
|
task["inner_path"], len(workers), task["optional_hash_id"],
|
||||||
|
len(task["peers"] or []), len(task["failed"]), len(self.asked_peers)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if not announced and task["site"].isAddedRecently():
|
||||||
|
task["site"].announce(mode="more") # Find more peers
|
||||||
|
announced = True
|
||||||
|
if task["optional_hash_id"]:
|
||||||
|
if self.workers:
|
||||||
|
if not task["time_started"]:
|
||||||
|
ask_limit = 20
|
||||||
|
else:
|
||||||
|
ask_limit = max(10, time.time() - task["time_started"])
|
||||||
|
if len(self.asked_peers) < ask_limit and len(task["peers"] or []) <= len(task["failed"]) * 2:
|
||||||
|
# Re-search for high priority
|
||||||
|
self.startFindOptional(find_more=True)
|
||||||
|
if task["peers"]:
|
||||||
|
peers_try = [peer for peer in task["peers"] if peer not in task["failed"] and peer not in workers]
|
||||||
|
if peers_try:
|
||||||
|
self.startWorkers(peers_try, force_num=5, reason="Task checker (optional, has peers)")
|
||||||
|
else:
|
||||||
|
self.startFindOptional(find_more=True)
|
||||||
|
else:
|
||||||
|
self.startFindOptional(find_more=True)
|
||||||
|
else:
|
||||||
|
if task["peers"]: # Release the peer lock
|
||||||
|
self.log.debug("Task peer lock release: %s" % task["inner_path"])
|
||||||
|
task["peers"] = []
|
||||||
|
self.startWorkers(reason="Task checker")
|
||||||
|
|
||||||
|
if len(self.tasks) > len(self.workers) * 2 and len(self.workers) < self.getMaxWorkers():
|
||||||
|
self.startWorkers(reason="Task checker (need more workers)")
|
||||||
|
|
||||||
|
self.log.debug("checkTasks stopped running")
|
||||||
|
|
||||||
|
# Returns the next free or less worked task
|
||||||
|
def getTask(self, peer):
|
||||||
|
for task in self.tasks: # Find a task
|
||||||
|
if task["peers"] and peer not in task["peers"]:
|
||||||
|
continue # This peer not allowed to pick this task
|
||||||
|
if peer in task["failed"]:
|
||||||
|
continue # Peer already tried to solve this, but failed
|
||||||
|
if task["optional_hash_id"] and task["peers"] is None:
|
||||||
|
continue # No peers found yet for the optional task
|
||||||
|
if task["done"]:
|
||||||
|
continue
|
||||||
|
return task
|
||||||
|
|
||||||
|
def removeSolvedFileTasks(self, mark_as_good=True):
|
||||||
|
for task in self.tasks[:]:
|
||||||
|
if task["inner_path"] not in self.site.bad_files:
|
||||||
|
self.log.debug("No longer in bad_files, marking as %s: %s" % (mark_as_good, task["inner_path"]))
|
||||||
|
task["done"] = True
|
||||||
|
task["evt"].set(mark_as_good)
|
||||||
|
self.tasks.remove(task)
|
||||||
|
if not self.tasks:
|
||||||
|
self.started_task_num = 0
|
||||||
|
self.site.updateWebsocket()
|
||||||
|
|
||||||
|
# New peers added to site
|
||||||
|
def onPeers(self):
|
||||||
|
self.startWorkers(reason="More peers found")
|
||||||
|
|
||||||
|
def getMaxWorkers(self):
|
||||||
|
if len(self.tasks) > 50:
|
||||||
|
return config.workers * 3
|
||||||
|
else:
|
||||||
|
return config.workers
|
||||||
|
|
||||||
|
# Add new worker
|
||||||
|
def addWorker(self, peer, multiplexing=False, force=False):
|
||||||
|
key = peer.key
|
||||||
|
if len(self.workers) > self.getMaxWorkers() and not force:
|
||||||
|
return False
|
||||||
|
if multiplexing: # Add even if we already have worker for this peer
|
||||||
|
key = "%s/%s" % (key, len(self.workers))
|
||||||
|
if key not in self.workers:
|
||||||
|
# We dont have worker for that peer and workers num less than max
|
||||||
|
task = self.getTask(peer)
|
||||||
|
if task:
|
||||||
|
worker = Worker(self, peer)
|
||||||
|
self.workers[key] = worker
|
||||||
|
worker.key = key
|
||||||
|
worker.start()
|
||||||
|
return worker
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else: # We have worker for this peer or its over the limit
|
||||||
|
return False
|
||||||
|
|
||||||
|
def taskAddPeer(self, task, peer):
|
||||||
|
if task["peers"] is None:
|
||||||
|
task["peers"] = []
|
||||||
|
if peer in task["failed"]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if peer not in task["peers"]:
|
||||||
|
task["peers"].append(peer)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Start workers to process tasks
|
||||||
|
def startWorkers(self, peers=None, force_num=0, reason="Unknown"):
|
||||||
|
if not self.tasks:
|
||||||
|
return False # No task for workers
|
||||||
|
max_workers = min(self.getMaxWorkers(), len(self.site.peers))
|
||||||
|
if len(self.workers) >= max_workers and not peers:
|
||||||
|
return False # Workers number already maxed and no starting peers defined
|
||||||
|
self.log.debug(
|
||||||
|
"Starting workers (%s), tasks: %s, peers: %s, workers: %s" %
|
||||||
|
(reason, len(self.tasks), len(peers or []), len(self.workers))
|
||||||
|
)
|
||||||
|
if not peers:
|
||||||
|
peers = self.site.getConnectedPeers()
|
||||||
|
if len(peers) < max_workers:
|
||||||
|
peers += self.site.getRecentPeers(max_workers * 2)
|
||||||
|
if type(peers) is set:
|
||||||
|
peers = list(peers)
|
||||||
|
|
||||||
|
# Sort by ping
|
||||||
|
peers.sort(key=lambda peer: peer.connection.last_ping_delay if peer.connection and peer.connection.last_ping_delay and len(peer.connection.waiting_requests) == 0 and peer.connection.connected else 9999)
|
||||||
|
|
||||||
|
for peer in peers: # One worker for every peer
|
||||||
|
if peers and peer not in peers:
|
||||||
|
continue # If peers defined and peer not valid
|
||||||
|
|
||||||
|
if force_num:
|
||||||
|
worker = self.addWorker(peer, force=True)
|
||||||
|
force_num -= 1
|
||||||
|
else:
|
||||||
|
worker = self.addWorker(peer)
|
||||||
|
|
||||||
|
if worker:
|
||||||
|
self.log.debug("Added worker: %s (rep: %s), workers: %s/%s" % (peer.key, peer.reputation, len(self.workers), max_workers))
|
||||||
|
|
||||||
|
# Find peers for optional hash in local hash tables and add to task peers
|
||||||
|
def findOptionalTasks(self, optional_tasks, reset_task=False):
|
||||||
|
found = collections.defaultdict(list) # { found_hash: [peer1, peer2...], ...}
|
||||||
|
|
||||||
|
for peer in list(self.site.peers.values()):
|
||||||
|
if not peer.has_hashfield:
|
||||||
|
continue
|
||||||
|
|
||||||
|
hashfield_set = set(peer.hashfield) # Finding in set is much faster
|
||||||
|
for task in optional_tasks:
|
||||||
|
optional_hash_id = task["optional_hash_id"]
|
||||||
|
if optional_hash_id in hashfield_set:
|
||||||
|
if reset_task and len(task["failed"]) > 0:
|
||||||
|
task["failed"] = []
|
||||||
|
if peer in task["failed"]:
|
||||||
|
continue
|
||||||
|
if self.taskAddPeer(task, peer):
|
||||||
|
found[optional_hash_id].append(peer)
|
||||||
|
|
||||||
|
return found
|
||||||
|
|
||||||
|
# Find peers for optional hash ids in local hash tables
|
||||||
|
def findOptionalHashIds(self, optional_hash_ids, limit=0):
|
||||||
|
found = collections.defaultdict(list) # { found_hash_id: [peer1, peer2...], ...}
|
||||||
|
|
||||||
|
for peer in list(self.site.peers.values()):
|
||||||
|
if not peer.has_hashfield:
|
||||||
|
continue
|
||||||
|
|
||||||
|
hashfield_set = set(peer.hashfield) # Finding in set is much faster
|
||||||
|
for optional_hash_id in optional_hash_ids:
|
||||||
|
if optional_hash_id in hashfield_set:
|
||||||
|
found[optional_hash_id].append(peer)
|
||||||
|
if limit and len(found[optional_hash_id]) >= limit:
|
||||||
|
optional_hash_ids.remove(optional_hash_id)
|
||||||
|
|
||||||
|
return found
|
||||||
|
|
||||||
|
# Add peers to tasks from found result
|
||||||
|
def addOptionalPeers(self, found_ips):
|
||||||
|
found = collections.defaultdict(list)
|
||||||
|
for hash_id, peer_ips in found_ips.items():
|
||||||
|
task = [task for task in self.tasks if task["optional_hash_id"] == hash_id]
|
||||||
|
if task: # Found task, lets take the first
|
||||||
|
task = task[0]
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
for peer_ip in peer_ips:
|
||||||
|
peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True, source="optional")
|
||||||
|
if not peer:
|
||||||
|
continue
|
||||||
|
if self.taskAddPeer(task, peer):
|
||||||
|
found[hash_id].append(peer)
|
||||||
|
if peer.hashfield.appendHashId(hash_id): # Peer has this file
|
||||||
|
peer.time_hashfield = None # Peer hashfield probably outdated
|
||||||
|
|
||||||
|
return found
|
||||||
|
|
||||||
|
# Start find peers for optional files
|
||||||
|
@util.Noparallel(blocking=False, ignore_args=True)
|
||||||
|
def startFindOptional(self, reset_task=False, find_more=False, high_priority=False):
|
||||||
|
# Wait for more file requests
|
||||||
|
if len(self.tasks) < 20 or high_priority:
|
||||||
|
time.sleep(0.01)
|
||||||
|
elif len(self.tasks) > 90:
|
||||||
|
time.sleep(5)
|
||||||
|
else:
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
|
optional_tasks = [task for task in self.tasks if task["optional_hash_id"]]
|
||||||
|
if not optional_tasks:
|
||||||
|
return False
|
||||||
|
optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks])
|
||||||
|
time_tasks = self.time_task_added
|
||||||
|
|
||||||
|
self.log.debug(
|
||||||
|
"Finding peers for optional files: %s (reset_task: %s, find_more: %s)" %
|
||||||
|
(optional_hash_ids, reset_task, find_more)
|
||||||
|
)
|
||||||
|
found = self.findOptionalTasks(optional_tasks, reset_task=reset_task)
|
||||||
|
|
||||||
|
if found:
|
||||||
|
found_peers = set([peer for peers in list(found.values()) for peer in peers])
|
||||||
|
self.startWorkers(found_peers, force_num=3, reason="Optional found in local peers")
|
||||||
|
|
||||||
|
if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.values())):
|
||||||
|
self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found)))
|
||||||
|
|
||||||
|
# Query hashfield from connected peers
|
||||||
|
threads = []
|
||||||
|
peers = self.site.getConnectedPeers()
|
||||||
|
if not peers:
|
||||||
|
peers = self.site.getConnectablePeers()
|
||||||
|
for peer in peers:
|
||||||
|
threads.append(self.site.greenlet_manager.spawn(peer.updateHashfield, force=find_more))
|
||||||
|
gevent.joinall(threads, timeout=5)
|
||||||
|
|
||||||
|
if time_tasks != self.time_task_added: # New task added since start
|
||||||
|
optional_tasks = [task for task in self.tasks if task["optional_hash_id"]]
|
||||||
|
optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks])
|
||||||
|
|
||||||
|
found = self.findOptionalTasks(optional_tasks)
|
||||||
|
self.log.debug("Found optional files after query hashtable connected peers: %s/%s" % (
|
||||||
|
len(found), len(optional_hash_ids)
|
||||||
|
))
|
||||||
|
|
||||||
|
if found:
|
||||||
|
found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers])
|
||||||
|
self.startWorkers(found_peers, force_num=3, reason="Optional found in connected peers")
|
||||||
|
|
||||||
|
if len(found) < len(optional_hash_ids) or find_more:
|
||||||
|
self.log.debug(
|
||||||
|
"No connected hashtable result for optional files: %s (asked: %s)" %
|
||||||
|
(optional_hash_ids - set(found), len(self.asked_peers))
|
||||||
|
)
|
||||||
|
if not self.tasks:
|
||||||
|
self.log.debug("No tasks, stopping finding optional peers")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Try to query connected peers
|
||||||
|
threads = []
|
||||||
|
peers = [peer for peer in self.site.getConnectedPeers() if peer.key not in self.asked_peers][0:10]
|
||||||
|
if not peers:
|
||||||
|
peers = self.site.getConnectablePeers(ignore=self.asked_peers)
|
||||||
|
|
||||||
|
for peer in peers:
|
||||||
|
threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids)))
|
||||||
|
self.asked_peers.append(peer.key)
|
||||||
|
|
||||||
|
for i in range(5):
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
thread_values = [thread.value for thread in threads if thread.value]
|
||||||
|
if not thread_values:
|
||||||
|
continue
|
||||||
|
|
||||||
|
found_ips = helper.mergeDicts(thread_values)
|
||||||
|
found = self.addOptionalPeers(found_ips)
|
||||||
|
self.log.debug("Found optional files after findhash connected peers: %s/%s (asked: %s)" % (
|
||||||
|
len(found), len(optional_hash_ids), len(threads)
|
||||||
|
))
|
||||||
|
|
||||||
|
if found:
|
||||||
|
found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers])
|
||||||
|
self.startWorkers(found_peers, force_num=3, reason="Optional found by findhash connected peers")
|
||||||
|
|
||||||
|
if len(thread_values) == len(threads):
|
||||||
|
# Got result from all started thread
|
||||||
|
break
|
||||||
|
|
||||||
|
if len(found) < len(optional_hash_ids):
|
||||||
|
self.log.debug(
|
||||||
|
"No findHash result, try random peers: %s (asked: %s)" %
|
||||||
|
(optional_hash_ids - set(found), len(self.asked_peers))
|
||||||
|
)
|
||||||
|
# Try to query random peers
|
||||||
|
|
||||||
|
if time_tasks != self.time_task_added: # New task added since start
|
||||||
|
optional_tasks = [task for task in self.tasks if task["optional_hash_id"]]
|
||||||
|
optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks])
|
||||||
|
|
||||||
|
threads = []
|
||||||
|
peers = self.site.getConnectablePeers(ignore=self.asked_peers)
|
||||||
|
|
||||||
|
for peer in peers:
|
||||||
|
threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids)))
|
||||||
|
self.asked_peers.append(peer.key)
|
||||||
|
|
||||||
|
gevent.joinall(threads, timeout=15)
|
||||||
|
|
||||||
|
found_ips = helper.mergeDicts([thread.value for thread in threads if thread.value])
|
||||||
|
found = self.addOptionalPeers(found_ips)
|
||||||
|
self.log.debug("Found optional files after findhash random peers: %s/%s" % (len(found), len(optional_hash_ids)))
|
||||||
|
|
||||||
|
if found:
|
||||||
|
found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers])
|
||||||
|
self.startWorkers(found_peers, force_num=3, reason="Option found using findhash random peers")
|
||||||
|
|
||||||
|
if len(found) < len(optional_hash_ids):
|
||||||
|
self.log.debug("No findhash result for optional files: %s" % (optional_hash_ids - set(found)))
|
||||||
|
|
||||||
|
if time_tasks != self.time_task_added: # New task added since start
|
||||||
|
self.log.debug("New task since start, restarting...")
|
||||||
|
self.site.greenlet_manager.spawnLater(0.1, self.startFindOptional)
|
||||||
|
else:
|
||||||
|
self.log.debug("startFindOptional ended")
|
||||||
|
|
||||||
|
# Stop all worker
|
||||||
|
def stopWorkers(self):
|
||||||
|
num = 0
|
||||||
|
for worker in list(self.workers.values()):
|
||||||
|
worker.stop(reason="Stopping all workers")
|
||||||
|
num += 1
|
||||||
|
tasks = self.tasks[:] # Copy
|
||||||
|
for task in tasks: # Mark all current task as failed
|
||||||
|
self.failTask(task, reason="Stopping all workers")
|
||||||
|
return num
|
||||||
|
|
||||||
|
# Find workers by task
|
||||||
|
def findWorkers(self, task):
|
||||||
|
workers = []
|
||||||
|
for worker in list(self.workers.values()):
|
||||||
|
if worker.task == task:
|
||||||
|
workers.append(worker)
|
||||||
|
return workers
|
||||||
|
|
||||||
|
# Ends and remove a worker
|
||||||
|
def removeWorker(self, worker):
|
||||||
|
worker.running = False
|
||||||
|
if worker.key in self.workers:
|
||||||
|
del(self.workers[worker.key])
|
||||||
|
self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), self.getMaxWorkers()))
|
||||||
|
if len(self.workers) <= self.getMaxWorkers() / 3 and len(self.asked_peers) < 10:
|
||||||
|
optional_task = next((task for task in self.tasks if task["optional_hash_id"]), None)
|
||||||
|
if optional_task:
|
||||||
|
if len(self.workers) == 0:
|
||||||
|
self.startFindOptional(find_more=True)
|
||||||
|
else:
|
||||||
|
self.startFindOptional()
|
||||||
|
elif self.tasks and not self.workers and worker.task and len(worker.task["failed"]) < 20:
|
||||||
|
self.log.debug("Starting new workers... (tasks: %s)" % len(self.tasks))
|
||||||
|
self.startWorkers(reason="Removed worker")
|
||||||
|
|
||||||
|
# Tasks sorted by this
|
||||||
|
def getPriorityBoost(self, inner_path):
|
||||||
|
if inner_path == "content.json":
|
||||||
|
return 9999 # Content.json always priority
|
||||||
|
if inner_path == "index.html":
|
||||||
|
return 9998 # index.html also important
|
||||||
|
if "-default" in inner_path:
|
||||||
|
return -4 # Default files are cloning not important
|
||||||
|
elif inner_path.endswith("all.css"):
|
||||||
|
return 14 # boost css files priority
|
||||||
|
elif inner_path.endswith("all.js"):
|
||||||
|
return 13 # boost js files priority
|
||||||
|
elif inner_path.endswith("dbschema.json"):
|
||||||
|
return 12 # boost database specification
|
||||||
|
elif inner_path.endswith("content.json"):
|
||||||
|
return 1 # boost included content.json files priority a bit
|
||||||
|
elif inner_path.endswith(".json"):
|
||||||
|
if len(inner_path) < 50: # Boost non-user json files
|
||||||
|
return 11
|
||||||
|
else:
|
||||||
|
return 2
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def addTaskUpdate(self, task, peer, priority=0):
|
||||||
|
if priority > task["priority"]:
|
||||||
|
self.tasks.updateItem(task, "priority", priority)
|
||||||
|
if peer and task["peers"]: # This peer also has new version, add it to task possible peers
|
||||||
|
task["peers"].append(peer)
|
||||||
|
self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"]))
|
||||||
|
self.startWorkers([peer], reason="Added new task (update received by peer)")
|
||||||
|
elif peer and peer in task["failed"]:
|
||||||
|
task["failed"].remove(peer) # New update arrived, remove the peer from failed peers
|
||||||
|
self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
|
||||||
|
self.startWorkers([peer], reason="Added new task (peer failed before)")
|
||||||
|
|
||||||
|
def addTaskCreate(self, inner_path, peer, priority=0, file_info=None):
|
||||||
|
evt = gevent.event.AsyncResult()
|
||||||
|
if peer:
|
||||||
|
peers = [peer] # Only download from this peer
|
||||||
|
else:
|
||||||
|
peers = None
|
||||||
|
if not file_info:
|
||||||
|
file_info = self.site.content_manager.getFileInfo(inner_path)
|
||||||
|
if file_info and file_info["optional"]:
|
||||||
|
optional_hash_id = helper.toHashId(file_info["sha512"])
|
||||||
|
else:
|
||||||
|
optional_hash_id = None
|
||||||
|
if file_info:
|
||||||
|
size = file_info.get("size", 0)
|
||||||
|
else:
|
||||||
|
size = 0
|
||||||
|
|
||||||
|
self.lock_add_task.acquire()
|
||||||
|
|
||||||
|
# Check again if we have task for this file
|
||||||
|
task = self.tasks.findTask(inner_path)
|
||||||
|
if task:
|
||||||
|
self.addTaskUpdate(task, peer, priority)
|
||||||
|
return task
|
||||||
|
|
||||||
|
priority += self.getPriorityBoost(inner_path)
|
||||||
|
|
||||||
|
if self.started_task_num == 0: # Boost priority for first requested file
|
||||||
|
priority += 1
|
||||||
|
|
||||||
|
task = {
|
||||||
|
"id": self.next_task_id, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
|
||||||
|
"optional_hash_id": optional_hash_id, "time_added": time.time(), "time_started": None, "lock": None,
|
||||||
|
"time_action": None, "peers": peers, "priority": priority, "failed": [], "size": size
|
||||||
|
}
|
||||||
|
|
||||||
|
self.tasks.append(task)
|
||||||
|
self.lock_add_task.release()
|
||||||
|
|
||||||
|
self.next_task_id += 1
|
||||||
|
self.started_task_num += 1
|
||||||
|
if config.verbose:
|
||||||
|
self.log.debug(
|
||||||
|
"New task: %s, peer lock: %s, priority: %s, optional_hash_id: %s, tasks started: %s" %
|
||||||
|
(task["inner_path"], peers, priority, optional_hash_id, self.started_task_num)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.time_task_added = time.time()
|
||||||
|
|
||||||
|
if optional_hash_id:
|
||||||
|
if self.asked_peers:
|
||||||
|
del self.asked_peers[:] # Reset asked peers
|
||||||
|
self.startFindOptional(high_priority=priority > 0)
|
||||||
|
|
||||||
|
if peers:
|
||||||
|
self.startWorkers(peers, reason="Added new optional task")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.startWorkers(peers, reason="Added new task")
|
||||||
|
return task
|
||||||
|
|
||||||
|
# Create new task and return asyncresult
|
||||||
|
def addTask(self, inner_path, peer=None, priority=0, file_info=None):
|
||||||
|
self.site.onFileStart(inner_path) # First task, trigger site download started
|
||||||
|
task = self.tasks.findTask(inner_path)
|
||||||
|
if task: # Already has task for that file
|
||||||
|
self.addTaskUpdate(task, peer, priority)
|
||||||
|
else: # No task for that file yet
|
||||||
|
task = self.addTaskCreate(inner_path, peer, priority, file_info)
|
||||||
|
return task
|
||||||
|
|
||||||
|
def addTaskWorker(self, task, worker):
|
||||||
|
try:
|
||||||
|
self.tasks.updateItem(task, "workers_num", task["workers_num"] + 1)
|
||||||
|
except ValueError:
|
||||||
|
task["workers_num"] += 1
|
||||||
|
|
||||||
|
def removeTaskWorker(self, task, worker):
|
||||||
|
try:
|
||||||
|
self.tasks.updateItem(task, "workers_num", task["workers_num"] - 1)
|
||||||
|
except ValueError:
|
||||||
|
task["workers_num"] -= 1
|
||||||
|
if len(task["failed"]) >= len(self.workers):
|
||||||
|
fail_reason = "Too many fails: %s (workers: %s)" % (len(task["failed"]), len(self.workers))
|
||||||
|
self.failTask(task, reason=fail_reason)
|
||||||
|
|
||||||
|
# Wait for other tasks
|
||||||
|
def checkComplete(self):
|
||||||
|
time.sleep(0.1)
|
||||||
|
if not self.tasks:
|
||||||
|
self.log.debug("Check complete: No tasks")
|
||||||
|
self.onComplete()
|
||||||
|
|
||||||
|
def onComplete(self):
|
||||||
|
self.started_task_num = 0
|
||||||
|
del self.asked_peers[:]
|
||||||
|
self.site.onComplete() # No more task trigger site complete
|
||||||
|
|
||||||
|
# Mark a task done
|
||||||
|
def doneTask(self, task):
|
||||||
|
task["done"] = True
|
||||||
|
self.tasks.remove(task) # Remove from queue
|
||||||
|
if task["optional_hash_id"]:
|
||||||
|
self.log.debug(
|
||||||
|
"Downloaded optional file in %.3fs, adding to hashfield: %s" %
|
||||||
|
(time.time() - task["time_started"], task["inner_path"])
|
||||||
|
)
|
||||||
|
self.site.content_manager.optionalDownloaded(task["inner_path"], task["optional_hash_id"], task["size"])
|
||||||
|
self.site.onFileDone(task["inner_path"])
|
||||||
|
task["evt"].set(True)
|
||||||
|
if not self.tasks:
|
||||||
|
self.site.greenlet_manager.spawn(self.checkComplete)
|
||||||
|
|
||||||
|
# Mark a task failed
|
||||||
|
def failTask(self, task, reason="Unknown"):
|
||||||
|
try:
|
||||||
|
self.tasks.remove(task) # Remove from queue
|
||||||
|
except ValueError as err:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.log.debug("Task %s failed (Reason: %s)" % (task["inner_path"], reason))
|
||||||
|
task["done"] = True
|
||||||
|
self.site.onFileFail(task["inner_path"])
|
||||||
|
task["evt"].set(False)
|
||||||
|
if not self.tasks:
|
||||||
|
self.site.greenlet_manager.spawn(self.checkComplete)
|
|
@ -0,0 +1,122 @@
|
||||||
|
import bisect
|
||||||
|
from collections.abc import MutableSequence
|
||||||
|
|
||||||
|
|
||||||
|
class CustomSortedList(MutableSequence):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.items = [] # (priority, added index, actual value)
|
||||||
|
self.logging = False
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<{0} {1}>".format(self.__class__.__name__, self.items)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.items)
|
||||||
|
|
||||||
|
def __getitem__(self, index):
|
||||||
|
if type(index) is int:
|
||||||
|
return self.items[index][2]
|
||||||
|
else:
|
||||||
|
return [item[2] for item in self.items[index]]
|
||||||
|
|
||||||
|
def __delitem__(self, index):
|
||||||
|
del self.items[index]
|
||||||
|
|
||||||
|
def __setitem__(self, index, value):
|
||||||
|
self.items[index] = self.valueToItem(value)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self[:])
|
||||||
|
|
||||||
|
def insert(self, index, value):
|
||||||
|
self.append(value)
|
||||||
|
|
||||||
|
def append(self, value):
|
||||||
|
bisect.insort(self.items, self.valueToItem(value))
|
||||||
|
|
||||||
|
def updateItem(self, value, update_key=None, update_value=None):
|
||||||
|
self.remove(value)
|
||||||
|
if update_key is not None:
|
||||||
|
value[update_key] = update_value
|
||||||
|
self.append(value)
|
||||||
|
|
||||||
|
def sort(self, *args, **kwargs):
|
||||||
|
raise Exception("Sorted list can't be sorted")
|
||||||
|
|
||||||
|
def valueToItem(self, value):
|
||||||
|
return (self.getPriority(value), self.getId(value), value)
|
||||||
|
|
||||||
|
def getPriority(self, value):
|
||||||
|
return value
|
||||||
|
|
||||||
|
def getId(self, value):
|
||||||
|
return id(value)
|
||||||
|
|
||||||
|
def indexSlow(self, value):
|
||||||
|
for pos, item in enumerate(self.items):
|
||||||
|
if item[2] == value:
|
||||||
|
return pos
|
||||||
|
return None
|
||||||
|
|
||||||
|
def index(self, value):
|
||||||
|
item = (self.getPriority(value), self.getId(value), value)
|
||||||
|
bisect_pos = bisect.bisect(self.items, item) - 1
|
||||||
|
if bisect_pos >= 0 and self.items[bisect_pos][2] == value:
|
||||||
|
return bisect_pos
|
||||||
|
|
||||||
|
# Item probably changed since added, switch to slow iteration
|
||||||
|
pos = self.indexSlow(value)
|
||||||
|
|
||||||
|
if self.logging:
|
||||||
|
print("Slow index for %s in pos %s bisect: %s" % (item[2], pos, bisect_pos))
|
||||||
|
|
||||||
|
if pos is None:
|
||||||
|
raise ValueError("%r not in list" % value)
|
||||||
|
else:
|
||||||
|
return pos
|
||||||
|
|
||||||
|
def __contains__(self, value):
|
||||||
|
try:
|
||||||
|
self.index(value)
|
||||||
|
return True
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerTaskManager(CustomSortedList):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.inner_paths = {}
|
||||||
|
|
||||||
|
def getPriority(self, value):
|
||||||
|
return 0 - (value["priority"] - value["workers_num"] * 10)
|
||||||
|
|
||||||
|
def getId(self, value):
|
||||||
|
return value["id"]
|
||||||
|
|
||||||
|
def __contains__(self, value):
|
||||||
|
return value["inner_path"] in self.inner_paths
|
||||||
|
|
||||||
|
def __delitem__(self, index):
|
||||||
|
# Remove from inner path cache
|
||||||
|
del self.inner_paths[self.items[index][2]["inner_path"]]
|
||||||
|
super().__delitem__(index)
|
||||||
|
|
||||||
|
# Fast task search by inner_path
|
||||||
|
|
||||||
|
def append(self, task):
|
||||||
|
if task["inner_path"] in self.inner_paths:
|
||||||
|
raise ValueError("File %s already has a task" % task["inner_path"])
|
||||||
|
super().append(task)
|
||||||
|
# Create inner path cache for faster lookup by filename
|
||||||
|
self.inner_paths[task["inner_path"]] = task
|
||||||
|
|
||||||
|
def remove(self, task):
|
||||||
|
if task not in self:
|
||||||
|
raise ValueError("%r not in list" % task)
|
||||||
|
else:
|
||||||
|
super().remove(task)
|
||||||
|
|
||||||
|
def findTask(self, inner_path):
|
||||||
|
return self.inner_paths.get(inner_path, None)
|
|
@ -0,0 +1,2 @@
|
||||||
|
from .Worker import Worker
|
||||||
|
from .WorkerManager import WorkerManager
|
|
@ -0,0 +1,9 @@
|
||||||
|
# This file is for adding rules for selectively enabling debug logging
|
||||||
|
# when working on the code.
|
||||||
|
# Add your rules here and skip this file when committing changes.
|
||||||
|
|
||||||
|
#import re
|
||||||
|
#from util import SelectiveLogger
|
||||||
|
#
|
||||||
|
#SelectiveLogger.addLogLevelRaisingRule("ConnServer")
|
||||||
|
#SelectiveLogger.addLogLevelRaisingRule(re.compile(r'^Site:'))
|
|
@ -0,0 +1,603 @@
|
||||||
|
# Included modules
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
import loglevel_overrides
|
||||||
|
|
||||||
|
startup_errors = []
|
||||||
|
def startupError(msg):
|
||||||
|
startup_errors.append(msg)
|
||||||
|
print("Startup error: %s" % msg)
|
||||||
|
|
||||||
|
# Third party modules
|
||||||
|
import gevent
|
||||||
|
if gevent.version_info.major <= 1: # Workaround for random crash when libuv used with threads
|
||||||
|
try:
|
||||||
|
if "libev" not in str(gevent.config.loop):
|
||||||
|
gevent.config.loop = "libev-cext"
|
||||||
|
except Exception as err:
|
||||||
|
startupError("Unable to switch gevent loop to libev: %s" % err)
|
||||||
|
|
||||||
|
import gevent.monkey
|
||||||
|
gevent.monkey.patch_all(thread=False, subprocess=False)
|
||||||
|
|
||||||
|
update_after_shutdown = False # If set True then update and restart zeronet after main loop ended
|
||||||
|
restart_after_shutdown = False # If set True then restart zeronet after main loop ended
|
||||||
|
|
||||||
|
# Load config
|
||||||
|
from Config import config
|
||||||
|
config.parse(silent=True) # Plugins need to access the configuration
|
||||||
|
if not config.arguments: # Config parse failed, show the help screen and exit
|
||||||
|
config.parse()
|
||||||
|
|
||||||
|
if not os.path.isdir(config.data_dir):
|
||||||
|
os.mkdir(config.data_dir)
|
||||||
|
try:
|
||||||
|
os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||||
|
except Exception as err:
|
||||||
|
startupError("Can't change permission of %s: %s" % (config.data_dir, err))
|
||||||
|
|
||||||
|
if not os.path.isfile("%s/sites.json" % config.data_dir):
|
||||||
|
open("%s/sites.json" % config.data_dir, "w").write("{}")
|
||||||
|
if not os.path.isfile("%s/users.json" % config.data_dir):
|
||||||
|
open("%s/users.json" % config.data_dir, "w").write("{}")
|
||||||
|
|
||||||
|
if config.action == "main":
|
||||||
|
from util import helper
|
||||||
|
try:
|
||||||
|
lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w")
|
||||||
|
lock.write("%s" % os.getpid())
|
||||||
|
except BlockingIOError as err:
|
||||||
|
startupError("Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err)
|
||||||
|
if config.open_browser and config.open_browser != "False":
|
||||||
|
print("Opening browser: %s...", config.open_browser)
|
||||||
|
import webbrowser
|
||||||
|
try:
|
||||||
|
if config.open_browser == "default_browser":
|
||||||
|
browser = webbrowser.get()
|
||||||
|
else:
|
||||||
|
browser = webbrowser.get(config.open_browser)
|
||||||
|
browser.open("http://%s:%s/%s" % (
|
||||||
|
config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage
|
||||||
|
), new=2)
|
||||||
|
except Exception as err:
|
||||||
|
startupError("Error starting browser: %s" % err)
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
config.initLogging()
|
||||||
|
|
||||||
|
# Debug dependent configuration
|
||||||
|
from Debug import DebugHook
|
||||||
|
|
||||||
|
# Load plugins
|
||||||
|
from Plugin import PluginManager
|
||||||
|
PluginManager.plugin_manager.loadPlugins()
|
||||||
|
config.loadPlugins()
|
||||||
|
config.parse() # Parse again to add plugin configuration options
|
||||||
|
|
||||||
|
# Log current config
|
||||||
|
logging.debug("Config: %s" % config)
|
||||||
|
|
||||||
|
# Modify stack size on special hardwares
|
||||||
|
if config.stack_size:
|
||||||
|
import threading
|
||||||
|
threading.stack_size(config.stack_size)
|
||||||
|
|
||||||
|
# Use pure-python implementation of msgpack to save CPU
|
||||||
|
if config.msgpack_purepython:
|
||||||
|
os.environ["MSGPACK_PUREPYTHON"] = "True"
|
||||||
|
|
||||||
|
# Fix console encoding on Windows
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
import subprocess
|
||||||
|
try:
|
||||||
|
chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip()
|
||||||
|
logging.debug("Changed console encoding to utf8: %s" % chcp_res)
|
||||||
|
except Exception as err:
|
||||||
|
logging.error("Error changing console encoding to utf8: %s" % err)
|
||||||
|
|
||||||
|
# Socket monkey patch
|
||||||
|
if config.proxy:
|
||||||
|
from util import SocksProxy
|
||||||
|
import urllib.request
|
||||||
|
logging.info("Patching sockets to socks proxy: %s" % config.proxy)
|
||||||
|
if config.fileserver_ip == "*":
|
||||||
|
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
|
||||||
|
config.disable_udp = True # UDP not supported currently with proxy
|
||||||
|
SocksProxy.monkeyPatch(*config.proxy.split(":"))
|
||||||
|
elif config.tor == "always":
|
||||||
|
from util import SocksProxy
|
||||||
|
import urllib.request
|
||||||
|
logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy)
|
||||||
|
if config.fileserver_ip == "*":
|
||||||
|
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
|
||||||
|
SocksProxy.monkeyPatch(*config.tor_proxy.split(":"))
|
||||||
|
config.disable_udp = True
|
||||||
|
elif config.bind:
|
||||||
|
bind = config.bind
|
||||||
|
if ":" not in config.bind:
|
||||||
|
bind += ":0"
|
||||||
|
from util import helper
|
||||||
|
helper.socketBindMonkeyPatch(*bind.split(":"))
|
||||||
|
|
||||||
|
# -- Actions --
|
||||||
|
|
||||||
|
|
||||||
|
@PluginManager.acceptPlugins
|
||||||
|
class Actions(object):
|
||||||
|
def call(self, function_name, kwargs):
|
||||||
|
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
|
||||||
|
|
||||||
|
func = getattr(self, function_name, None)
|
||||||
|
back = func(**kwargs)
|
||||||
|
if back:
|
||||||
|
print(back)
|
||||||
|
|
||||||
|
# Default action: Start serving UiServer and FileServer
|
||||||
|
def main(self):
|
||||||
|
global ui_server, file_server
|
||||||
|
from File import FileServer
|
||||||
|
from Ui import UiServer
|
||||||
|
logging.info("Creating FileServer....")
|
||||||
|
file_server = FileServer()
|
||||||
|
logging.info("Creating UiServer....")
|
||||||
|
ui_server = UiServer()
|
||||||
|
file_server.ui_server = ui_server
|
||||||
|
|
||||||
|
for startup_error in startup_errors:
|
||||||
|
logging.error("Startup error: %s" % startup_error)
|
||||||
|
|
||||||
|
logging.info("Removing old SSL certs...")
|
||||||
|
from Crypt import CryptConnection
|
||||||
|
CryptConnection.manager.removeCerts()
|
||||||
|
|
||||||
|
logging.info("Starting servers....")
|
||||||
|
gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
|
||||||
|
logging.info("All servers stopped")
|
||||||
|
|
||||||
|
# Site commands
|
||||||
|
|
||||||
|
def siteCreate(self, use_master_seed=True):
|
||||||
|
logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed)
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
if use_master_seed:
|
||||||
|
from User import UserManager
|
||||||
|
user = UserManager.user_manager.get()
|
||||||
|
if not user:
|
||||||
|
user = UserManager.user_manager.create()
|
||||||
|
address, address_index, site_data = user.getNewSiteData()
|
||||||
|
privatekey = site_data["privatekey"]
|
||||||
|
logging.info("Generated using master seed from users.json, site index: %s" % address_index)
|
||||||
|
else:
|
||||||
|
privatekey = CryptBitcoin.newPrivatekey()
|
||||||
|
address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
logging.info("----------------------------------------------------------------------")
|
||||||
|
logging.info("Site private key: %s" % privatekey)
|
||||||
|
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
|
||||||
|
logging.info("Site address: %s" % address)
|
||||||
|
logging.info("----------------------------------------------------------------------")
|
||||||
|
|
||||||
|
while True and not config.batch and not use_master_seed:
|
||||||
|
if input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
logging.info("Please, secure it now, you going to need it to modify your site!")
|
||||||
|
|
||||||
|
logging.info("Creating directory structure...")
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
os.mkdir("%s/%s" % (config.data_dir, address))
|
||||||
|
open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
|
||||||
|
|
||||||
|
logging.info("Creating content.json...")
|
||||||
|
site = Site(address)
|
||||||
|
extend = {"postmessage_nonce_security": True}
|
||||||
|
if use_master_seed:
|
||||||
|
extend["address_index"] = address_index
|
||||||
|
|
||||||
|
site.content_manager.sign(privatekey=privatekey, extend=extend)
|
||||||
|
site.settings["own"] = True
|
||||||
|
site.saveSettings()
|
||||||
|
|
||||||
|
logging.info("Site created!")
|
||||||
|
|
||||||
|
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False):
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
from Debug import Debug
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
logging.info("Signing site: %s..." % address)
|
||||||
|
site = Site(address, allow_create=False)
|
||||||
|
|
||||||
|
if not privatekey: # If no privatekey defined
|
||||||
|
from User import UserManager
|
||||||
|
user = UserManager.user_manager.get()
|
||||||
|
if user:
|
||||||
|
site_data = user.getSiteData(address)
|
||||||
|
privatekey = site_data.get("privatekey")
|
||||||
|
else:
|
||||||
|
privatekey = None
|
||||||
|
if not privatekey:
|
||||||
|
# Not found in users.json, ask from console
|
||||||
|
import getpass
|
||||||
|
privatekey = getpass.getpass("Private key (input hidden):")
|
||||||
|
try:
|
||||||
|
succ = site.content_manager.sign(
|
||||||
|
inner_path=inner_path, privatekey=privatekey,
|
||||||
|
update_changed_files=True, remove_missing_optional=remove_missing_optional
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
logging.error("Sign error: %s" % Debug.formatException(err))
|
||||||
|
succ = False
|
||||||
|
if succ and publish:
|
||||||
|
self.sitePublish(address, inner_path=inner_path)
|
||||||
|
|
||||||
|
def siteVerify(self, address):
|
||||||
|
import time
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
logging.info("Verifing site: %s..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
bad_files = []
|
||||||
|
|
||||||
|
for content_inner_path in site.content_manager.contents:
|
||||||
|
s = time.time()
|
||||||
|
logging.info("Verifing %s signature..." % content_inner_path)
|
||||||
|
err = None
|
||||||
|
try:
|
||||||
|
file_correct = site.content_manager.verifyFile(
|
||||||
|
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
file_correct = False
|
||||||
|
|
||||||
|
if file_correct is True:
|
||||||
|
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
|
||||||
|
else:
|
||||||
|
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err))
|
||||||
|
input("Continue?")
|
||||||
|
bad_files += content_inner_path
|
||||||
|
|
||||||
|
logging.info("Verifying site files...")
|
||||||
|
bad_files += site.storage.verifyFiles()["bad_files"]
|
||||||
|
if not bad_files:
|
||||||
|
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
|
||||||
|
else:
|
||||||
|
logging.error("[ERROR] Error during verifying site files!")
|
||||||
|
|
||||||
|
def dbRebuild(self, address):
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
logging.info("Rebuilding site sql cache: %s..." % address)
|
||||||
|
site = SiteManager.site_manager.get(address)
|
||||||
|
s = time.time()
|
||||||
|
try:
|
||||||
|
site.storage.rebuildDb()
|
||||||
|
logging.info("Done in %.3fs" % (time.time() - s))
|
||||||
|
except Exception as err:
|
||||||
|
logging.error(err)
|
||||||
|
|
||||||
|
def dbQuery(self, address, query):
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
import json
|
||||||
|
site = Site(address)
|
||||||
|
result = []
|
||||||
|
for row in site.storage.query(query):
|
||||||
|
result.append(dict(row))
|
||||||
|
print(json.dumps(result, indent=4))
|
||||||
|
|
||||||
|
def siteAnnounce(self, address):
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
logging.info("Opening a simple connection server")
|
||||||
|
global file_server
|
||||||
|
from File import FileServer
|
||||||
|
file_server = FileServer("127.0.0.1", 1234)
|
||||||
|
file_server.start()
|
||||||
|
|
||||||
|
logging.info("Announcing site %s to tracker..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
site.announce()
|
||||||
|
print("Response time: %.3fs" % (time.time() - s))
|
||||||
|
print(site.peers)
|
||||||
|
|
||||||
|
def siteDownload(self, address):
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
logging.info("Opening a simple connection server")
|
||||||
|
global file_server
|
||||||
|
from File import FileServer
|
||||||
|
file_server = FileServer("127.0.0.1", 1234)
|
||||||
|
file_server_thread = gevent.spawn(file_server.start, check_sites=False)
|
||||||
|
|
||||||
|
site = Site(address)
|
||||||
|
|
||||||
|
on_completed = gevent.event.AsyncResult()
|
||||||
|
|
||||||
|
def onComplete(evt):
|
||||||
|
evt.set(True)
|
||||||
|
|
||||||
|
site.onComplete.once(lambda: onComplete(on_completed))
|
||||||
|
print("Announcing...")
|
||||||
|
site.announce()
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
print("Downloading...")
|
||||||
|
site.downloadContent("content.json", check_modifications=True)
|
||||||
|
|
||||||
|
print("Downloaded in %.3fs" % (time.time()-s))
|
||||||
|
|
||||||
|
def siteNeedFile(self, address, inner_path):
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
SiteManager.site_manager.load()
|
||||||
|
|
||||||
|
def checker():
|
||||||
|
while 1:
|
||||||
|
s = time.time()
|
||||||
|
time.sleep(1)
|
||||||
|
print("Switch time:", time.time() - s)
|
||||||
|
gevent.spawn(checker)
|
||||||
|
|
||||||
|
logging.info("Opening a simple connection server")
|
||||||
|
global file_server
|
||||||
|
from File import FileServer
|
||||||
|
file_server = FileServer("127.0.0.1", 1234)
|
||||||
|
file_server_thread = gevent.spawn(file_server.start, check_sites=False)
|
||||||
|
|
||||||
|
site = Site(address)
|
||||||
|
site.announce()
|
||||||
|
print(site.needFile(inner_path, update=True))
|
||||||
|
|
||||||
|
def siteCmd(self, address, cmd, parameters):
|
||||||
|
import json
|
||||||
|
from Site import SiteManager
|
||||||
|
|
||||||
|
site = SiteManager.site_manager.get(address)
|
||||||
|
|
||||||
|
if not site:
|
||||||
|
logging.error("Site not found: %s" % address)
|
||||||
|
return None
|
||||||
|
|
||||||
|
ws = self.getWebsocket(site)
|
||||||
|
|
||||||
|
ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1}))
|
||||||
|
res_raw = ws.recv()
|
||||||
|
|
||||||
|
try:
|
||||||
|
res = json.loads(res_raw)
|
||||||
|
except Exception as err:
|
||||||
|
return {"error": "Invalid result: %s" % err, "res_raw": res_raw}
|
||||||
|
|
||||||
|
if "result" in res:
|
||||||
|
return res["result"]
|
||||||
|
else:
|
||||||
|
return res
|
||||||
|
|
||||||
|
def getWebsocket(self, site):
|
||||||
|
import websocket
|
||||||
|
|
||||||
|
ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"])
|
||||||
|
logging.info("Connecting to %s" % ws_address)
|
||||||
|
ws = websocket.create_connection(ws_address)
|
||||||
|
return ws
|
||||||
|
|
||||||
|
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
|
||||||
|
global file_server
|
||||||
|
from Site.Site import Site
|
||||||
|
from Site import SiteManager
|
||||||
|
from File import FileServer # We need fileserver to handle incoming file requests
|
||||||
|
from Peer import Peer
|
||||||
|
file_server = FileServer()
|
||||||
|
site = SiteManager.site_manager.get(address)
|
||||||
|
logging.info("Loading site...")
|
||||||
|
site.settings["serving"] = True # Serving the site even if its disabled
|
||||||
|
|
||||||
|
try:
|
||||||
|
ws = self.getWebsocket(site)
|
||||||
|
logging.info("Sending siteReload")
|
||||||
|
self.siteCmd(address, "siteReload", inner_path)
|
||||||
|
|
||||||
|
logging.info("Sending sitePublish")
|
||||||
|
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
|
||||||
|
logging.info("Done.")
|
||||||
|
|
||||||
|
except Exception as err:
|
||||||
|
logging.info("Can't connect to local websocket client: %s" % err)
|
||||||
|
logging.info("Creating FileServer....")
|
||||||
|
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
|
||||||
|
time.sleep(0.001)
|
||||||
|
|
||||||
|
# Started fileserver
|
||||||
|
file_server.portCheck()
|
||||||
|
if peer_ip: # Announce ip specificed
|
||||||
|
site.addPeer(peer_ip, peer_port)
|
||||||
|
else: # Just ask the tracker
|
||||||
|
logging.info("Gathering peers from tracker")
|
||||||
|
site.announce() # Gather peers
|
||||||
|
published = site.publish(5, inner_path) # Push to peers
|
||||||
|
if published > 0:
|
||||||
|
time.sleep(3)
|
||||||
|
logging.info("Serving files (max 60s)...")
|
||||||
|
gevent.joinall([file_server_thread], timeout=60)
|
||||||
|
logging.info("Done.")
|
||||||
|
else:
|
||||||
|
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
|
||||||
|
|
||||||
|
# Crypto commands
|
||||||
|
def cryptPrivatekeyToAddress(self, privatekey=None):
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
if not privatekey: # If no privatekey in args then ask it now
|
||||||
|
import getpass
|
||||||
|
privatekey = getpass.getpass("Private key (input hidden):")
|
||||||
|
|
||||||
|
print(CryptBitcoin.privatekeyToAddress(privatekey))
|
||||||
|
|
||||||
|
def cryptSign(self, message, privatekey):
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
print(CryptBitcoin.sign(message, privatekey))
|
||||||
|
|
||||||
|
def cryptVerify(self, message, sign, address):
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
print(CryptBitcoin.verify(message, address, sign))
|
||||||
|
|
||||||
|
def cryptGetPrivatekey(self, master_seed, site_address_index=None):
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
if len(master_seed) != 64:
|
||||||
|
logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed))
|
||||||
|
return False
|
||||||
|
privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index)
|
||||||
|
print("Requested private key: %s" % privatekey)
|
||||||
|
|
||||||
|
# Peer
|
||||||
|
def peerPing(self, peer_ip, peer_port=None):
|
||||||
|
if not peer_port:
|
||||||
|
peer_port = 15441
|
||||||
|
logging.info("Opening a simple connection server")
|
||||||
|
global file_server
|
||||||
|
from Connection import ConnectionServer
|
||||||
|
file_server = ConnectionServer("127.0.0.1", 1234)
|
||||||
|
file_server.start(check_connections=False)
|
||||||
|
from Crypt import CryptConnection
|
||||||
|
CryptConnection.manager.loadCerts()
|
||||||
|
|
||||||
|
from Peer import Peer
|
||||||
|
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
|
||||||
|
s = time.time()
|
||||||
|
peer = Peer(peer_ip, peer_port)
|
||||||
|
peer.connect()
|
||||||
|
|
||||||
|
if not peer.connection:
|
||||||
|
print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error)
|
||||||
|
return False
|
||||||
|
if "shared_ciphers" in dir(peer.connection.sock):
|
||||||
|
print("Shared ciphers:", peer.connection.sock.shared_ciphers())
|
||||||
|
if "cipher" in dir(peer.connection.sock):
|
||||||
|
print("Cipher:", peer.connection.sock.cipher()[0])
|
||||||
|
if "version" in dir(peer.connection.sock):
|
||||||
|
print("TLS version:", peer.connection.sock.version())
|
||||||
|
print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error))
|
||||||
|
|
||||||
|
for i in range(5):
|
||||||
|
ping_delay = peer.ping()
|
||||||
|
print("Response time: %.3fs" % ping_delay)
|
||||||
|
time.sleep(1)
|
||||||
|
peer.remove()
|
||||||
|
print("Reconnect test...")
|
||||||
|
peer = Peer(peer_ip, peer_port)
|
||||||
|
for i in range(5):
|
||||||
|
ping_delay = peer.ping()
|
||||||
|
print("Response time: %.3fs" % ping_delay)
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False):
|
||||||
|
logging.info("Opening a simple connection server")
|
||||||
|
global file_server
|
||||||
|
from Connection import ConnectionServer
|
||||||
|
file_server = ConnectionServer("127.0.0.1", 1234)
|
||||||
|
file_server.start(check_connections=False)
|
||||||
|
from Crypt import CryptConnection
|
||||||
|
CryptConnection.manager.loadCerts()
|
||||||
|
|
||||||
|
from Peer import Peer
|
||||||
|
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
|
||||||
|
peer = Peer(peer_ip, peer_port)
|
||||||
|
s = time.time()
|
||||||
|
if benchmark:
|
||||||
|
for i in range(10):
|
||||||
|
peer.getFile(site, filename),
|
||||||
|
print("Response time: %.3fs" % (time.time() - s))
|
||||||
|
input("Check memory")
|
||||||
|
else:
|
||||||
|
print(peer.getFile(site, filename).read())
|
||||||
|
|
||||||
|
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
|
||||||
|
logging.info("Opening a simple connection server")
|
||||||
|
global file_server
|
||||||
|
from Connection import ConnectionServer
|
||||||
|
file_server = ConnectionServer()
|
||||||
|
file_server.start(check_connections=False)
|
||||||
|
from Crypt import CryptConnection
|
||||||
|
CryptConnection.manager.loadCerts()
|
||||||
|
|
||||||
|
from Peer import Peer
|
||||||
|
peer = Peer(peer_ip, peer_port)
|
||||||
|
|
||||||
|
import json
|
||||||
|
if parameters:
|
||||||
|
parameters = json.loads(parameters.replace("'", '"'))
|
||||||
|
else:
|
||||||
|
parameters = {}
|
||||||
|
try:
|
||||||
|
res = peer.request(cmd, parameters)
|
||||||
|
print(json.dumps(res, indent=2, ensure_ascii=False))
|
||||||
|
except Exception as err:
|
||||||
|
print("Unknown response (%s): %s" % (err, res))
|
||||||
|
|
||||||
|
def getConfig(self):
|
||||||
|
import json
|
||||||
|
print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False))
|
||||||
|
|
||||||
|
def test(self, test_name, *args, **kwargs):
|
||||||
|
import types
|
||||||
|
def funcToName(func_name):
|
||||||
|
test_name = func_name.replace("test", "")
|
||||||
|
return test_name[0].lower() + test_name[1:]
|
||||||
|
|
||||||
|
test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"]
|
||||||
|
if not test_name:
|
||||||
|
# No test specificed, list tests
|
||||||
|
print("\nNo test specified, possible tests:")
|
||||||
|
for test_name in test_names:
|
||||||
|
func_name = "test" + test_name[0].upper() + test_name[1:]
|
||||||
|
func = getattr(self, func_name)
|
||||||
|
if func.__doc__:
|
||||||
|
print("- %s: %s" % (test_name, func.__doc__.strip()))
|
||||||
|
else:
|
||||||
|
print("- %s" % test_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
func_name = "test" + test_name[0].upper() + test_name[1:]
|
||||||
|
if hasattr(self, func_name):
|
||||||
|
func = getattr(self, func_name)
|
||||||
|
print("- Running test: %s" % test_name, end="")
|
||||||
|
s = time.time()
|
||||||
|
ret = func(*args, **kwargs)
|
||||||
|
if type(ret) is types.GeneratorType:
|
||||||
|
for progress in ret:
|
||||||
|
print(progress, end="")
|
||||||
|
sys.stdout.flush()
|
||||||
|
print("\n* Test %s done in %.3fs" % (test_name, time.time() - s))
|
||||||
|
else:
|
||||||
|
print("Unknown test: %r (choose from: %s)" % (
|
||||||
|
test_name, test_names
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
actions = Actions()
|
||||||
|
# Starts here when running zeronet.py
|
||||||
|
|
||||||
|
|
||||||
|
def start():
|
||||||
|
# Call function
|
||||||
|
action_kwargs = config.getActionArguments()
|
||||||
|
actions.call(config.action, action_kwargs)
|
|
@ -0,0 +1,68 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class Cached(object):
|
||||||
|
def __init__(self, timeout):
|
||||||
|
self.cache_db = {}
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
def __call__(self, func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
key = "%s %s" % (args, kwargs)
|
||||||
|
cached_value = None
|
||||||
|
cache_hit = False
|
||||||
|
if key in self.cache_db:
|
||||||
|
cache_hit = True
|
||||||
|
cached_value, time_cached_end = self.cache_db[key]
|
||||||
|
if time.time() > time_cached_end:
|
||||||
|
self.cleanupExpired()
|
||||||
|
cached_value = None
|
||||||
|
cache_hit = False
|
||||||
|
|
||||||
|
if cache_hit:
|
||||||
|
return cached_value
|
||||||
|
else:
|
||||||
|
cached_value = func(*args, **kwargs)
|
||||||
|
time_cached_end = time.time() + self.timeout
|
||||||
|
self.cache_db[key] = (cached_value, time_cached_end)
|
||||||
|
return cached_value
|
||||||
|
|
||||||
|
wrapper.emptyCache = self.emptyCache
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
def cleanupExpired(self):
|
||||||
|
for key in list(self.cache_db.keys()):
|
||||||
|
cached_value, time_cached_end = self.cache_db[key]
|
||||||
|
if time.time() > time_cached_end:
|
||||||
|
del(self.cache_db[key])
|
||||||
|
|
||||||
|
def emptyCache(self):
|
||||||
|
num = len(self.cache_db)
|
||||||
|
self.cache_db.clear()
|
||||||
|
return num
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from gevent import monkey
|
||||||
|
monkey.patch_all()
|
||||||
|
|
||||||
|
@Cached(timeout=2)
|
||||||
|
def calcAdd(a, b):
|
||||||
|
print("CalcAdd", a, b)
|
||||||
|
return a + b
|
||||||
|
|
||||||
|
@Cached(timeout=1)
|
||||||
|
def calcMultiply(a, b):
|
||||||
|
print("calcMultiply", a, b)
|
||||||
|
return a * b
|
||||||
|
|
||||||
|
for i in range(5):
|
||||||
|
print("---")
|
||||||
|
print("Emptied", calcAdd.emptyCache())
|
||||||
|
assert calcAdd(1, 2) == 3
|
||||||
|
print("Emptied", calcAdd.emptyCache())
|
||||||
|
assert calcAdd(1, 2) == 3
|
||||||
|
assert calcAdd(2, 3) == 5
|
||||||
|
assert calcMultiply(2, 3) == 6
|
||||||
|
time.sleep(1)
|
|
@ -0,0 +1,34 @@
|
||||||
|
import random
|
||||||
|
|
||||||
|
class CircularIterator:
|
||||||
|
def __init__(self):
|
||||||
|
self.successive_count = 0
|
||||||
|
self.last_size = 0
|
||||||
|
self.index = -1
|
||||||
|
|
||||||
|
def next(self, items):
|
||||||
|
self.last_size = len(items)
|
||||||
|
|
||||||
|
if self.last_size == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.index < 0:
|
||||||
|
self.index = random.randint(0, self.last_size)
|
||||||
|
else:
|
||||||
|
self.index += 1
|
||||||
|
|
||||||
|
self.index = self.index % self.last_size
|
||||||
|
|
||||||
|
self.successive_count += 1
|
||||||
|
|
||||||
|
return items[self.index]
|
||||||
|
|
||||||
|
def resetSuccessiveCount(self):
|
||||||
|
self.successive_count = 0
|
||||||
|
|
||||||
|
def getSuccessiveCount(self):
|
||||||
|
return self.successive_count
|
||||||
|
|
||||||
|
def isWrapped(self):
|
||||||
|
return self.successive_count >= self.last_size
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
import io
|
||||||
|
|
||||||
|
import difflib
|
||||||
|
|
||||||
|
|
||||||
|
def sumLen(lines):
|
||||||
|
return sum(map(len, lines))
|
||||||
|
|
||||||
|
|
||||||
|
def diff(old, new, limit=False):
|
||||||
|
matcher = difflib.SequenceMatcher(None, old, new)
|
||||||
|
actions = []
|
||||||
|
size = 0
|
||||||
|
for tag, old_from, old_to, new_from, new_to in matcher.get_opcodes():
|
||||||
|
if tag == "insert":
|
||||||
|
new_line = new[new_from:new_to]
|
||||||
|
actions.append(("+", new_line))
|
||||||
|
size += sum(map(len, new_line))
|
||||||
|
elif tag == "equal":
|
||||||
|
actions.append(("=", sumLen(old[old_from:old_to])))
|
||||||
|
elif tag == "delete":
|
||||||
|
actions.append(("-", sumLen(old[old_from:old_to])))
|
||||||
|
elif tag == "replace":
|
||||||
|
actions.append(("-", sumLen(old[old_from:old_to])))
|
||||||
|
new_lines = new[new_from:new_to]
|
||||||
|
actions.append(("+", new_lines))
|
||||||
|
size += sumLen(new_lines)
|
||||||
|
if limit and size > limit:
|
||||||
|
return False
|
||||||
|
return actions
|
||||||
|
|
||||||
|
|
||||||
|
def patch(old_f, actions):
|
||||||
|
new_f = io.BytesIO()
|
||||||
|
for action, param in actions:
|
||||||
|
if type(action) is bytes:
|
||||||
|
action = action.decode()
|
||||||
|
if action == "=": # Same lines
|
||||||
|
new_f.write(old_f.read(param))
|
||||||
|
elif action == "-": # Delete lines
|
||||||
|
old_f.seek(param, 1) # Seek from current position
|
||||||
|
continue
|
||||||
|
elif action == "+": # Add lines
|
||||||
|
for add_line in param:
|
||||||
|
new_f.write(add_line)
|
||||||
|
else:
|
||||||
|
raise "Unknown action: %s" % action
|
||||||
|
return new_f
|
|
@ -0,0 +1,39 @@
|
||||||
|
import hashlib
|
||||||
|
import struct
|
||||||
|
|
||||||
|
|
||||||
|
# Electrum, the heck?!
|
||||||
|
|
||||||
|
def bchr(i):
|
||||||
|
return struct.pack("B", i)
|
||||||
|
|
||||||
|
def encode(val, base, minlen=0):
|
||||||
|
base, minlen = int(base), int(minlen)
|
||||||
|
code_string = b"".join([bchr(x) for x in range(256)])
|
||||||
|
result = b""
|
||||||
|
while val > 0:
|
||||||
|
index = val % base
|
||||||
|
result = code_string[index:index + 1] + result
|
||||||
|
val //= base
|
||||||
|
return code_string[0:1] * max(minlen - len(result), 0) + result
|
||||||
|
|
||||||
|
def insane_int(x):
|
||||||
|
x = int(x)
|
||||||
|
if x < 253:
|
||||||
|
return bchr(x)
|
||||||
|
elif x < 65536:
|
||||||
|
return bchr(253) + encode(x, 256, 2)[::-1]
|
||||||
|
elif x < 4294967296:
|
||||||
|
return bchr(254) + encode(x, 256, 4)[::-1]
|
||||||
|
else:
|
||||||
|
return bchr(255) + encode(x, 256, 8)[::-1]
|
||||||
|
|
||||||
|
|
||||||
|
def magic(message):
|
||||||
|
return b"\x18Bitcoin Signed Message:\n" + insane_int(len(message)) + message
|
||||||
|
|
||||||
|
def format(message):
|
||||||
|
return hashlib.sha256(magic(message)).digest()
|
||||||
|
|
||||||
|
def dbl_format(message):
|
||||||
|
return hashlib.sha256(format(message)).digest()
|
|
@ -0,0 +1,55 @@
|
||||||
|
# Based on http://stackoverflow.com/a/2022629
|
||||||
|
|
||||||
|
|
||||||
|
class Event(list):
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
for f in self[:]:
|
||||||
|
if "once" in dir(f) and f in self:
|
||||||
|
self.remove(f)
|
||||||
|
f(*args, **kwargs)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Event(%s)" % list.__repr__(self)
|
||||||
|
|
||||||
|
def once(self, func, name=None):
|
||||||
|
func.once = True
|
||||||
|
func.name = None
|
||||||
|
if name: # Dont function with same name twice
|
||||||
|
names = [f.name for f in self if "once" in dir(f)]
|
||||||
|
if name not in names:
|
||||||
|
func.name = name
|
||||||
|
self.append(func)
|
||||||
|
else:
|
||||||
|
self.append(func)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
def testBenchmark():
|
||||||
|
def say(pre, text):
|
||||||
|
print("%s Say: %s" % (pre, text))
|
||||||
|
|
||||||
|
import time
|
||||||
|
s = time.time()
|
||||||
|
on_changed = Event()
|
||||||
|
for i in range(1000):
|
||||||
|
on_changed.once(lambda pre: say(pre, "once"), "once")
|
||||||
|
print("Created 1000 once in %.3fs" % (time.time() - s))
|
||||||
|
on_changed("#1")
|
||||||
|
|
||||||
|
def testUsage():
|
||||||
|
def say(pre, text):
|
||||||
|
print("%s Say: %s" % (pre, text))
|
||||||
|
|
||||||
|
on_changed = Event()
|
||||||
|
on_changed.once(lambda pre: say(pre, "once"))
|
||||||
|
on_changed.once(lambda pre: say(pre, "once"))
|
||||||
|
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
|
||||||
|
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
|
||||||
|
on_changed.append(lambda pre: say(pre, "always"))
|
||||||
|
on_changed("#1")
|
||||||
|
on_changed("#2")
|
||||||
|
on_changed("#3")
|
||||||
|
|
||||||
|
testBenchmark()
|
|
@ -0,0 +1,22 @@
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
|
||||||
|
class Flag(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.valid_flags = set([
|
||||||
|
"admin", # Only allowed to run sites with ADMIN permission
|
||||||
|
"async_run", # Action will be ran async with gevent.spawn
|
||||||
|
"no_multiuser" # Action disabled if Multiuser plugin running in open proxy mode
|
||||||
|
])
|
||||||
|
self.db = defaultdict(set)
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
def func(f):
|
||||||
|
if key not in self.valid_flags:
|
||||||
|
raise Exception("Invalid flag: %s (valid: %s)" % (key, self.valid_flags))
|
||||||
|
self.db[f.__name__].add(key)
|
||||||
|
return f
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
|
flag = Flag()
|
|
@ -0,0 +1,44 @@
|
||||||
|
import gevent
|
||||||
|
from Debug import Debug
|
||||||
|
|
||||||
|
|
||||||
|
class GreenletManager:
|
||||||
|
# pool is either gevent.pool.Pool or GreenletManager.
|
||||||
|
# if pool is None, new gevent.pool.Pool() is created.
|
||||||
|
def __init__(self, pool=None):
|
||||||
|
self.greenlets = set()
|
||||||
|
if not pool:
|
||||||
|
pool = gevent.pool.Pool(None)
|
||||||
|
self.pool = pool
|
||||||
|
|
||||||
|
def _spawn_later(self, seconds, *args, **kwargs):
|
||||||
|
# If pool is another GreenletManager, delegate to it.
|
||||||
|
if hasattr(self.pool, 'spawnLater'):
|
||||||
|
return self.pool.spawnLater(seconds, *args, **kwargs)
|
||||||
|
|
||||||
|
# There's gevent.spawn_later(), but there isn't gevent.pool.Pool.spawn_later().
|
||||||
|
# Doing manually.
|
||||||
|
greenlet = self.pool.greenlet_class(*args, **kwargs)
|
||||||
|
self.pool.add(greenlet)
|
||||||
|
greenlet.start_later(seconds)
|
||||||
|
return greenlet
|
||||||
|
|
||||||
|
def _spawn(self, *args, **kwargs):
|
||||||
|
return self.pool.spawn(*args, **kwargs)
|
||||||
|
|
||||||
|
def spawnLater(self, *args, **kwargs):
|
||||||
|
greenlet = self._spawn_later(*args, **kwargs)
|
||||||
|
greenlet.link(lambda greenlet: self.greenlets.remove(greenlet))
|
||||||
|
self.greenlets.add(greenlet)
|
||||||
|
return greenlet
|
||||||
|
|
||||||
|
def spawn(self, *args, **kwargs):
|
||||||
|
greenlet = self._spawn(*args, **kwargs)
|
||||||
|
greenlet.link(lambda greenlet: self.greenlets.remove(greenlet))
|
||||||
|
self.greenlets.add(greenlet)
|
||||||
|
return greenlet
|
||||||
|
|
||||||
|
def stopGreenlets(self, reason="Stopping all greenlets"):
|
||||||
|
num = len(self.greenlets)
|
||||||
|
gevent.killall(list(self.greenlets), Debug.createNotifyType(reason), block=False)
|
||||||
|
return num
|
|
@ -0,0 +1,101 @@
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import io
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
import msgpack.fallback
|
||||||
|
|
||||||
|
|
||||||
|
def msgpackHeader(size):
|
||||||
|
if size <= 2 ** 8 - 1:
|
||||||
|
return b"\xc4" + struct.pack("B", size)
|
||||||
|
elif size <= 2 ** 16 - 1:
|
||||||
|
return b"\xc5" + struct.pack(">H", size)
|
||||||
|
elif size <= 2 ** 32 - 1:
|
||||||
|
return b"\xc6" + struct.pack(">I", size)
|
||||||
|
else:
|
||||||
|
raise Exception("huge binary string")
|
||||||
|
|
||||||
|
|
||||||
|
def stream(data, writer):
|
||||||
|
packer = msgpack.Packer(use_bin_type=True)
|
||||||
|
writer(packer.pack_map_header(len(data)))
|
||||||
|
for key, val in data.items():
|
||||||
|
writer(packer.pack(key))
|
||||||
|
if isinstance(val, io.IOBase): # File obj
|
||||||
|
max_size = os.fstat(val.fileno()).st_size - val.tell()
|
||||||
|
size = min(max_size, val.read_bytes)
|
||||||
|
bytes_left = size
|
||||||
|
writer(msgpackHeader(size))
|
||||||
|
buff = 1024 * 64
|
||||||
|
while 1:
|
||||||
|
writer(val.read(min(bytes_left, buff)))
|
||||||
|
bytes_left = bytes_left - buff
|
||||||
|
if bytes_left <= 0:
|
||||||
|
break
|
||||||
|
else: # Simple
|
||||||
|
writer(packer.pack(val))
|
||||||
|
return size
|
||||||
|
|
||||||
|
|
||||||
|
class FilePart(object):
|
||||||
|
__slots__ = ("file", "read_bytes", "__class__")
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.file = open(*args, **kwargs)
|
||||||
|
self.__enter__ == self.file.__enter__
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
return getattr(self.file, attr)
|
||||||
|
|
||||||
|
def __enter__(self, *args, **kwargs):
|
||||||
|
return self.file.__enter__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __exit__(self, *args, **kwargs):
|
||||||
|
return self.file.__exit__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# Don't try to decode the value of these fields as utf8
|
||||||
|
bin_value_keys = ("hashfield_raw", "peers", "peers_ipv6", "peers_onion", "body", "sites", "bin")
|
||||||
|
|
||||||
|
|
||||||
|
def objectDecoderHook(obj):
|
||||||
|
global bin_value_keys
|
||||||
|
back = {}
|
||||||
|
for key, val in obj:
|
||||||
|
if type(key) is bytes:
|
||||||
|
key = key.decode("utf8")
|
||||||
|
if key in bin_value_keys or type(val) is not bytes or len(key) >= 64:
|
||||||
|
back[key] = val
|
||||||
|
else:
|
||||||
|
back[key] = val.decode("utf8")
|
||||||
|
return back
|
||||||
|
|
||||||
|
|
||||||
|
def getUnpacker(fallback=False, decode=True):
|
||||||
|
if fallback: # Pure Python
|
||||||
|
unpacker = msgpack.fallback.Unpacker
|
||||||
|
else:
|
||||||
|
unpacker = msgpack.Unpacker
|
||||||
|
|
||||||
|
extra_kwargs = {"max_buffer_size": 5 * 1024 * 1024}
|
||||||
|
if msgpack.version[0] >= 1:
|
||||||
|
extra_kwargs["strict_map_key"] = False
|
||||||
|
|
||||||
|
if decode: # Workaround for backward compatibility: Try to decode bin to str
|
||||||
|
unpacker = unpacker(raw=True, object_pairs_hook=objectDecoderHook, **extra_kwargs)
|
||||||
|
else:
|
||||||
|
unpacker = unpacker(raw=False, **extra_kwargs)
|
||||||
|
|
||||||
|
return unpacker
|
||||||
|
|
||||||
|
|
||||||
|
def pack(data, use_bin_type=True):
|
||||||
|
return msgpack.packb(data, use_bin_type=use_bin_type)
|
||||||
|
|
||||||
|
|
||||||
|
def unpack(data, decode=True):
|
||||||
|
unpacker = getUnpacker(decode=decode)
|
||||||
|
unpacker.feed(data)
|
||||||
|
return next(unpacker)
|
||||||
|
|
|
@ -0,0 +1,202 @@
|
||||||
|
import gevent
|
||||||
|
import time
|
||||||
|
from gevent.event import AsyncResult
|
||||||
|
|
||||||
|
from . import ThreadPool
|
||||||
|
|
||||||
|
|
||||||
|
class Noparallel: # Only allow function running once in same time
|
||||||
|
|
||||||
|
def __init__(self, blocking=True, ignore_args=False, ignore_class=False, queue=False):
|
||||||
|
self.threads = {}
|
||||||
|
self.blocking = blocking # Blocking: Acts like normal function else thread returned
|
||||||
|
self.queue = queue # Execute again when blocking is done
|
||||||
|
self.queued = False
|
||||||
|
self.ignore_args = ignore_args # Block does not depend on function call arguments
|
||||||
|
self.ignore_class = ignore_class # Block does not depeds on class instance
|
||||||
|
|
||||||
|
def __call__(self, func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
if not ThreadPool.isMainThread():
|
||||||
|
return ThreadPool.main_loop.call(wrapper, *args, **kwargs)
|
||||||
|
|
||||||
|
if self.ignore_class:
|
||||||
|
key = func # Unique key only by function and class object
|
||||||
|
elif self.ignore_args:
|
||||||
|
key = (func, args[0]) # Unique key only by function and class object
|
||||||
|
else:
|
||||||
|
key = (func, tuple(args), str(kwargs)) # Unique key for function including parameters
|
||||||
|
if key in self.threads: # Thread already running (if using blocking mode)
|
||||||
|
if self.queue:
|
||||||
|
self.queued = True
|
||||||
|
thread = self.threads[key]
|
||||||
|
if self.blocking:
|
||||||
|
if self.queued:
|
||||||
|
res = thread.get() # Blocking until its finished
|
||||||
|
if key in self.threads:
|
||||||
|
return self.threads[key].get() # Queue finished since started running
|
||||||
|
self.queued = False
|
||||||
|
return wrapper(*args, **kwargs) # Run again after the end
|
||||||
|
else:
|
||||||
|
return thread.get() # Return the value
|
||||||
|
|
||||||
|
else: # No blocking
|
||||||
|
if thread.ready(): # Its finished, create a new
|
||||||
|
thread = gevent.spawn(func, *args, **kwargs)
|
||||||
|
self.threads[key] = thread
|
||||||
|
return thread
|
||||||
|
else: # Still running
|
||||||
|
return thread
|
||||||
|
else: # Thread not running
|
||||||
|
if self.blocking: # Wait for finish
|
||||||
|
asyncres = AsyncResult()
|
||||||
|
self.threads[key] = asyncres
|
||||||
|
try:
|
||||||
|
res = func(*args, **kwargs)
|
||||||
|
asyncres.set(res)
|
||||||
|
self.cleanup(key, asyncres)
|
||||||
|
return res
|
||||||
|
except Exception as err:
|
||||||
|
asyncres.set_exception(err)
|
||||||
|
self.cleanup(key, asyncres)
|
||||||
|
raise(err)
|
||||||
|
else: # No blocking just return the thread
|
||||||
|
thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
|
||||||
|
thread.link(lambda thread: self.cleanup(key, thread))
|
||||||
|
self.threads[key] = thread
|
||||||
|
return thread
|
||||||
|
wrapper.__name__ = func.__name__
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
# Cleanup finished threads
|
||||||
|
def cleanup(self, key, thread):
|
||||||
|
if key in self.threads:
|
||||||
|
del(self.threads[key])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
|
||||||
|
class Test():
|
||||||
|
|
||||||
|
@Noparallel()
|
||||||
|
def count(self, num=5):
|
||||||
|
for i in range(num):
|
||||||
|
print(self, i)
|
||||||
|
time.sleep(1)
|
||||||
|
return "%s return:%s" % (self, i)
|
||||||
|
|
||||||
|
class TestNoblock():
|
||||||
|
|
||||||
|
@Noparallel(blocking=False)
|
||||||
|
def count(self, num=5):
|
||||||
|
for i in range(num):
|
||||||
|
print(self, i)
|
||||||
|
time.sleep(1)
|
||||||
|
return "%s return:%s" % (self, i)
|
||||||
|
|
||||||
|
def testBlocking():
|
||||||
|
test = Test()
|
||||||
|
test2 = Test()
|
||||||
|
print("Counting...")
|
||||||
|
print("Creating class1/thread1")
|
||||||
|
thread1 = gevent.spawn(test.count)
|
||||||
|
print("Creating class1/thread2 (ignored)")
|
||||||
|
thread2 = gevent.spawn(test.count)
|
||||||
|
print("Creating class2/thread3")
|
||||||
|
thread3 = gevent.spawn(test2.count)
|
||||||
|
|
||||||
|
print("Joining class1/thread1")
|
||||||
|
thread1.join()
|
||||||
|
print("Joining class1/thread2")
|
||||||
|
thread2.join()
|
||||||
|
print("Joining class2/thread3")
|
||||||
|
thread3.join()
|
||||||
|
|
||||||
|
print("Creating class1/thread4 (its finished, allowed again)")
|
||||||
|
thread4 = gevent.spawn(test.count)
|
||||||
|
print("Joining thread4")
|
||||||
|
thread4.join()
|
||||||
|
|
||||||
|
print(thread1.value, thread2.value, thread3.value, thread4.value)
|
||||||
|
print("Done.")
|
||||||
|
|
||||||
|
def testNoblocking():
|
||||||
|
test = TestNoblock()
|
||||||
|
test2 = TestNoblock()
|
||||||
|
print("Creating class1/thread1")
|
||||||
|
thread1 = test.count()
|
||||||
|
print("Creating class1/thread2 (ignored)")
|
||||||
|
thread2 = test.count()
|
||||||
|
print("Creating class2/thread3")
|
||||||
|
thread3 = test2.count()
|
||||||
|
print("Joining class1/thread1")
|
||||||
|
thread1.join()
|
||||||
|
print("Joining class1/thread2")
|
||||||
|
thread2.join()
|
||||||
|
print("Joining class2/thread3")
|
||||||
|
thread3.join()
|
||||||
|
|
||||||
|
print("Creating class1/thread4 (its finished, allowed again)")
|
||||||
|
thread4 = test.count()
|
||||||
|
print("Joining thread4")
|
||||||
|
thread4.join()
|
||||||
|
|
||||||
|
print(thread1.value, thread2.value, thread3.value, thread4.value)
|
||||||
|
print("Done.")
|
||||||
|
|
||||||
|
def testBenchmark():
|
||||||
|
import time
|
||||||
|
|
||||||
|
def printThreadNum():
|
||||||
|
import gc
|
||||||
|
from greenlet import greenlet
|
||||||
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
|
||||||
|
print("Greenlets: %s" % len(objs))
|
||||||
|
|
||||||
|
printThreadNum()
|
||||||
|
test = TestNoblock()
|
||||||
|
s = time.time()
|
||||||
|
for i in range(3):
|
||||||
|
gevent.spawn(test.count, i + 1)
|
||||||
|
print("Created in %.3fs" % (time.time() - s))
|
||||||
|
printThreadNum()
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
def testException():
|
||||||
|
import time
|
||||||
|
@Noparallel(blocking=True, queue=True)
|
||||||
|
def count(self, num=5):
|
||||||
|
s = time.time()
|
||||||
|
# raise Exception("err")
|
||||||
|
for i in range(num):
|
||||||
|
print(self, i)
|
||||||
|
time.sleep(1)
|
||||||
|
return "%s return:%s" % (s, i)
|
||||||
|
def caller():
|
||||||
|
try:
|
||||||
|
print("Ret:", count(5))
|
||||||
|
except Exception as err:
|
||||||
|
print("Raised:", repr(err))
|
||||||
|
|
||||||
|
gevent.joinall([
|
||||||
|
gevent.spawn(caller),
|
||||||
|
gevent.spawn(caller),
|
||||||
|
gevent.spawn(caller),
|
||||||
|
gevent.spawn(caller)
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
from gevent import monkey
|
||||||
|
monkey.patch_all()
|
||||||
|
|
||||||
|
testException()
|
||||||
|
|
||||||
|
"""
|
||||||
|
testBenchmark()
|
||||||
|
print("Testing blocking mode...")
|
||||||
|
testBlocking()
|
||||||
|
print("Testing noblocking mode...")
|
||||||
|
testNoblocking()
|
||||||
|
"""
|
|
@ -0,0 +1,69 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import ctypes.util
|
||||||
|
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
find_library_original = ctypes.util.find_library
|
||||||
|
|
||||||
|
|
||||||
|
def getOpensslPath():
|
||||||
|
if config.openssl_lib_file:
|
||||||
|
return config.openssl_lib_file
|
||||||
|
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
lib_paths = [
|
||||||
|
os.path.join(os.getcwd(), "tools/openssl/libeay32.dll"), # ZeroBundle Windows
|
||||||
|
os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1-x64.dll"),
|
||||||
|
os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1.dll")
|
||||||
|
]
|
||||||
|
elif sys.platform == "cygwin":
|
||||||
|
lib_paths = ["/bin/cygcrypto-1.0.0.dll"]
|
||||||
|
else:
|
||||||
|
lib_paths = [
|
||||||
|
"../runtime/lib/libcrypto.so.1.1", # ZeroBundle Linux
|
||||||
|
"../../Frameworks/libcrypto.1.1.dylib", # ZeroBundle macOS
|
||||||
|
"/opt/lib/libcrypto.so.1.0.0", # For optware and entware
|
||||||
|
"/usr/local/ssl/lib/libcrypto.so"
|
||||||
|
]
|
||||||
|
|
||||||
|
for lib_path in lib_paths:
|
||||||
|
if os.path.isfile(lib_path):
|
||||||
|
return lib_path
|
||||||
|
|
||||||
|
if "ANDROID_APP_PATH" in os.environ:
|
||||||
|
try:
|
||||||
|
lib_dir = os.environ["ANDROID_APP_PATH"] + "/../../lib"
|
||||||
|
return [lib for lib in os.listdir(lib_dir) if "crypto" in lib][0]
|
||||||
|
except Exception as err:
|
||||||
|
logging.debug("OpenSSL lib not found in: %s (%s)" % (lib_dir, err))
|
||||||
|
|
||||||
|
if "LD_LIBRARY_PATH" in os.environ:
|
||||||
|
lib_dir_paths = os.environ["LD_LIBRARY_PATH"].split(":")
|
||||||
|
for path in lib_dir_paths:
|
||||||
|
try:
|
||||||
|
return [lib for lib in os.listdir(path) if "libcrypto.so" in lib][0]
|
||||||
|
except Exception as err:
|
||||||
|
logging.debug("OpenSSL lib not found in: %s (%s)" % (path, err))
|
||||||
|
|
||||||
|
lib_path = (
|
||||||
|
find_library_original('ssl.so') or find_library_original('ssl') or
|
||||||
|
find_library_original('crypto') or find_library_original('libcrypto') or 'libeay32'
|
||||||
|
)
|
||||||
|
|
||||||
|
return lib_path
|
||||||
|
|
||||||
|
|
||||||
|
def patchCtypesOpensslFindLibrary():
|
||||||
|
def findLibraryPatched(name):
|
||||||
|
if name in ("ssl", "crypto", "libeay32"):
|
||||||
|
lib_path = getOpensslPath()
|
||||||
|
return lib_path
|
||||||
|
else:
|
||||||
|
return find_library_original(name)
|
||||||
|
|
||||||
|
ctypes.util.find_library = findLibraryPatched
|
||||||
|
|
||||||
|
|
||||||
|
patchCtypesOpensslFindLibrary()
|
|
@ -0,0 +1,36 @@
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
def setMaxfilesopened(limit):
|
||||||
|
try:
|
||||||
|
if sys.platform == "win32":
|
||||||
|
import ctypes
|
||||||
|
dll = None
|
||||||
|
last_err = None
|
||||||
|
for dll_name in ["msvcr100", "msvcr110", "msvcr120"]:
|
||||||
|
try:
|
||||||
|
dll = getattr(ctypes.cdll, dll_name)
|
||||||
|
break
|
||||||
|
except OSError as err:
|
||||||
|
last_err = err
|
||||||
|
|
||||||
|
if not dll:
|
||||||
|
raise last_err
|
||||||
|
|
||||||
|
maxstdio = dll._getmaxstdio()
|
||||||
|
if maxstdio < limit:
|
||||||
|
logging.debug("%s: Current maxstdio: %s, changing to %s..." % (dll, maxstdio, limit))
|
||||||
|
dll._setmaxstdio(limit)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
import resource
|
||||||
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||||
|
if soft < limit:
|
||||||
|
logging.debug("Current RLIMIT_NOFILE: %s (max: %s), changing to %s..." % (soft, hard, limit))
|
||||||
|
resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard))
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as err:
|
||||||
|
logging.error("Failed to modify max files open limit: %s" % err)
|
||||||
|
return False
|
|
@ -0,0 +1,65 @@
|
||||||
|
import gevent.pool
|
||||||
|
|
||||||
|
|
||||||
|
class Pooled(object):
|
||||||
|
def __init__(self, size=100):
|
||||||
|
self.pool = gevent.pool.Pool(size)
|
||||||
|
self.pooler_running = False
|
||||||
|
self.queue = []
|
||||||
|
self.func = None
|
||||||
|
|
||||||
|
def waiter(self, evt, args, kwargs):
|
||||||
|
res = self.func(*args, **kwargs)
|
||||||
|
if type(res) == gevent.event.AsyncResult:
|
||||||
|
evt.set(res.get())
|
||||||
|
else:
|
||||||
|
evt.set(res)
|
||||||
|
|
||||||
|
def pooler(self):
|
||||||
|
while self.queue:
|
||||||
|
evt, args, kwargs = self.queue.pop(0)
|
||||||
|
self.pool.spawn(self.waiter, evt, args, kwargs)
|
||||||
|
self.pooler_running = False
|
||||||
|
|
||||||
|
def __call__(self, func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
evt = gevent.event.AsyncResult()
|
||||||
|
self.queue.append((evt, args, kwargs))
|
||||||
|
if not self.pooler_running:
|
||||||
|
self.pooler_running = True
|
||||||
|
gevent.spawn(self.pooler)
|
||||||
|
return evt
|
||||||
|
wrapper.__name__ = func.__name__
|
||||||
|
self.func = func
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import gevent
|
||||||
|
import gevent.pool
|
||||||
|
import gevent.queue
|
||||||
|
import gevent.event
|
||||||
|
import gevent.monkey
|
||||||
|
import time
|
||||||
|
|
||||||
|
gevent.monkey.patch_all()
|
||||||
|
|
||||||
|
def addTask(inner_path):
|
||||||
|
evt = gevent.event.AsyncResult()
|
||||||
|
gevent.spawn_later(1, lambda: evt.set(True))
|
||||||
|
return evt
|
||||||
|
|
||||||
|
def needFile(inner_path):
|
||||||
|
return addTask(inner_path)
|
||||||
|
|
||||||
|
@Pooled(10)
|
||||||
|
def pooledNeedFile(inner_path):
|
||||||
|
return needFile(inner_path)
|
||||||
|
|
||||||
|
threads = []
|
||||||
|
for i in range(100):
|
||||||
|
threads.append(pooledNeedFile(i))
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
gevent.joinall(threads) # Should take 10 second
|
||||||
|
print(time.time() - s)
|
|
@ -0,0 +1,67 @@
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def queryFile(file_path, filter_path, filter_key=None, filter_val=None):
|
||||||
|
back = []
|
||||||
|
data = json.load(open(file_path))
|
||||||
|
if filter_path == ['']:
|
||||||
|
return [data]
|
||||||
|
for key in filter_path: # Get to the point
|
||||||
|
data = data.get(key)
|
||||||
|
if not data:
|
||||||
|
return
|
||||||
|
|
||||||
|
if type(data) == list:
|
||||||
|
for row in data:
|
||||||
|
if filter_val: # Filter by value
|
||||||
|
if row[filter_key] == filter_val:
|
||||||
|
back.append(row)
|
||||||
|
else:
|
||||||
|
back.append(row)
|
||||||
|
else:
|
||||||
|
back.append({"value": data})
|
||||||
|
|
||||||
|
return back
|
||||||
|
|
||||||
|
|
||||||
|
# Find in json files
|
||||||
|
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...]
|
||||||
|
def query(path_pattern, filter):
|
||||||
|
if "=" in filter: # Filter by value
|
||||||
|
filter_path, filter_val = filter.split("=")
|
||||||
|
filter_path = filter_path.split(".")
|
||||||
|
filter_key = filter_path.pop() # Last element is the key
|
||||||
|
filter_val = int(filter_val)
|
||||||
|
else: # No filter
|
||||||
|
filter_path = filter
|
||||||
|
filter_path = filter_path.split(".")
|
||||||
|
filter_key = None
|
||||||
|
filter_val = None
|
||||||
|
|
||||||
|
if "/*/" in path_pattern: # Wildcard search
|
||||||
|
root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
|
||||||
|
else: # No wildcard
|
||||||
|
root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
|
||||||
|
for root, dirs, files in os.walk(root_dir, topdown=False):
|
||||||
|
root = root.replace("\\", "/")
|
||||||
|
inner_path = root.replace(root_dir, "").strip("/")
|
||||||
|
for file_name in files:
|
||||||
|
if file_pattern != file_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val)
|
||||||
|
if not res:
|
||||||
|
continue
|
||||||
|
except Exception: # Json load error
|
||||||
|
continue
|
||||||
|
for row in res:
|
||||||
|
row["inner_path"] = inner_path
|
||||||
|
yield row
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
|
||||||
|
print(row)
|
|
@ -0,0 +1,128 @@
|
||||||
|
import time
|
||||||
|
import gevent
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger("RateLimit")
|
||||||
|
|
||||||
|
called_db = {} # Holds events last call time
|
||||||
|
queue_db = {} # Commands queued to run
|
||||||
|
|
||||||
|
# Register event as called
|
||||||
|
# Return: None
|
||||||
|
|
||||||
|
|
||||||
|
def called(event, penalty=0):
|
||||||
|
called_db[event] = time.time() + penalty
|
||||||
|
|
||||||
|
|
||||||
|
# Check if calling event is allowed
|
||||||
|
# Return: True if allowed False if not
|
||||||
|
def isAllowed(event, allowed_again=10):
|
||||||
|
last_called = called_db.get(event)
|
||||||
|
if not last_called: # Its not called before
|
||||||
|
return True
|
||||||
|
elif time.time() - last_called >= allowed_again:
|
||||||
|
del called_db[event] # Delete last call time to save memory
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delayLeft(event, allowed_again=10):
|
||||||
|
last_called = called_db.get(event)
|
||||||
|
if not last_called: # Its not called before
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return allowed_again - (time.time() - last_called)
|
||||||
|
|
||||||
|
def callQueue(event):
|
||||||
|
func, args, kwargs, thread = queue_db[event]
|
||||||
|
log.debug("Calling: %s" % event)
|
||||||
|
called(event)
|
||||||
|
del queue_db[event]
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# Rate limit and delay function call if necessary
|
||||||
|
# If the function called again within the rate limit interval then previous queued call will be dropped
|
||||||
|
# Return: Immediately gevent thread
|
||||||
|
def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
|
||||||
|
if isAllowed(event, allowed_again): # Not called recently, call it now
|
||||||
|
called(event)
|
||||||
|
# print "Calling now"
|
||||||
|
return gevent.spawn(func, *args, **kwargs)
|
||||||
|
else: # Called recently, schedule it for later
|
||||||
|
time_left = allowed_again - max(0, time.time() - called_db[event])
|
||||||
|
log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
|
||||||
|
if not queue_db.get(event): # Function call not queued yet
|
||||||
|
thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
|
||||||
|
queue_db[event] = (func, args, kwargs, thread)
|
||||||
|
return thread
|
||||||
|
else: # Function call already queued, just update the parameters
|
||||||
|
thread = queue_db[event][3]
|
||||||
|
queue_db[event] = (func, args, kwargs, thread)
|
||||||
|
return thread
|
||||||
|
|
||||||
|
|
||||||
|
# Rate limit and delay function call if needed
|
||||||
|
# Return: Wait for execution/delay then return value
|
||||||
|
def call(event, allowed_again=10, func=None, *args, **kwargs):
|
||||||
|
if isAllowed(event): # Not called recently, call it now
|
||||||
|
called(event)
|
||||||
|
# print "Calling now", allowed_again
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
else: # Called recently, schedule it for later
|
||||||
|
time_left = max(0, allowed_again - (time.time() - called_db[event]))
|
||||||
|
# print "Time left: %s" % time_left, args, kwargs
|
||||||
|
log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
|
||||||
|
called(event, time_left)
|
||||||
|
time.sleep(time_left)
|
||||||
|
back = func(*args, **kwargs)
|
||||||
|
called(event)
|
||||||
|
return back
|
||||||
|
|
||||||
|
|
||||||
|
# Cleanup expired events every 3 minutes
|
||||||
|
def rateLimitCleanup():
|
||||||
|
while 1:
|
||||||
|
expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
|
||||||
|
for event in list(called_db.keys()):
|
||||||
|
if called_db[event] < expired:
|
||||||
|
del called_db[event]
|
||||||
|
time.sleep(60 * 3) # Every 3 minutes
|
||||||
|
gevent.spawn(rateLimitCleanup)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from gevent import monkey
|
||||||
|
monkey.patch_all()
|
||||||
|
import random
|
||||||
|
|
||||||
|
def publish(inner_path):
|
||||||
|
print("Publishing %s..." % inner_path)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def cb(thread):
|
||||||
|
print("Value:", thread.value)
|
||||||
|
|
||||||
|
print("Testing async spam requests rate limit to 1/sec...")
|
||||||
|
for i in range(3000):
|
||||||
|
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
|
||||||
|
time.sleep(float(random.randint(1, 20)) / 100000)
|
||||||
|
print(thread.link(cb))
|
||||||
|
print("Done")
|
||||||
|
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
print("Testing sync spam requests rate limit to 1/sec...")
|
||||||
|
for i in range(5):
|
||||||
|
call("publish data.json", 1, publish, "data.json %s" % i)
|
||||||
|
time.sleep(float(random.randint(1, 100)) / 100)
|
||||||
|
print("Done")
|
||||||
|
|
||||||
|
print("Testing cleanup")
|
||||||
|
thread = callAsync("publish content.json single", 1, publish, "content.json single")
|
||||||
|
print("Needs to cleanup:", called_db, queue_db)
|
||||||
|
print("Waiting 3min for cleanup process...")
|
||||||
|
time.sleep(60 * 3)
|
||||||
|
print("Cleaned up:", called_db, queue_db)
|
|
@ -0,0 +1,98 @@
|
||||||
|
import re
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger("SafeRe")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class UnsafePatternError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
max_cache_size = 1000
|
||||||
|
cached_patterns = {}
|
||||||
|
old_cached_patterns = {}
|
||||||
|
|
||||||
|
|
||||||
|
def isSafePattern(pattern):
|
||||||
|
if len(pattern) > 255:
|
||||||
|
raise UnsafePatternError("Pattern too long: %s characters in %s" % (len(pattern), pattern))
|
||||||
|
|
||||||
|
unsafe_pattern_match = re.search(r"[^\.][\*\{\+]", pattern) # Always should be "." before "*{+" characters to avoid ReDoS
|
||||||
|
if unsafe_pattern_match:
|
||||||
|
raise UnsafePatternError("Potentially unsafe part of the pattern: %s in %s" % (unsafe_pattern_match.group(0), pattern))
|
||||||
|
|
||||||
|
repetitions1 = re.findall(r"\.[\*\{\+]", pattern)
|
||||||
|
repetitions2 = re.findall(r"[^(][?]", pattern)
|
||||||
|
if len(repetitions1) + len(repetitions2) >= 10:
|
||||||
|
raise UnsafePatternError("More than 10 repetitions in %s" % pattern)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def compilePattern(pattern):
|
||||||
|
global cached_patterns
|
||||||
|
global old_cached_patterns
|
||||||
|
|
||||||
|
cached_pattern = cached_patterns.get(pattern)
|
||||||
|
if cached_pattern:
|
||||||
|
return cached_pattern
|
||||||
|
|
||||||
|
cached_pattern = old_cached_patterns.get(pattern)
|
||||||
|
if cached_pattern:
|
||||||
|
del old_cached_patterns[pattern]
|
||||||
|
cached_patterns[pattern] = cached_pattern
|
||||||
|
return cached_pattern
|
||||||
|
|
||||||
|
if isSafePattern(pattern):
|
||||||
|
cached_pattern = re.compile(pattern)
|
||||||
|
cached_patterns[pattern] = cached_pattern
|
||||||
|
log.debug("Compiled new pattern: %s" % pattern)
|
||||||
|
log.debug("Cache size: %d + %d" % (len(cached_patterns), len(old_cached_patterns)))
|
||||||
|
|
||||||
|
if len(cached_patterns) > max_cache_size:
|
||||||
|
old_cached_patterns = cached_patterns
|
||||||
|
cached_patterns = {}
|
||||||
|
log.debug("Size limit reached. Rotating cache.")
|
||||||
|
log.debug("Cache size: %d + %d" % (len(cached_patterns), len(old_cached_patterns)))
|
||||||
|
|
||||||
|
return cached_pattern
|
||||||
|
|
||||||
|
|
||||||
|
def match(pattern, *args, **kwargs):
|
||||||
|
cached_pattern = compilePattern(pattern)
|
||||||
|
return cached_pattern.match(*args, **kwargs)
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
# TESTS
|
||||||
|
|
||||||
|
def testSafePattern(pattern):
|
||||||
|
try:
|
||||||
|
return isSafePattern(pattern)
|
||||||
|
except UnsafePatternError as err:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Some real examples to make sure it works as expected
|
||||||
|
assert testSafePattern('(data/mp4/.*|updater/.*)')
|
||||||
|
assert testSafePattern('((js|css)/(?!all.(js|css)))|.git')
|
||||||
|
|
||||||
|
|
||||||
|
# Unsafe cases:
|
||||||
|
|
||||||
|
# ((?!json).)*$ not allowed, because of ) before the * character. Possible fix: .*(?!json)$
|
||||||
|
assert not testSafePattern('((?!json).)*$')
|
||||||
|
assert testSafePattern('.*(?!json)$')
|
||||||
|
|
||||||
|
# (.*.epub|.*.jpg|.*.jpeg|.*.png|data/.*.gif|.*.avi|.*.ogg|.*.webm|.*.mp4|.*.mp3|.*.mkv|.*.eot) not allowed, because it has 12 .* repetition patterns. Possible fix: .*(epub|jpg|jpeg|png|data/gif|avi|ogg|webm|mp4|mp3|mkv|eot)
|
||||||
|
assert not testSafePattern('(.*.epub|.*.jpg|.*.jpeg|.*.png|data/.*.gif|.*.avi|.*.ogg|.*.webm|.*.mp4|.*.mp3|.*.mkv|.*.eot)')
|
||||||
|
assert testSafePattern('.*(epub|jpg|jpeg|png|data/gif|avi|ogg|webm|mp4|mp3|mkv|eot)')
|
||||||
|
|
||||||
|
# https://github.com/HelloZeroNet/ZeroNet/issues/2757
|
||||||
|
assert not testSafePattern('a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
||||||
|
assert not testSafePattern('a?a?a?a?a?a?a?x.{0,1}x.{0,1}x.{0,1}')
|
||||||
|
assert testSafePattern('a?a?a?a?a?a?a?x.{0,1}x.{0,1}')
|
||||||
|
assert not testSafePattern('a?a?a?a?a?a?a?x.*x.*x.*')
|
||||||
|
assert testSafePattern('a?a?a?a?a?a?a?x.*x.*')
|
||||||
|
|
||||||
|
################################################################################
|
|
@ -0,0 +1,43 @@
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
log_level_raising_rules = []
|
||||||
|
|
||||||
|
def addLogLevelRaisingRule(rule, level=None):
|
||||||
|
if level is None:
|
||||||
|
level = logging.INFO
|
||||||
|
log_level_raising_rules.append({
|
||||||
|
"rule": rule,
|
||||||
|
"level": level
|
||||||
|
})
|
||||||
|
|
||||||
|
def matchLogLevelRaisingRule(name):
|
||||||
|
for rule in log_level_raising_rules:
|
||||||
|
if isinstance(rule["rule"], re.Pattern):
|
||||||
|
if rule["rule"].search(name):
|
||||||
|
return rule["level"]
|
||||||
|
else:
|
||||||
|
if rule["rule"] == name:
|
||||||
|
return rule["level"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
class SelectiveLogger(logging.getLoggerClass()):
|
||||||
|
def __init__(self, name, level=logging.NOTSET):
|
||||||
|
return super().__init__(name, level)
|
||||||
|
|
||||||
|
def raiseLevel(self, level):
|
||||||
|
raised_level = matchLogLevelRaisingRule(self.name)
|
||||||
|
if raised_level is not None:
|
||||||
|
if level < raised_level:
|
||||||
|
level = raised_level
|
||||||
|
return level
|
||||||
|
|
||||||
|
def isEnabledFor(self, level):
|
||||||
|
level = self.raiseLevel(level)
|
||||||
|
return super().isEnabledFor(level)
|
||||||
|
|
||||||
|
def _log(self, level, msg, args, **kwargs):
|
||||||
|
level = self.raiseLevel(level)
|
||||||
|
return super()._log(level, msg, args, **kwargs)
|
||||||
|
|
||||||
|
logging.setLoggerClass(SelectiveLogger)
|
|
@ -0,0 +1,26 @@
|
||||||
|
import socket
|
||||||
|
|
||||||
|
import socks
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
def create_connection(address, timeout=None, source_address=None):
|
||||||
|
if address in config.ip_local:
|
||||||
|
sock = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.connect(address)
|
||||||
|
else:
|
||||||
|
sock = socks.socksocket()
|
||||||
|
sock.connect(address)
|
||||||
|
return sock
|
||||||
|
|
||||||
|
|
||||||
|
# Dns queries using the proxy
|
||||||
|
def getaddrinfo(*args):
|
||||||
|
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
|
||||||
|
|
||||||
|
|
||||||
|
def monkeyPatch(proxy_ip, proxy_port):
|
||||||
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
|
||||||
|
socket.socket_noproxy = socket.socket
|
||||||
|
socket.socket = socks.socksocket
|
||||||
|
socket.create_connection = create_connection
|
||||||
|
socket.getaddrinfo = getaddrinfo
|
|
@ -0,0 +1,180 @@
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import queue
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
import gevent.monkey
|
||||||
|
import gevent.threadpool
|
||||||
|
import gevent._threading
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadPool:
|
||||||
|
def __init__(self, max_size, name=None):
|
||||||
|
self.setMaxSize(max_size)
|
||||||
|
if name:
|
||||||
|
self.name = name
|
||||||
|
else:
|
||||||
|
self.name = "ThreadPool#%s" % id(self)
|
||||||
|
|
||||||
|
def setMaxSize(self, max_size):
|
||||||
|
self.max_size = max_size
|
||||||
|
if max_size > 0:
|
||||||
|
self.pool = gevent.threadpool.ThreadPool(max_size)
|
||||||
|
else:
|
||||||
|
self.pool = None
|
||||||
|
|
||||||
|
def wrap(self, func):
|
||||||
|
if self.pool is None:
|
||||||
|
return func
|
||||||
|
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
if not isMainThread(): # Call directly if not in main thread
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
res = self.apply(func, args, kwargs)
|
||||||
|
return res
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
def spawn(self, *args, **kwargs):
|
||||||
|
if not isMainThread() and not self.pool._semaphore.ready():
|
||||||
|
# Avoid semaphore error when spawning from other thread and the pool is full
|
||||||
|
return main_loop.call(self.spawn, *args, **kwargs)
|
||||||
|
res = self.pool.spawn(*args, **kwargs)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def apply(self, func, args=(), kwargs={}):
|
||||||
|
t = self.spawn(func, *args, **kwargs)
|
||||||
|
if self.pool._apply_immediately():
|
||||||
|
return main_loop.call(t.get)
|
||||||
|
else:
|
||||||
|
return t.get()
|
||||||
|
|
||||||
|
def kill(self):
|
||||||
|
if self.pool is not None and self.pool.size > 0 and main_loop:
|
||||||
|
main_loop.call(lambda: gevent.spawn(self.pool.kill).join(timeout=1))
|
||||||
|
|
||||||
|
del self.pool
|
||||||
|
self.pool = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *args):
|
||||||
|
self.kill()
|
||||||
|
|
||||||
|
|
||||||
|
lock_pool = gevent.threadpool.ThreadPool(50)
|
||||||
|
main_thread_id = threading.current_thread().ident
|
||||||
|
|
||||||
|
|
||||||
|
def isMainThread():
|
||||||
|
return threading.current_thread().ident == main_thread_id
|
||||||
|
|
||||||
|
|
||||||
|
class Lock:
|
||||||
|
def __init__(self):
|
||||||
|
self.lock = gevent._threading.Lock()
|
||||||
|
self.locked = self.lock.locked
|
||||||
|
self.release = self.lock.release
|
||||||
|
self.time_lock = 0
|
||||||
|
|
||||||
|
def acquire(self, *args, **kwargs):
|
||||||
|
self.time_lock = time.time()
|
||||||
|
if self.locked() and isMainThread():
|
||||||
|
# Start in new thread to avoid blocking gevent loop
|
||||||
|
return lock_pool.apply(self.lock.acquire, args, kwargs)
|
||||||
|
else:
|
||||||
|
return self.lock.acquire(*args, **kwargs)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
while self.locked():
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
|
||||||
|
class Event:
|
||||||
|
def __init__(self):
|
||||||
|
self.get_lock = Lock()
|
||||||
|
self.res = None
|
||||||
|
self.get_lock.acquire(False)
|
||||||
|
self.done = False
|
||||||
|
|
||||||
|
def set(self, res):
|
||||||
|
if self.done:
|
||||||
|
raise Exception("Event already has value")
|
||||||
|
self.res = res
|
||||||
|
self.get_lock.release()
|
||||||
|
self.done = True
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if not self.done:
|
||||||
|
self.get_lock.acquire(True)
|
||||||
|
if self.get_lock.locked():
|
||||||
|
self.get_lock.release()
|
||||||
|
back = self.res
|
||||||
|
return back
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.res = None
|
||||||
|
while self.get_lock.locked():
|
||||||
|
self.get_lock.release()
|
||||||
|
|
||||||
|
|
||||||
|
# Execute function calls in main loop from other threads
|
||||||
|
class MainLoopCaller():
|
||||||
|
def __init__(self):
|
||||||
|
self.queue_call = queue.Queue()
|
||||||
|
|
||||||
|
self.pool = gevent.threadpool.ThreadPool(1)
|
||||||
|
self.num_direct = 0
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
def caller(self, func, args, kwargs, event_done):
|
||||||
|
try:
|
||||||
|
res = func(*args, **kwargs)
|
||||||
|
event_done.set((True, res))
|
||||||
|
except Exception as err:
|
||||||
|
event_done.set((False, err))
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
gevent.spawn(self.run)
|
||||||
|
time.sleep(0.001)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while self.running:
|
||||||
|
if self.queue_call.qsize() == 0: # Get queue in new thread to avoid gevent blocking
|
||||||
|
func, args, kwargs, event_done = self.pool.apply(self.queue_call.get)
|
||||||
|
else:
|
||||||
|
func, args, kwargs, event_done = self.queue_call.get()
|
||||||
|
gevent.spawn(self.caller, func, args, kwargs, event_done)
|
||||||
|
del func, args, kwargs, event_done
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
def call(self, func, *args, **kwargs):
|
||||||
|
if threading.current_thread().ident == main_thread_id:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
event_done = Event()
|
||||||
|
self.queue_call.put((func, args, kwargs, event_done))
|
||||||
|
success, res = event_done.get()
|
||||||
|
del event_done
|
||||||
|
self.queue_call.task_done()
|
||||||
|
if success:
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
raise res
|
||||||
|
|
||||||
|
|
||||||
|
def patchSleep(): # Fix memory leak by using real sleep in threads
|
||||||
|
real_sleep = gevent.monkey.get_original("time", "sleep")
|
||||||
|
|
||||||
|
def patched_sleep(seconds):
|
||||||
|
if isMainThread():
|
||||||
|
gevent.sleep(seconds)
|
||||||
|
else:
|
||||||
|
real_sleep(seconds)
|
||||||
|
time.sleep = patched_sleep
|
||||||
|
|
||||||
|
|
||||||
|
main_loop = MainLoopCaller()
|
||||||
|
main_loop.start()
|
||||||
|
patchSleep()
|
|
@ -0,0 +1,395 @@
|
||||||
|
import re
|
||||||
|
import urllib.request
|
||||||
|
import http.client
|
||||||
|
import logging
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
from xml.dom.minidom import parseString
|
||||||
|
from xml.parsers.expat import ExpatError
|
||||||
|
|
||||||
|
from gevent import socket
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
# Relevant UPnP spec:
|
||||||
|
# http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf
|
||||||
|
|
||||||
|
# General TODOs:
|
||||||
|
# Handle 0 or >1 IGDs
|
||||||
|
|
||||||
|
logger = logging.getLogger("Upnp")
|
||||||
|
|
||||||
|
class UpnpError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class IGDError(UpnpError):
|
||||||
|
"""
|
||||||
|
Signifies a problem with the IGD.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
REMOVE_WHITESPACE = re.compile(r'>\s*<')
|
||||||
|
|
||||||
|
|
||||||
|
def perform_m_search(local_ip):
|
||||||
|
"""
|
||||||
|
Broadcast a UDP SSDP M-SEARCH packet and return response.
|
||||||
|
"""
|
||||||
|
search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
|
||||||
|
|
||||||
|
ssdp_request = ''.join(
|
||||||
|
['M-SEARCH * HTTP/1.1\r\n',
|
||||||
|
'HOST: 239.255.255.250:1900\r\n',
|
||||||
|
'MAN: "ssdp:discover"\r\n',
|
||||||
|
'MX: 2\r\n',
|
||||||
|
'ST: {0}\r\n'.format(search_target),
|
||||||
|
'\r\n']
|
||||||
|
).encode("utf8")
|
||||||
|
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
|
||||||
|
sock.bind((local_ip, 0))
|
||||||
|
|
||||||
|
sock.sendto(ssdp_request, ('239.255.255.250', 1900))
|
||||||
|
if local_ip == "127.0.0.1":
|
||||||
|
sock.settimeout(1)
|
||||||
|
else:
|
||||||
|
sock.settimeout(5)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return sock.recv(2048).decode("utf8")
|
||||||
|
except socket.error:
|
||||||
|
raise UpnpError("No reply from IGD using {} as IP".format(local_ip))
|
||||||
|
finally:
|
||||||
|
sock.close()
|
||||||
|
|
||||||
|
|
||||||
|
def _retrieve_location_from_ssdp(response):
|
||||||
|
"""
|
||||||
|
Parse raw HTTP response to retrieve the UPnP location header
|
||||||
|
and return a ParseResult object.
|
||||||
|
"""
|
||||||
|
parsed_headers = re.findall(r'(?P<name>.*?): (?P<value>.*?)\r\n', response)
|
||||||
|
header_locations = [header[1]
|
||||||
|
for header in parsed_headers
|
||||||
|
if header[0].lower() == 'location']
|
||||||
|
|
||||||
|
if len(header_locations) < 1:
|
||||||
|
raise IGDError('IGD response does not contain a "location" header.')
|
||||||
|
|
||||||
|
return urlparse(header_locations[0])
|
||||||
|
|
||||||
|
|
||||||
|
def _retrieve_igd_profile(url):
|
||||||
|
"""
|
||||||
|
Retrieve the device's UPnP profile.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8')
|
||||||
|
except socket.error:
|
||||||
|
raise IGDError('IGD profile query timed out')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_first_child_data(node):
|
||||||
|
"""
|
||||||
|
Get the text value of the first child text node of a node.
|
||||||
|
"""
|
||||||
|
return node.childNodes[0].data
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_igd_profile(profile_xml):
|
||||||
|
"""
|
||||||
|
Traverse the profile xml DOM looking for either
|
||||||
|
WANIPConnection or WANPPPConnection and return
|
||||||
|
the 'controlURL' and the service xml schema.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
dom = parseString(profile_xml)
|
||||||
|
except ExpatError as e:
|
||||||
|
raise IGDError(
|
||||||
|
'Unable to parse IGD reply: {0} \n\n\n {1}'.format(profile_xml, e))
|
||||||
|
|
||||||
|
service_types = dom.getElementsByTagName('serviceType')
|
||||||
|
for service in service_types:
|
||||||
|
if _get_first_child_data(service).find('WANIPConnection') > 0 or \
|
||||||
|
_get_first_child_data(service).find('WANPPPConnection') > 0:
|
||||||
|
try:
|
||||||
|
control_url = _get_first_child_data(
|
||||||
|
service.parentNode.getElementsByTagName('controlURL')[0])
|
||||||
|
upnp_schema = _get_first_child_data(service).split(':')[-2]
|
||||||
|
return control_url, upnp_schema
|
||||||
|
except IndexError:
|
||||||
|
# Pass the error because any error here should raise the
|
||||||
|
# that's specified outside the for loop.
|
||||||
|
pass
|
||||||
|
raise IGDError(
|
||||||
|
'Could not find a control url or UPNP schema in IGD response.')
|
||||||
|
|
||||||
|
|
||||||
|
# add description
|
||||||
|
def _get_local_ips():
|
||||||
|
def method1():
|
||||||
|
try:
|
||||||
|
# get local ip using UDP and a broadcast address
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
||||||
|
# Not using <broadcast> because gevents getaddrinfo doesn't like that
|
||||||
|
# using port 1 as per hobbldygoop's comment about port 0 not working on osx:
|
||||||
|
# https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928
|
||||||
|
s.connect(('239.255.255.250', 1))
|
||||||
|
return [s.getsockname()[0]]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def method2():
|
||||||
|
# Get ip by using UDP and a normal address (google dns ip)
|
||||||
|
try:
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
s.connect(('8.8.8.8', 0))
|
||||||
|
return [s.getsockname()[0]]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def method3():
|
||||||
|
# Get ip by '' hostname . Not supported on all platforms.
|
||||||
|
try:
|
||||||
|
return socket.gethostbyname_ex('')[2]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
threads = [
|
||||||
|
gevent.spawn(method1),
|
||||||
|
gevent.spawn(method2),
|
||||||
|
gevent.spawn(method3)
|
||||||
|
]
|
||||||
|
|
||||||
|
gevent.joinall(threads, timeout=5)
|
||||||
|
|
||||||
|
local_ips = []
|
||||||
|
for thread in threads:
|
||||||
|
if thread.value:
|
||||||
|
local_ips += thread.value
|
||||||
|
|
||||||
|
# Delete duplicates
|
||||||
|
local_ips = list(set(local_ips))
|
||||||
|
|
||||||
|
|
||||||
|
# Probably we looking for an ip starting with 192
|
||||||
|
local_ips = sorted(local_ips, key=lambda a: a.startswith("192"), reverse=True)
|
||||||
|
|
||||||
|
return local_ips
|
||||||
|
|
||||||
|
|
||||||
|
def _create_open_message(local_ip,
|
||||||
|
port,
|
||||||
|
description="UPnPPunch",
|
||||||
|
protocol="TCP",
|
||||||
|
upnp_schema='WANIPConnection'):
|
||||||
|
"""
|
||||||
|
Build a SOAP AddPortMapping message.
|
||||||
|
"""
|
||||||
|
|
||||||
|
soap_message = """<?xml version="1.0"?>
|
||||||
|
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
|
||||||
|
<s:Body>
|
||||||
|
<u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:{upnp_schema}:1">
|
||||||
|
<NewRemoteHost></NewRemoteHost>
|
||||||
|
<NewExternalPort>{port}</NewExternalPort>
|
||||||
|
<NewProtocol>{protocol}</NewProtocol>
|
||||||
|
<NewInternalPort>{port}</NewInternalPort>
|
||||||
|
<NewInternalClient>{host_ip}</NewInternalClient>
|
||||||
|
<NewEnabled>1</NewEnabled>
|
||||||
|
<NewPortMappingDescription>{description}</NewPortMappingDescription>
|
||||||
|
<NewLeaseDuration>0</NewLeaseDuration>
|
||||||
|
</u:AddPortMapping>
|
||||||
|
</s:Body>
|
||||||
|
</s:Envelope>""".format(port=port,
|
||||||
|
protocol=protocol,
|
||||||
|
host_ip=local_ip,
|
||||||
|
description=description,
|
||||||
|
upnp_schema=upnp_schema)
|
||||||
|
return (REMOVE_WHITESPACE.sub('><', soap_message), 'AddPortMapping')
|
||||||
|
|
||||||
|
|
||||||
|
def _create_close_message(local_ip,
|
||||||
|
port,
|
||||||
|
description=None,
|
||||||
|
protocol='TCP',
|
||||||
|
upnp_schema='WANIPConnection'):
|
||||||
|
soap_message = """<?xml version="1.0"?>
|
||||||
|
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
|
||||||
|
<s:Body>
|
||||||
|
<u:DeletePortMapping xmlns:u="urn:schemas-upnp-org:service:{upnp_schema}:1">
|
||||||
|
<NewRemoteHost></NewRemoteHost>
|
||||||
|
<NewExternalPort>{port}</NewExternalPort>
|
||||||
|
<NewProtocol>{protocol}</NewProtocol>
|
||||||
|
</u:DeletePortMapping>
|
||||||
|
</s:Body>
|
||||||
|
</s:Envelope>""".format(port=port,
|
||||||
|
protocol=protocol,
|
||||||
|
upnp_schema=upnp_schema)
|
||||||
|
return (REMOVE_WHITESPACE.sub('><', soap_message), 'DeletePortMapping')
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_for_errors(soap_response):
|
||||||
|
logger.debug(soap_response.status)
|
||||||
|
if soap_response.status >= 400:
|
||||||
|
response_data = soap_response.read()
|
||||||
|
logger.debug(response_data)
|
||||||
|
try:
|
||||||
|
err_dom = parseString(response_data)
|
||||||
|
err_code = _get_first_child_data(err_dom.getElementsByTagName(
|
||||||
|
'errorCode')[0])
|
||||||
|
err_msg = _get_first_child_data(
|
||||||
|
err_dom.getElementsByTagName('errorDescription')[0]
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
raise IGDError(
|
||||||
|
'Unable to parse SOAP error: {0}. Got: "{1}"'.format(
|
||||||
|
err, response_data))
|
||||||
|
raise IGDError(
|
||||||
|
'SOAP request error: {0} - {1}'.format(err_code, err_msg)
|
||||||
|
)
|
||||||
|
return soap_response
|
||||||
|
|
||||||
|
|
||||||
|
def _send_soap_request(location, upnp_schema, control_path, soap_fn,
|
||||||
|
soap_message):
|
||||||
|
"""
|
||||||
|
Send out SOAP request to UPnP device and return a response.
|
||||||
|
"""
|
||||||
|
headers = {
|
||||||
|
'SOAPAction': (
|
||||||
|
'"urn:schemas-upnp-org:service:{schema}:'
|
||||||
|
'1#{fn_name}"'.format(schema=upnp_schema, fn_name=soap_fn)
|
||||||
|
),
|
||||||
|
'Content-Type': 'text/xml'
|
||||||
|
}
|
||||||
|
logger.debug("Sending UPnP request to {0}:{1}...".format(
|
||||||
|
location.hostname, location.port))
|
||||||
|
conn = http.client.HTTPConnection(location.hostname, location.port)
|
||||||
|
conn.request('POST', control_path, soap_message, headers)
|
||||||
|
|
||||||
|
response = conn.getresponse()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return _parse_for_errors(response)
|
||||||
|
|
||||||
|
|
||||||
|
def _collect_idg_data(ip_addr):
|
||||||
|
idg_data = {}
|
||||||
|
idg_response = perform_m_search(ip_addr)
|
||||||
|
idg_data['location'] = _retrieve_location_from_ssdp(idg_response)
|
||||||
|
idg_data['control_path'], idg_data['upnp_schema'] = _parse_igd_profile(
|
||||||
|
_retrieve_igd_profile(idg_data['location']))
|
||||||
|
return idg_data
|
||||||
|
|
||||||
|
|
||||||
|
def _send_requests(messages, location, upnp_schema, control_path):
|
||||||
|
responses = [_send_soap_request(location, upnp_schema, control_path,
|
||||||
|
message_tup[1], message_tup[0])
|
||||||
|
for message_tup in messages]
|
||||||
|
|
||||||
|
if all(rsp.status == 200 for rsp in responses):
|
||||||
|
return
|
||||||
|
raise UpnpError('Sending requests using UPnP failed.')
|
||||||
|
|
||||||
|
|
||||||
|
def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")):
|
||||||
|
logger.debug("Trying using local ip: %s" % ip)
|
||||||
|
idg_data = _collect_idg_data(ip)
|
||||||
|
|
||||||
|
soap_messages = [
|
||||||
|
msg_fn(ip, port, desc, proto, idg_data['upnp_schema'])
|
||||||
|
for proto in protos
|
||||||
|
]
|
||||||
|
|
||||||
|
_send_requests(soap_messages, **idg_data)
|
||||||
|
|
||||||
|
|
||||||
|
def _communicate_with_igd(port=15441,
|
||||||
|
desc="UpnpPunch",
|
||||||
|
retries=3,
|
||||||
|
fn=_create_open_message,
|
||||||
|
protos=("TCP", "UDP")):
|
||||||
|
"""
|
||||||
|
Manage sending a message generated by 'fn'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
local_ips = _get_local_ips()
|
||||||
|
success = False
|
||||||
|
|
||||||
|
def job(local_ip):
|
||||||
|
for retry in range(retries):
|
||||||
|
try:
|
||||||
|
_orchestrate_soap_request(local_ip, port, fn, desc, protos)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug('Upnp request using "{0}" failed: {1}'.format(local_ip, e))
|
||||||
|
gevent.sleep(1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
threads = []
|
||||||
|
|
||||||
|
for local_ip in local_ips:
|
||||||
|
job_thread = gevent.spawn(job, local_ip)
|
||||||
|
threads.append(job_thread)
|
||||||
|
gevent.sleep(0.1)
|
||||||
|
if any([thread.value for thread in threads]):
|
||||||
|
success = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# Wait another 10sec for competition or any positive result
|
||||||
|
for _ in range(10):
|
||||||
|
all_done = all([thread.value is not None for thread in threads])
|
||||||
|
any_succeed = any([thread.value for thread in threads])
|
||||||
|
if all_done or any_succeed:
|
||||||
|
break
|
||||||
|
gevent.sleep(1)
|
||||||
|
|
||||||
|
if any([thread.value for thread in threads]):
|
||||||
|
success = True
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
raise UpnpError(
|
||||||
|
'Failed to communicate with igd using port {0} on local machine after {1} tries.'.format(
|
||||||
|
port, retries))
|
||||||
|
|
||||||
|
return success
|
||||||
|
|
||||||
|
|
||||||
|
def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")):
|
||||||
|
logger.debug("Trying to open port %d." % port)
|
||||||
|
return _communicate_with_igd(port=port,
|
||||||
|
desc=desc,
|
||||||
|
retries=retries,
|
||||||
|
fn=_create_open_message,
|
||||||
|
protos=protos)
|
||||||
|
|
||||||
|
|
||||||
|
def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")):
|
||||||
|
logger.debug("Trying to close port %d." % port)
|
||||||
|
# retries=1 because multiple successes cause 500 response and failure
|
||||||
|
return _communicate_with_igd(port=port,
|
||||||
|
desc=desc,
|
||||||
|
retries=retries,
|
||||||
|
fn=_create_close_message,
|
||||||
|
protos=protos)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from gevent import monkey
|
||||||
|
monkey.patch_all()
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
import time
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
print("Opening port...")
|
||||||
|
print("Success:", ask_to_open_port(15443, "ZeroNet", protos=["TCP"]))
|
||||||
|
print("Done in", time.time() - s)
|
||||||
|
|
||||||
|
|
||||||
|
print("Closing port...")
|
||||||
|
print("Success:", ask_to_close_port(15443, "ZeroNet", protos=["TCP"]))
|
||||||
|
print("Done in", time.time() - s)
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
from .Cached import Cached
|
||||||
|
from .CircularIterator import CircularIterator
|
||||||
|
from .Event import Event
|
||||||
|
from .Noparallel import Noparallel
|
||||||
|
from .Pooled import Pooled
|
|
@ -0,0 +1,357 @@
|
||||||
|
import os
|
||||||
|
import stat
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
import re
|
||||||
|
import collections
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
|
||||||
|
def atomicWrite(dest, content, mode="wb"):
|
||||||
|
try:
|
||||||
|
with open(dest + "-tmpnew", mode) as f:
|
||||||
|
f.write(content)
|
||||||
|
f.flush()
|
||||||
|
os.fsync(f.fileno())
|
||||||
|
if os.path.isfile(dest + "-tmpold"): # Previous incomplete write
|
||||||
|
os.rename(dest + "-tmpold", dest + "-tmpold-%s" % time.time())
|
||||||
|
if os.path.isfile(dest): # Rename old file to -tmpold
|
||||||
|
os.rename(dest, dest + "-tmpold")
|
||||||
|
os.rename(dest + "-tmpnew", dest)
|
||||||
|
if os.path.isfile(dest + "-tmpold"):
|
||||||
|
os.unlink(dest + "-tmpold") # Remove old file
|
||||||
|
return True
|
||||||
|
except Exception as err:
|
||||||
|
from Debug import Debug
|
||||||
|
logging.error(
|
||||||
|
"File %s write failed: %s, (%s) reverting..." %
|
||||||
|
(dest, Debug.formatException(err), Debug.formatStack())
|
||||||
|
)
|
||||||
|
if os.path.isfile(dest + "-tmpold") and not os.path.isfile(dest):
|
||||||
|
os.rename(dest + "-tmpold", dest)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def jsonDumps(data):
|
||||||
|
content = json.dumps(data, indent=1, sort_keys=True)
|
||||||
|
|
||||||
|
# Make it a little more compact by removing unnecessary white space
|
||||||
|
def compact_dict(match):
|
||||||
|
if "\n" in match.group(0):
|
||||||
|
return match.group(0).replace(match.group(1), match.group(1).strip())
|
||||||
|
else:
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
content = re.sub(r"\{(\n[^,\[\{]{10,100000}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
def compact_list(match):
|
||||||
|
if "\n" in match.group(0):
|
||||||
|
stripped_lines = re.sub("\n[ ]*", "", match.group(1))
|
||||||
|
return match.group(0).replace(match.group(1), stripped_lines)
|
||||||
|
else:
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
content = re.sub(r"\[([^\[\{]{2,100000}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
# Remove end of line whitespace
|
||||||
|
content = re.sub(r"(?m)[ ]+$", "", content)
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def openLocked(path, mode="wb"):
|
||||||
|
try:
|
||||||
|
if os.name == "posix":
|
||||||
|
import fcntl
|
||||||
|
f = open(path, mode)
|
||||||
|
fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
elif os.name == "nt":
|
||||||
|
import msvcrt
|
||||||
|
f = open(path, mode)
|
||||||
|
msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1)
|
||||||
|
else:
|
||||||
|
f = open(path, mode)
|
||||||
|
except (IOError, PermissionError, BlockingIOError) as err:
|
||||||
|
raise BlockingIOError("Unable to lock file: %s" % err)
|
||||||
|
return f
|
||||||
|
|
||||||
|
|
||||||
|
def getFreeSpace():
|
||||||
|
free_space = -1
|
||||||
|
if "statvfs" in dir(os): # Unix
|
||||||
|
statvfs = os.statvfs(config.data_dir.encode("utf8"))
|
||||||
|
free_space = statvfs.f_frsize * statvfs.f_bavail
|
||||||
|
else: # Windows
|
||||||
|
try:
|
||||||
|
import ctypes
|
||||||
|
free_space_pointer = ctypes.c_ulonglong(0)
|
||||||
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||||
|
ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer)
|
||||||
|
)
|
||||||
|
free_space = free_space_pointer.value
|
||||||
|
except Exception as err:
|
||||||
|
logging.error("GetFreeSpace error: %s" % err)
|
||||||
|
return free_space
|
||||||
|
|
||||||
|
|
||||||
|
def sqlquote(value):
|
||||||
|
if type(value) is int:
|
||||||
|
return str(value)
|
||||||
|
else:
|
||||||
|
return "'%s'" % value.replace("'", "''")
|
||||||
|
|
||||||
|
|
||||||
|
def shellquote(*args):
|
||||||
|
if len(args) == 1:
|
||||||
|
return '"%s"' % args[0].replace('"', "")
|
||||||
|
else:
|
||||||
|
return tuple(['"%s"' % arg.replace('"', "") for arg in args])
|
||||||
|
|
||||||
|
|
||||||
|
def packPeers(peers):
|
||||||
|
packed_peers = {"ipv4": [], "ipv6": [], "onion": []}
|
||||||
|
for peer in peers:
|
||||||
|
try:
|
||||||
|
ip_type = getIpType(peer.ip)
|
||||||
|
if ip_type in packed_peers:
|
||||||
|
packed_peers[ip_type].append(peer.packMyAddress())
|
||||||
|
except Exception:
|
||||||
|
logging.debug("Error packing peer address: %s" % peer)
|
||||||
|
return packed_peers
|
||||||
|
|
||||||
|
|
||||||
|
# ip, port to packed 6byte or 18byte format
|
||||||
|
def packAddress(ip, port):
|
||||||
|
if ":" in ip:
|
||||||
|
return socket.inet_pton(socket.AF_INET6, ip) + struct.pack("H", port)
|
||||||
|
else:
|
||||||
|
return socket.inet_aton(ip) + struct.pack("H", port)
|
||||||
|
|
||||||
|
|
||||||
|
# From 6byte or 18byte format to ip, port
|
||||||
|
def unpackAddress(packed):
|
||||||
|
if len(packed) == 18:
|
||||||
|
return socket.inet_ntop(socket.AF_INET6, packed[0:16]), struct.unpack_from("H", packed, 16)[0]
|
||||||
|
else:
|
||||||
|
if len(packed) != 6:
|
||||||
|
raise Exception("Invalid length ip4 packed address: %s" % len(packed))
|
||||||
|
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
|
||||||
|
|
||||||
|
|
||||||
|
# onion, port to packed 12byte format
|
||||||
|
def packOnionAddress(onion, port):
|
||||||
|
onion = onion.replace(".onion", "")
|
||||||
|
return base64.b32decode(onion.upper()) + struct.pack("H", port)
|
||||||
|
|
||||||
|
|
||||||
|
# From 12byte format to ip, port
|
||||||
|
def unpackOnionAddress(packed):
|
||||||
|
return base64.b32encode(packed[0:-2]).lower().decode() + ".onion", struct.unpack("H", packed[-2:])[0]
|
||||||
|
|
||||||
|
|
||||||
|
# Get dir from file
|
||||||
|
# Return: data/site/content.json -> data/site/
|
||||||
|
def getDirname(path):
|
||||||
|
if "/" in path:
|
||||||
|
return path[:path.rfind("/") + 1].lstrip("/")
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
# Get dir from file
|
||||||
|
# Return: data/site/content.json -> content.json
|
||||||
|
def getFilename(path):
|
||||||
|
return path[path.rfind("/") + 1:]
|
||||||
|
|
||||||
|
|
||||||
|
def getFilesize(path):
|
||||||
|
try:
|
||||||
|
s = os.stat(path)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
if stat.S_ISREG(s.st_mode): # Test if it's file
|
||||||
|
return s.st_size
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Convert hash to hashid for hashfield
|
||||||
|
def toHashId(hash):
|
||||||
|
return int(hash[0:4], 16)
|
||||||
|
|
||||||
|
|
||||||
|
# Merge dict values
|
||||||
|
def mergeDicts(dicts):
|
||||||
|
back = collections.defaultdict(set)
|
||||||
|
for d in dicts:
|
||||||
|
for key, val in d.items():
|
||||||
|
back[key].update(val)
|
||||||
|
return dict(back)
|
||||||
|
|
||||||
|
|
||||||
|
# Request https url using gevent SSL error workaround
|
||||||
|
def httpRequest(url, as_file=False):
|
||||||
|
if url.startswith("http://"):
|
||||||
|
import urllib.request
|
||||||
|
response = urllib.request.urlopen(url)
|
||||||
|
else: # Hack to avoid Python gevent ssl errors
|
||||||
|
import socket
|
||||||
|
import http.client
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
host, request = re.match("https://(.*?)(/.*?)$", url).groups()
|
||||||
|
|
||||||
|
conn = http.client.HTTPSConnection(host)
|
||||||
|
sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address)
|
||||||
|
conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file)
|
||||||
|
conn.request("GET", request)
|
||||||
|
response = conn.getresponse()
|
||||||
|
if response.status in [301, 302, 303, 307, 308]:
|
||||||
|
logging.info("Redirect to: %s" % response.getheader('Location'))
|
||||||
|
response = httpRequest(response.getheader('Location'))
|
||||||
|
|
||||||
|
if as_file:
|
||||||
|
import io
|
||||||
|
data = io.BytesIO()
|
||||||
|
while True:
|
||||||
|
buff = response.read(1024 * 16)
|
||||||
|
if not buff:
|
||||||
|
break
|
||||||
|
data.write(buff)
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def timerCaller(secs, func, *args, **kwargs):
|
||||||
|
gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs)
|
||||||
|
func(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def timer(secs, func, *args, **kwargs):
|
||||||
|
return gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def create_connection(address, timeout=None, source_address=None):
|
||||||
|
if address in config.ip_local:
|
||||||
|
sock = socket.create_connection_original(address, timeout, source_address)
|
||||||
|
else:
|
||||||
|
sock = socket.create_connection_original(address, timeout, socket.bind_addr)
|
||||||
|
return sock
|
||||||
|
|
||||||
|
|
||||||
|
def socketBindMonkeyPatch(bind_ip, bind_port):
|
||||||
|
import socket
|
||||||
|
logging.info("Monkey patching socket to bind to: %s:%s" % (bind_ip, bind_port))
|
||||||
|
socket.bind_addr = (bind_ip, int(bind_port))
|
||||||
|
socket.create_connection_original = socket.create_connection
|
||||||
|
socket.create_connection = create_connection
|
||||||
|
|
||||||
|
|
||||||
|
def limitedGzipFile(*args, **kwargs):
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
class LimitedGzipFile(gzip.GzipFile):
|
||||||
|
def read(self, size=-1):
|
||||||
|
return super(LimitedGzipFile, self).read(1024 * 1024 * 25)
|
||||||
|
return LimitedGzipFile(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def avg(items):
|
||||||
|
if len(items) > 0:
|
||||||
|
return sum(items) / len(items)
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def isIp(ip):
|
||||||
|
if ":" in ip: # IPv6
|
||||||
|
try:
|
||||||
|
socket.inet_pton(socket.AF_INET6, ip)
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
else: # IPv4
|
||||||
|
try:
|
||||||
|
socket.inet_aton(ip)
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
local_ip_pattern = re.compile(r"^127\.|192\.168\.|10\.|172\.1[6-9]\.|172\.2[0-9]\.|172\.3[0-1]\.|169\.254\.|::1$|fe80")
|
||||||
|
def isPrivateIp(ip):
|
||||||
|
return local_ip_pattern.match(ip)
|
||||||
|
|
||||||
|
# XXX: Deprecated. Use ConnectionServer.getIpType() instead.
|
||||||
|
# To be removed in 0.9.0
|
||||||
|
def getIpType(ip):
|
||||||
|
if ip.endswith(".onion"):
|
||||||
|
return "onion"
|
||||||
|
elif ":" in ip:
|
||||||
|
return "ipv6"
|
||||||
|
elif re.match(r"[0-9\.]+$", ip):
|
||||||
|
return "ipv4"
|
||||||
|
else:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
def createSocket(ip, sock_type=socket.SOCK_STREAM):
|
||||||
|
ip_type = getIpType(ip)
|
||||||
|
if ip_type == "ipv6":
|
||||||
|
return socket.socket(socket.AF_INET6, sock_type)
|
||||||
|
else:
|
||||||
|
return socket.socket(socket.AF_INET, sock_type)
|
||||||
|
|
||||||
|
|
||||||
|
def getInterfaceIps(ip_type="ipv4"):
|
||||||
|
res = []
|
||||||
|
if ip_type == "ipv6":
|
||||||
|
test_ips = ["ff0e::c", "2606:4700:4700::1111"]
|
||||||
|
else:
|
||||||
|
test_ips = ['239.255.255.250', "8.8.8.8"]
|
||||||
|
|
||||||
|
for test_ip in test_ips:
|
||||||
|
try:
|
||||||
|
s = createSocket(test_ip, sock_type=socket.SOCK_DGRAM)
|
||||||
|
s.connect((test_ip, 1))
|
||||||
|
res.append(s.getsockname()[0])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
res += [ip[4][0] for ip in socket.getaddrinfo(socket.gethostname(), 1)]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
res = [re.sub("%.*", "", ip) for ip in res if getIpType(ip) == ip_type and isIp(ip)]
|
||||||
|
return list(set(res))
|
||||||
|
|
||||||
|
|
||||||
|
def cmp(a, b):
|
||||||
|
return (a > b) - (a < b)
|
||||||
|
|
||||||
|
|
||||||
|
def encodeResponse(func): # Encode returned data from utf8 to bytes
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
back = func(*args, **kwargs)
|
||||||
|
if "__next__" in dir(back):
|
||||||
|
for part in back:
|
||||||
|
if type(part) == bytes:
|
||||||
|
yield part
|
||||||
|
else:
|
||||||
|
yield part.encode()
|
||||||
|
else:
|
||||||
|
if type(back) == bytes:
|
||||||
|
yield back
|
||||||
|
else:
|
||||||
|
yield back.encode()
|
||||||
|
|
||||||
|
return wrapper
|